diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/__init__.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/__init__.py index 56799f6c..361d2a5a 100644 --- a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/__init__.py +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/__init__.py @@ -1,77 +1,4 @@ -import json +from .metric import MetricsHook +from .trace import TracingHook -from openfeature.exception import ErrorCode -from openfeature.flag_evaluation import FlagEvaluationDetails, Reason -from openfeature.hook import Hook, HookContext, HookHints -from opentelemetry import trace -from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE - -OTEL_EVENT_NAME = "feature_flag.evaluation" - - -class EventAttributes: - KEY = "feature_flag.key" - RESULT_VALUE = "feature_flag.result.value" - RESULT_VARIANT = "feature_flag.result.variant" - CONTEXT_ID = "feature_flag.context.id" - PROVIDER_NAME = "feature_flag.provider.name" - RESULT_REASON = "feature_flag.result.reason" - SET_ID = "feature_flag.set.id" - VERSION = "feature_flag.version" - - -class TracingHook(Hook): - def __init__(self, exclude_exceptions: bool = False): - self.exclude_exceptions = exclude_exceptions - - def finally_after( - self, - hook_context: HookContext, - details: FlagEvaluationDetails, - hints: HookHints, - ) -> None: - current_span = trace.get_current_span() - - event_attributes = { - EventAttributes.KEY: details.flag_key, - EventAttributes.RESULT_VALUE: json.dumps(details.value), - EventAttributes.RESULT_REASON: str( - details.reason or Reason.UNKNOWN - ).lower(), - } - - if details.variant: - event_attributes[EventAttributes.RESULT_VARIANT] = details.variant - - if details.reason == Reason.ERROR: - error_type = str(details.error_code or ErrorCode.GENERAL).lower() - event_attributes[ERROR_TYPE] = error_type - if details.error_message: - event_attributes["error.message"] = details.error_message - - context = hook_context.evaluation_context - if context.targeting_key: - event_attributes[EventAttributes.CONTEXT_ID] = context.targeting_key - - if hook_context.provider_metadata: - event_attributes[EventAttributes.PROVIDER_NAME] = ( - hook_context.provider_metadata.name - ) - - current_span.add_event(OTEL_EVENT_NAME, event_attributes) - - def error( - self, hook_context: HookContext, exception: Exception, hints: HookHints - ) -> None: - if self.exclude_exceptions: - return - attributes = { - EventAttributes.KEY: hook_context.flag_key, - EventAttributes.RESULT_VALUE: json.dumps(hook_context.default_value), - } - if hook_context.provider_metadata: - attributes[EventAttributes.PROVIDER_NAME] = ( - hook_context.provider_metadata.name - ) - current_span = trace.get_current_span() - current_span.record_exception(exception, attributes) +__all__ = ["MetricsHook", "TracingHook"] diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/constants.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/constants.py new file mode 100644 index 00000000..4ebbe852 --- /dev/null +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/constants.py @@ -0,0 +1,19 @@ +class Attributes: + OTEL_CONTEXT_ID = "feature_flag.context.id" + OTEL_EVENT_NAME = "feature_flag.evaluation" + OTEL_ERROR_TYPE = "error.type" + OTEL_ERROR_MESSAGE = "error.message" + OTEL_FLAG_KEY = "feature_flag.key" + OTEL_FLAG_VARIANT = "feature_flag.result.variant" + OTEL_PROVIDER_NAME = "feature_flag.provider.name" + OTEL_RESULT_VALUE = "feature_flag.result.value" + OTEL_RESULT_REASON = "feature_flag.result.reason" + OTEL_SET_ID = "feature_flag.set.id" + OTEL_VERSION = "feature_flag.version" + + +class Metrics: + ACTIVE_COUNT = "feature_flag.evaluation.active_count" + SUCCESS_TOTAL = "feature_flag.evaluation.success_total" + REQUEST_TOTAL = "feature_flag.evaluation.request_total" + ERROR_TOTAL = "feature_flag.evaluation.error_total" diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py new file mode 100644 index 00000000..5823b38c --- /dev/null +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/metric.py @@ -0,0 +1,98 @@ +import typing + +from openfeature.flag_evaluation import FlagEvaluationDetails, FlagMetadata, Reason +from openfeature.hook import Hook, HookContext, HookHints +from opentelemetry import metrics +from opentelemetry.util.types import AttributeValue + +from .constants import Attributes, Metrics + + +class MetricsHook(Hook): + def __init__(self, extra_attributes: typing.Optional[list[str]] = None) -> None: + self.extra_attributes = extra_attributes or [] + meter: metrics.Meter = metrics.get_meter("openfeature.hooks.opentelemetry") + self.evaluation_active_count = meter.create_up_down_counter( + Metrics.ACTIVE_COUNT, "active flag evaluations" + ) + self.evaluation_error_total = meter.create_counter( + Metrics.ERROR_TOTAL, "error flag evaluations" + ) + self.evaluation_success_total = meter.create_counter( + Metrics.SUCCESS_TOTAL, "success flag evaluations" + ) + self.evaluation_request_total = meter.create_counter( + Metrics.REQUEST_TOTAL, "request flag evaluations" + ) + + def before(self, hook_context: HookContext, hints: HookHints) -> None: + attributes: dict[str, AttributeValue] = { + Attributes.OTEL_FLAG_KEY: hook_context.flag_key, + } + if hook_context.provider_metadata: + attributes[Attributes.OTEL_PROVIDER_NAME] = ( + hook_context.provider_metadata.name + ) + self.evaluation_active_count.add(1, attributes) + self.evaluation_request_total.add(1, attributes) + + def after( + self, + hook_context: HookContext, + details: FlagEvaluationDetails, + hints: HookHints, + ) -> None: + attributes: dict[str, AttributeValue] = { + Attributes.OTEL_FLAG_KEY: details.flag_key, + Attributes.OTEL_RESULT_REASON: str( + details.reason or Reason.UNKNOWN + ).lower(), + } + if details.variant: + attributes[Attributes.OTEL_FLAG_VARIANT] = details.variant + if hook_context.provider_metadata: + attributes[Attributes.OTEL_PROVIDER_NAME] = ( + hook_context.provider_metadata.name + ) + attributes = attributes | get_extra_attributes( + self.extra_attributes, details.flag_metadata + ) + self.evaluation_success_total.add(1, attributes) + + def error( + self, hook_context: HookContext, exception: Exception, hints: HookHints + ) -> None: + attributes: dict[str, AttributeValue] = { + Attributes.OTEL_FLAG_KEY: hook_context.flag_key, + "exception": str(exception).lower(), + } + if hook_context.provider_metadata: + attributes[Attributes.OTEL_PROVIDER_NAME] = ( + hook_context.provider_metadata.name + ) + self.evaluation_error_total.add(1, attributes) + + def finally_after( + self, + hook_context: HookContext, + details: FlagEvaluationDetails, + hints: HookHints, + ) -> None: + attributes: dict[str, AttributeValue] = { + Attributes.OTEL_FLAG_KEY: hook_context.flag_key, + } + if hook_context.provider_metadata: + attributes[Attributes.OTEL_PROVIDER_NAME] = ( + hook_context.provider_metadata.name + ) + self.evaluation_active_count.add(-1, attributes) + + +def get_extra_attributes( + extra_attributes: list[str], metadata: FlagMetadata +) -> dict[str, AttributeValue]: + attributes: dict[str, AttributeValue] = {} + for attribute in extra_attributes: + if (attr := metadata.get(attribute)) is not None: + attributes[attribute] = attr + return attributes diff --git a/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/trace.py b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/trace.py new file mode 100644 index 00000000..af2faa26 --- /dev/null +++ b/hooks/openfeature-hooks-opentelemetry/src/openfeature/contrib/hook/opentelemetry/trace.py @@ -0,0 +1,65 @@ +import json + +from openfeature.exception import ErrorCode +from openfeature.flag_evaluation import FlagEvaluationDetails, Reason +from openfeature.hook import Hook, HookContext, HookHints +from opentelemetry import trace + +from .constants import Attributes + + +class TracingHook(Hook): + def __init__(self, exclude_exceptions: bool = False): + self.exclude_exceptions = exclude_exceptions + + def finally_after( + self, + hook_context: HookContext, + details: FlagEvaluationDetails, + hints: HookHints, + ) -> None: + current_span = trace.get_current_span() + + event_attributes = { + Attributes.OTEL_FLAG_KEY: details.flag_key, + Attributes.OTEL_RESULT_VALUE: json.dumps(details.value), + Attributes.OTEL_RESULT_REASON: str( + details.reason or Reason.UNKNOWN + ).lower(), + } + + if details.variant: + event_attributes[Attributes.OTEL_FLAG_VARIANT] = details.variant + + if details.reason == Reason.ERROR: + error_type = str(details.error_code or ErrorCode.GENERAL).lower() + event_attributes[Attributes.OTEL_ERROR_TYPE] = error_type + if details.error_message: + event_attributes[Attributes.OTEL_ERROR_MESSAGE] = details.error_message + + context = hook_context.evaluation_context + if context.targeting_key: + event_attributes[Attributes.OTEL_CONTEXT_ID] = context.targeting_key + + if hook_context.provider_metadata: + event_attributes[Attributes.OTEL_PROVIDER_NAME] = ( + hook_context.provider_metadata.name + ) + + current_span.add_event(Attributes.OTEL_EVENT_NAME, event_attributes) + + def error( + self, hook_context: HookContext, exception: Exception, hints: HookHints + ) -> None: + if self.exclude_exceptions: + return + attributes = { + Attributes.OTEL_FLAG_KEY: hook_context.flag_key, + Attributes.OTEL_RESULT_VALUE: json.dumps(hook_context.default_value), + } + if hook_context.provider_metadata: + attributes[Attributes.OTEL_PROVIDER_NAME] = ( + hook_context.provider_metadata.name + ) + current_span = trace.get_current_span() + current_span.record_exception(exception, attributes) diff --git a/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py b/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py new file mode 100644 index 00000000..f649773f --- /dev/null +++ b/hooks/openfeature-hooks-opentelemetry/tests/test_metric.py @@ -0,0 +1,213 @@ +from unittest.mock import Mock + +import pytest +from opentelemetry import metrics + +from openfeature.contrib.hook.opentelemetry import MetricsHook +from openfeature.evaluation_context import EvaluationContext +from openfeature.flag_evaluation import Reason +from openfeature.hook import FlagEvaluationDetails, FlagType, HookContext +from openfeature.provider.metadata import Metadata + + +@pytest.fixture +def mock_get_meter(monkeypatch): + mock_counters = { + "feature_flag.evaluation.active_count": Mock(spec=metrics.UpDownCounter), + "feature_flag.evaluation.error_total": Mock(spec=metrics.Counter), + "feature_flag.evaluation.success_total": Mock(spec=metrics.Counter), + "feature_flag.evaluation.request_total": Mock(spec=metrics.Counter), + } + + def side_effect(*args, **kwargs): + return mock_counters[args[0]] + + mock_meter = Mock( + spec=metrics.Meter, + create_up_down_counter=side_effect, + create_counter=side_effect, + ) + monkeypatch.setattr(metrics, "get_meter", lambda name: mock_meter) + + return mock_meter, mock_counters + + +def test_metric_before(mock_get_meter): + _, mock_counters = mock_get_meter + hook = MetricsHook() + hook_context = HookContext( + flag_key="flag_key", + flag_type=FlagType.BOOLEAN, + default_value=False, + evaluation_context=EvaluationContext(), + provider_metadata=Metadata(name="test-provider"), + ) + + hook.before(hook_context, hints={}) + mock_counters["feature_flag.evaluation.active_count"].add.assert_called_once_with( + 1, + { + "feature_flag.key": "flag_key", + "feature_flag.provider.name": "test-provider", + }, + ) + mock_counters["feature_flag.evaluation.request_total"].add.assert_called_once_with( + 1, + { + "feature_flag.key": "flag_key", + "feature_flag.provider.name": "test-provider", + }, + ) + mock_counters["feature_flag.evaluation.error_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.success_total"].add.assert_not_called() + + +def test_metric_after(mock_get_meter): + _, mock_counters = mock_get_meter + hook = MetricsHook() + hook_context = HookContext( + flag_key="flag_key", + flag_type=FlagType.BOOLEAN, + default_value=False, + evaluation_context=EvaluationContext(), + provider_metadata=Metadata(name="test-provider"), + ) + details = FlagEvaluationDetails( + flag_key="flag_key", + value=True, + variant="enabled", + reason=Reason.TARGETING_MATCH, + error_code=None, + error_message=None, + ) + hook.after(hook_context, details, hints={}) + mock_counters["feature_flag.evaluation.success_total"].add.assert_called_once_with( + 1, + { + "feature_flag.key": "flag_key", + "feature_flag.result.reason": "targeting_match", + "feature_flag.result.variant": "enabled", + "feature_flag.provider.name": "test-provider", + }, + ) + mock_counters["feature_flag.evaluation.error_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.request_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.active_count"].add.assert_not_called() + + +def test_metric_after_with_extra_dimensions(mock_get_meter): + _, mock_counters = mock_get_meter + hook = MetricsHook(extra_attributes=["scope", "test"]) + hook_context = HookContext( + flag_key="flag_key", + flag_type=FlagType.BOOLEAN, + default_value=False, + evaluation_context=EvaluationContext(), + provider_metadata=Metadata(name="test-provider"), + ) + details = FlagEvaluationDetails( + flag_key="flag_key", + value=True, + variant="enabled", + reason=Reason.TARGETING_MATCH, + flag_metadata={"scope": "application", "test": True}, + error_code=None, + error_message=None, + ) + hook.after(hook_context, details, hints={}) + mock_counters["feature_flag.evaluation.success_total"].add.assert_called_once_with( + 1, + { + "feature_flag.key": "flag_key", + "feature_flag.result.reason": "targeting_match", + "feature_flag.result.variant": "enabled", + "feature_flag.provider.name": "test-provider", + "scope": "application", + "test": True, + }, + ) + + +def test_metric_after_with_extra_dimensions_missing_attribute(mock_get_meter): + _, mock_counters = mock_get_meter + hook = MetricsHook(extra_attributes=["scope", "test"]) + hook_context = HookContext( + flag_key="flag_key", + flag_type=FlagType.BOOLEAN, + default_value=False, + evaluation_context=EvaluationContext(), + provider_metadata=Metadata(name="test-provider"), + ) + details = FlagEvaluationDetails( + flag_key="flag_key", + value=True, + variant="enabled", + reason=Reason.TARGETING_MATCH, + flag_metadata={"test": True}, + ) + hook.after(hook_context, details, hints={}) + mock_counters["feature_flag.evaluation.success_total"].add.assert_called_once_with( + 1, + { + "feature_flag.key": "flag_key", + "feature_flag.result.reason": "targeting_match", + "feature_flag.result.variant": "enabled", + "feature_flag.provider.name": "test-provider", + "test": True, + }, + ) + + +def test_metric_error(mock_get_meter): + _, mock_counters = mock_get_meter + hook = MetricsHook() + hook_context = HookContext( + flag_key="flag_key", + flag_type=FlagType.BOOLEAN, + default_value=False, + evaluation_context=EvaluationContext(), + provider_metadata=Metadata(name="test-provider"), + ) + hook.error(hook_context, Exception("test error"), hints={}) + mock_counters["feature_flag.evaluation.error_total"].add.assert_called_once_with( + 1, + { + "feature_flag.key": "flag_key", + "feature_flag.provider.name": "test-provider", + "exception": "test error", + }, + ) + mock_counters["feature_flag.evaluation.success_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.request_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.active_count"].add.assert_not_called() + + +def test_metric_finally_after(mock_get_meter): + _, mock_counters = mock_get_meter + hook = MetricsHook() + hook_context = HookContext( + flag_key="flag_key", + flag_type=FlagType.BOOLEAN, + default_value=False, + evaluation_context=EvaluationContext(), + provider_metadata=Metadata(name="test-provider"), + ) + details = FlagEvaluationDetails( + flag_key="flag_key", + value=True, + variant="enabled", + reason=Reason.TARGETING_MATCH, + error_code=None, + error_message=None, + ) + hook.finally_after(hook_context, details, hints={}) + mock_counters["feature_flag.evaluation.active_count"].add.assert_called_once_with( + -1, + { + "feature_flag.key": "flag_key", + "feature_flag.provider.name": "test-provider", + }, + ) + mock_counters["feature_flag.evaluation.success_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.request_total"].add.assert_not_called() + mock_counters["feature_flag.evaluation.error_total"].add.assert_not_called() diff --git a/hooks/openfeature-hooks-opentelemetry/tests/test_otel.py b/hooks/openfeature-hooks-opentelemetry/tests/test_trace.py similarity index 93% rename from hooks/openfeature-hooks-opentelemetry/tests/test_otel.py rename to hooks/openfeature-hooks-opentelemetry/tests/test_trace.py index b0279f47..82b9ccbf 100644 --- a/hooks/openfeature-hooks-opentelemetry/tests/test_otel.py +++ b/hooks/openfeature-hooks-opentelemetry/tests/test_trace.py @@ -17,7 +17,7 @@ def mock_get_current_span(monkeypatch): monkeypatch.setattr(trace, "get_current_span", Mock()) -def test_finally_after(mock_get_current_span): +def test_trace_finally_after(mock_get_current_span): # Given hook = TracingHook() hook_context = HookContext( @@ -56,7 +56,7 @@ def test_finally_after(mock_get_current_span): ) -def test_after_evaluation_error(mock_get_current_span): +def test_trace_after_evaluation_error(mock_get_current_span): # Given hook = TracingHook() hook_context = HookContext( @@ -94,7 +94,7 @@ def test_after_evaluation_error(mock_get_current_span): ) -def test_error(mock_get_current_span): +def test_trace_error(mock_get_current_span): # Given hook = TracingHook() hook_context = HookContext( @@ -121,7 +121,7 @@ def test_error(mock_get_current_span): mock_span.record_exception.assert_called_once_with(exception, attributes) -def test_error_exclude_exceptions(mock_get_current_span): +def test_trace_error_exclude_exceptions(mock_get_current_span): # Given hook = TracingHook(exclude_exceptions=True) hook_context = HookContext( @@ -141,7 +141,7 @@ def test_error_exclude_exceptions(mock_get_current_span): mock_span.record_exception.assert_not_called() -def test_error_no_provider_metadata(mock_get_current_span): +def test_trace_error_no_provider_metadata(mock_get_current_span): # Given hook = TracingHook() hook_context = HookContext(