diff --git a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/chat_wrappers.py b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/chat_wrappers.py index 151cb20f00..7e3e188183 100644 --- a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/chat_wrappers.py +++ b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/chat_wrappers.py @@ -41,6 +41,7 @@ ) from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY from opentelemetry.metrics import Counter, Histogram +from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE from opentelemetry.semconv_ai import ( SUPPRESS_LANGUAGE_MODEL_INSTRUMENTATION_KEY, LLMRequestTypeValues, @@ -89,7 +90,6 @@ def chat_wrapper( ) run_async(_handle_request(span, kwargs, instance)) - try: start_time = time.time() response = wrapped(*args, **kwargs) @@ -107,10 +107,12 @@ def chat_wrapper( if exception_counter: exception_counter.add(1, attributes=attributes) + span.set_attribute(ERROR_TYPE, e.__class__.__name__) + span.record_exception(e) span.set_status(Status(StatusCode.ERROR, str(e))) span.end() - raise e + raise if is_streaming_response(response): # span will be closed after the generator is done @@ -204,10 +206,12 @@ async def achat_wrapper( if exception_counter: exception_counter.add(1, attributes=attributes) + span.set_attribute(ERROR_TYPE, e.__class__.__name__) + span.record_exception(e) span.set_status(Status(StatusCode.ERROR, str(e))) span.end() - raise e + raise if is_streaming_response(response): # span will be closed after the generator is done @@ -637,7 +641,7 @@ def __next__(self): except Exception as e: if isinstance(e, StopIteration): self._process_complete_response() - raise e + raise else: self._process_item(chunk) return chunk @@ -648,7 +652,7 @@ async def __anext__(self): except Exception as e: if isinstance(e, StopAsyncIteration): self._process_complete_response() - raise e + raise else: self._process_item(chunk) return chunk diff --git a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/completion_wrappers.py b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/completion_wrappers.py index 62ddd819fb..d821fecb94 100644 --- a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/completion_wrappers.py +++ b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/completion_wrappers.py @@ -15,6 +15,7 @@ should_record_stream_token_usage, ) from opentelemetry.instrumentation.openai.shared.config import Config +from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE from opentelemetry.instrumentation.openai.shared.event_emitter import emit_event from opentelemetry.instrumentation.openai.shared.event_models import ( ChoiceEvent, @@ -61,9 +62,11 @@ def completion_wrapper(tracer, wrapped, instance, args, kwargs): try: response = wrapped(*args, **kwargs) except Exception as e: + span.set_attribute(ERROR_TYPE, e.__class__.__name__) + span.record_exception(e) span.set_status(Status(StatusCode.ERROR, str(e))) span.end() - raise e + raise if is_streaming_response(response): # span will be closed after the generator is done @@ -93,9 +96,11 @@ async def acompletion_wrapper(tracer, wrapped, instance, args, kwargs): try: response = await wrapped(*args, **kwargs) except Exception as e: + span.set_attribute(ERROR_TYPE, e.__class__.__name__) + span.record_exception(e) span.set_status(Status(StatusCode.ERROR, str(e))) span.end() - raise e + raise if is_streaming_response(response): # span will be closed after the generator is done diff --git a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py index 6e099bcdce..d45af3789a 100644 --- a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py +++ b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/embeddings_wrappers.py @@ -31,6 +31,7 @@ ) from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY from opentelemetry.metrics import Counter, Histogram +from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE from opentelemetry.semconv_ai import ( SUPPRESS_LANGUAGE_MODEL_INSTRUMENTATION_KEY, LLMRequestTypeValues, @@ -89,10 +90,12 @@ def embeddings_wrapper( if exception_counter: exception_counter.add(1, attributes=attributes) + span.set_attribute(ERROR_TYPE, e.__class__.__name__) + span.record_exception(e) span.set_status(Status(StatusCode.ERROR, str(e))) span.end() - raise e + raise duration = end_time - start_time @@ -152,10 +155,12 @@ async def aembeddings_wrapper( if exception_counter: exception_counter.add(1, attributes=attributes) + span.set_attribute(ERROR_TYPE, e.__class__.__name__) + span.record_exception(e) span.set_status(Status(StatusCode.ERROR, str(e))) span.end() - raise e + raise duration = end_time - start_time diff --git a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py index c7e3e88864..eddeac50da 100644 --- a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py +++ b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/shared/image_gen_wrappers.py @@ -47,7 +47,7 @@ def image_gen_metrics_wrapper( if exception_counter: exception_counter.add(1, attributes=attributes) - raise e + raise if is_openai_v1(): response_dict = model_as_dict(response) diff --git a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/v1/assistant_wrappers.py b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/v1/assistant_wrappers.py index f8bdd78768..203147a85b 100644 --- a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/v1/assistant_wrappers.py +++ b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/v1/assistant_wrappers.py @@ -18,8 +18,9 @@ should_emit_events, ) from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY +from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE from opentelemetry.semconv_ai import LLMRequestTypeValues, SpanAttributes -from opentelemetry.trace import SpanKind +from opentelemetry.trace import SpanKind, Status, StatusCode from openai._legacy_response import LegacyAPIResponse from openai.types.beta.threads.run import Run @@ -53,17 +54,24 @@ def runs_create_wrapper(tracer, wrapped, instance, args, kwargs): thread_id = kwargs.get("thread_id") instructions = kwargs.get("instructions") - response = wrapped(*args, **kwargs) - response_dict = model_as_dict(response) + try: + response = wrapped(*args, **kwargs) + response_dict = model_as_dict(response) - runs[thread_id] = { - "start_time": time.time_ns(), - "assistant_id": kwargs.get("assistant_id"), - "instructions": instructions, - "run_id": response_dict.get("id"), - } + runs[thread_id] = { + "start_time": time.time_ns(), + "assistant_id": kwargs.get("assistant_id"), + "instructions": instructions, + "run_id": response_dict.get("id"), + } - return response + return response + except Exception as e: + runs[thread_id] = { + "exception": e, + "end_time": time.time_ns(), + } + raise @_with_tracer_wrapper @@ -85,10 +93,16 @@ def process_response(response): if context_api.get_value(_SUPPRESS_INSTRUMENTATION_KEY): return wrapped(*args, **kwargs) - response = wrapped(*args, **kwargs) - process_response(response) - - return response + try: + response = wrapped(*args, **kwargs) + process_response(response) + return response + except Exception as e: + thread_id = kwargs.get("thread_id") + if thread_id in runs: + runs[thread_id]["exception"] = e + runs[thread_id]["end_time"] = time.time_ns() + raise @_with_tracer_wrapper @@ -113,6 +127,11 @@ def messages_list_wrapper(tracer, wrapped, instance, args, kwargs): attributes={SpanAttributes.LLM_REQUEST_TYPE: LLMRequestTypeValues.CHAT.value}, start_time=run.get("start_time"), ) + if exception := run.get("exception"): + span.set_attribute(ERROR_TYPE, exception.__class__.__name__) + span.record_exception(exception) + span.set_status(Status(StatusCode.ERROR, str(exception))) + span.end(run.get("end_time")) prompt_index = 0 if assistants.get(run["assistant_id"]) is not None or Config.enrich_assistant: @@ -288,6 +307,12 @@ def runs_create_and_stream_wrapper(tracer, wrapped, instance, args, kwargs): span=span, ) - response = wrapped(*args, **kwargs) - - return response + try: + response = wrapped(*args, **kwargs) + return response + except Exception as e: + span.set_attribute(ERROR_TYPE, e.__class__.__name__) + span.record_exception(e) + span.set_status(Status(StatusCode.ERROR, str(e))) + span.end() + raise diff --git a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py index 969632da37..c2d39bbf53 100644 --- a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py +++ b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/v1/event_handler_wrapper.py @@ -2,7 +2,9 @@ from opentelemetry.instrumentation.openai.shared.event_emitter import emit_event from opentelemetry.instrumentation.openai.shared.event_models import ChoiceEvent from opentelemetry.instrumentation.openai.utils import should_emit_events +from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE from opentelemetry.semconv_ai import SpanAttributes +from opentelemetry.trace import Status, StatusCode from typing_extensions import override from openai import AssistantEventHandler @@ -66,6 +68,9 @@ def on_tool_call_done(self, tool_call): @override def on_exception(self, exception: Exception): + self._span.set_attribute(ERROR_TYPE, exception.__class__.__name__) + self._span.record_exception(exception) + self._span.set_status(Status(StatusCode.ERROR, str(exception))) self._original_handler.on_exception(exception) @override diff --git a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/v1/responses_wrappers.py b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/v1/responses_wrappers.py index 7a51a9f0ec..d0842c7267 100644 --- a/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/v1/responses_wrappers.py +++ b/packages/opentelemetry-instrumentation-openai/opentelemetry/instrumentation/openai/v1/responses_wrappers.py @@ -39,6 +39,7 @@ from opentelemetry import context as context_api from opentelemetry.instrumentation.utils import _SUPPRESS_INSTRUMENTATION_KEY from opentelemetry.semconv_ai import SpanAttributes +from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE from opentelemetry.semconv._incubating.attributes.gen_ai_attributes import ( GEN_AI_COMPLETION, GEN_AI_PROMPT, @@ -426,6 +427,7 @@ def responses_get_or_create_wrapper(tracer: Tracer, wrapped, instance, args, kwa start_time if traced_data is None else int(traced_data.start_time) ), ) + span.set_attribute(ERROR_TYPE, e.__class__.__name__) span.record_exception(e) span.set_status(StatusCode.ERROR, str(e)) if traced_data: @@ -519,6 +521,7 @@ async def async_responses_get_or_create_wrapper( start_time if traced_data is None else int(traced_data.start_time) ), ) + span.set_attribute(ERROR_TYPE, e.__class__.__name__) span.record_exception(e) span.set_status(StatusCode.ERROR, str(e)) if traced_data: diff --git a/packages/opentelemetry-instrumentation-openai/tests/traces/test_chat.py b/packages/opentelemetry-instrumentation-openai/tests/traces/test_chat.py index 5101c2f02b..abd546db77 100644 --- a/packages/opentelemetry-instrumentation-openai/tests/traces/test_chat.py +++ b/packages/opentelemetry-instrumentation-openai/tests/traces/test_chat.py @@ -15,6 +15,7 @@ gen_ai_attributes as GenAIAttributes, ) from opentelemetry.semconv_ai import SpanAttributes +from opentelemetry.trace import StatusCode from .utils import assert_request_contains_tracecontext, spy_decorator @@ -1436,3 +1437,78 @@ def test_chat_history_message_pydantic(span_exporter, openai_client): == second_user_message["content"] ) assert second_span.attributes[f"{SpanAttributes.LLM_PROMPTS}.2.role"] == "user" + + +def test_chat_exception(instrument_legacy, span_exporter, openai_client): + openai_client.api_key = "invalid" + with pytest.raises(Exception): + openai_client.chat.completions.create( + model="gpt-3.5-turbo", + messages=[{"role": "user", "content": "Tell me a joke about opentelemetry"}], + ) + + spans = span_exporter.get_finished_spans() + + assert [span.name for span in spans] == [ + "openai.chat", + ] + open_ai_span = spans[0] + assert ( + open_ai_span.attributes[f"{SpanAttributes.LLM_PROMPTS}.0.content"] + == "Tell me a joke about opentelemetry" + ) + assert ( + open_ai_span.attributes.get(SpanAttributes.LLM_OPENAI_API_BASE) + == "https://api.openai.com/v1/" + ) + assert open_ai_span.attributes.get(SpanAttributes.LLM_IS_STREAMING) is False + assert open_ai_span.status.status_code == StatusCode.ERROR + assert open_ai_span.status.description.startswith("Error code: 401") + events = open_ai_span.events + assert len(events) == 1 + event = events[0] + assert event.name == "exception" + assert event.attributes["exception.type"] == "openai.AuthenticationError" + assert event.attributes["exception.message"].startswith("Error code: 401") + assert open_ai_span.attributes.get("error.type") == "AuthenticationError" + assert "Traceback (most recent call last):" in event.attributes["exception.stacktrace"] + assert "openai.AuthenticationError" in event.attributes["exception.stacktrace"] + assert "invalid_api_key" in event.attributes["exception.stacktrace"] + + +@pytest.mark.asyncio +async def test_chat_async_exception(instrument_legacy, span_exporter, async_openai_client): + async_openai_client.api_key = "invalid" + with pytest.raises(Exception): + await async_openai_client.chat.completions.create( + model="gpt-3.5-turbo", + messages=[{"role": "user", "content": "Tell me a joke about opentelemetry"}], + ) + + spans = span_exporter.get_finished_spans() + + assert [span.name for span in spans] == [ + "openai.chat", + ] + open_ai_span = spans[0] + assert ( + open_ai_span.attributes[f"{SpanAttributes.LLM_PROMPTS}.0.content"] + == "Tell me a joke about opentelemetry" + ) + assert ( + open_ai_span.attributes.get(SpanAttributes.LLM_OPENAI_API_BASE) + == "https://api.openai.com/v1/" + ) + assert open_ai_span.attributes.get(SpanAttributes.LLM_IS_STREAMING) is False + assert open_ai_span.status.status_code == StatusCode.ERROR + assert open_ai_span.status.description.startswith("Error code: 401") + events = open_ai_span.events + assert len(events) == 1 + event = events[0] + assert event.name == "exception" + assert event.attributes["exception.type"] == "openai.AuthenticationError" + assert event.attributes["exception.message"].startswith("Error code: 401") + assert "Traceback (most recent call last):" in event.attributes["exception.stacktrace"] + assert "openai.AuthenticationError" in event.attributes["exception.stacktrace"] + assert "invalid_api_key" in event.attributes["exception.stacktrace"] + assert open_ai_span.attributes.get("error.type") == "AuthenticationError" diff --git a/packages/opentelemetry-instrumentation-openai/tests/traces/test_chat_parse.py b/packages/opentelemetry-instrumentation-openai/tests/traces/test_chat_parse.py index 01fd8aeec6..8960b63e49 100644 --- a/packages/opentelemetry-instrumentation-openai/tests/traces/test_chat_parse.py +++ b/packages/opentelemetry-instrumentation-openai/tests/traces/test_chat_parse.py @@ -8,6 +8,8 @@ gen_ai_attributes as GenAIAttributes, ) from opentelemetry.semconv_ai import SpanAttributes +from opentelemetry.sdk.trace import Span +from opentelemetry.trace import StatusCode from pydantic import BaseModel @@ -527,3 +529,72 @@ def assert_message_in_logs(log: LogData, event_name: str, expected_content: dict else: assert log.log_record.body assert dict(log.log_record.body) == expected_content + + +def test_parsed_completion_exception( + instrument_legacy, span_exporter, openai_client +): + openai_client.api_key = "invalid" + with pytest.raises(Exception): + openai_client.chat.completions.parse( + model="gpt-4o", + messages=[{"role": "user", "content": "Tell me a joke about opentelemetry"}], + response_format=StructuredAnswer, + ) + + spans = span_exporter.get_finished_spans() + assert len(spans) == 1 + span: Span = spans[0] + assert span.name == "openai.chat" + assert span.attributes.get(SpanAttributes.LLM_OPENAI_API_BASE) == "https://api.openai.com/v1/" + assert span.attributes.get(SpanAttributes.LLM_IS_STREAMING) is False + assert span.attributes.get(f"{SpanAttributes.LLM_PROMPTS}.0.content") == "Tell me a joke about opentelemetry" + assert span.attributes.get(f"{SpanAttributes.LLM_PROMPTS}.0.role") == "user" + + assert span.status.status_code == StatusCode.ERROR + assert span.status.description.startswith("Error code: 401") + events = span.events + assert len(events) == 1 + event = events[0] + assert event.name == "exception" + assert event.attributes["exception.type"] == "openai.AuthenticationError" + assert event.attributes["exception.message"].startswith("Error code: 401") + assert "Traceback (most recent call last):" in event.attributes["exception.stacktrace"] + assert "openai.AuthenticationError" in event.attributes["exception.stacktrace"] + assert "invalid_api_key" in event.attributes["exception.stacktrace"] + assert span.attributes.get("error.type") == "AuthenticationError" + + +@pytest.mark.asyncio +async def test_async_parsed_completion_exception( + instrument_legacy, span_exporter, async_openai_client +): + async_openai_client.api_key = "invalid" + with pytest.raises(Exception): + await async_openai_client.chat.completions.parse( + model="gpt-4o", + messages=[{"role": "user", "content": "Tell me a joke about opentelemetry"}], + response_format=StructuredAnswer, + ) + + spans = span_exporter.get_finished_spans() + assert len(spans) == 1 + span: Span = spans[0] + assert span.name == "openai.chat" + assert span.attributes.get(SpanAttributes.LLM_OPENAI_API_BASE) == "https://api.openai.com/v1/" + assert span.attributes.get(SpanAttributes.LLM_IS_STREAMING) is False + assert span.attributes.get(f"{SpanAttributes.LLM_PROMPTS}.0.content") == "Tell me a joke about opentelemetry" + assert span.attributes.get(f"{SpanAttributes.LLM_PROMPTS}.0.role") == "user" + + assert span.status.status_code == StatusCode.ERROR + assert span.status.description.startswith("Error code: 401") + events = span.events + assert len(events) == 1 + event = events[0] + assert event.name == "exception" + assert event.attributes["exception.type"] == "openai.AuthenticationError" + assert event.attributes["exception.message"].startswith("Error code: 401") + assert "Traceback (most recent call last):" in event.attributes["exception.stacktrace"] + assert "openai.AuthenticationError" in event.attributes["exception.stacktrace"] + assert "invalid_api_key" in event.attributes["exception.stacktrace"] + assert span.attributes.get("error.type") == "AuthenticationError" diff --git a/packages/opentelemetry-instrumentation-openai/tests/traces/test_completions.py b/packages/opentelemetry-instrumentation-openai/tests/traces/test_completions.py index 53cac0dc85..02f76d6b4c 100644 --- a/packages/opentelemetry-instrumentation-openai/tests/traces/test_completions.py +++ b/packages/opentelemetry-instrumentation-openai/tests/traces/test_completions.py @@ -10,6 +10,7 @@ from opentelemetry.semconv._incubating.attributes import ( gen_ai_attributes as GenAIAttributes, ) +from opentelemetry.trace import StatusCode from opentelemetry.semconv_ai import SpanAttributes from .utils import assert_request_contains_tracecontext, spy_decorator @@ -920,6 +921,69 @@ async def test_async_completion_context_propagation_with_events_with_no_content( assert_message_in_logs(logs[1], "gen_ai.choice", choice_event) +def test_completion_exception(instrument_legacy, span_exporter, openai_client): + openai_client.api_key = "invalid" + with pytest.raises(Exception): + openai_client.completions.create( + model="gpt-3.5-turbo", + prompt="Tell me a joke about opentelemetry", + ) + + spans = span_exporter.get_finished_spans() + assert [span.name for span in spans] == [ + "openai.completion", + ] + open_ai_span = spans[0] + assert ( + open_ai_span.attributes[f"{SpanAttributes.LLM_PROMPTS}.0.user"] + == "Tell me a joke about opentelemetry" + ) + assert open_ai_span.status.status_code == StatusCode.ERROR + assert open_ai_span.status.description.startswith("Error code: 401") + events = open_ai_span.events + assert len(events) == 1 + event = events[0] + assert event.name == "exception" + assert event.attributes["exception.type"] == "openai.AuthenticationError" + assert event.attributes["exception.message"].startswith("Error code: 401") + assert "Traceback (most recent call last):" in event.attributes["exception.stacktrace"] + assert "openai.AuthenticationError" in event.attributes["exception.stacktrace"] + assert "invalid_api_key" in event.attributes["exception.stacktrace"] + assert open_ai_span.attributes.get("error.type") == "AuthenticationError" + + +@pytest.mark.asyncio +async def test_async_completion_exception(instrument_legacy, span_exporter, async_openai_client): + async_openai_client.api_key = "invalid" + with pytest.raises(Exception): + await async_openai_client.completions.create( + model="gpt-3.5-turbo", + prompt="Tell me a joke about opentelemetry", + ) + + spans = span_exporter.get_finished_spans() + assert [span.name for span in spans] == [ + "openai.completion", + ] + open_ai_span = spans[0] + assert ( + open_ai_span.attributes[f"{SpanAttributes.LLM_PROMPTS}.0.user"] + == "Tell me a joke about opentelemetry" + ) + assert open_ai_span.status.status_code == StatusCode.ERROR + assert open_ai_span.status.description.startswith("Error code: 401") + events = open_ai_span.events + assert len(events) == 1 + event = events[0] + assert event.name == "exception" + assert event.attributes["exception.type"] == "openai.AuthenticationError" + assert event.attributes["exception.message"].startswith("Error code: 401") + assert "Traceback (most recent call last):" in event.attributes["exception.stacktrace"] + assert "openai.AuthenticationError" in event.attributes["exception.stacktrace"] + assert "invalid_api_key" in event.attributes["exception.stacktrace"] + assert open_ai_span.attributes.get("error.type") == "AuthenticationError" + + def assert_message_in_logs(log: LogData, event_name: str, expected_content: dict): assert log.log_record.attributes.get(EventAttributes.EVENT_NAME) == event_name assert ( diff --git a/packages/opentelemetry-instrumentation-openai/tests/traces/test_embeddings.py b/packages/opentelemetry-instrumentation-openai/tests/traces/test_embeddings.py index 5e6c582c7c..1040427b8a 100644 --- a/packages/opentelemetry-instrumentation-openai/tests/traces/test_embeddings.py +++ b/packages/opentelemetry-instrumentation-openai/tests/traces/test_embeddings.py @@ -11,6 +11,7 @@ gen_ai_attributes as GenAIAttributes, ) from opentelemetry.semconv_ai import SpanAttributes +from opentelemetry.trace import StatusCode from .utils import assert_request_contains_tracecontext, spy_decorator @@ -600,6 +601,53 @@ async def test_async_embeddings_context_propagation_with_events_with_no_content( assert_message_in_logs(logs[1], "gen_ai.choice", choice_event) +def test_embeddings_exception(instrument_legacy, span_exporter, openai_client): + openai_client.api_key = "invalid" + with pytest.raises(Exception): + openai_client.embeddings.create( + input="Tell me a joke about opentelemetry", + model="text-embedding-ada-002", + ) + + spans = span_exporter.get_finished_spans() + assert [span.name for span in spans] == [ + "openai.embeddings", + ] + open_ai_span = spans[0] + assert open_ai_span.status.status_code == StatusCode.ERROR + assert open_ai_span.status.description.startswith("Error code: 401") + events = open_ai_span.events + assert len(events) == 1 + event = events[0] + assert event.name == "exception" + assert event.attributes["exception.type"] == "openai.AuthenticationError" + assert event.attributes["exception.message"].startswith("Error code: 401") + + +@pytest.mark.asyncio +async def test_async_embeddings_exception(instrument_legacy, span_exporter, async_openai_client): + async_openai_client.api_key = "invalid" + with pytest.raises(Exception): + await async_openai_client.embeddings.create( + input="Tell me a joke about opentelemetry", + model="text-embedding-ada-002", + ) + + spans = span_exporter.get_finished_spans() + assert [span.name for span in spans] == [ + "openai.embeddings", + ] + open_ai_span = spans[0] + assert open_ai_span.status.status_code == StatusCode.ERROR + assert open_ai_span.status.description.startswith("Error code: 401") + events = open_ai_span.events + assert len(events) == 1 + event = events[0] + assert event.name == "exception" + assert event.attributes["exception.type"] == "openai.AuthenticationError" + assert event.attributes["exception.message"].startswith("Error code: 401") + + def assert_message_in_logs(log: LogData, event_name: str, expected_content: dict): assert log.log_record.attributes.get(EventAttributes.EVENT_NAME) == event_name assert (