Skip to content

openai: don't crash on no-op #54

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
# available since 1.29.0
GEN_AI_REQUEST_ENCODING_FORMATS = "gen_ai.request.encoding_formats"

from opentelemetry.metrics import Histogram
from opentelemetry.metrics import Histogram, NoOpHistogram
from opentelemetry.trace import Span
from opentelemetry.util.types import Attributes

Expand Down Expand Up @@ -191,6 +191,9 @@ def _get_attributes_if_set(span: Span, names: Iterable) -> Attributes:


def _record_token_usage_metrics(metric: Histogram, span: Span, usage: CompletionUsage):
if _is_metric_disabled(metric):
return # Avoid reading back attributes for an unrecorded metric

token_usage_metric_attrs = _get_attributes_if_set(
span,
(
Expand All @@ -208,7 +211,14 @@ def _record_token_usage_metrics(metric: Histogram, span: Span, usage: Completion
metric.record(usage.completion_tokens, {**token_usage_metric_attrs, GEN_AI_TOKEN_TYPE: "output"})


def _is_metric_disabled(metric: Histogram):
return isinstance(metric, NoOpHistogram)


def _record_operation_duration_metric(metric: Histogram, span: Span, start: float):
if _is_metric_disabled(metric):
return # Avoid reading back attributes for an unrecorded metric

operation_duration_metric_attrs = _get_attributes_if_set(
span,
(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

import json
import os
import re
from typing import Sequence, Tuple, Union

import openai
Expand Down Expand Up @@ -105,15 +106,12 @@ def instrument():
@pytest.fixture
def vcr_cassette_name(request):
"""
Strips `_async` from the test function name as they use the same data.
Strips `_async` and `_disabled` from test function names as they use the same data.
"""
# Get the name of the test function
test_name = request.node.name

# Remove '_async' from the test name
cassette_name = test_name.replace("_async", "")

return cassette_name
return re.sub(r"(_async|_disabled)", "", test_name)


OPENAI_API_KEY = "test_openai_api_key"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from opentelemetry._events import Event
from opentelemetry._logs import LogRecord
from opentelemetry.instrumentation.openai import OpenAIInstrumentor
from opentelemetry.metrics import NoOpMeterProvider
from opentelemetry.semconv._incubating.attributes.gen_ai_attributes import (
GEN_AI_OPENAI_REQUEST_RESPONSE_FORMAT,
GEN_AI_OPENAI_REQUEST_SEED,
Expand All @@ -48,7 +49,7 @@
)
from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE
from opentelemetry.semconv.attributes.server_attributes import SERVER_ADDRESS, SERVER_PORT
from opentelemetry.trace import SpanKind, StatusCode
from opentelemetry.trace import NoOpTracerProvider, SpanKind, StatusCode

from .conftest import (
address_and_port,
Expand Down Expand Up @@ -1010,6 +1011,34 @@ def test_chat_stream_with_include_usage_option(default_openai_env, trace_exporte
)


@pytest.mark.vcr()
def test_chat_stream_with_include_usage_option_disabled(default_openai_env, instrument):
"""
When OTEL_SDK_DISABLED=true, opentelemetry-instrument still instruments
OpenAI, just with NoOp tracer and meter providers. This ensures when NoOp,
instrumentation doesn't crash.
"""

instrument.uninstrument()
instrumentor = OpenAIInstrumentor()
instrumentor.instrument(tracer_provider=NoOpTracerProvider(), meter_provider=NoOpMeterProvider())
try:
client = openai.OpenAI()

messages = [
{
"role": "user",
"content": TEST_CHAT_INPUT,
}
]

chat_completion = client.chat.completions.create(model=TEST_CHAT_MODEL, messages=messages, stream=True)
chunks = [chunk.choices[0].delta.content or "" for chunk in chat_completion if chunk.choices]
assert "".join(chunks) == "South Atlantic Ocean."
finally:
instrumentor.uninstrument()


@pytest.mark.skipif(OPENAI_VERSION < (1, 26, 0), reason="stream_options added in 1.26.0")
@pytest.mark.integration
def test_chat_stream_with_include_usage_option_and_capture_message_content_integration(
Expand Down
Loading