Skip to content

Commit ddc75fa

Browse files
committed
Add gen_ai.conversation.id instrumentation to OpenAI provider
## Subject Add `gen_ai.conversation.id` instrumentation to OpenAI provider ## Description Automatically extract and capture `gen_ai.conversation.id` from OpenAI's stateful APIs to comply with OTel GenAI semantic conventions. ### Changes - Extract conversation ID from `/responses` endpoint request (`conversation` parameter) - Add handler for `/conversations` endpoint - Extract conversation ID from Response objects (both sync and streaming) - Extract conversation ID from conversation creation responses ### Files Modified - `logfire/_internal/integrations/llm_providers/openai.py` - `tests/otel_integrations/test_openai.py`
1 parent 4f4bcbb commit ddc75fa

File tree

2 files changed

+162
-0
lines changed

2 files changed

+162
-0
lines changed

logfire/_internal/integrations/llm_providers/openai.py

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from openai.types.chat.chat_completion import ChatCompletion
1111
from openai.types.chat.chat_completion_chunk import ChatCompletionChunk
1212
from openai.types.completion import Completion
13+
from openai.types.conversations import Conversation
1314
from openai.types.create_embedding_response import CreateEmbeddingResponse
1415
from openai.types.images_response import ImagesResponse
1516
from openai.types.responses import Response
@@ -19,6 +20,7 @@
1920

2021
from ...utils import handle_internal_errors, log_internal_error
2122
from .semconv import (
23+
CONVERSATION_ID,
2224
OPERATION_NAME,
2325
PROVIDER_NAME,
2426
REQUEST_FREQUENCY_PENALTY,
@@ -122,6 +124,12 @@ def get_endpoint_config(options: FinalRequestOptions) -> EndpointConfig:
122124
}
123125
_extract_request_parameters(json_data, span_data)
124126

127+
if (conversation := json_data.get('conversation')) is not None:
128+
if isinstance(conversation, str):
129+
span_data[CONVERSATION_ID] = conversation
130+
elif isinstance(conversation, dict) and 'id' in conversation:
131+
span_data[CONVERSATION_ID] = conversation['id']
132+
125133
return EndpointConfig(
126134
message_template='Responses API with {gen_ai.request.model!r}',
127135
span_data=span_data,
@@ -164,6 +172,17 @@ def get_endpoint_config(options: FinalRequestOptions) -> EndpointConfig:
164172
message_template='Image Generation with {request_data[model]!r}',
165173
span_data=span_data,
166174
)
175+
elif url == '/conversations':
176+
span_data = {
177+
'request_data': json_data,
178+
'url': url,
179+
PROVIDER_NAME: 'openai',
180+
OPERATION_NAME: 'conversation',
181+
}
182+
return EndpointConfig(
183+
message_template='OpenAI Conversation Create',
184+
span_data=span_data,
185+
)
167186
else:
168187
span_data = {
169188
'request_data': json_data,
@@ -225,6 +244,11 @@ def get_response_data(self) -> Any:
225244
def get_attributes(self, span_data: dict[str, Any]) -> dict[str, Any]:
226245
response = self.get_response_data()
227246
span_data['events'] = span_data['events'] + responses_output_events(response)
247+
248+
if (conversation := getattr(response, 'conversation', None)) is not None:
249+
if hasattr(conversation, 'id'):
250+
span_data[CONVERSATION_ID] = conversation.id
251+
228252
return span_data
229253

230254

@@ -311,6 +335,10 @@ def on_response(response: ResponseT, span: LogfireSpan) -> ResponseT:
311335
elif isinstance(response, ImagesResponse):
312336
span.set_attribute('response_data', {'images': response.data})
313337
elif isinstance(response, Response): # pragma: no branch
338+
if (conversation := getattr(response, 'conversation', None)) is not None:
339+
if hasattr(conversation, 'id'):
340+
span.set_attribute(CONVERSATION_ID, conversation.id)
341+
314342
try:
315343
events = json.loads(span.attributes['events']) # type: ignore
316344
except Exception:
@@ -319,6 +347,9 @@ def on_response(response: ResponseT, span: LogfireSpan) -> ResponseT:
319347
events += responses_output_events(response)
320348
span.set_attribute('events', events)
321349

350+
if isinstance(response, Conversation):
351+
span.set_attribute(CONVERSATION_ID, response.id)
352+
322353
return response
323354

324355

tests/otel_integrations/test_openai.py

Lines changed: 131 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -360,6 +360,23 @@ def request_handler(request: httpx.Request) -> httpx.Response:
360360
200,
361361
json={'id': 'thread_abc123', 'object': 'thread', 'created_at': 1698107661, 'metadata': {}},
362362
)
363+
elif request.url == 'https://api.openai.com/v1/responses':
364+
json_body = json.loads(request.content)
365+
conversation_data = {'id': json_body['conversation']} if json_body.get('conversation') else None
366+
return httpx.Response(
367+
200,
368+
json={
369+
'id': 'resp_test_123',
370+
'created_at': 1698107661,
371+
'model': json_body.get('model', 'gpt-4'),
372+
'object': 'response',
373+
'output': [
374+
{'type': 'message', 'role': 'assistant', 'content': [{'type': 'output_text', 'text': 'Nine'}]}
375+
],
376+
'conversation': conversation_data,
377+
'usage': {'input_tokens': 10, 'output_tokens': 5, 'total_tokens': 15},
378+
},
379+
)
363380
else: # pragma: no cover
364381
raise ValueError(f'Unexpected request to {request.url!r}')
365382

@@ -2803,3 +2820,117 @@ def test_openrouter_streaming_reasoning(exporter: TestExporter) -> None:
28032820
},
28042821
]
28052822
)
2823+
2824+
2825+
def test_get_endpoint_config_conversations() -> None:
2826+
"""Test that /conversations endpoint is properly configured."""
2827+
from unittest.mock import MagicMock
2828+
2829+
from logfire._internal.integrations.llm_providers.openai import (
2830+
get_endpoint_config, # pyright: ignore[reportPrivateUsage]
2831+
)
2832+
2833+
options = MagicMock()
2834+
options.url = '/conversations'
2835+
options.json_data = {}
2836+
2837+
config = get_endpoint_config(options)
2838+
assert config.message_template == 'OpenAI Conversation Create'
2839+
assert config.span_data['gen_ai.provider.name'] == 'openai'
2840+
assert config.span_data['gen_ai.operation.name'] == 'conversation'
2841+
2842+
2843+
def test_get_endpoint_config_responses_with_conversation_string() -> None:
2844+
"""Test that /responses endpoint extracts conversation ID when provided as string."""
2845+
from unittest.mock import MagicMock
2846+
2847+
from logfire._internal.integrations.llm_providers.openai import (
2848+
get_endpoint_config, # pyright: ignore[reportPrivateUsage]
2849+
)
2850+
2851+
options = MagicMock()
2852+
options.url = '/responses'
2853+
options.json_data = {
2854+
'model': 'gpt-4',
2855+
'input': 'Hello',
2856+
'conversation': 'conv_123456',
2857+
}
2858+
2859+
config = get_endpoint_config(options)
2860+
assert config.span_data['gen_ai.conversation.id'] == 'conv_123456'
2861+
2862+
2863+
def test_get_endpoint_config_responses_with_conversation_dict() -> None:
2864+
"""Test that /responses endpoint extracts conversation ID when provided as dict."""
2865+
from unittest.mock import MagicMock
2866+
2867+
from logfire._internal.integrations.llm_providers.openai import (
2868+
get_endpoint_config, # pyright: ignore[reportPrivateUsage]
2869+
)
2870+
2871+
options = MagicMock()
2872+
options.url = '/responses'
2873+
options.json_data = {
2874+
'model': 'gpt-4',
2875+
'input': 'Hello',
2876+
'conversation': {'id': 'conv_789012'},
2877+
}
2878+
2879+
config = get_endpoint_config(options)
2880+
assert config.span_data['gen_ai.conversation.id'] == 'conv_789012'
2881+
2882+
2883+
def test_on_response_handles_conversation_creation_response() -> None:
2884+
"""Test that on_response extracts conversation ID from /conversations response."""
2885+
from unittest.mock import MagicMock
2886+
2887+
from openai.types.conversations import Conversation
2888+
2889+
from logfire._internal.integrations.llm_providers.openai import (
2890+
on_response, # pyright: ignore[reportPrivateUsage]
2891+
)
2892+
2893+
response = Conversation.model_construct(id='conv_created_456')
2894+
mock_span = MagicMock()
2895+
mock_span.attributes = {}
2896+
2897+
on_response(response, mock_span)
2898+
2899+
mock_span.set_attribute.assert_any_call('gen_ai.conversation.id', 'conv_created_456')
2900+
2901+
2902+
def test_streaming_handler_extracts_conversation_id() -> None:
2903+
"""Test that OpenaiResponsesStreamState.get_attributes extracts conversation ID."""
2904+
from unittest.mock import MagicMock
2905+
2906+
from logfire._internal.integrations.llm_providers.openai import (
2907+
OpenaiResponsesStreamState, # pyright: ignore[reportPrivateUsage]
2908+
)
2909+
2910+
mock_response = MagicMock()
2911+
mock_response.output = []
2912+
mock_response.conversation = MagicMock(id='conv_stream_789')
2913+
2914+
stream_state = OpenaiResponsesStreamState()
2915+
stream_state._state._completed_response = mock_response
2916+
2917+
span_data: dict[str, Any] = {'events': []}
2918+
result = stream_state.get_attributes(span_data)
2919+
2920+
assert result['gen_ai.conversation.id'] == 'conv_stream_789'
2921+
2922+
2923+
def test_responses_api_with_conversation_id(instrumented_client: openai.Client, exporter: TestExporter) -> None:
2924+
"""Integration test for conversation ID extraction from Responses API."""
2925+
response = instrumented_client.responses.create(
2926+
model='gpt-4',
2927+
input='What is four plus five?',
2928+
conversation='conv_integration_test_123',
2929+
)
2930+
assert response.id == 'resp_test_123'
2931+
assert response.conversation is not None
2932+
assert response.conversation.id == 'conv_integration_test_123'
2933+
2934+
spans = exporter.exported_spans_as_dict(parse_json_attributes=True)
2935+
assert len(spans) == 1
2936+
assert spans[0]['attributes']['gen_ai.conversation.id'] == 'conv_integration_test_123'

0 commit comments

Comments
 (0)