Skip to content

Don't ascii escape unicode chars in prompts and completions #40003

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Mar 17, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion sdk/ai/azure-ai-inference/assets.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "python",
"TagPrefix": "python/ai/azure-ai-inference",
"Tag": "python/ai/azure-ai-inference_3f06cee8a7"
"Tag": "python/ai/azure-ai-inference_01cf9f82e3"
}
10 changes: 5 additions & 5 deletions sdk/ai/azure-ai-inference/azure/ai/inference/tracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ def _add_request_chat_message_events(self, span: "AbstractSpan", **kwargs: Any)
f"gen_ai.{message.get('role')}.message",
{
"gen_ai.system": _INFERENCE_GEN_AI_SYSTEM_NAME,
"gen_ai.event.content": json.dumps(message),
"gen_ai.event.content": json.dumps(message, ensure_ascii=False),
},
timestamp,
)
Expand Down Expand Up @@ -300,7 +300,7 @@ def _add_response_chat_message_events(
full_response["message"]["tool_calls"] = [tool.as_dict() for tool in choice.message.tool_calls]
attributes = {
"gen_ai.system": _INFERENCE_GEN_AI_SYSTEM_NAME,
"gen_ai.event.content": json.dumps(full_response),
"gen_ai.event.content": json.dumps(full_response, ensure_ascii=False),
}
else:
response: Dict[str, Any] = {
Expand All @@ -318,7 +318,7 @@ def _add_response_chat_message_events(

attributes = {
"gen_ai.system": _INFERENCE_GEN_AI_SYSTEM_NAME,
"gen_ai.event.content": json.dumps(response),
"gen_ai.event.content": json.dumps(response, ensure_ascii=False),
}
last_event_timestamp_ns = self._record_event(span, "gen_ai.choice", attributes, last_event_timestamp_ns)

Expand Down Expand Up @@ -478,7 +478,7 @@ def __iter__( # pyright: ignore [reportIncompatibleMethodOverride]
)
attributes = {
"gen_ai.system": _INFERENCE_GEN_AI_SYSTEM_NAME,
"gen_ai.event.content": json.dumps(accumulate),
"gen_ai.event.content": json.dumps(accumulate, ensure_ascii=False),
}
self._instrumentor._record_event(span, "gen_ai.choice", attributes, previous_event_timestamp)
span.finish()
Expand Down Expand Up @@ -532,7 +532,7 @@ def _trace_stream_content(self) -> None:
self._accumulate["message"]["tool_calls"] = list(tools_no_recording)
attributes = {
"gen_ai.system": _INFERENCE_GEN_AI_SYSTEM_NAME,
"gen_ai.event.content": json.dumps(self._accumulate),
"gen_ai.event.content": json.dumps(self._accumulate, ensure_ascii=False),
}
self._last_event_timestamp_ns = self._instrumentor._record_event( # pylint: disable=protected-access, line-too-long # pyright: ignore [reportFunctionMemberAccess]
span, "gen_ai.choice", attributes, self._last_event_timestamp_ns
Expand Down
65 changes: 57 additions & 8 deletions sdk/ai/azure-ai-inference/tests/test_client_tracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import json
import os
import azure.ai.inference as sdk
from azure.ai.inference.tracing import AIInferenceInstrumentor
Expand Down Expand Up @@ -322,6 +323,58 @@ def test_chat_completion_tracing_content_recording_enabled(self, **kwargs):
assert events_match == True
AIInferenceInstrumentor().uninstrument()

@ServicePreparerChatCompletions()
@recorded_by_proxy
def test_chat_completion_tracing_content_unicode(self, **kwargs):
# Make sure code is not instrumented due to a previous test exception
try:
AIInferenceInstrumentor().uninstrument()
except RuntimeError as e:
pass
self.modify_env_var(CONTENT_TRACING_ENV_VARIABLE, "True")
client = self._create_chat_client(**kwargs)
processor, exporter = self.setup_memory_trace_exporter()
AIInferenceInstrumentor().instrument()
response = client.complete(
messages=[
sdk.models.SystemMessage(content="You are a helpful assistant."),
sdk.models.UserMessage(content="将“hello world”翻译成中文和乌克兰语"),
],
)
processor.force_flush()
spans = exporter.get_spans_by_name_starts_with("chat")
assert len(spans) == 1
expected_events = [
{
"name": "gen_ai.system.message",
"attributes": {
"gen_ai.system": "az.ai.inference",
"gen_ai.event.content": '{"role": "system", "content": "You are a helpful assistant."}',
},
},
{
"name": "gen_ai.user.message",
"attributes": {
"gen_ai.system": "az.ai.inference",
"gen_ai.event.content": '{"role": "user", "content": "将“hello world”翻译成中文和乌克兰语"}',
},
},
{
"name": "gen_ai.choice",
"attributes": {
"gen_ai.system": "az.ai.inference",
"gen_ai.event.content": '{"message": {"content": "*"}, "finish_reason": "stop", "index": 0}',
},
},
]
events_match = GenAiTraceVerifier().check_span_events(spans[0], expected_events)
assert events_match == True

completion_event_content = json.loads(spans[0].events[2].attributes["gen_ai.event.content"])
assert False == completion_event_content["message"]["content"].isascii()
assert response.choices[0].message.content == completion_event_content["message"]["content"]
AIInferenceInstrumentor().uninstrument()

@ServicePreparerChatCompletions()
@recorded_by_proxy
def test_chat_completion_streaming_tracing_content_recording_disabled(self, **kwargs):
Expand All @@ -344,14 +397,12 @@ def test_chat_completion_streaming_tracing_content_recording_disabled(self, **kw
)
response_content = ""
for update in response:
if update.choices:
if update.choices and update.choices[0].delta.content:
response_content = response_content + update.choices[0].delta.content
client.close()

processor.force_flush()
spans = exporter.get_spans_by_name_starts_with("chat ")
if len(spans) == 0:
spans = exporter.get_spans_by_name("chat")
spans = exporter.get_spans_by_name_starts_with("chat")
assert len(spans) == 1
span = spans[0]
expected_attributes = [
Expand Down Expand Up @@ -403,7 +454,7 @@ def test_chat_completion_streaming_tracing_content_recording_enabled(self, **kwa
)
response_content = ""
for update in response:
if update.choices:
if update.choices and update.choices[0].delta.content:
response_content = response_content + update.choices[0].delta.content
client.close()

Expand Down Expand Up @@ -527,9 +578,7 @@ def get_weather(city: str) -> str:
# With the additional tools information on hand, get another response from the model
response = client.complete(messages=messages, tools=[weather_description])
processor.force_flush()
spans = exporter.get_spans_by_name_starts_with("chat ")
if len(spans) == 0:
spans = exporter.get_spans_by_name("chat")
spans = exporter.get_spans_by_name_starts_with("chat")
assert len(spans) == 2
expected_attributes = [
("gen_ai.operation.name", "chat"),
Expand Down
12 changes: 7 additions & 5 deletions sdk/ai/azure-ai-projects/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -580,7 +580,7 @@ agent = project_client.agents.create_agent(
Currently, the Azure Function integration for the AI Agent has the following limitations:

- Azure Functions integration is available **only for non-streaming scenarios**.
- Supported trigger for Azure Function is currently limited to **Queue triggers** only.
- Supported trigger for Azure Function is currently limited to **Queue triggers** only.
HTTP or other trigger types and streaming responses are not supported at this time.

---
Expand All @@ -601,8 +601,8 @@ app = func.FunctionApp()
@app.get_weather(arg_name="inputQueue",
queue_name="input",
connection="AzureWebJobsStorage")
@app.queue_output(arg_name="outputQueue",
queue_name="output",
@app.queue_output(arg_name="outputQueue",
queue_name="output",
connection="AzureWebJobsStorage")
def get_weather(inputQueue: func.QueueMessage, outputQueue: func.Out[str]):
try:
Expand Down Expand Up @@ -852,7 +852,7 @@ message = project_client.agents.create_message(

#### Create Message with Code Interpreter Attachment

To attach a file to a message for data analysis, use `MessageAttachment` and `CodeInterpreterTool` classes. You must pass `CodeInterpreterTool` as `tools` or `toolset` in `create_agent` call or the file attachment cannot be opened for code interpreter.
To attach a file to a message for data analysis, use `MessageAttachment` and `CodeInterpreterTool` classes. You must pass `CodeInterpreterTool` as `tools` or `toolset` in `create_agent` call or the file attachment cannot be opened for code interpreter.

Here is an example to pass `CodeInterpreterTool` as tool:

Expand Down Expand Up @@ -1288,12 +1288,14 @@ if not application_insights_connection_string:
exit()
configure_azure_monitor(connection_string=application_insights_connection_string)

# enable additional instrumentations
project_client.telemetry.enable()

scenario = os.path.basename(__file__)
tracer = trace.get_tracer(__name__)

with tracer.start_as_current_span(scenario):
with project_client:
project_client.telemetry.enable()
```

<!-- END SNIPPET -->
Expand Down
2 changes: 1 addition & 1 deletion sdk/ai/azure-ai-projects/assets.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "python",
"TagPrefix": "python/ai/azure-ai-projects",
"Tag": "python/ai/azure-ai-projects_04dc35e78c"
"Tag": "python/ai/azure-ai-projects_e7dc31a23f"
}
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ def _add_message_event(
message_status=message_status,
usage=usage,
)
attributes[GEN_AI_EVENT_CONTENT] = json.dumps(event_body)
attributes[GEN_AI_EVENT_CONTENT] = json.dumps(event_body, ensure_ascii=False)
span.span_instance.add_event(name=f"gen_ai.{role}.message", attributes=attributes)

def _get_field(self, obj: Any, field: str) -> Any:
Expand Down Expand Up @@ -374,7 +374,7 @@ def _add_instructions_event(
event_body["content"] = instructions or additional_instructions

attributes = self._create_event_attributes(agent_id=agent_id, thread_id=thread_id)
attributes[GEN_AI_EVENT_CONTENT] = json.dumps(event_body)
attributes[GEN_AI_EVENT_CONTENT] = json.dumps(event_body, ensure_ascii=False)
span.span_instance.add_event(name=GEN_AI_SYSTEM_MESSAGE, attributes=attributes)

def _get_role(self, role: Optional[Union[str, MessageRole]]) -> str:
Expand Down Expand Up @@ -413,10 +413,10 @@ def _add_tool_assistant_message_event(self, span, step: RunStep) -> None:
)

if _trace_agents_content:
attributes[GEN_AI_EVENT_CONTENT] = json.dumps({"tool_calls": tool_calls})
attributes[GEN_AI_EVENT_CONTENT] = json.dumps({"tool_calls": tool_calls}, ensure_ascii=False)
else:
tool_calls_non_recording = self._remove_function_call_names_and_arguments(tool_calls=tool_calls)
attributes[GEN_AI_EVENT_CONTENT] = json.dumps({"tool_calls": tool_calls_non_recording})
attributes[GEN_AI_EVENT_CONTENT] = json.dumps({"tool_calls": tool_calls_non_recording}, ensure_ascii=False)
span.span_instance.add_event(name="gen_ai.assistant.message", attributes=attributes)

def set_end_run(self, span: "AbstractSpan", run: Optional[ThreadRun]) -> None:
Expand Down Expand Up @@ -518,7 +518,9 @@ def _add_tool_message_events(
body = {"content": tool_output["output"], "id": tool_output["tool_call_id"]}
else:
body = {"content": "", "id": tool_output["tool_call_id"]}
span.span_instance.add_event("gen_ai.tool.message", {"gen_ai.event.content": json.dumps(body)})
span.span_instance.add_event(
"gen_ai.tool.message", {"gen_ai.event.content": json.dumps(body, ensure_ascii=False)}
)
return True

return False
Expand Down Expand Up @@ -1330,33 +1332,33 @@ def inner(*args, **kwargs): # pylint: disable=R0911
class_function_name = function.__qualname__

if class_function_name.startswith("AgentsOperations.create_agent"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return self.trace_create_agent(function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.create_thread"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return self.trace_create_thread(function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.create_message"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return self.trace_create_message(function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.create_run"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return self.trace_create_run(OperationName.START_THREAD_RUN, function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.create_and_process_run"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return self.trace_create_run(OperationName.PROCESS_THREAD_RUN, function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.submit_tool_outputs_to_run"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return self.trace_submit_tool_outputs(False, function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.submit_tool_outputs_to_stream"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return self.trace_submit_tool_outputs(True, function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations._handle_submit_tool_outputs"):
return self.trace_handle_submit_tool_outputs(function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.create_stream"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return self.trace_create_stream(function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.list_messages"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return self.trace_list_messages(function, *args, **kwargs)
if class_function_name.startswith("AgentRunStream.__exit__"):
return self.handle_run_stream_exit(function, *args, **kwargs)
Expand Down Expand Up @@ -1398,33 +1400,33 @@ async def inner(*args, **kwargs): # pylint: disable=R0911
class_function_name = function.__qualname__

if class_function_name.startswith("AgentsOperations.create_agent"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return await self.trace_create_agent_async(function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.create_thread"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return await self.trace_create_thread_async(function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.create_message"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return await self.trace_create_message_async(function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.create_run"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return await self.trace_create_run_async(OperationName.START_THREAD_RUN, function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.create_and_process_run"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return await self.trace_create_run_async(OperationName.PROCESS_THREAD_RUN, function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.submit_tool_outputs_to_run"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return await self.trace_submit_tool_outputs_async(False, function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.submit_tool_outputs_to_stream"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return await self.trace_submit_tool_outputs_async(True, function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations._handle_submit_tool_outputs"):
return await self.trace_handle_submit_tool_outputs_async(function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.create_stream"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return await self.trace_create_stream_async(function, *args, **kwargs)
if class_function_name.startswith("AgentsOperations.list_messages"):
kwargs.setdefault('merge_span', True)
kwargs.setdefault("merge_span", True)
return await self.trace_list_messages_async(function, *args, **kwargs)
if class_function_name.startswith("AsyncAgentRunStream.__aexit__"):
return self.handle_run_stream_exit(function, *args, **kwargs)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
scenario = os.path.basename(__file__)
tracer = trace.get_tracer(__name__)


async def main() -> None:

async with DefaultAzureCredential() as creds:
Expand All @@ -47,11 +48,15 @@ async def main() -> None:
exit()
configure_azure_monitor(connection_string=application_insights_connection_string)

# enable additional instrumentations
project_client.telemetry.enable()

with tracer.start_as_current_span(scenario):
async with project_client:
project_client.telemetry.enable()
agent = await project_client.agents.create_agent(
model=os.environ["MODEL_DEPLOYMENT_NAME"], name="my-assistant", instructions="You are helpful assistant"
model=os.environ["MODEL_DEPLOYMENT_NAME"],
name="my-assistant",
instructions="You are helpful assistant",
)
print(f"Created agent, agent ID: {agent.id}")

Expand Down
Loading