Skip to content

Commit e4a621b

Browse files
authored
migrating tracing module to samples folder (#214)
* migrating tracing module to samples folder * changes were made based on pre-commit failure * pre-commit errors were resolved
1 parent 063cb63 commit e4a621b

9 files changed

+950
-0
lines changed
Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
# ------------------------------------
2+
# Copyright (c) Microsoft Corporation.
3+
# Licensed under the MIT License.
4+
# ------------------------------------
5+
6+
"""
7+
FILE: sample_agents_basics_async_with_azure_monitor_tracing.py
8+
9+
DESCRIPTION:
10+
This sample demonstrates how to use basic agent operations from
11+
the Azure Agents service using a asynchronous client with Azure Monitor tracing.
12+
View the results in the "Tracing" tab in your Azure AI Studio project page.
13+
14+
USAGE:
15+
python sample_agents_basics_async_with_azure_monitor_tracing.py
16+
17+
Before running the sample:
18+
19+
pip install azure-ai-projects azure-identity opentelemetry-sdk azure-monitor-opentelemetry
20+
21+
Set these environment variables with your own values:
22+
* PROJECT_CONNECTION_STRING - The Azure AI Project connection string, as found in your AI Studio Project.
23+
* AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED - Optional. Set to `true` to trace the content of chat
24+
messages, which may contain personal data. False by default.
25+
"""
26+
import asyncio
27+
import time
28+
from azure.ai.projects.aio import AIProjectClient
29+
from azure.identity.aio import DefaultAzureCredential
30+
from opentelemetry import trace
31+
import os
32+
from azure.monitor.opentelemetry import configure_azure_monitor
33+
34+
35+
tracer = trace.get_tracer(__name__)
36+
37+
38+
@tracer.start_as_current_span(__file__)
39+
async def main() -> None:
40+
# Create an Azure AI Project Client from a connection string, copied from your AI Studio project.
41+
# At the moment, it should be in the format "<HostName>;<AzureSubscriptionId>;<ResourceGroup>;<HubName>"
42+
# Customer needs to login to Azure subscription via Azure CLI and set the environment variables
43+
44+
project_client = AIProjectClient.from_connection_string(
45+
credential=DefaultAzureCredential(), conn_str=os.environ["PROJECT_CONNECTION_STRING"]
46+
)
47+
48+
# Enable Azure Monitor tracing
49+
application_insights_connection_string = project_client.telemetry.get_connection_string()
50+
if not application_insights_connection_string:
51+
print("Application Insights was not enabled for this project.")
52+
print("Enable it via the 'Tracing' tab in your AI Studio project page.")
53+
exit()
54+
configure_azure_monitor(connection_string=application_insights_connection_string)
55+
56+
async with project_client:
57+
agent = await project_client.agents.create_agent(
58+
model="gpt-4o", name="my-assistant", instructions="You are helpful assistant"
59+
)
60+
print(f"Created agent, agent ID: {agent.id}")
61+
62+
thread = await project_client.agents.create_thread()
63+
print(f"Created thread, thread ID: {thread.id}")
64+
65+
message = await project_client.agents.create_message(
66+
thread_id=thread.id, role="user", content="Hello, tell me a joke"
67+
)
68+
print(f"Created message, message ID: {message.id}")
69+
70+
run = await project_client.agents.create_run(thread_id=thread.id, assistant_id=agent.id)
71+
72+
# poll the run as long as run status is queued or in progress
73+
while run.status in ["queued", "in_progress", "requires_action"]:
74+
# wait for a second
75+
time.sleep(1)
76+
run = await project_client.agents.get_run(thread_id=thread.id, run_id=run.id)
77+
78+
print(f"Run status: {run.status}")
79+
80+
print(f"Run completed with status: {run.status}")
81+
82+
await project_client.agents.delete_agent(agent.id)
83+
print("Deleted agent")
84+
85+
messages = await project_client.agents.list_messages(thread_id=thread.id)
86+
print(f"Messages: {messages}")
87+
88+
89+
if __name__ == "__main__":
90+
asyncio.run(main())
Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
# ------------------------------------
2+
# Copyright (c) Microsoft Corporation.
3+
# Licensed under the MIT License.
4+
# ------------------------------------
5+
6+
"""
7+
FILE: sample_agents_basics_async_with_console_tracing.py
8+
9+
DESCRIPTION:
10+
This sample demonstrates how to use basic agent operations from
11+
the Azure Agents service using a asynchronous client with tracing to console.
12+
13+
USAGE:
14+
python sample_agents_basics_async_with_console_tracing.py
15+
16+
Before running the sample:
17+
18+
pip install azure-ai-projects azure-identity opentelemetry-sdk azure-core-tracing-opentelemetry
19+
20+
If you want to export telemetry to OTLP endpoint (such as Aspire dashboard
21+
https://learn.microsoft.com/dotnet/aspire/fundamentals/dashboard/standalone?tabs=bash)
22+
install:
23+
24+
pip install opentelemetry-exporter-otlp-proto-grpc
25+
26+
Set these environment variables with your own values:
27+
* PROJECT_CONNECTION_STRING - The Azure AI Project connection string, as found in your AI Studio Project.
28+
* AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED - Optional. Set to `true` to trace the content of chat
29+
messages, which may contain personal data. False by default.
30+
"""
31+
import asyncio
32+
import time
33+
import sys
34+
from azure.ai.projects.aio import AIProjectClient
35+
from azure.identity.aio import DefaultAzureCredential
36+
from opentelemetry import trace
37+
import os
38+
39+
40+
tracer = trace.get_tracer(__name__)
41+
42+
43+
@tracer.start_as_current_span(__file__)
44+
async def main() -> None:
45+
# Create an Azure AI Project Client from a connection string, copied from your AI Studio project.
46+
# At the moment, it should be in the format "<HostName>;<AzureSubscriptionId>;<ResourceGroup>;<HubName>"
47+
# Customer needs to login to Azure subscription via Azure CLI and set the environment variables
48+
49+
project_client = AIProjectClient.from_connection_string(
50+
credential=DefaultAzureCredential(), conn_str=os.environ["PROJECT_CONNECTION_STRING"]
51+
)
52+
53+
# Enable console tracing
54+
# or, if you have local OTLP endpoint running, change it to
55+
# project_client.telemetry.enable(destination="http://localhost:4317")
56+
await project_client.telemetry.enable(destination=sys.stdout)
57+
58+
async with project_client:
59+
agent = await project_client.agents.create_agent(
60+
model="gpt-4o", name="my-assistant", instructions="You are helpful assistant"
61+
)
62+
print(f"Created agent, agent ID: {agent.id}")
63+
64+
thread = await project_client.agents.create_thread()
65+
print(f"Created thread, thread ID: {thread.id}")
66+
67+
message = await project_client.agents.create_message(
68+
thread_id=thread.id, role="user", content="Hello, tell me a joke"
69+
)
70+
print(f"Created message, message ID: {message.id}")
71+
72+
run = await project_client.agents.create_run(thread_id=thread.id, assistant_id=agent.id)
73+
74+
# poll the run as long as run status is queued or in progress
75+
while run.status in ["queued", "in_progress", "requires_action"]:
76+
# wait for a second
77+
time.sleep(1)
78+
run = await project_client.agents.get_run(thread_id=thread.id, run_id=run.id)
79+
80+
print(f"Run status: {run.status}")
81+
82+
print(f"Run completed with status: {run.status}")
83+
84+
await project_client.agents.delete_agent(agent.id)
85+
print("Deleted agent")
86+
87+
messages = await project_client.agents.list_messages(thread_id=thread.id)
88+
print(f"Messages: {messages}")
89+
90+
91+
if __name__ == "__main__":
92+
asyncio.run(main())
Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
### Tracing using Application Insights
2+
3+
Reasoning about your agent executions is important for troubleshooting and debugging. However, it can be difficult for complex agents for a number of reasons:
4+
* There could be a high number of steps, making it hard to keep track of all of them.
5+
* The sequence of steps could vary based on user input.
6+
* The inputs/outputs at each stage may be long and deserve more detailed inspection.
7+
* Each step of an agent might also involve nesting — for example, an agent might invoke a tool, which uses another process, which then invokes another tool. If you notice strange or incorrect output from a top-level agent run, it is difficult to determine exactly where in the execution it was introduced.
8+
9+
Tracing solves this by allowing you to clearly see the inputs and outputs of each primitive involved in a particular agent run, in the order in which they were invoked.
10+
11+
Tracing lets you analyze your agent's performance and behavior by using OpenTelemetry and adding an Application Insights Azure resource to your Azure AI Studio project. See the Tracing tab in your [AI studio](https://ai.azure.com/) project page. If one was enabled, you can get the Application Insights connection string, configure your Agents, and observe the full execution path through Azure Monitor. Typically, you might want to start tracing before you create an Agent.
12+
13+
#### Installation
14+
15+
Make sure to install OpenTelemetry and the Azure SDK tracing plugin:
16+
17+
```bash
18+
pip install opentelemetry
19+
pip install azure-core-tracing-opentelemetry
20+
```
21+
22+
You will also need an exporter to send telemetry to your observability backend. You can print traces to the console or use a local viewer such as [Aspire Dashboard](https://learn.microsoft.com/dotnet/aspire/fundamentals/dashboard/standalone?tabs=bash).
23+
24+
To connect to Aspire Dashboard or another OpenTelemetry compatible backend, install the OTLP exporter:
25+
26+
```bash
27+
pip install opentelemetry-exporter-otlp
28+
```
29+
30+
These samples are broken into asynchronous and synchronous samples. From there, each sample has two versions, one that traces and displays the results locally in the console, and one that sends the traces to the Azure Monitor in AI Studio. Navigate to the to the 'Tracing' tab in your AI Studio project page to enable the second set of samples.
31+
32+
Note: the initial release of Azure AI Projects has a bug in the agents tracing functionality. The bug will cause agent function tool call related info (function names and parameter values, which could contain sensitive information) to be included in the traces even when content recoding is not enabled. We are working to fix this issue.
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
# ------------------------------------
2+
# Copyright (c) Microsoft Corporation.
3+
# Licensed under the MIT License.
4+
# ------------------------------------
5+
6+
"""
7+
FILE: sample_agents_basics_with_azure_monitor_tracing.py
8+
9+
DESCRIPTION:
10+
This sample demonstrates how to use basic agent operations from
11+
the Azure Agents service using a synchronous client with Azure Monitor tracing.
12+
View the results in the "Tracing" tab in your Azure AI Studio project page.
13+
14+
USAGE:
15+
python sample_agents_basics_with_azure_monitor_tracing.py
16+
17+
Before running the sample:
18+
19+
pip install azure-ai-projects azure-identity azure-monitor-opentelemetry
20+
21+
Set these environment variables with your own values:
22+
* PROJECT_CONNECTION_STRING - The Azure AI Project connection string, as found in your AI Studio Project.
23+
* AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED - Optional. Set to `true` to trace the content of chat
24+
messages, which may contain personal data. False by default.
25+
"""
26+
27+
import os
28+
import time
29+
from azure.ai.projects import AIProjectClient
30+
from azure.identity import DefaultAzureCredential
31+
from dotenv import load_dotenv
32+
from opentelemetry import trace
33+
from azure.monitor.opentelemetry import configure_azure_monitor
34+
from pathlib import Path
35+
36+
# Create an AI Project Client from a connection string, copied from your AI Studio project.
37+
# At the moment, it should be in the format "<HostName>;<AzureSubscriptionId>;<ResourceGroup>;<HubName>"
38+
# Customer needs to login to Azure subscription via Azure CLI and set the environment variables
39+
40+
load_dotenv()
41+
42+
project_client = AIProjectClient.from_connection_string(
43+
credential=DefaultAzureCredential(),
44+
conn_str=os.environ["PROJECT_CONNECTION_STRING"],
45+
)
46+
47+
# [START enable_tracing]
48+
49+
50+
# Enable Azure Monitor tracing
51+
application_insights_connection_string = project_client.telemetry.get_connection_string()
52+
if not application_insights_connection_string:
53+
print("Application Insights was not enabled for this project.")
54+
print("Enable it via the 'Tracing' tab in your AI Studio project page.")
55+
exit()
56+
configure_azure_monitor(connection_string=application_insights_connection_string)
57+
58+
59+
scenario = Path(__file__).name
60+
tracer = trace.get_tracer(__name__)
61+
62+
with tracer.start_as_current_span(scenario), project_client:
63+
# Code logic here
64+
65+
# [END enable_tracing]
66+
agent = project_client.agents.create_agent(
67+
model="gpt-4o", name="my-assistant", instructions="You are helpful assistant"
68+
)
69+
print(f"Created agent, agent ID: {agent.id}")
70+
71+
thread = project_client.agents.create_thread()
72+
print(f"Created thread, thread ID: {thread.id}")
73+
74+
message = project_client.agents.create_message(thread_id=thread.id, role="user", content="Hello, tell me a joke")
75+
print(f"Created message, message ID: {message.id}")
76+
77+
run = project_client.agents.create_run(thread_id=thread.id, assistant_id=agent.id)
78+
79+
# poll the run as long as run status is queued or in progress
80+
while run.status in ["queued", "in_progress", "requires_action"]:
81+
# wait for a second
82+
time.sleep(1)
83+
run = project_client.agents.get_run(thread_id=thread.id, run_id=run.id)
84+
85+
print(f"Run status: {run.status}")
86+
87+
project_client.agents.delete_agent(agent.id)
88+
print("Deleted agent")
89+
90+
messages = project_client.agents.list_messages(thread_id=thread.id)
91+
print(f"messages: {messages}")

0 commit comments

Comments
 (0)