diff --git a/.github/.linkspector.yml b/.github/.linkspector.yml
index eb365c2982..4b0ce3bd77 100644
--- a/.github/.linkspector.yml
+++ b/.github/.linkspector.yml
@@ -18,6 +18,7 @@ ignorePatterns:
- pattern: "0001-madr-architecture-decisions.md"
- pattern: "https://api.powerplatform.com/.default"
- pattern: "https://your-resource.openai.azure.com/"
+ - pattern: "https://your-project.services.ai.azure.com"
- pattern: "http://host.docker.internal"
- pattern: "https://openai.github.io/openai-agents-js/openai/agents/classes/"
# excludedDirs:
diff --git a/dotnet/agent-framework-dotnet.slnx b/dotnet/agent-framework-dotnet.slnx
index b96b891b00..ec6da01736 100644
--- a/dotnet/agent-framework-dotnet.slnx
+++ b/dotnet/agent-framework-dotnet.slnx
@@ -124,19 +124,6 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/dotnet/samples/01-get-started/01_hello_agent/01_hello_agent.csproj b/dotnet/samples/01-get-started/01_hello_agent/01_hello_agent.csproj
index b32de63906..fdce858e37 100644
--- a/dotnet/samples/01-get-started/01_hello_agent/01_hello_agent.csproj
+++ b/dotnet/samples/01-get-started/01_hello_agent/01_hello_agent.csproj
@@ -9,13 +9,13 @@
-
+
-
+
diff --git a/dotnet/samples/01-get-started/01_hello_agent/Program.cs b/dotnet/samples/01-get-started/01_hello_agent/Program.cs
index e461f9ba75..64721b0210 100644
--- a/dotnet/samples/01-get-started/01_hello_agent/Program.cs
+++ b/dotnet/samples/01-get-started/01_hello_agent/Program.cs
@@ -1,29 +1,40 @@
// Copyright (c) Microsoft. All rights reserved.
-// This sample shows how to create and use a simple AI agent with Azure OpenAI as the backend.
+// This sample shows how to create and use a simple AI agent with Azure AI Foundry as the backend.
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
-using OpenAI.Chat;
+using Microsoft.Extensions.AI;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsAIAgent(instructions: "You are good at telling jokes.", name: "Joker");
+//
+// Create a Foundry project Responses API client.
+IChatClient chatClient = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient();
+// Create the agent with model specified in chat options.
+ChatClientAgent agent = new(chatClient, new ChatClientAgentOptions
+{
+ Name = "Joker",
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." },
+});
+//
+
+//
// Invoke the agent and output the text result.
Console.WriteLine(await agent.RunAsync("Tell me a joke about a pirate."));
+//
+//
// Invoke the agent with streaming support.
await foreach (var update in agent.RunStreamingAsync("Tell me a joke about a pirate."))
{
Console.WriteLine(update);
}
+//
diff --git a/dotnet/samples/01-get-started/02_add_tools/02_add_tools.csproj b/dotnet/samples/01-get-started/02_add_tools/02_add_tools.csproj
index b32de63906..fdce858e37 100644
--- a/dotnet/samples/01-get-started/02_add_tools/02_add_tools.csproj
+++ b/dotnet/samples/01-get-started/02_add_tools/02_add_tools.csproj
@@ -9,13 +9,13 @@
-
+
-
+
diff --git a/dotnet/samples/01-get-started/02_add_tools/Program.cs b/dotnet/samples/01-get-started/02_add_tools/Program.cs
index da0b638562..833152ab25 100644
--- a/dotnet/samples/01-get-started/02_add_tools/Program.cs
+++ b/dotnet/samples/01-get-started/02_add_tools/Program.cs
@@ -4,29 +4,43 @@
// It shows both non-streaming and streaming agent interactions using menu-related tools.
using System.ComponentModel;
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
-using OpenAI.Chat;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+//
[Description("Get the weather for a given location.")]
static string GetWeather([Description("The location to get the weather for.")] string location)
=> $"The weather in {location} is cloudy with a high of 15°C.";
+//
-// Create the chat client and agent, and provide the function tool to the agent.
+//
+// Create a Foundry project Responses API client and agent with a function tool.
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsAIAgent(instructions: "You are a helpful assistant", tools: [AIFunctionFactory.Create(GetWeather)]);
+IChatClient chatClient = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient();
+ChatClientAgent agent = new(chatClient, new ChatClientAgentOptions
+{
+ ChatOptions = new()
+ {
+ ModelId = deploymentName,
+ Instructions = "You are a helpful assistant",
+ Tools = [AIFunctionFactory.Create(GetWeather)]
+ },
+});
+//
+
+//
// Non-streaming agent interaction with function tools.
Console.WriteLine(await agent.RunAsync("What is the weather like in Amsterdam?"));
@@ -35,3 +49,4 @@ static string GetWeather([Description("The location to get the weather for.")] s
{
Console.WriteLine(update);
}
+//
diff --git a/dotnet/samples/01-get-started/03_multi_turn/03_multi_turn.csproj b/dotnet/samples/01-get-started/03_multi_turn/03_multi_turn.csproj
index b32de63906..fdce858e37 100644
--- a/dotnet/samples/01-get-started/03_multi_turn/03_multi_turn.csproj
+++ b/dotnet/samples/01-get-started/03_multi_turn/03_multi_turn.csproj
@@ -9,13 +9,13 @@
-
+
-
+
diff --git a/dotnet/samples/01-get-started/03_multi_turn/Program.cs b/dotnet/samples/01-get-started/03_multi_turn/Program.cs
index 5d49e806ed..f8f35496fc 100644
--- a/dotnet/samples/01-get-started/03_multi_turn/Program.cs
+++ b/dotnet/samples/01-get-started/03_multi_turn/Program.cs
@@ -2,23 +2,33 @@
// This sample shows how to create and use a simple AI agent with a multi-turn conversation.
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
-using OpenAI.Chat;
+using Microsoft.Extensions.AI;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+//
+// Create a Foundry project Responses API client and agent.
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsAIAgent(instructions: "You are good at telling jokes.", name: "Joker");
+IChatClient chatClient = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient();
+ChatClientAgent agent = new(chatClient, new ChatClientAgentOptions
+{
+ Name = "Joker",
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." },
+});
+//
+
+//
// Invoke the agent with a multi-turn conversation, where the context is preserved in the session object.
AgentSession session = await agent.CreateSessionAsync();
Console.WriteLine(await agent.RunAsync("Tell me a joke about a pirate.", session));
@@ -34,3 +44,4 @@
{
Console.WriteLine(update);
}
+//
diff --git a/dotnet/samples/01-get-started/04_memory/04_memory.csproj b/dotnet/samples/01-get-started/04_memory/04_memory.csproj
index b32de63906..fdce858e37 100644
--- a/dotnet/samples/01-get-started/04_memory/04_memory.csproj
+++ b/dotnet/samples/01-get-started/04_memory/04_memory.csproj
@@ -9,13 +9,13 @@
-
+
-
+
diff --git a/dotnet/samples/01-get-started/04_memory/Program.cs b/dotnet/samples/01-get-started/04_memory/Program.cs
index fa6940f5fd..9f5e29b7d7 100644
--- a/dotnet/samples/01-get-started/04_memory/Program.cs
+++ b/dotnet/samples/01-get-started/04_memory/Program.cs
@@ -8,23 +8,25 @@
using System.Text;
using System.Text.Json;
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
-using OpenAI.Chat;
using SampleApp;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+//
+// Create a Foundry project Responses API chat client.
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-ChatClient chatClient = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName);
+IChatClient chatClient = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient();
// Create the agent and provide a factory to add our custom memory component to
// all sessions created by the agent. Here each new memory component will have its own
@@ -33,13 +35,14 @@
// and preferably shared between multiple sessions used by the same user, ensure that the
// factory reads the user id from the current context and scopes the memory component
// and its storage to that user id.
-AIAgent agent = chatClient.AsAIAgent(new ChatClientAgentOptions()
+AIAgent agent = new ChatClientAgent(chatClient, new ChatClientAgentOptions()
{
- ChatOptions = new() { Instructions = "You are a friendly assistant. Always address the user by their name." },
- AIContextProviders = [new UserInfoMemory(chatClient.AsIChatClient())]
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a friendly assistant. Always address the user by their name." },
+ AIContextProviders = [new UserInfoMemory(chatClient)]
});
+//
-// Create a new session for the conversation.
+//
AgentSession session = await agent.CreateSessionAsync();
Console.WriteLine(">> Use session with blank memory\n");
@@ -80,7 +83,9 @@
// Invoke the agent and output the text result.
// This time the agent should remember the user's name and use it in the response.
Console.WriteLine(await agent.RunAsync("What is my name and age?", newSession));
+//
+//
namespace SampleApp
{
///
@@ -159,4 +164,5 @@ internal sealed class UserInfo
public string? UserName { get; set; }
public int? UserAge { get; set; }
}
+ //
}
diff --git a/dotnet/samples/01-get-started/05_first_workflow/Program.cs b/dotnet/samples/01-get-started/05_first_workflow/Program.cs
index af1dcb50d9..3364d12a59 100644
--- a/dotnet/samples/01-get-started/05_first_workflow/Program.cs
+++ b/dotnet/samples/01-get-started/05_first_workflow/Program.cs
@@ -19,6 +19,7 @@ public static class Program
{
private static async Task Main()
{
+ //
// Create the executors
Func uppercaseFunc = s => s.ToUpperInvariant();
var uppercase = uppercaseFunc.BindAsExecutor("UppercaseExecutor");
@@ -29,7 +30,9 @@ private static async Task Main()
WorkflowBuilder builder = new(uppercase);
builder.AddEdge(uppercase, reverse).WithOutputFrom(reverse);
var workflow = builder.Build();
+ //
+ //
// Execute the workflow with input data
await using Run run = await InProcessExecution.RunAsync(workflow, "Hello, World!");
foreach (WorkflowEvent evt in run.NewEvents)
@@ -39,6 +42,7 @@ private static async Task Main()
Console.WriteLine($"{executorComplete.ExecutorId}: {executorComplete.Data}");
}
}
+ //
}
}
diff --git a/dotnet/samples/01-get-started/06_host_your_agent/06_host_your_agent.csproj b/dotnet/samples/01-get-started/06_host_your_agent/06_host_your_agent.csproj
index 2f0efd7b3a..bdd486e136 100644
--- a/dotnet/samples/01-get-started/06_host_your_agent/06_host_your_agent.csproj
+++ b/dotnet/samples/01-get-started/06_host_your_agent/06_host_your_agent.csproj
@@ -21,11 +21,12 @@
-
+
+
-
+
diff --git a/dotnet/samples/01-get-started/06_host_your_agent/Program.cs b/dotnet/samples/01-get-started/06_host_your_agent/Program.cs
index 6012119b25..24c595923f 100644
--- a/dotnet/samples/01-get-started/06_host_your_agent/Program.cs
+++ b/dotnet/samples/01-get-started/06_host_your_agent/Program.cs
@@ -4,37 +4,45 @@
//
// Prerequisites:
// - Azure Functions Core Tools
-// - Azure OpenAI resource
+// - Azure AI Foundry project
//
// Environment variables:
-// AZURE_OPENAI_ENDPOINT
-// AZURE_OPENAI_DEPLOYMENT_NAME (defaults to "gpt-4o-mini")
+// AZURE_AI_PROJECT_ENDPOINT
+// AZURE_AI_MODEL_DEPLOYMENT_NAME (defaults to "gpt-4o-mini")
//
// Run with: func start
// Then call: POST http://localhost:7071/api/agents/HostedAgent/run
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Agents.AI.Hosting.AzureFunctions;
using Microsoft.Azure.Functions.Worker.Builder;
+using Microsoft.Extensions.AI;
using Microsoft.Extensions.Hosting;
-using OpenAI.Chat;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT")
- ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+//
// Set up an AI agent following the standard Microsoft Agent Framework pattern.
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsAIAgent(
- instructions: "You are a helpful assistant hosted in Azure Functions.",
- name: "HostedAgent");
+IChatClient chatClient = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient();
+ChatClientAgent agent = new(chatClient, new ChatClientAgentOptions
+{
+ Name = "HostedAgent",
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful assistant hosted in Azure Functions." },
+});
+//
+
+//
// Configure the function app to host the AI agent.
// This will automatically generate HTTP API endpoints for the agent.
using IHost app = FunctionsApplication
@@ -43,3 +51,4 @@
.ConfigureDurableAgents(options => options.AddAIAgent(agent, timeToLive: TimeSpan.FromHours(1)))
.Build();
app.Run();
+//
diff --git a/dotnet/samples/02-agents/AgentOpenTelemetry/AgentOpenTelemetry.csproj b/dotnet/samples/02-agents/AgentOpenTelemetry/AgentOpenTelemetry.csproj
index e194fec9c2..4024ab687d 100644
--- a/dotnet/samples/02-agents/AgentOpenTelemetry/AgentOpenTelemetry.csproj
+++ b/dotnet/samples/02-agents/AgentOpenTelemetry/AgentOpenTelemetry.csproj
@@ -9,13 +9,12 @@
-
+
-
@@ -25,7 +24,6 @@
-
diff --git a/dotnet/samples/02-agents/AgentOpenTelemetry/Program.cs b/dotnet/samples/02-agents/AgentOpenTelemetry/Program.cs
index 69d71e7b88..784b3a0031 100644
--- a/dotnet/samples/02-agents/AgentOpenTelemetry/Program.cs
+++ b/dotnet/samples/02-agents/AgentOpenTelemetry/Program.cs
@@ -3,7 +3,7 @@
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.Metrics;
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Azure.Monitor.OpenTelemetry.Exporter;
using Microsoft.Agents.AI;
@@ -97,8 +97,8 @@ You can view the telemetry data in the Aspire Dashboard.
Type your message and press Enter. Type 'exit' or empty message to quit.
""");
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT environment variable is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT environment variable is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
// Log application startup
appLogger.LogInformation("OpenTelemetry Aspire Demo application started");
@@ -113,9 +113,10 @@ static async Task GetWeatherAsync([Description("The location to get the
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-using var instrumentedChatClient = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsIChatClient() // Converts a native OpenAI SDK ChatClient into a Microsoft.Extensions.AI.IChatClient
+using var instrumentedChatClient = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
.AsBuilder()
.UseFunctionInvocation()
.UseOpenTelemetry(sourceName: SourceName, configure: (cfg) => cfg.EnableSensitiveData = true) // enable telemetry at the chat client level
@@ -123,10 +124,16 @@ static async Task GetWeatherAsync([Description("The location to get the
appLogger.LogInformation("Creating Agent with OpenTelemetry instrumentation");
// Create the agent with the instrumented chat client
-var agent = new ChatClientAgent(instrumentedChatClient,
- name: "OpenTelemetryDemoAgent",
- instructions: "You are a helpful assistant that provides concise and informative responses.",
- tools: [AIFunctionFactory.Create(GetWeatherAsync)])
+var agent = new ChatClientAgent(instrumentedChatClient, new ChatClientAgentOptions
+{
+ Name = "OpenTelemetryDemoAgent",
+ ChatOptions = new()
+ {
+ ModelId = deploymentName,
+ Instructions = "You are a helpful assistant that provides concise and informative responses.",
+ Tools = [AIFunctionFactory.Create(GetWeatherAsync)]
+ },
+})
.AsBuilder()
.UseOpenTelemetry(SourceName, configure: (cfg) => cfg.EnableSensitiveData = true) // enable telemetry at the agent level
.Build();
diff --git a/dotnet/samples/02-agents/AgentOpenTelemetry/README.md b/dotnet/samples/02-agents/AgentOpenTelemetry/README.md
index 229d37dca6..de6e2d1248 100644
--- a/dotnet/samples/02-agents/AgentOpenTelemetry/README.md
+++ b/dotnet/samples/02-agents/AgentOpenTelemetry/README.md
@@ -1,6 +1,6 @@
-# OpenTelemetry Aspire Demo with Azure OpenAI
+# OpenTelemetry Aspire Demo with Azure AI Foundry
-This demo showcases the integration of OpenTelemetry with the Microsoft Agent Framework using Azure OpenAI and .NET Aspire Dashboard for telemetry visualization.
+This demo showcases the integration of OpenTelemetry with the Microsoft Agent Framework using Azure AI Foundry and .NET Aspire Dashboard for telemetry visualization.
## Overview
@@ -15,7 +15,7 @@ The demo consists of three main components:
```mermaid
graph TD
A["Console App (Interactive)"] --> B["Agent Framework with OpenTel Instrumentation"]
- B --> C["Azure OpenAI Service"]
+ B --> C["Azure AI Foundry Service"]
A --> D["Aspire Dashboard (OpenTelemetry Visualization)"]
B --> D
```
@@ -23,21 +23,21 @@ graph TD
## Prerequisites
- .NET 10 SDK or later
-- Azure OpenAI service endpoint and deployment configured
+- Azure AI Foundry project endpoint and model deployment configured
- Azure CLI installed and authenticated (for Azure credential authentication)
- Docker installed (for running Aspire Dashboard)
- [Optional] Application Insights and Grafana
## Configuration
-### Azure OpenAI Setup
+### Azure AI Foundry Setup
Set the following environment variables:
```powershell
-$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/"
-$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
+$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com"
+$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
```
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure OpenAI resource.
+**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure AI Foundry project.
### [Optional] Application Insights Setup
Set the following environment variables:
@@ -56,7 +56,7 @@ The easiest way to run the demo is using the provided PowerShell script:
```
This script will automatically:
-- ✅ Check prerequisites (Docker, Azure OpenAI configuration)
+- ✅ Check prerequisites (Docker, Azure AI Foundry configuration)
- 🔨 Build the console application
- 🐳 Start the Aspire Dashboard via Docker (with anonymous access)
- ⏳ Wait for dashboard to be ready (polls port until listening)
@@ -158,7 +158,7 @@ Open dashboard in Azure portal:
- **Telemetry correlation** across the entire request flow
### Agent Framework Features
-- **ChatClientAgent** with Azure OpenAI integration
+- **ChatClientAgent** with Azure AI Foundry integration
- **OpenTelemetry wrapper** using `.WithOpenTelemetry()`
- **Conversation threading** for multi-turn conversations
- **Error handling** with telemetry correlation
@@ -182,7 +182,7 @@ Complete demo startup script that handles everything automatically.
```
**Features:**
-- **Automatic configuration detection** - Checks for Azure OpenAI configuration
+- **Automatic configuration detection** - Checks for Azure AI Foundry configuration
- **Project building** - Automatically builds projects before running
- **Error handling** - Provides clear error messages if something goes wrong
- **Multi-window support** - Opens dashboard in separate window for better experience
@@ -201,10 +201,10 @@ If you encounter port binding errors, try:
2. Or kill any processes using the conflicting ports
### Authentication Issues
-- Ensure your Azure OpenAI endpoint is correctly configured
+- Ensure your Azure AI Foundry endpoint is correctly configured
- Check that the environment variables are set in the correct terminal session
-- Verify you're logged in with Azure CLI (`az login`) and have access to the Azure OpenAI resource
-- Ensure the Azure OpenAI deployment name matches your actual deployment
+- Verify you're logged in with Azure CLI (`az login`) and have access to the Azure AI Foundry project
+- Ensure the model deployment name matches your actual deployment
### Build Issues
- Ensure you're using .NET 10.0 SDK
@@ -216,7 +216,7 @@ If you encounter port binding errors, try:
```
AgentOpenTelemetry/
├── AgentOpenTelemetry.csproj # Project file with dependencies
-├── Program.cs # Main application with Azure OpenAI agent integration
+├── Program.cs # Main application with Azure AI Foundry agent integration
├── start-demo.ps1 # PowerShell script to start the demo
└── README.md # This file
```
diff --git a/dotnet/samples/02-agents/AgentOpenTelemetry/start-demo.ps1 b/dotnet/samples/02-agents/AgentOpenTelemetry/start-demo.ps1
index 7af1c9d8ae..e5ffb4b088 100644
--- a/dotnet/samples/02-agents/AgentOpenTelemetry/start-demo.ps1
+++ b/dotnet/samples/02-agents/AgentOpenTelemetry/start-demo.ps1
@@ -21,18 +21,18 @@ try {
exit 1
}
-# Check for Azure OpenAI configuration
-if ($env:AZURE_OPENAI_ENDPOINT) {
- Write-Host "Found Azure OpenAI endpoint: $($env:AZURE_OPENAI_ENDPOINT)" -ForegroundColor Green
- if ($env:AZURE_OPENAI_DEPLOYMENT_NAME) {
- Write-Host "Using deployment: $($env:AZURE_OPENAI_DEPLOYMENT_NAME)" -ForegroundColor Green
+# Check for Azure AI Foundry configuration
+if ($env:AZURE_AI_PROJECT_ENDPOINT) {
+ Write-Host "Found Azure AI Foundry endpoint: $($env:AZURE_AI_PROJECT_ENDPOINT)" -ForegroundColor Green
+ if ($env:AZURE_AI_MODEL_DEPLOYMENT_NAME) {
+ Write-Host "Using deployment: $($env:AZURE_AI_MODEL_DEPLOYMENT_NAME)" -ForegroundColor Green
} else {
Write-Host "Using default deployment: gpt-4o-mini" -ForegroundColor Cyan
}
} else {
- Write-Host "Warning: AZURE_OPENAI_ENDPOINT not found!" -ForegroundColor Yellow
- Write-Host "Please set the AZURE_OPENAI_ENDPOINT environment variable" -ForegroundColor Yellow
- Write-Host "Example: `$env:AZURE_OPENAI_ENDPOINT='https://your-resource.openai.azure.com/'" -ForegroundColor Yellow
+ Write-Host "Warning: AZURE_AI_PROJECT_ENDPOINT not found!" -ForegroundColor Yellow
+ Write-Host "Please set the AZURE_AI_PROJECT_ENDPOINT environment variable" -ForegroundColor Yellow
+ Write-Host "Example: `$env:AZURE_AI_PROJECT_ENDPOINT='https://your-project.services.ai.azure.com'" -ForegroundColor Yellow
Write-Host ""
}
diff --git a/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Agent_Step01_BasicSkills.csproj b/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Agent_Step01_BasicSkills.csproj
index 2a503bbfb2..15c7e85504 100644
--- a/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Agent_Step01_BasicSkills.csproj
+++ b/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Agent_Step01_BasicSkills.csproj
@@ -10,12 +10,13 @@
-
+
+
-
+
diff --git a/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Program.cs b/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Program.cs
index 290c3f9b6b..2b686a5fa9 100644
--- a/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Program.cs
+++ b/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Program.cs
@@ -7,28 +7,31 @@
// This sample includes the expense-report skill:
// - Policy-based expense filing with references and assets
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
-using OpenAI.Responses;
+using Microsoft.Extensions.AI;
// --- Configuration ---
-string endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT")
- ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
// --- Skills Provider ---
// Discovers skills from the 'skills' directory and makes them available to the agent
var skillsProvider = new FileAgentSkillsProvider(skillPath: Path.Combine(AppContext.BaseDirectory, "skills"));
// --- Agent Setup ---
-AIAgent agent = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential())
- .GetResponsesClient(deploymentName)
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
.AsAIAgent(new ChatClientAgentOptions
{
Name = "SkillsAgent",
ChatOptions = new()
{
+ ModelId = deploymentName,
Instructions = "You are a helpful assistant.",
},
AIContextProviders = [skillsProvider],
diff --git a/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/README.md b/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/README.md
index 78099fa8a5..d735f9a419 100644
--- a/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/README.md
+++ b/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/README.md
@@ -36,13 +36,13 @@ Agent_Step01_BasicSkills/
### Prerequisites
- .NET 10.0 SDK
-- Azure OpenAI endpoint with a deployed model
+- Azure AI Foundry project endpoint with a deployed model
### Setup
1. Set environment variables:
```bash
- export AZURE_OPENAI_ENDPOINT="https://your-endpoint.openai.azure.com/"
- export AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini"
+ export AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com"
+ export AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini"
```
2. Run the sample:
diff --git a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/AgentWithMemory_Step01_ChatHistoryMemory.csproj b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/AgentWithMemory_Step01_ChatHistoryMemory.csproj
index 860089b621..1d2fb75ebc 100644
--- a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/AgentWithMemory_Step01_ChatHistoryMemory.csproj
+++ b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/AgentWithMemory_Step01_ChatHistoryMemory.csproj
@@ -10,13 +10,14 @@
+
-
+
diff --git a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/Program.cs b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/Program.cs
index ff4628ef7a..b6da6b951f 100644
--- a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/Program.cs
+++ b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/Program.cs
@@ -4,15 +4,16 @@
// It can then use the chat history from prior conversations to inform responses in new conversations.
using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.VectorData;
using Microsoft.SemanticKernel.Connectors.InMemory;
-using OpenAI.Chat;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var embeddingEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
var embeddingDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME") ?? "text-embedding-3-large";
// Create a vector store to store the chat messages in.
@@ -23,19 +24,19 @@
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
- EmbeddingGenerator = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential())
+ EmbeddingGenerator = new AzureOpenAIClient(new Uri(embeddingEndpoint), new DefaultAzureCredential())
.GetEmbeddingClient(embeddingDeploymentName)
.AsIEmbeddingGenerator()
});
// Create the agent and add the ChatHistoryMemoryProvider to store chat messages in the vector store.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
.AsAIAgent(new ChatClientAgentOptions
{
- ChatOptions = new() { Instructions = "You are good at telling jokes." },
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." },
Name = "Joker",
AIContextProviders = [new ChatHistoryMemoryProvider(
vectorStore,
diff --git a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/AgentWithMemory_Step02_MemoryUsingMem0.csproj b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/AgentWithMemory_Step02_MemoryUsingMem0.csproj
index 1e0863d66f..8dda238192 100644
--- a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/AgentWithMemory_Step02_MemoryUsingMem0.csproj
+++ b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/AgentWithMemory_Step02_MemoryUsingMem0.csproj
@@ -9,13 +9,13 @@
-
+
-
+
diff --git a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/Program.cs b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/Program.cs
index f1842eb634..4db73dc8c1 100644
--- a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/Program.cs
+++ b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/Program.cs
@@ -6,15 +6,14 @@
using System.Net.Http.Headers;
using System.Text.Json;
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Agents.AI.Mem0;
using Microsoft.Extensions.AI;
-using OpenAI.Chat;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
var mem0ServiceUri = Environment.GetEnvironmentVariable("MEM0_ENDPOINT") ?? throw new InvalidOperationException("MEM0_ENDPOINT is not set.");
var mem0ApiKey = Environment.GetEnvironmentVariable("MEM0_API_KEY") ?? throw new InvalidOperationException("MEM0_API_KEY is not set.");
@@ -27,13 +26,13 @@
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
.AsAIAgent(new ChatClientAgentOptions()
{
- ChatOptions = new() { Instructions = "You are a friendly travel assistant. Use known memories about the user when responding, and do not invent details." },
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a friendly travel assistant. Use known memories about the user when responding, and do not invent details." },
// The stateInitializer can be used to customize the Mem0 scope per session and it will be called each time a session
// is encountered by the Mem0Provider that does not already have Mem0Provider state stored on the session.
// If each session should have its own Mem0 scope, you can create a new id per session via the stateInitializer, e.g.:
diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/AgentWithRAG_Step01_BasicTextRAG.csproj b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/AgentWithRAG_Step01_BasicTextRAG.csproj
index 860089b621..1d2fb75ebc 100644
--- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/AgentWithRAG_Step01_BasicTextRAG.csproj
+++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/AgentWithRAG_Step01_BasicTextRAG.csproj
@@ -10,13 +10,14 @@
+
-
+
diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/Program.cs b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/Program.cs
index c04601d940..1d14f7ce3e 100644
--- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/Program.cs
+++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/Program.cs
@@ -6,23 +6,24 @@
// The TextSearchStore is a sample store implementation that hardcodes a storage schema and uses the vector store to store and retrieve documents.
using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Agents.AI.Samples;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.VectorData;
using Microsoft.SemanticKernel.Connectors.InMemory;
-using OpenAI.Chat;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var embeddingEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
var embeddingDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME") ?? "text-embedding-3-large";
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
AzureOpenAIClient azureOpenAIClient = new(
- new Uri(endpoint),
+ new Uri(embeddingEndpoint),
new DefaultAzureCredential());
// Create an In-Memory vector store that uses the Azure OpenAI embedding model to generate embeddings.
@@ -60,11 +61,13 @@
};
// Create the AI agent with the TextSearchProvider as the AI context provider.
-AIAgent agent = azureOpenAIClient
- .GetChatClient(deploymentName)
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
.AsAIAgent(new ChatClientAgentOptions
{
- ChatOptions = new() { Instructions = "You are a helpful support specialist for Contoso Outdoors. Answer questions using the provided context and cite the source document when available." },
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful support specialist for Contoso Outdoors. Answer questions using the provided context and cite the source document when available." },
AIContextProviders = [new TextSearchProvider(SearchAdapter, textSearchOptions)],
// Since we are using ChatCompletion which stores chat history locally, we can also add a message filter
// that removes messages produced by the TextSearchProvider before they are added to the chat history, so that
diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/AgentWithRAG_Step02_CustomVectorStoreRAG.csproj b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/AgentWithRAG_Step02_CustomVectorStoreRAG.csproj
index 33029395dd..e2fc3ac0fb 100644
--- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/AgentWithRAG_Step02_CustomVectorStoreRAG.csproj
+++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/AgentWithRAG_Step02_CustomVectorStoreRAG.csproj
@@ -10,13 +10,14 @@
+
-
+
diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/Program.cs b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/Program.cs
index 0c299a1445..8410083c07 100644
--- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/Program.cs
+++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/Program.cs
@@ -5,16 +5,17 @@
// The TextSearchProvider runs a search against the vector store before each model invocation and injects the results into the model context.
using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.VectorData;
using Microsoft.SemanticKernel.Connectors.Qdrant;
-using OpenAI.Chat;
using Qdrant.Client;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var embeddingEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
var embeddingDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME") ?? "text-embedding-3-large";
var afOverviewUrl = "https://github.com/MicrosoftDocs/semantic-kernel-docs/blob/main/agent-framework/overview/agent-framework-overview.md";
var afMigrationUrl = "https://raw.githubusercontent.com/MicrosoftDocs/semantic-kernel-docs/refs/heads/main/agent-framework/migration-guide/from-semantic-kernel/index.md";
@@ -23,7 +24,7 @@
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
AzureOpenAIClient azureOpenAIClient = new(
- new Uri(endpoint),
+ new Uri(embeddingEndpoint),
new DefaultAzureCredential());
// Create a Qdrant vector store that uses the Azure OpenAI embedding model to generate embeddings.
@@ -69,11 +70,13 @@
};
// Create the AI agent with the TextSearchProvider as the AI context provider.
-AIAgent agent = azureOpenAIClient
- .GetChatClient(deploymentName)
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
.AsAIAgent(new ChatClientAgentOptions
{
- ChatOptions = new() { Instructions = "You are a helpful support specialist for the Microsoft Agent Framework. Answer questions using the provided context and cite the source document when available. Keep responses brief." },
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful support specialist for the Microsoft Agent Framework. Answer questions using the provided context and cite the source document when available. Keep responses brief." },
AIContextProviders = [new TextSearchProvider(SearchAdapter, textSearchOptions)],
// Configure a filter on the InMemoryChatHistoryProvider so that we don't persist the messages produced by the TextSearchProvider in chat history.
// The default is to persist all messages except those that came from chat history in the first place.
diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/README.md b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/README.md
index 131adde82b..15c243ddc7 100644
--- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/README.md
+++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/README.md
@@ -7,23 +7,21 @@ This sample uses Qdrant for the vector store, but this can easily be swapped out
## Prerequisites
- .NET 10 SDK or later
-- Azure OpenAI service endpoint
-- Both a chat completion and embedding deployment configured in the Azure OpenAI resource
+- Azure AI Foundry project endpoint
+- An embedding deployment configured in an Azure OpenAI resource
- Azure CLI installed and authenticated (for Azure credential authentication)
-- User has the `Cognitive Services OpenAI Contributor` role for the Azure OpenAI resource.
- An existing Qdrant instance. You can use a managed service or run a local instance using Docker, but the sample assumes the instance is running locally.
-**Note**: These samples use Azure OpenAI models. For more information, see [how to deploy Azure OpenAI models with Azure AI Foundry](https://learn.microsoft.com/en-us/azure/ai-foundry/how-to/deploy-models-openai).
-
-**Note**: These samples use Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure OpenAI resource and have the `Cognitive Services OpenAI Contributor` role. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
+**Note**: These samples use Azure AI Foundry for agent chat and Azure OpenAI for embeddings. Make sure you're logged in with `az login` and have access to both resources.
## Running the sample from the console
Set the following environment variables:
```powershell
-$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure OpenAI resource endpoint
-$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
+$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com" # Azure AI Foundry project endpoint
+$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
+$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Azure OpenAI endpoint for embeddings
$env:AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME="text-embedding-3-large" # Optional, defaults to text-embedding-3-large
```
diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/AgentWithRAG_Step03_CustomRAGDataSource.csproj b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/AgentWithRAG_Step03_CustomRAGDataSource.csproj
index 0f9de7c359..ede4e4ca18 100644
--- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/AgentWithRAG_Step03_CustomRAGDataSource.csproj
+++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/AgentWithRAG_Step03_CustomRAGDataSource.csproj
@@ -9,13 +9,13 @@
-
+
-
+
diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/Program.cs b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/Program.cs
index d4e3a40756..6aed669feb 100644
--- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/Program.cs
+++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/Program.cs
@@ -6,14 +6,13 @@
// The provider invokes the custom search function
// before each model invocation and injects the results into the model context.
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
-using OpenAI.Chat;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
TextSearchProviderOptions textSearchOptions = new()
{
@@ -25,13 +24,13 @@
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
.AsAIAgent(new ChatClientAgentOptions
{
- ChatOptions = new() { Instructions = "You are a helpful support specialist for Contoso Outdoors. Answer questions using the provided context and cite the source document when available." },
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful support specialist for Contoso Outdoors. Answer questions using the provided context and cite the source document when available." },
AIContextProviders = [new TextSearchProvider(MockSearchAsync, textSearchOptions)]
});
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Agent_Step01_UsingFunctionToolsWithApprovals.csproj b/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Agent_Step01_UsingFunctionToolsWithApprovals.csproj
index 0f9de7c359..ede4e4ca18 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Agent_Step01_UsingFunctionToolsWithApprovals.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Agent_Step01_UsingFunctionToolsWithApprovals.csproj
@@ -9,13 +9,13 @@
-
+
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Program.cs
index 5bdfc9421c..6bb9bd0dbd 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Program.cs
@@ -6,15 +6,14 @@
// while the agent is waiting for user input.
using System.ComponentModel;
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
-using OpenAI.Chat;
using ChatMessage = Microsoft.Extensions.AI.ChatMessage;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
// Create a sample function tool that the agent can use.
[Description("Get the weather for a given location.")]
@@ -26,11 +25,19 @@ static string GetWeather([Description("The location to get the weather for.")] s
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsAIAgent(instructions: "You are a helpful assistant", tools: [new ApprovalRequiredAIFunction(AIFunctionFactory.Create(GetWeather))]);
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
+ .AsAIAgent(new ChatClientAgentOptions
+ {
+ ChatOptions = new()
+ {
+ ModelId = deploymentName,
+ Instructions = "You are a helpful assistant",
+ Tools = [new ApprovalRequiredAIFunction(AIFunctionFactory.Create(GetWeather))]
+ },
+ });
// Call the agent and check if there are any function approval requests to handle.
// For simplicity, we are assuming here that only function approvals are pending.
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Agent_Step02_StructuredOutput.csproj b/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Agent_Step02_StructuredOutput.csproj
index 0f9de7c359..ede4e4ca18 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Agent_Step02_StructuredOutput.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Agent_Step02_StructuredOutput.csproj
@@ -9,13 +9,13 @@
-
+
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Program.cs
index 7e74315e7d..b16864e58a 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Program.cs
@@ -5,25 +5,24 @@
using System.ComponentModel;
using System.Text.Json;
using System.Text.Json.Serialization;
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
-using OpenAI.Chat;
using SampleApp;
using ChatMessage = Microsoft.Extensions.AI.ChatMessage;
-string endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
// Create chat client to be used by chat client agents.
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-ChatClient chatClient = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName);
+IChatClient chatClient = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient();
// Demonstrates how to work with structured output via ResponseFormat with the non-generic RunAsync method.
// This approach is useful when:
@@ -47,7 +46,7 @@
// the text output from the agent into structured data using a chat client.
await UseStructuredOutputWithMiddlewareAsync(chatClient);
-static async Task UseStructuredOutputWithResponseFormatAsync(ChatClient chatClient)
+async Task UseStructuredOutputWithResponseFormatAsync(IChatClient chatClient)
{
Console.WriteLine("=== Structured Output with ResponseFormat ===");
@@ -57,9 +56,10 @@ static async Task UseStructuredOutputWithResponseFormatAsync(ChatClient chatClie
Name = "HelpfulAssistant",
ChatOptions = new()
{
+ ModelId = deploymentName,
Instructions = "You are a helpful assistant.",
// Specify CityInfo as the type parameter of ForJsonSchema to indicate the expected structured output from the agent.
- ResponseFormat = Microsoft.Extensions.AI.ChatResponseFormat.ForJsonSchema()
+ ResponseFormat = ChatResponseFormat.ForJsonSchema()
}
});
@@ -81,12 +81,16 @@ static async Task UseStructuredOutputWithResponseFormatAsync(ChatClient chatClie
Console.WriteLine();
}
-static async Task UseStructuredOutputWithRunAsync(ChatClient chatClient)
+async Task UseStructuredOutputWithRunAsync(IChatClient chatClient)
{
Console.WriteLine("=== Structured Output with RunAsync ===");
// Create the agent
- AIAgent agent = chatClient.AsAIAgent(name: "HelpfulAssistant", instructions: "You are a helpful assistant.");
+ AIAgent agent = chatClient.AsAIAgent(new ChatClientAgentOptions
+ {
+ Name = "HelpfulAssistant",
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful assistant." },
+ });
// Set CityInfo as the type parameter of RunAsync method to specify the expected structured output from the agent and invoke it with some unstructured input.
AgentResponse response = await agent.RunAsync("Provide information about the capital of France.");
@@ -99,7 +103,7 @@ static async Task UseStructuredOutputWithRunAsync(ChatClient chatClient)
Console.WriteLine();
}
-static async Task UseStructuredOutputWithRunStreamingAsync(ChatClient chatClient)
+async Task UseStructuredOutputWithRunStreamingAsync(IChatClient chatClient)
{
Console.WriteLine("=== Structured Output with RunStreamingAsync ===");
@@ -109,9 +113,10 @@ static async Task UseStructuredOutputWithRunStreamingAsync(ChatClient chatClient
Name = "HelpfulAssistant",
ChatOptions = new()
{
+ ModelId = deploymentName,
Instructions = "You are a helpful assistant.",
// Specify CityInfo as the type parameter of ForJsonSchema to indicate the expected structured output from the agent.
- ResponseFormat = Microsoft.Extensions.AI.ChatResponseFormat.ForJsonSchema()
+ ResponseFormat = ChatResponseFormat.ForJsonSchema()
}
});
@@ -129,15 +134,16 @@ static async Task UseStructuredOutputWithRunStreamingAsync(ChatClient chatClient
Console.WriteLine();
}
-static async Task UseStructuredOutputWithMiddlewareAsync(ChatClient chatClient)
+async Task UseStructuredOutputWithMiddlewareAsync(IChatClient chatClient)
{
Console.WriteLine("=== Structured Output with UseStructuredOutput Middleware ===");
- // Create chat client that will transform the agent text response into structured output.
- IChatClient meaiChatClient = chatClient.AsIChatClient();
-
// Create the agent
- AIAgent agent = meaiChatClient.AsAIAgent(name: "HelpfulAssistant", instructions: "You are a helpful assistant.");
+ AIAgent agent = chatClient.AsAIAgent(new ChatClientAgentOptions
+ {
+ Name = "HelpfulAssistant",
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful assistant." },
+ });
// Add structured output middleware via UseStructuredOutput method to add structured output support to the agent.
// This middleware transforms the agent's text response into structured data using a chat client.
@@ -145,7 +151,7 @@ static async Task UseStructuredOutputWithMiddlewareAsync(ChatClient chatClient)
// from the AgentRunOptions to emulate an agent that doesn't support structured output natively
agent = agent
.AsBuilder()
- .UseStructuredOutput(meaiChatClient)
+ .UseStructuredOutput(chatClient)
.Use(ResponseFormatRemovalMiddleware, null)
.Build();
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/README.md b/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/README.md
index 5652fe9b0a..2d7f899337 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/README.md
+++ b/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/README.md
@@ -14,21 +14,21 @@ This sample demonstrates how to configure ChatClientAgent to produce structured
Before you begin, ensure you have the following prerequisites:
- .NET 10 SDK or later
-- Azure OpenAI service endpoint and deployment configured
+- Azure AI Foundry service endpoint and deployment configured
- Azure CLI installed and authenticated (for Azure credential authentication)
-- User has the `Cognitive Services OpenAI Contributor` role for the Azure OpenAI resource
+- User has the `Cognitive Services OpenAI Contributor` role for the Azure AI Foundry resource
-**Note**: This sample uses Azure OpenAI models. For more information, see [how to deploy Azure OpenAI models with Azure AI Foundry](https://learn.microsoft.com/en-us/azure/ai-foundry/how-to/deploy-models-openai).
+**Note**: This sample uses Azure AI Foundry models. For more information, see [how to deploy Azure AI Foundry models with Azure AI Foundry](https://learn.microsoft.com/en-us/azure/ai-foundry/how-to/deploy-models-openai).
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure OpenAI resource and have the `Cognitive Services OpenAI Contributor` role. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
+**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure AI Foundry resource and have the `Cognitive Services OpenAI Contributor` role. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
## Environment Variables
Set the following environment variables:
```powershell
-$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure OpenAI resource endpoint
-$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
+$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com" # Replace with your Azure AI Foundry project endpoint
+$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
```
## Run the sample
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Agent_Step03_PersistedConversations.csproj b/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Agent_Step03_PersistedConversations.csproj
index 0f9de7c359..ede4e4ca18 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Agent_Step03_PersistedConversations.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Agent_Step03_PersistedConversations.csproj
@@ -9,13 +9,13 @@
-
+
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Program.cs
index d3331cb2b8..5c55ef1b32 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Program.cs
@@ -5,23 +5,27 @@
// This sample shows how to create and use a simple AI agent with a conversation that can be persisted to disk.
using System.Text.Json;
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
-using OpenAI.Chat;
+using Microsoft.Extensions.AI;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
// Create the agent
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsAIAgent(instructions: "You are good at telling jokes.", name: "Joker");
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
+ .AsAIAgent(new ChatClientAgentOptions
+ {
+ Name = "Joker",
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." },
+ });
// Start a new session for the agent conversation.
AgentSession session = await agent.CreateSessionAsync();
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Agent_Step04_3rdPartyChatHistoryStorage.csproj b/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Agent_Step04_3rdPartyChatHistoryStorage.csproj
index 860089b621..1a0e2c0be1 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Agent_Step04_3rdPartyChatHistoryStorage.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Agent_Step04_3rdPartyChatHistoryStorage.csproj
@@ -9,14 +9,14 @@
-
+
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Program.cs
index cbcf14157e..28ee2d1a65 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Program.cs
@@ -7,18 +7,17 @@
// the chat history can be retrieved from the custom storage location.
using System.Text.Json;
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.VectorData;
using Microsoft.SemanticKernel.Connectors.InMemory;
-using OpenAI.Chat;
using SampleApp;
using ChatMessage = Microsoft.Extensions.AI.ChatMessage;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
// Create a vector store to store the chat messages in.
// Replace this with a vector store implementation of your choice if you want to persist the chat history to disk.
@@ -28,13 +27,13 @@
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
.AsAIAgent(new ChatClientAgentOptions
{
- ChatOptions = new() { Instructions = "You are good at telling jokes." },
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." },
Name = "Joker",
// Create a new ChatHistoryProvider for this agent that stores chat history in a vector store.
ChatHistoryProvider = new VectorChatHistoryProvider(vectorStore)
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Agent_Step05_Observability.csproj b/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Agent_Step05_Observability.csproj
index 1a618d660a..e86cc51346 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Agent_Step05_Observability.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Agent_Step05_Observability.csproj
@@ -9,7 +9,7 @@
-
+
@@ -18,7 +18,7 @@
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Program.cs
index 20a0c252a2..f84f2191e4 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Program.cs
@@ -2,16 +2,16 @@
// This sample shows how to create and use a simple AI agent with Azure OpenAI as the backend that logs telemetry using OpenTelemetry.
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Azure.Monitor.OpenTelemetry.Exporter;
using Microsoft.Agents.AI;
-using OpenAI.Chat;
+using Microsoft.Extensions.AI;
using OpenTelemetry;
using OpenTelemetry.Trace;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
var applicationInsightsConnectionString = Environment.GetEnvironmentVariable("APPLICATIONINSIGHTS_CONNECTION_STRING");
// Create TracerProvider with console exporter
@@ -30,9 +30,15 @@
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsAIAgent(instructions: "You are good at telling jokes.", name: "Joker")
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
+ .AsAIAgent(new ChatClientAgentOptions
+ {
+ Name = "Joker",
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." },
+ })
.AsBuilder()
.UseOpenTelemetry(sourceName: sourceName)
.Build();
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Agent_Step06_DependencyInjection.csproj b/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Agent_Step06_DependencyInjection.csproj
index 0aaa471260..c73468be3e 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Agent_Step06_DependencyInjection.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Agent_Step06_DependencyInjection.csproj
@@ -9,14 +9,14 @@
-
+
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Program.cs
index 218ab1a10e..75fcb539d9 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Program.cs
@@ -4,31 +4,30 @@
// This sample shows how to use dependency injection to register an AIAgent and use it from a hosted service with a user input chat loop.
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
// Create a host builder that we will register services with and then run.
HostApplicationBuilder builder = Host.CreateApplicationBuilder(args);
// Add agent options to the service collection.
-builder.Services.AddSingleton(new ChatClientAgentOptions() { Name = "Joker", ChatOptions = new() { Instructions = "You are good at telling jokes." } });
+builder.Services.AddSingleton(new ChatClientAgentOptions() { Name = "Joker", ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." } });
// Add a chat client to the service collection.
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-builder.Services.AddKeyedChatClient("AzureOpenAI", (sp) => new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsIChatClient());
+builder.Services.AddKeyedChatClient("AzureOpenAI", (sp) => new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient());
// Add the AI agent to the service collection.
builder.Services.AddSingleton((sp) => new ChatClientAgent(
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Agent_Step07_AsMcpTool.csproj b/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Agent_Step07_AsMcpTool.csproj
index db776afd1e..1df2f9ce52 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Agent_Step07_AsMcpTool.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Agent_Step07_AsMcpTool.csproj
@@ -10,14 +10,15 @@
-
+
+
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Program.cs
index d621227ea0..deff1045f0 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Program.cs
@@ -2,9 +2,10 @@
// This sample shows how to expose an AI agent as an MCP tool.
-using Azure.AI.Agents.Persistent;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
+using Microsoft.Extensions.AI;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using ModelContextProtocol.Server;
@@ -12,20 +13,24 @@
var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+// Create a code-first agent using ProjectResponsesClient.
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-var persistentAgentsClient = new PersistentAgentsClient(endpoint, new DefaultAzureCredential());
-
-// Create a server side persistent agent
-var agentMetadata = await persistentAgentsClient.Administration.CreateAgentAsync(
- model: deploymentName,
- instructions: "You are good at telling jokes, and you always start each joke with 'Aye aye, captain!'.",
- name: "Joker",
- description: "An agent that tells jokes.");
-
-// Retrieve the server side persistent agent as an AIAgent.
-AIAgent agent = await persistentAgentsClient.GetAIAgentAsync(agentMetadata.Value.Id);
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
+ .AsAIAgent(new ChatClientAgentOptions
+ {
+ Name = "Joker",
+ Description = "An agent that tells jokes.",
+ ChatOptions = new()
+ {
+ ModelId = deploymentName,
+ Instructions = "You are good at telling jokes, and you always start each joke with 'Aye aye, captain!'.",
+ },
+ });
// Convert the agent to an AIFunction and then to an MCP tool.
// The agent name and description will be used as the mcp tool name and description.
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/README.md b/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/README.md
index e35cf01e90..b0e5e6e6db 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/README.md
+++ b/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/README.md
@@ -21,7 +21,7 @@ To use the [MCP Inspector](https://modelcontextprotocol.io/docs/tools/inspector)
```
1. Open a web browser and navigate to the URL displayed in the terminal. If not opened automatically, this will open the MCP Inspector interface.
1. In the MCP Inspector interface, add the following environment variables to allow your MCP server to access Azure AI Foundry Project to create and run the agent:
- - AZURE_AI_PROJECT_ENDPOINT = https://your-resource.openai.azure.com/ # Replace with your Azure AI Foundry Project endpoint
+ - AZURE_AI_PROJECT_ENDPOINT = https://your-project.services.ai.azure.com # Replace with your Azure AI Foundry Project endpoint
- AZURE_AI_MODEL_DEPLOYMENT_NAME = gpt-4o-mini # Replace with your model deployment name
1. Find and click the `Connect` button in the MCP Inspector interface to connect to the MCP server.
1. As soon as the connection is established, open the `Tools` tab in the MCP Inspector interface and select the `Joker` tool from the list.
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Agent_Step08_UsingImages.csproj b/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Agent_Step08_UsingImages.csproj
index 73a41005f1..8d2dbc66b3 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Agent_Step08_UsingImages.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Agent_Step08_UsingImages.csproj
@@ -9,12 +9,13 @@
-
+
+
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Program.cs
index 984a9e3b5c..4bc39839cd 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Program.cs
@@ -2,23 +2,27 @@
// This sample shows how to use Image Multi-Modality with an AI agent.
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
+using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
-using OpenAI.Chat;
using ChatMessage = Microsoft.Extensions.AI.ChatMessage;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = System.Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = System.Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o";
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-var agent = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsAIAgent(
- name: "VisionAgent",
- instructions: "You are a helpful agent that can analyze images");
+var agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
+ .AsAIAgent(new ChatClientAgentOptions
+ {
+ Name = "VisionAgent",
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful agent that can analyze images" },
+ });
ChatMessage message = new(ChatRole.User, [
new TextContent("What do you see in this image?"),
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/README.md b/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/README.md
index e70c09f513..9d9c316441 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/README.md
+++ b/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/README.md
@@ -1,6 +1,6 @@
# Using Images with AI Agents
-This sample demonstrates how to use image multi-modality with an AI agent. It shows how to create a vision-enabled agent that can analyze and describe images using Azure OpenAI.
+This sample demonstrates how to use image multi-modality with an AI agent. It shows how to create a vision-enabled agent that can analyze and describe images using Azure AI Foundry.
## What this sample demonstrates
@@ -13,13 +13,13 @@ This sample demonstrates how to use image multi-modality with an AI agent. It sh
- **Vision Agent**: Creates an agent specifically instructed to analyze images
- **Multimodal Input**: Combines text questions with image uri in a single message
-- **Azure OpenAI Integration**: Uses AzureOpenAI LLM agents
+- **Azure AI Foundry Integration**: Uses AzureOpenAI LLM agents
## Prerequisites
Before running this sample, ensure you have:
-1. An Azure OpenAI project set up
+1. An Azure AI Foundry project set up
2. A compatible model deployment (e.g., gpt-4o)
3. Azure CLI installed and authenticated
@@ -28,8 +28,8 @@ Before running this sample, ensure you have:
Set the following environment variables:
```powershell
-$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure OpenAI endpoint
-$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o" # Replace with your model deployment name (optional, defaults to gpt-4o)
+$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com" # Replace with your Azure AI Foundry endpoint
+$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o" # Replace with your model deployment name (optional, defaults to gpt-4o)
```
## Run the sample
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Agent_Step09_AsFunctionTool.csproj b/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Agent_Step09_AsFunctionTool.csproj
index 2660090404..bbd4449ca8 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Agent_Step09_AsFunctionTool.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Agent_Step09_AsFunctionTool.csproj
@@ -10,13 +10,14 @@
-
+
+
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Program.cs
index aca1a95ce4..94683b01de 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Program.cs
@@ -3,14 +3,13 @@
// This sample shows how to create and use a Azure OpenAI AI agent as a function tool.
using System.ComponentModel;
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
-using OpenAI.Chat;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
[Description("Get the weather for a given location.")]
static string GetWeather([Description("The location to get the weather for.")] string location)
@@ -20,22 +19,36 @@ static string GetWeather([Description("The location to get the weather for.")] s
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent weatherAgent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsAIAgent(
- instructions: "You answer questions about the weather.",
- name: "WeatherAgent",
- description: "An agent that answers questions about the weather.",
- tools: [AIFunctionFactory.Create(GetWeather)]);
+AIAgent weatherAgent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
+ .AsAIAgent(new ChatClientAgentOptions
+ {
+ Name = "WeatherAgent",
+ Description = "An agent that answers questions about the weather.",
+ ChatOptions = new()
+ {
+ ModelId = deploymentName,
+ Instructions = "You answer questions about the weather.",
+ Tools = [AIFunctionFactory.Create(GetWeather)]
+ },
+ });
// Create the main agent, and provide the weather agent as a function tool.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsAIAgent(instructions: "You are a helpful assistant who responds in French.", tools: [weatherAgent.AsAIFunction()]);
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
+ .AsAIAgent(new ChatClientAgentOptions
+ {
+ ChatOptions = new()
+ {
+ ModelId = deploymentName,
+ Instructions = "You are a helpful assistant who responds in French.",
+ Tools = [weatherAgent.AsAIFunction()]
+ },
+ });
// Invoke the agent and output the text result.
Console.WriteLine(await agent.RunAsync("What is the weather like in Amsterdam?"));
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Agent_Step10_BackgroundResponsesWithToolsAndPersistence.csproj b/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Agent_Step10_BackgroundResponsesWithToolsAndPersistence.csproj
index 29fab5f992..39f5c6ed62 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Agent_Step10_BackgroundResponsesWithToolsAndPersistence.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Agent_Step10_BackgroundResponsesWithToolsAndPersistence.csproj
@@ -9,12 +9,13 @@
-
+
+
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Program.cs
index 5d9c70a5fd..891584617b 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Program.cs
@@ -8,29 +8,34 @@
using System.ComponentModel;
using System.Text.Json;
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
-using OpenAI.Responses;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-5";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-5";
var stateStore = new Dictionary();
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetResponsesClient(deploymentName)
- .AsAIAgent(
- name: "SpaceNovelWriter",
- instructions: "You are a space novel writer. Always research relevant facts and generate character profiles for the main characters before writing novels." +
- "Write complete chapters without asking for approval or feedback. Do not ask the user about tone, style, pace, or format preferences - just write the novel based on the request.",
- tools: [AIFunctionFactory.Create(ResearchSpaceFactsAsync), AIFunctionFactory.Create(GenerateCharacterProfilesAsync)]);
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
+ .AsAIAgent(new ChatClientAgentOptions
+ {
+ Name = "SpaceNovelWriter",
+ ChatOptions = new()
+ {
+ ModelId = deploymentName,
+ Instructions = "You are a space novel writer. Always research relevant facts and generate character profiles for the main characters before writing novels." +
+ "Write complete chapters without asking for approval or feedback. Do not ask the user about tone, style, pace, or format preferences - just write the novel based on the request.",
+ Tools = [AIFunctionFactory.Create(ResearchSpaceFactsAsync), AIFunctionFactory.Create(GenerateCharacterProfilesAsync)]
+ },
+ });
// Enable background responses (only supported by {Azure}OpenAI Responses at this time).
AgentRunOptions options = new() { AllowBackgroundResponses = true };
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/README.md b/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/README.md
index ca52e8afa3..b10a541cff 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/README.md
+++ b/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/README.md
@@ -1,6 +1,6 @@
# What This Sample Shows
-This sample demonstrates how to use background responses with ChatCompletionAgent and Azure OpenAI Responses for long-running operations. Background responses support:
+This sample demonstrates how to use background responses with ChatCompletionAgent and Azure AI Foundry Responses for long-running operations. Background responses support:
- **Polling for completion** - Non-streaming APIs can start a background operation and return a continuation token. Poll with the token until the response completes.
- **Function calling** - Functions can be called during background operations.
@@ -15,14 +15,14 @@ For more information, see the [official documentation](https://learn.microsoft.c
Before you begin, ensure you have the following prerequisites:
- .NET 10 SDK or later
-- Azure OpenAI service endpoint and deployment configured
+- Azure AI Foundry service endpoint and deployment configured
- Azure CLI installed and authenticated (for Azure credential authentication)
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure OpenAI resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
+**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure AI Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
Set the following environment variables:
```powershell
-$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure OpenAI resource endpoint
-$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-5" # Optional, defaults to gpt-5
+$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com" # Replace with your Azure AI Foundry project endpoint
+$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-5" # Optional, defaults to gpt-5
```
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Agent_Step11_Middleware.csproj b/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Agent_Step11_Middleware.csproj
index 6582c30cd5..1696973a7c 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Agent_Step11_Middleware.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Agent_Step11_Middleware.csproj
@@ -11,12 +11,12 @@
-
+
+
-
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Program.cs
index 09cd540378..da73862f20 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Program.cs
@@ -8,21 +8,23 @@
using System.ComponentModel;
using System.Text.RegularExpressions;
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
// Get Azure AI Foundry configuration from environment variables
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = System.Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = System.Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o";
-// Get a client to create/retrieve server side agents with
+// Create a chat client.
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-var azureOpenAIClient = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential())
- .GetChatClient(deploymentName);
+var chatClient = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient();
[Description("Get the weather for a given location.")]
static string GetWeather([Description("The location to get the weather for.")] string location)
@@ -33,12 +35,18 @@ static string GetDateTime()
=> DateTimeOffset.Now.ToString();
// Adding middleware to the chat client level and building an agent on top of it
-var originalAgent = azureOpenAIClient.AsIChatClient()
+var originalAgent = chatClient
.AsBuilder()
.Use(getResponseFunc: ChatClientMiddleware, getStreamingResponseFunc: null)
- .BuildAIAgent(
- instructions: "You are an AI assistant that helps people find information.",
- tools: [AIFunctionFactory.Create(GetDateTime, name: nameof(GetDateTime))]);
+ .BuildAIAgent(new ChatClientAgentOptions
+ {
+ ChatOptions = new()
+ {
+ ModelId = deploymentName,
+ Instructions = "You are an AI assistant that helps people find information.",
+ Tools = [AIFunctionFactory.Create(GetDateTime, name: nameof(GetDateTime))]
+ },
+ });
// Adding middleware to the agent level
var middlewareEnabledAgent = originalAgent
@@ -117,11 +125,17 @@ static string GetDateTime()
// In this case we are attaching an AIContextProvider that only adds messages.
Console.WriteLine("\n\n=== Example 6: AIContextProvider on chat client pipeline ===");
-var chatClientProviderAgent = azureOpenAIClient.AsIChatClient()
+var chatClientProviderAgent = chatClient
.AsBuilder()
.UseAIContextProviders(new DateTimeContextProvider())
- .BuildAIAgent(
- instructions: "You are an AI assistant that helps people find information.");
+ .BuildAIAgent(new ChatClientAgentOptions
+ {
+ ChatOptions = new()
+ {
+ ModelId = deploymentName,
+ Instructions = "You are an AI assistant that helps people find information.",
+ },
+ });
var chatClientContextResponse = await chatClientProviderAgent.RunAsync("Is it almost time for lunch?");
Console.WriteLine($"Chat client context-enriched response: {chatClientContextResponse}");
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/README.md b/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/README.md
index 74895e0cdf..c744267372 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/README.md
+++ b/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/README.md
@@ -7,7 +7,7 @@ This sample demonstrates how to add middleware to intercept:
## What This Sample Shows
-1. Azure OpenAI integration via `AzureOpenAIClient` and `DefaultAzureCredential`
+1. Azure AI Foundry integration via `ProjectResponsesClient` and `DefaultAzureCredential`
2. Chat client middleware using `ChatClientBuilder.Use(...)`
3. Agent run middleware (PII redaction and wording guardrails)
4. Function invocation middleware (logging and overriding a tool result)
@@ -26,8 +26,8 @@ Attempting to use function middleware on agents that do not wrap a ChatClientAge
## Prerequisites
1. Environment variables:
- - `AZURE_OPENAI_ENDPOINT`: Your Azure OpenAI endpoint
- - `AZURE_OPENAI_DEPLOYMENT_NAME`: Chat deployment name (optional; defaults to `gpt-4o`)
+ - `AZURE_AI_PROJECT_ENDPOINT`: Your Azure AI Foundry endpoint
+ - `AZURE_AI_MODEL_DEPLOYMENT_NAME`: Chat deployment name (optional; defaults to `gpt-4o`)
2. Sign in with Azure CLI (PowerShell):
```powershell
az login
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Agent_Step12_Plugins.csproj b/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Agent_Step12_Plugins.csproj
index 122c2e77a4..c88c4fc605 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Agent_Step12_Plugins.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Agent_Step12_Plugins.csproj
@@ -13,11 +13,11 @@
-
+
+
-
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Program.cs
index 2e9b405183..73a88135b8 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Program.cs
@@ -9,15 +9,14 @@
// as AI functions. The AsAITools method of the plugin class shows how to specify
// which methods should be exposed to the AI agent.
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.DependencyInjection;
-using OpenAI.Chat;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
// Create a service collection to hold the agent plugin and its dependencies.
ServiceCollection services = new();
@@ -30,15 +29,20 @@
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsAIAgent(
- instructions: "You are a helpful assistant that helps people find information.",
- name: "Assistant",
- tools: [.. serviceProvider.GetRequiredService().AsAITools()],
- services: serviceProvider); // Pass the service provider to the agent so it will be available to plugin functions to resolve dependencies.
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
+ .AsAIAgent(new ChatClientAgentOptions
+ {
+ Name = "Assistant",
+ ChatOptions = new()
+ {
+ ModelId = deploymentName,
+ Instructions = "You are a helpful assistant that helps people find information.",
+ Tools = [.. serviceProvider.GetRequiredService().AsAITools()]
+ },
+ }, services: serviceProvider); // Pass the service provider to the agent so it will be available to plugin functions to resolve dependencies.
Console.WriteLine(await agent.RunAsync("Tell me current time and weather in Seattle."));
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Agent_Step13_ChatReduction.csproj b/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Agent_Step13_ChatReduction.csproj
index 0f9de7c359..ede4e4ca18 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Agent_Step13_ChatReduction.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Agent_Step13_ChatReduction.csproj
@@ -9,13 +9,13 @@
-
+
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Program.cs
index fe93ed785c..253e1fd0a7 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Program.cs
@@ -5,26 +5,25 @@
// NOTE: this feature is only supported where the chat history is stored locally, such as with OpenAI Chat Completion.
// Where the chat history is stored server side, such as with Azure Foundry Agents, the service must manage the chat history size.
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
-using OpenAI.Chat;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
// Construct the agent, and provide a factory to create an in-memory chat message store with a reducer that keeps only the last 2 non-system messages.
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
.AsAIAgent(new ChatClientAgentOptions
{
- ChatOptions = new() { Instructions = "You are good at telling jokes." },
+ ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." },
Name = "Joker",
ChatHistoryProvider = new InMemoryChatHistoryProvider(new() { ChatReducer = new MessageCountingChatReducer(2) })
});
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Agent_Step14_BackgroundResponses.csproj b/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Agent_Step14_BackgroundResponses.csproj
index 1c95b4af25..621600228d 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Agent_Step14_BackgroundResponses.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Agent_Step14_BackgroundResponses.csproj
@@ -9,12 +9,13 @@
-
+
+
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Program.cs
index 62db550556..83560e80da 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Program.cs
@@ -2,22 +2,25 @@
// This sample shows how to use background responses with ChatClientAgent and Azure OpenAI Responses.
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
-using OpenAI.Responses;
+using Microsoft.Extensions.AI;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetResponsesClient(deploymentName)
- .AsAIAgent();
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
+ .AsAIAgent(new ChatClientAgentOptions
+ {
+ ChatOptions = new() { ModelId = deploymentName },
+ });
// Enable background responses (only supported by OpenAI Responses at this time).
AgentRunOptions options = new() { AllowBackgroundResponses = true };
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/README.md b/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/README.md
index e898733bc3..59a93885c0 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/README.md
+++ b/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/README.md
@@ -1,6 +1,6 @@
-# What This Sample Shows
+# What This Sample Shows
-This sample demonstrates how to use background responses with ChatCompletionAgent and Azure OpenAI Responses for long-running operations. Background responses support:
+This sample demonstrates how to use background responses with ChatCompletionAgent and Azure AI Foundry Responses for long-running operations. Background responses support:
- **Polling for completion** - Non-streaming APIs can start a background operation and return a continuation token. Poll with the token until the response completes.
- **Resuming after interruption** - Streaming APIs can be interrupted and resumed from the last update using the continuation token.
@@ -14,14 +14,14 @@ For more information, see the [official documentation](https://learn.microsoft.c
Before you begin, ensure you have the following prerequisites:
- .NET 10 SDK or later
-- Azure OpenAI service endpoint and deployment configured
+- Azure AI Foundry service endpoint and deployment configured
- Azure CLI installed and authenticated (for Azure credential authentication)
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure OpenAI resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
+**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure AI Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
Set the following environment variables:
```powershell
-$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure OpenAI resource endpoint
-$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
+$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com" # Replace with your Azure AI Foundry project endpoint
+$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
```
\ No newline at end of file
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Agent_Step16_Declarative.csproj b/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Agent_Step16_Declarative.csproj
index 99073874ee..d4521b560b 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Agent_Step16_Declarative.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Agent_Step16_Declarative.csproj
@@ -9,7 +9,7 @@
-
+
@@ -19,7 +19,7 @@
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Program.cs
index 215833c795..a6d7f9ed25 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Program.cs
@@ -2,23 +2,22 @@
// This sample shows how to create an agent from a YAML based declarative representation.
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var _ = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
// Create the chat client
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-IChatClient chatClient = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsIChatClient();
+IChatClient chatClient = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient();
// Define the agent using a YAML definition.
var text =
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Agent_Step17_AdditionalAIContext.csproj b/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Agent_Step17_AdditionalAIContext.csproj
index 99073874ee..d4521b560b 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Agent_Step17_AdditionalAIContext.csproj
+++ b/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Agent_Step17_AdditionalAIContext.csproj
@@ -9,7 +9,7 @@
-
+
@@ -19,7 +19,7 @@
-
+
diff --git a/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Program.cs
index a341abe8cd..87834af317 100644
--- a/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Program.cs
+++ b/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Program.cs
@@ -9,16 +9,14 @@
using System.Text;
using System.Text.Json;
-using Azure.AI.OpenAI;
+using Azure.AI.Projects.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
-using OpenAI.Chat;
using SampleApp;
-using MEAI = Microsoft.Extensions.AI;
-var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-5-mini";
+var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-5-mini";
// A sample function to load the next three calendar events for the user.
Func> loadNextThreeCalendarEvents = async () =>
@@ -36,13 +34,13 @@
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
+AIAgent agent = new ProjectResponsesClient(
+ projectEndpoint: new Uri(endpoint),
+ tokenProvider: new DefaultAzureCredential())
+ .AsIChatClient()
.AsAIAgent(new ChatClientAgentOptions()
{
- ChatOptions = new() { Instructions = """
+ ChatOptions = new() { ModelId = deploymentName, Instructions = """
You are a helpful personal assistant.
You manage a TODO list for the user. When the user has completed one of the tasks it can be removed from the TODO list. Only provide the list of TODO items if asked.
You remind users of upcoming calendar events when the user interacts with you.
@@ -120,7 +118,7 @@ protected override ValueTask ProvideAIContextAsync(InvokingContext co
],
Messages =
[
- new MEAI.ChatMessage(ChatRole.User, outputMessageBuilder.ToString())
+ new ChatMessage(ChatRole.User, outputMessageBuilder.ToString())
]
});
}
@@ -150,7 +148,7 @@ private static void AddTodoItem(AgentSession? session, string item)
///
internal sealed class CalendarSearchAIContextProvider(Func> loadNextThreeCalendarEvents) : MessageAIContextProvider
{
- protected override async ValueTask> ProvideMessagesAsync(InvokingContext context, CancellationToken cancellationToken = default)
+ protected override async ValueTask> ProvideMessagesAsync(InvokingContext context, CancellationToken cancellationToken = default)
{
var events = await loadNextThreeCalendarEvents();
@@ -161,7 +159,7 @@ internal sealed class CalendarSearchAIContextProvider(Func> loadN
outputMessageBuilder.AppendLine($" - {calendarEvent}");
}
- return [new MEAI.ChatMessage(ChatRole.User, outputMessageBuilder.ToString())];
+ return [new ChatMessage(ChatRole.User, outputMessageBuilder.ToString())];
}
}
}
diff --git a/dotnet/samples/02-agents/Agents/README.md b/dotnet/samples/02-agents/Agents/README.md
index 116cbfc06b..db0b29f4c8 100644
--- a/dotnet/samples/02-agents/Agents/README.md
+++ b/dotnet/samples/02-agents/Agents/README.md
@@ -1,10 +1,10 @@
# Getting started with agents
The getting started with agents samples demonstrate the fundamental concepts and functionalities
-of single agents and can be used with any agent type.
+of single agents using Azure AI Foundry as the default provider.
-While the functionality can be used with any agent type, these samples use Azure OpenAI as the AI provider
-and use ChatCompletion as the type of service.
+These samples use the `ProjectResponsesClient` from `Azure.AI.Projects.OpenAI` to connect to
+Azure AI Foundry via the Responses API. This is the recommended approach for new development.
For other samples that demonstrate how to create and configure each type of agent that come with the agent framework,
see the [How to create an agent for each provider](../AgentProviders/README.md) samples.
@@ -14,13 +14,12 @@ see the [How to create an agent for each provider](../AgentProviders/README.md)
Before you begin, ensure you have the following prerequisites:
- .NET 10 SDK or later
-- Azure OpenAI service endpoint and deployment configured
+- Azure AI Foundry project endpoint configured
- Azure CLI installed and authenticated (for Azure credential authentication)
-- User has the `Cognitive Services OpenAI Contributor` role for the Azure OpenAI resource.
-**Note**: These samples use Azure OpenAI models. For more information, see [how to deploy Azure OpenAI models with Azure AI Foundry](https://learn.microsoft.com/en-us/azure/ai-foundry/how-to/deploy-models-openai).
+**Note**: These samples use Azure AI Foundry. For more information, see [Azure AI Foundry documentation](https://learn.microsoft.com/en-us/azure/ai-foundry/).
-**Note**: These samples use Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure OpenAI resource and have the `Cognitive Services OpenAI Contributor` role. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
+**Note**: These samples use Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure AI Foundry project. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
## Samples
@@ -56,8 +55,8 @@ cd Agent_Step01_UsingFunctionToolsWithApprovals
Set the following environment variables:
```powershell
-$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure OpenAI resource endpoint
-$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
+$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry project endpoint
+$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
```
If the variables are not set, you will be prompted for the values when running the samples.
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/FoundryAgents_Step01.2_Running.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/FoundryAgents_Step01.2_Running.csproj
deleted file mode 100644
index daf7e24494..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/FoundryAgents_Step01.2_Running.csproj
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/Program.cs
deleted file mode 100644
index dd5db03b15..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/Program.cs
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample shows how to create and use a simple AI agent with Azure Foundry Agents as the backend.
-
-using Azure.AI.Projects;
-using Azure.AI.Projects.OpenAI;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
-
-const string JokerInstructions = "You are good at telling jokes.";
-const string JokerName = "JokerAgent";
-
-// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents.
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-// Define the agent you want to create. (Prompt Agent in this case)
-AgentVersionCreationOptions options = new(new PromptAgentDefinition(model: deploymentName) { Instructions = JokerInstructions });
-
-// Azure.AI.Agents SDK creates and manages agent by name and versions.
-// You can create a server side agent version with the Azure.AI.Agents SDK client below.
-AgentVersion agentVersion = aiProjectClient.Agents.CreateAgentVersion(agentName: JokerName, options);
-
-// You can use an AIAgent with an already created server side agent version.
-AIAgent jokerAgent = aiProjectClient.AsAIAgent(agentVersion);
-
-// Invoke the agent with streaming support.
-await foreach (AgentResponseUpdate update in jokerAgent.RunStreamingAsync("Tell me a joke about a pirate."))
-{
- Console.WriteLine(update);
-}
-
-// Cleanup by agent name removes the agent version created.
-await aiProjectClient.Agents.DeleteAgentAsync(jokerAgent.Name);
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/README.md
deleted file mode 100644
index 40cb5e107d..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/README.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# Running a Simple AI Agent with Streaming
-
-This sample demonstrates how to create and run a simple AI agent with Azure Foundry Agents, including both text and streaming responses.
-
-## What this sample demonstrates
-
-- Creating a simple AI agent with instructions
-- Running an agent with text output
-- Running an agent with streaming output
-- Managing agent lifecycle (creation and deletion)
-
-## Prerequisites
-
-Before you begin, ensure you have the following prerequisites:
-
-- .NET 10 SDK or later
-- Azure Foundry service endpoint and deployment configured
-- Azure CLI installed and authenticated (for Azure credential authentication)
-
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
-
-Set the following environment variables:
-
-```powershell
-$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint
-$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
-```
-
-## Run the sample
-
-Navigate to the FoundryAgents sample directory and run:
-
-```powershell
-cd dotnet/samples/02-agents/FoundryAgents
-dotnet run --project .\FoundryAgents_Step01.2_Running
-```
-
-## Expected behavior
-
-The sample will:
-
-1. Create an agent named "JokerAgent" with instructions to tell jokes
-2. Run the agent with a text prompt and display the response
-3. Run the agent again with streaming to display the response as it's generated
-4. Clean up resources by deleting the agent
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/FoundryAgents_Step02_MultiturnConversation.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/FoundryAgents_Step02_MultiturnConversation.csproj
deleted file mode 100644
index daf7e24494..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/FoundryAgents_Step02_MultiturnConversation.csproj
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/Program.cs
deleted file mode 100644
index 1ac51c30ad..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/Program.cs
+++ /dev/null
@@ -1,55 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample shows how to create and use a simple AI agent with a multi-turn conversation.
-
-using Azure.AI.Projects;
-using Azure.AI.Projects.OpenAI;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
-
-const string JokerInstructions = "You are good at telling jokes.";
-const string JokerName = "JokerAgent";
-
-// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents.
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-// Define the agent you want to create. (Prompt Agent in this case)
-AgentVersionCreationOptions options = new(new PromptAgentDefinition(model: deploymentName) { Instructions = JokerInstructions });
-
-// Retrieve an AIAgent for the created server side agent version.
-ChatClientAgent jokerAgent = await aiProjectClient.CreateAIAgentAsync(name: JokerName, options);
-
-// Invoke the agent with a multi-turn conversation, where the context is preserved in the session object.
-// Create a conversation in the server
-ProjectConversationsClient conversationsClient = aiProjectClient.GetProjectOpenAIClient().GetProjectConversationsClient();
-ProjectConversation conversation = await conversationsClient.CreateProjectConversationAsync();
-
-// Providing the conversation Id is not strictly necessary, but by not providing it no information will show up in the Foundry Project UI as conversations.
-// Sessions that don't have a conversation Id will work based on the `PreviousResponseId`.
-AgentSession session = await jokerAgent.CreateSessionAsync(conversation.Id);
-
-Console.WriteLine(await jokerAgent.RunAsync("Tell me a joke about a pirate.", session));
-Console.WriteLine(await jokerAgent.RunAsync("Now add some emojis to the joke and tell it in the voice of a pirate's parrot.", session));
-
-// Invoke the agent with a multi-turn conversation and streaming, where the context is preserved in the session object.
-session = await jokerAgent.CreateSessionAsync(conversation.Id);
-await foreach (AgentResponseUpdate update in jokerAgent.RunStreamingAsync("Tell me a joke about a pirate.", session))
-{
- Console.WriteLine(update);
-}
-await foreach (AgentResponseUpdate update in jokerAgent.RunStreamingAsync("Now add some emojis to the joke and tell it in the voice of a pirate's parrot.", session))
-{
- Console.WriteLine(update);
-}
-
-// Cleanup by agent name removes the agent version created.
-await aiProjectClient.Agents.DeleteAgentAsync(jokerAgent.Name);
-
-// Cleanup the conversation created.
-await conversationsClient.DeleteConversationAsync(conversation.Id);
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/README.md
deleted file mode 100644
index 86721bf960..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/README.md
+++ /dev/null
@@ -1,59 +0,0 @@
-# Multi-turn Conversation with AI Agents
-
-This sample demonstrates how to implement multi-turn conversations with AI agents, where context is preserved across multiple agent runs using threads and conversation IDs.
-
-## What this sample demonstrates
-
-- Creating an AI agent with instructions
-- Creating a project conversation to track conversations in the Foundry UI
-- Using threads with conversation IDs to maintain conversation context
-- Running multi-turn conversations with text output
-- Running multi-turn conversations with streaming output
-- Managing agent and conversation lifecycle (creation and deletion)
-
-## Prerequisites
-
-Before you begin, ensure you have the following prerequisites:
-
-- .NET 10 SDK or later
-- Azure Foundry service endpoint and deployment configured
-- Azure CLI installed and authenticated (for Azure credential authentication)
-
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
-
-Set the following environment variables:
-
-```powershell
-$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint
-$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
-```
-
-## Run the sample
-
-Navigate to the FoundryAgents sample directory and run:
-
-```powershell
-cd dotnet/samples/02-agents/FoundryAgents
-dotnet run --project .\FoundryAgents_Step02_MultiturnConversation
-```
-
-## Expected behavior
-
-The sample will:
-
-1. Create an agent named "JokerAgent" with instructions to tell jokes
-2. Create a project conversation to enable visibility in the Azure Foundry UI
-3. Create a thread linked to the conversation ID for context tracking
-4. Run the agent with a text prompt and display the response
-5. Send a follow-up message to the same thread, demonstrating context preservation
-6. Create a new thread sharing the same conversation ID and run the agent with streaming
-7. Send a follow-up streaming message to demonstrate multi-turn streaming
-8. Clean up resources by deleting the agent and conversation
-
-## Conversation ID vs PreviousResponseId
-
-When working with multi-turn conversations, there are two approaches:
-
-- **With Conversation ID**: By passing a `conversation.Id` to `CreateSessionAsync()`, the conversation will be visible in the Azure Foundry Project UI. This is useful for tracking and debugging conversations.
-- **Without Conversation ID**: Sessions created without a conversation ID still work correctly, maintaining context via `PreviousResponseId`. However, these conversations may not appear in the Foundry UI.
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/FoundryAgents_Step03_UsingFunctionTools.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/FoundryAgents_Step03_UsingFunctionTools.csproj
deleted file mode 100644
index daf7e24494..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/FoundryAgents_Step03_UsingFunctionTools.csproj
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/Program.cs
deleted file mode 100644
index cfd74000a6..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/Program.cs
+++ /dev/null
@@ -1,54 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample demonstrates how to use an agent with function tools.
-// It shows both non-streaming and streaming agent interactions using weather-related tools.
-
-using System.ComponentModel;
-using Azure.AI.Projects;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Extensions.AI;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
-
-[Description("Get the weather for a given location.")]
-static string GetWeather([Description("The location to get the weather for.")] string location)
- => $"The weather in {location} is cloudy with a high of 15°C.";
-
-const string AssistantInstructions = "You are a helpful assistant that can get weather information.";
-const string AssistantName = "WeatherAssistant";
-
-// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents.
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-// Define the agent with function tools.
-AITool tool = AIFunctionFactory.Create(GetWeather);
-
-// Create AIAgent directly
-var newAgent = await aiProjectClient.CreateAIAgentAsync(name: AssistantName, model: deploymentName, instructions: AssistantInstructions, tools: [tool]);
-
-// Getting an already existing agent by name with tools.
-/*
- * IMPORTANT: Since agents that are stored in the server only know the definition of the function tools (JSON Schema),
- * you need to provided all invocable function tools when retrieving the agent so it can invoke them automatically.
- * If no invocable tools are provided, the function calling needs to handled manually.
- */
-var existingAgent = await aiProjectClient.GetAIAgentAsync(name: AssistantName, tools: [tool]);
-
-// Non-streaming agent interaction with function tools.
-AgentSession session = await existingAgent.CreateSessionAsync();
-Console.WriteLine(await existingAgent.RunAsync("What is the weather like in Amsterdam?", session));
-
-// Streaming agent interaction with function tools.
-session = await existingAgent.CreateSessionAsync();
-await foreach (AgentResponseUpdate update in existingAgent.RunStreamingAsync("What is the weather like in Amsterdam?", session))
-{
- Console.WriteLine(update);
-}
-
-// Cleanup by agent name removes the agent version created.
-await aiProjectClient.Agents.DeleteAgentAsync(existingAgent.Name);
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/README.md
deleted file mode 100644
index fa9b5baf21..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/README.md
+++ /dev/null
@@ -1,48 +0,0 @@
-# Using Function Tools with AI Agents
-
-This sample demonstrates how to use function tools with AI agents, allowing agents to call custom functions to retrieve information.
-
-## What this sample demonstrates
-
-- Creating function tools using AIFunctionFactory
-- Passing function tools to an AI agent
-- Running agents with function tools (text output)
-- Running agents with function tools (streaming output)
-- Managing agent lifecycle (creation and deletion)
-
-## Prerequisites
-
-Before you begin, ensure you have the following prerequisites:
-
-- .NET 10 SDK or later
-- Azure Foundry service endpoint and deployment configured
-- Azure CLI installed and authenticated (for Azure credential authentication)
-
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
-
-Set the following environment variables:
-
-```powershell
-$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint
-$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
-```
-
-## Run the sample
-
-Navigate to the FoundryAgents sample directory and run:
-
-```powershell
-cd dotnet/samples/02-agents/FoundryAgents
-dotnet run --project .\FoundryAgents_Step03.1_UsingFunctionTools
-```
-
-## Expected behavior
-
-The sample will:
-
-1. Create an agent named "WeatherAssistant" with a GetWeather function tool
-2. Run the agent with a text prompt asking about weather
-3. The agent will invoke the GetWeather function tool to retrieve weather information
-4. Run the agent again with streaming to display the response as it's generated
-5. Clean up resources by deleting the agent
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/FoundryAgents_Step04_UsingFunctionToolsWithApprovals.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/FoundryAgents_Step04_UsingFunctionToolsWithApprovals.csproj
deleted file mode 100644
index daf7e24494..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/FoundryAgents_Step04_UsingFunctionToolsWithApprovals.csproj
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/Program.cs
deleted file mode 100644
index f33fae35f4..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/Program.cs
+++ /dev/null
@@ -1,65 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample demonstrates how to use an agent with function tools that require a human in the loop for approvals.
-// It shows both non-streaming and streaming agent interactions using weather-related tools.
-// If the agent is hosted in a service, with a remote user, combine this sample with the Persisted Conversations sample to persist the chat history
-// while the agent is waiting for user input.
-
-using System.ComponentModel;
-using Azure.AI.Projects;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Extensions.AI;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
-
-// Create a sample function tool that the agent can use.
-[Description("Get the weather for a given location.")]
-static string GetWeather([Description("The location to get the weather for.")] string location)
- => $"The weather in {location} is cloudy with a high of 15°C.";
-
-const string AssistantInstructions = "You are a helpful assistant that can get weather information.";
-const string AssistantName = "WeatherAssistant";
-
-// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents.
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-ApprovalRequiredAIFunction approvalTool = new(AIFunctionFactory.Create(GetWeather, name: nameof(GetWeather)));
-
-// Create AIAgent directly
-AIAgent agent = await aiProjectClient.CreateAIAgentAsync(name: AssistantName, model: deploymentName, instructions: AssistantInstructions, tools: [approvalTool]);
-
-// Call the agent with approval-required function tools.
-// The agent will request approval before invoking the function.
-AgentSession session = await agent.CreateSessionAsync();
-AgentResponse response = await agent.RunAsync("What is the weather like in Amsterdam?", session);
-
-// Check if there are any approval requests.
-// For simplicity, we are assuming here that only function approvals are pending.
-List approvalRequests = response.Messages.SelectMany(m => m.Contents).OfType().ToList();
-
-while (approvalRequests.Count > 0)
-{
- // Ask the user to approve each function call request.
- List userInputMessages = approvalRequests
- .ConvertAll(functionApprovalRequest =>
- {
- Console.WriteLine($"The agent would like to invoke the following function, please reply Y to approve: Name {functionApprovalRequest.FunctionCall.Name}");
- bool approved = Console.ReadLine()?.Equals("Y", StringComparison.OrdinalIgnoreCase) ?? false;
- return new ChatMessage(ChatRole.User, [functionApprovalRequest.CreateResponse(approved)]);
- });
-
- // Pass the user input responses back to the agent for further processing.
- response = await agent.RunAsync(userInputMessages, session);
-
- approvalRequests = response.Messages.SelectMany(m => m.Contents).OfType().ToList();
-}
-
-Console.WriteLine($"\nAgent: {response}");
-
-// Cleanup by agent name removes the agent version created.
-await aiProjectClient.Agents.DeleteAgentAsync(agent.Name);
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/README.md
deleted file mode 100644
index 42cbd6ba32..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/README.md
+++ /dev/null
@@ -1,51 +0,0 @@
-# Using Function Tools with Approvals (Human-in-the-Loop)
-
-This sample demonstrates how to use function tools that require human approval before execution, implementing a human-in-the-loop workflow.
-
-## What this sample demonstrates
-
-- Creating approval-required function tools using ApprovalRequiredAIFunction
-- Handling user input requests for function approvals
-- Implementing human-in-the-loop approval workflows
-- Processing agent responses with pending approvals
-- Managing agent lifecycle (creation and deletion)
-
-## Prerequisites
-
-Before you begin, ensure you have the following prerequisites:
-
-- .NET 10 SDK or later
-- Azure Foundry service endpoint and deployment configured
-- Azure CLI installed and authenticated (for Azure credential authentication)
-
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
-
-Set the following environment variables:
-
-```powershell
-$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint
-$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
-```
-
-## Run the sample
-
-Navigate to the FoundryAgents sample directory and run:
-
-```powershell
-cd dotnet/samples/02-agents/FoundryAgents
-dotnet run --project .\FoundryAgents_Step04_UsingFunctionToolsWithApprovals
-```
-
-## Expected behavior
-
-The sample will:
-
-1. Create an agent named "WeatherAssistant" with an approval-required GetWeather function tool
-2. Run the agent with a prompt asking about weather
-3. The agent will request approval before invoking the GetWeather function
-4. The sample will prompt the user to approve or deny the function call (enter 'Y' to approve)
-5. After approval, the function will be executed and the result returned to the agent
-6. Clean up resources by deleting the agent
-
-**Note**: For hosted agents with remote users, combine this sample with the Persisted Conversations sample to persist chat history while waiting for user approval.
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/FoundryAgents_Step05_StructuredOutput.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/FoundryAgents_Step05_StructuredOutput.csproj
deleted file mode 100644
index daf7e24494..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/FoundryAgents_Step05_StructuredOutput.csproj
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/Program.cs
deleted file mode 100644
index 3c02a4cec2..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/Program.cs
+++ /dev/null
@@ -1,95 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample shows how to configure an agent to produce structured output.
-
-using System.ComponentModel;
-using System.Text.Json;
-using System.Text.Json.Serialization;
-using Azure.AI.Projects;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using SampleApp;
-
-#pragma warning disable CA5399
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
-
-const string AssistantInstructions = "You are a helpful assistant that extracts structured information about people.";
-const string AssistantName = "StructuredOutputAssistant";
-
-// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents.
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-// Create ChatClientAgent directly
-ChatClientAgent agent = await aiProjectClient.CreateAIAgentAsync(
- model: deploymentName,
- new ChatClientAgentOptions()
- {
- Name = AssistantName,
- ChatOptions = new()
- {
- Instructions = AssistantInstructions,
- ResponseFormat = Microsoft.Extensions.AI.ChatResponseFormat.ForJsonSchema()
- }
- });
-
-// Set PersonInfo as the type parameter of RunAsync method to specify the expected structured output from the agent and invoke the agent with some unstructured input.
-AgentResponse response = await agent.RunAsync("Please provide information about John Smith, who is a 35-year-old software engineer.");
-
-// Access the structured output via the Result property of the agent response.
-Console.WriteLine("Assistant Output:");
-Console.WriteLine($"Name: {response.Result.Name}");
-Console.WriteLine($"Age: {response.Result.Age}");
-Console.WriteLine($"Occupation: {response.Result.Occupation}");
-
-// Create the ChatClientAgent with the specified name, instructions, and expected structured output the agent should produce.
-ChatClientAgent agentWithPersonInfo = await aiProjectClient.CreateAIAgentAsync(
- model: deploymentName,
- new ChatClientAgentOptions()
- {
- Name = AssistantName,
- ChatOptions = new()
- {
- Instructions = AssistantInstructions,
- ResponseFormat = Microsoft.Extensions.AI.ChatResponseFormat.ForJsonSchema()
- }
- });
-
-// Invoke the agent with some unstructured input while streaming, to extract the structured information from.
-IAsyncEnumerable updates = agentWithPersonInfo.RunStreamingAsync("Please provide information about John Smith, who is a 35-year-old software engineer.");
-
-// Assemble all the parts of the streamed output, since we can only deserialize once we have the full json,
-// then deserialize the response into the PersonInfo class.
-PersonInfo personInfo = JsonSerializer.Deserialize((await updates.ToAgentResponseAsync()).Text, JsonSerializerOptions.Web)
- ?? throw new InvalidOperationException("Failed to deserialize the streamed response into PersonInfo.");
-
-Console.WriteLine("Assistant Output:");
-Console.WriteLine($"Name: {personInfo.Name}");
-Console.WriteLine($"Age: {personInfo.Age}");
-Console.WriteLine($"Occupation: {personInfo.Occupation}");
-
-// Cleanup by agent name removes the agent version created.
-await aiProjectClient.Agents.DeleteAgentAsync(agent.Name);
-
-namespace SampleApp
-{
- ///
- /// Represents information about a person, including their name, age, and occupation, matched to the JSON schema used in the agent.
- ///
- [Description("Information about a person including their name, age, and occupation")]
- public class PersonInfo
- {
- [JsonPropertyName("name")]
- public string? Name { get; set; }
-
- [JsonPropertyName("age")]
- public int? Age { get; set; }
-
- [JsonPropertyName("occupation")]
- public string? Occupation { get; set; }
- }
-}
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/README.md
deleted file mode 100644
index 4c44230e18..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/README.md
+++ /dev/null
@@ -1,49 +0,0 @@
-# Structured Output with AI Agents
-
-This sample demonstrates how to configure AI agents to produce structured output in JSON format using JSON schemas.
-
-## What this sample demonstrates
-
-- Configuring agents with JSON schema response formats
-- Using generic RunAsync method for structured output
-- Deserializing structured responses into typed objects
-- Running agents with streaming and structured output
-- Managing agent lifecycle (creation and deletion)
-
-## Prerequisites
-
-Before you begin, ensure you have the following prerequisites:
-
-- .NET 10 SDK or later
-- Azure Foundry service endpoint and deployment configured
-- Azure CLI installed and authenticated (for Azure credential authentication)
-
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
-
-Set the following environment variables:
-
-```powershell
-$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint
-$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
-```
-
-## Run the sample
-
-Navigate to the FoundryAgents sample directory and run:
-
-```powershell
-cd dotnet/samples/02-agents/FoundryAgents
-dotnet run --project .\FoundryAgents_Step05_StructuredOutput
-```
-
-## Expected behavior
-
-The sample will:
-
-1. Create an agent named "StructuredOutputAssistant" configured to produce JSON output
-2. Run the agent with a prompt to extract person information
-3. Deserialize the JSON response into a PersonInfo object
-4. Display the structured data (Name, Age, Occupation)
-5. Run the agent again with streaming and deserialize the streamed JSON response
-6. Clean up resources by deleting the agent
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/FoundryAgents_Step06_PersistedConversations.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/FoundryAgents_Step06_PersistedConversations.csproj
deleted file mode 100644
index daf7e24494..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/FoundryAgents_Step06_PersistedConversations.csproj
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/Program.cs
deleted file mode 100644
index d8a5a7cd35..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/Program.cs
+++ /dev/null
@@ -1,47 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample shows how to create and use a simple AI agent with a conversation that can be persisted to disk.
-
-using System.Text.Json;
-using Azure.AI.Projects;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
-
-const string JokerInstructions = "You are good at telling jokes.";
-const string JokerName = "JokerAgent";
-
-// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents.
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-AIAgent agent = await aiProjectClient.CreateAIAgentAsync(name: JokerName, model: deploymentName, instructions: JokerInstructions);
-
-// Start a new session for the agent conversation.
-AgentSession session = await agent.CreateSessionAsync();
-
-// Run the agent with a new session.
-Console.WriteLine(await agent.RunAsync("Tell me a joke about a pirate.", session));
-
-// Serialize the session state to a JsonElement, so it can be stored for later use.
-JsonElement serializedSession = await agent.SerializeSessionAsync(session);
-
-// Save the serialized session to a temporary file (for demonstration purposes).
-string tempFilePath = Path.GetTempFileName();
-await File.WriteAllTextAsync(tempFilePath, JsonSerializer.Serialize(serializedSession));
-
-// Load the serialized session from the temporary file (for demonstration purposes).
-JsonElement reloadedSerializedSession = JsonElement.Parse(await File.ReadAllTextAsync(tempFilePath))!;
-
-// Deserialize the session state after loading from storage.
-AgentSession resumedSession = await agent.DeserializeSessionAsync(reloadedSerializedSession);
-
-// Run the agent again with the resumed session.
-Console.WriteLine(await agent.RunAsync("Now tell the same joke in the voice of a pirate, and add some emojis to the joke.", resumedSession));
-
-// Cleanup by agent name removes the agent version created.
-await aiProjectClient.Agents.DeleteAgentAsync(agent.Name);
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/README.md
deleted file mode 100644
index 57a032e9ec..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/README.md
+++ /dev/null
@@ -1,50 +0,0 @@
-# Persisted Conversations with AI Agents
-
-This sample demonstrates how to serialize and persist agent conversation threads to storage, allowing conversations to be resumed later.
-
-## What this sample demonstrates
-
-- Serializing agent threads to JSON
-- Persisting thread state to disk
-- Loading and deserializing thread state from storage
-- Resuming conversations with persisted threads
-- Managing agent lifecycle (creation and deletion)
-
-## Prerequisites
-
-Before you begin, ensure you have the following prerequisites:
-
-- .NET 10 SDK or later
-- Azure Foundry service endpoint and deployment configured
-- Azure CLI installed and authenticated (for Azure credential authentication)
-
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
-
-Set the following environment variables:
-
-```powershell
-$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint
-$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
-```
-
-## Run the sample
-
-Navigate to the FoundryAgents sample directory and run:
-
-```powershell
-cd dotnet/samples/02-agents/FoundryAgents
-dotnet run --project .\FoundryAgents_Step06_PersistedConversations
-```
-
-## Expected behavior
-
-The sample will:
-
-1. Create an agent named "JokerAgent" with instructions to tell jokes
-2. Create a thread and run the agent with an initial prompt
-3. Serialize the thread state to JSON
-4. Save the serialized thread to a temporary file
-5. Load the thread from the file and deserialize it
-6. Resume the conversation with the same thread using a follow-up prompt
-7. Clean up resources by deleting the agent
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/FoundryAgents_Step07_Observability.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/FoundryAgents_Step07_Observability.csproj
deleted file mode 100644
index 5ceeabb204..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/FoundryAgents_Step07_Observability.csproj
+++ /dev/null
@@ -1,23 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/Program.cs
deleted file mode 100644
index 257e24859f..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/Program.cs
+++ /dev/null
@@ -1,55 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample shows how to create and use a simple AI agent with Azure Foundry Agents as the backend that logs telemetry using OpenTelemetry.
-
-using Azure.AI.Projects;
-using Azure.Identity;
-using Azure.Monitor.OpenTelemetry.Exporter;
-using Microsoft.Agents.AI;
-using OpenTelemetry;
-using OpenTelemetry.Trace;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
-string? applicationInsightsConnectionString = Environment.GetEnvironmentVariable("APPLICATIONINSIGHTS_CONNECTION_STRING");
-
-const string JokerInstructions = "You are good at telling jokes.";
-const string JokerName = "JokerAgent";
-
-// Create TracerProvider with console exporter
-// This will output the telemetry data to the console.
-string sourceName = Guid.NewGuid().ToString("N");
-TracerProviderBuilder tracerProviderBuilder = Sdk.CreateTracerProviderBuilder()
- .AddSource(sourceName)
- .AddConsoleExporter();
-if (!string.IsNullOrWhiteSpace(applicationInsightsConnectionString))
-{
- tracerProviderBuilder.AddAzureMonitorTraceExporter(options => options.ConnectionString = applicationInsightsConnectionString);
-}
-using var tracerProvider = tracerProviderBuilder.Build();
-
-// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents.
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-// Define the agent you want to create. (Prompt Agent in this case)
-AIAgent agent = (await aiProjectClient.CreateAIAgentAsync(name: JokerName, model: deploymentName, instructions: JokerInstructions))
- .AsBuilder()
- .UseOpenTelemetry(sourceName: sourceName)
- .Build();
-
-// Invoke the agent and output the text result.
-AgentSession session = await agent.CreateSessionAsync();
-Console.WriteLine(await agent.RunAsync("Tell me a joke about a pirate.", session));
-
-// Invoke the agent with streaming support.
-session = await agent.CreateSessionAsync();
-await foreach (AgentResponseUpdate update in agent.RunStreamingAsync("Tell me a joke about a pirate.", session))
-{
- Console.WriteLine(update);
-}
-
-// Cleanup by agent name removes the agent version created.
-await aiProjectClient.Agents.DeleteAgentAsync(agent.Name);
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/README.md
deleted file mode 100644
index 459434bce2..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/README.md
+++ /dev/null
@@ -1,51 +0,0 @@
-# Observability with OpenTelemetry
-
-This sample demonstrates how to add observability to AI agents using OpenTelemetry for tracing and monitoring.
-
-## What this sample demonstrates
-
-- Setting up OpenTelemetry TracerProvider
-- Configuring console exporter for telemetry output
-- Configuring Azure Monitor exporter for Application Insights
-- Adding OpenTelemetry middleware to agents
-- Running agents with telemetry collection (text and streaming)
-- Managing agent lifecycle (creation and deletion)
-
-## Prerequisites
-
-Before you begin, ensure you have the following prerequisites:
-
-- .NET 10 SDK or later
-- Azure Foundry service endpoint and deployment configured
-- Azure CLI installed and authenticated (for Azure credential authentication)
-- (Optional) Application Insights connection string for Azure Monitor integration
-
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
-
-Set the following environment variables:
-
-```powershell
-$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint
-$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
-$env:APPLICATIONINSIGHTS_CONNECTION_STRING="your-connection-string" # Optional, for Azure Monitor integration
-```
-
-## Run the sample
-
-Navigate to the FoundryAgents sample directory and run:
-
-```powershell
-cd dotnet/samples/02-agents/FoundryAgents
-dotnet run --project .\FoundryAgents_Step07_Observability
-```
-
-## Expected behavior
-
-The sample will:
-
-1. Create a TracerProvider with console exporter (and optionally Azure Monitor exporter)
-2. Create an agent named "JokerAgent" with OpenTelemetry middleware
-3. Run the agent with a text prompt and display telemetry traces to console
-4. Run the agent again with streaming and display telemetry traces
-5. Clean up resources by deleting the agent
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/FoundryAgents_Step08_DependencyInjection.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/FoundryAgents_Step08_DependencyInjection.csproj
deleted file mode 100644
index f1812befeb..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/FoundryAgents_Step08_DependencyInjection.csproj
+++ /dev/null
@@ -1,23 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
-
- $(NoWarn);CA1812
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/Program.cs
deleted file mode 100644
index b7a9874e7b..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/Program.cs
+++ /dev/null
@@ -1,97 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample shows how to use dependency injection to register an AIAgent and use it from a hosted service with a user input chat loop.
-
-using System.ClientModel;
-using Azure.AI.Projects;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Extensions.DependencyInjection;
-using Microsoft.Extensions.Hosting;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
-
-const string JokerInstructions = "You are good at telling jokes.";
-const string JokerName = "JokerAgent";
-
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aIProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-// Create a new agent if one doesn't exist already.
-ChatClientAgent agent;
-try
-{
- agent = await aIProjectClient.GetAIAgentAsync(name: JokerName);
-}
-catch (ClientResultException ex) when (ex.Status == 404)
-{
- agent = await aIProjectClient.CreateAIAgentAsync(name: JokerName, model: deploymentName, instructions: JokerInstructions);
-}
-
-// Create a host builder that we will register services with and then run.
-HostApplicationBuilder builder = Host.CreateApplicationBuilder(args);
-
-// Add the agents client to the service collection.
-builder.Services.AddSingleton((sp) => aIProjectClient);
-
-// Add the AI agent to the service collection.
-builder.Services.AddSingleton((sp) => agent);
-
-// Add a sample service that will use the agent to respond to user input.
-builder.Services.AddHostedService();
-
-// Build and run the host.
-using IHost host = builder.Build();
-await host.RunAsync().ConfigureAwait(false);
-
-///
-/// A sample service that uses an AI agent to respond to user input.
-///
-internal sealed class SampleService(AIProjectClient client, AIAgent agent, IHostApplicationLifetime appLifetime) : IHostedService
-{
- private AgentSession? _session;
-
- public async Task StartAsync(CancellationToken cancellationToken)
- {
- // Create a session that will be used for the entirety of the service lifetime so that the user can ask follow up questions.
- this._session = await agent.CreateSessionAsync(cancellationToken);
- _ = this.RunAsync(appLifetime.ApplicationStopping);
- }
-
- public async Task RunAsync(CancellationToken cancellationToken)
- {
- // Delay a little to allow the service to finish starting.
- await Task.Delay(100, cancellationToken);
-
- while (!cancellationToken.IsCancellationRequested)
- {
- Console.WriteLine("\nAgent: Ask me to tell you a joke about a specific topic. To exit just press Ctrl+C or enter without any input.\n");
- Console.Write("> ");
- string? input = Console.ReadLine();
-
- // If the user enters no input, signal the application to shut down.
- if (string.IsNullOrWhiteSpace(input))
- {
- appLifetime.StopApplication();
- break;
- }
-
- // Stream the output to the console as it is generated.
- await foreach (AgentResponseUpdate update in agent.RunStreamingAsync(input, this._session, cancellationToken: cancellationToken))
- {
- Console.Write(update);
- }
-
- Console.WriteLine();
- }
- }
-
- public async Task StopAsync(CancellationToken cancellationToken)
- {
- Console.WriteLine("\nDeleting agent ...");
- await client.Agents.DeleteAgentAsync(agent.Name, cancellationToken).ConfigureAwait(false);
- }
-}
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/README.md
deleted file mode 100644
index 12760e736f..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/README.md
+++ /dev/null
@@ -1,51 +0,0 @@
-# Dependency Injection with AI Agents
-
-This sample demonstrates how to use dependency injection to register and manage AI agents within a hosted service application.
-
-## What this sample demonstrates
-
-- Setting up dependency injection with HostApplicationBuilder
-- Registering AIProjectClient as a singleton service
-- Registering AIAgent as a singleton service
-- Using agents in hosted services
-- Interactive chat loop with streaming responses
-- Managing agent lifecycle (creation and deletion)
-
-## Prerequisites
-
-Before you begin, ensure you have the following prerequisites:
-
-- .NET 10 SDK or later
-- Azure Foundry service endpoint and deployment configured
-- Azure CLI installed and authenticated (for Azure credential authentication)
-
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
-
-Set the following environment variables:
-
-```powershell
-$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint
-$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
-```
-
-## Run the sample
-
-Navigate to the FoundryAgents sample directory and run:
-
-```powershell
-cd dotnet/samples/02-agents/FoundryAgents
-dotnet run --project .\FoundryAgents_Step08_DependencyInjection
-```
-
-## Expected behavior
-
-The sample will:
-
-1. Create a host with dependency injection configured
-2. Register AIProjectClient and AIAgent as services
-3. Create an agent named "JokerAgent" with instructions to tell jokes
-4. Start an interactive chat loop where you can ask the agent questions
-5. The agent will respond with streaming output
-6. Enter an empty line or press Ctrl+C to exit
-7. Clean up resources by deleting the agent
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/FoundryAgents_Step09_UsingMcpClientAsTools.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/FoundryAgents_Step09_UsingMcpClientAsTools.csproj
deleted file mode 100644
index a6d96cb3db..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/FoundryAgents_Step09_UsingMcpClientAsTools.csproj
+++ /dev/null
@@ -1,23 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
- 3afc9b74-af74-4d8e-ae96-fa1c511d11ac
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/Program.cs
deleted file mode 100644
index e1968122a4..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/Program.cs
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample shows how to expose an AI agent as an MCP tool.
-
-using Azure.AI.Projects;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Extensions.AI;
-using ModelContextProtocol.Client;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
-
-Console.WriteLine("Starting MCP Stdio for @modelcontextprotocol/server-github ... ");
-
-// Create an MCPClient for the GitHub server
-await using var mcpClient = await McpClient.CreateAsync(new StdioClientTransport(new()
-{
- Name = "MCPServer",
- Command = "npx",
- Arguments = ["-y", "--verbose", "@modelcontextprotocol/server-github"],
-}));
-
-// Retrieve the list of tools available on the GitHub server
-IList mcpTools = await mcpClient.ListToolsAsync();
-string agentName = "AgentWithMCP";
-// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents.
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-Console.WriteLine($"Creating the agent '{agentName}' ...");
-
-// Define the agent you want to create. (Prompt Agent in this case)
-AIAgent agent = await aiProjectClient.CreateAIAgentAsync(
- name: agentName,
- model: deploymentName,
- instructions: "You answer questions related to GitHub repositories only.",
- tools: [.. mcpTools.Cast()]);
-
-string prompt = "Summarize the last four commits to the microsoft/semantic-kernel repository?";
-
-Console.WriteLine($"Invoking agent '{agent.Name}' with prompt: {prompt} ...");
-
-// Invoke the agent and output the text result.
-Console.WriteLine(await agent.RunAsync(prompt));
-
-// Clean up the agent after use.
-await aiProjectClient.Agents.DeleteAgentAsync(agent.Name);
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/README.md
deleted file mode 100644
index e4e3fe537a..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/README.md
+++ /dev/null
@@ -1,50 +0,0 @@
-# Using MCP Client Tools with AI Agents
-
-This sample demonstrates how to use Model Context Protocol (MCP) client tools with AI agents, allowing agents to access tools provided by MCP servers. This sample uses the GitHub MCP server to provide tools for querying GitHub repositories.
-
-## What this sample demonstrates
-
-- Creating MCP clients to connect to MCP servers (GitHub server)
-- Retrieving tools from MCP servers
-- Using MCP tools with AI agents
-- Running agents with MCP-provided function tools
-- Managing agent lifecycle (creation and deletion)
-
-## Prerequisites
-
-Before you begin, ensure you have the following prerequisites:
-
-- .NET 10 SDK or later
-- Azure Foundry service endpoint and deployment configured
-- Azure CLI installed and authenticated (for Azure credential authentication)
-- Node.js and npm installed (for running the GitHub MCP server)
-
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
-
-Set the following environment variables:
-
-```powershell
-$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint
-$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
-```
-
-## Run the sample
-
-Navigate to the FoundryAgents sample directory and run:
-
-```powershell
-cd dotnet/samples/02-agents/FoundryAgents
-dotnet run --project .\FoundryAgents_Step09_UsingMcpClientAsTools
-```
-
-## Expected behavior
-
-The sample will:
-
-1. Start the GitHub MCP server using `@modelcontextprotocol/server-github`
-2. Create an MCP client to connect to the GitHub server
-3. Retrieve the available tools from the GitHub MCP server
-4. Create an agent named "AgentWithMCP" with the GitHub tools
-5. Run the agent with a prompt to summarize the last four commits to the microsoft/semantic-kernel repository
-6. The agent will use the GitHub MCP tools to query the repository information
-7. Clean up resources by deleting the agent
\ No newline at end of file
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/Assets/walkway.jpg b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/Assets/walkway.jpg
deleted file mode 100644
index 13ef1e1840..0000000000
Binary files a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/Assets/walkway.jpg and /dev/null differ
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/FoundryAgents_Step10_UsingImages.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/FoundryAgents_Step10_UsingImages.csproj
deleted file mode 100644
index 53661ff199..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/FoundryAgents_Step10_UsingImages.csproj
+++ /dev/null
@@ -1,26 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
-
-
-
-
-
-
-
-
-
-
-
-
-
- Always
-
-
-
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/Program.cs
deleted file mode 100644
index d44d62df51..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/Program.cs
+++ /dev/null
@@ -1,38 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample shows how to use Image Multi-Modality with an AI agent.
-
-using Azure.AI.Projects;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Extensions.AI;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o";
-
-const string VisionInstructions = "You are a helpful agent that can analyze images";
-const string VisionName = "VisionAgent";
-
-// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents.
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-// Define the agent you want to create. (Prompt Agent in this case)
-AIAgent agent = await aiProjectClient.CreateAIAgentAsync(name: VisionName, model: deploymentName, instructions: VisionInstructions);
-
-ChatMessage message = new(ChatRole.User, [
- new TextContent("What do you see in this image?"),
- await DataContent.LoadFromAsync("assets/walkway.jpg"),
-]);
-
-AgentSession session = await agent.CreateSessionAsync();
-
-await foreach (AgentResponseUpdate update in agent.RunStreamingAsync(message, session))
-{
- Console.WriteLine(update);
-}
-
-// Cleanup by agent name removes the agent version created.
-await aiProjectClient.Agents.DeleteAgentAsync(agent.Name);
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/README.md
deleted file mode 100644
index 220104a291..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/README.md
+++ /dev/null
@@ -1,53 +0,0 @@
-# Using Images with AI Agents
-
-This sample demonstrates how to use image multi-modality with an AI agent. It shows how to create a vision-enabled agent that can analyze and describe images using Azure Foundry Agents.
-
-## What this sample demonstrates
-
-- Creating a vision-enabled AI agent with image analysis capabilities
-- Sending both text and image content to an agent in a single message
-- Using `UriContent` for URI-referenced images
-- Processing multimodal input (text + image) with an AI agent
-- Managing agent lifecycle (creation and deletion)
-
-## Key features
-
-- **Vision Agent**: Creates an agent specifically instructed to analyze images
-- **Multimodal Input**: Combines text questions with image URI in a single message
-- **Azure Foundry Agents Integration**: Uses Azure Foundry Agents with vision capabilities
-
-## Prerequisites
-
-Before running this sample, ensure you have:
-
-1. An Azure OpenAI project set up
-2. A compatible model deployment (e.g., gpt-4o)
-3. Azure CLI installed and authenticated
-
-## Environment Variables
-
-Set the following environment variables:
-
-```powershell
-$env:AZURE_AI_PROJECT_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure Foundry Project endpoint
-$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o" # Replace with your model deployment name (optional, defaults to gpt-4o)
-```
-
-## Run the sample
-
-Navigate to the FoundryAgents sample directory and run:
-
-```powershell
-cd dotnet/samples/02-agents/FoundryAgents
-dotnet run --project .\FoundryAgents_Step10_UsingImages
-```
-
-## Expected behavior
-
-The sample will:
-
-1. Create a vision-enabled agent named "VisionAgent"
-2. Send a message containing both text ("What do you see in this image?") and a URI-referenced image of a green walkway (nature boardwalk)
-3. The agent will analyze the image and provide a description
-4. Clean up resources by deleting the agent
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/FoundryAgents_Step11_AsFunctionTool.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/FoundryAgents_Step11_AsFunctionTool.csproj
deleted file mode 100644
index 54f37f1aa6..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/FoundryAgents_Step11_AsFunctionTool.csproj
+++ /dev/null
@@ -1,21 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
- 3afc9b74-af74-4d8e-ae96-fa1c511d11ac
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/Program.cs
deleted file mode 100644
index 585725322e..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/Program.cs
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample shows how to create and use an Azure Foundry Agents AI agent as a function tool.
-
-using System.ComponentModel;
-using Azure.AI.Projects;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Extensions.AI;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini";
-
-const string WeatherInstructions = "You answer questions about the weather.";
-const string WeatherName = "WeatherAgent";
-const string MainInstructions = "You are a helpful assistant who responds in French.";
-const string MainName = "MainAgent";
-
-[Description("Get the weather for a given location.")]
-static string GetWeather([Description("The location to get the weather for.")] string location)
- => $"The weather in {location} is cloudy with a high of 15°C.";
-
-// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents.
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-// Create the weather agent with function tools.
-AITool weatherTool = AIFunctionFactory.Create(GetWeather);
-AIAgent weatherAgent = await aiProjectClient.CreateAIAgentAsync(
- name: WeatherName,
- model: deploymentName,
- instructions: WeatherInstructions,
- tools: [weatherTool]);
-
-// Create the main agent, and provide the weather agent as a function tool.
-AIAgent agent = await aiProjectClient.CreateAIAgentAsync(
- name: MainName,
- model: deploymentName,
- instructions: MainInstructions,
- tools: [weatherAgent.AsAIFunction()]);
-
-// Invoke the agent and output the text result.
-AgentSession session = await agent.CreateSessionAsync();
-Console.WriteLine(await agent.RunAsync("What is the weather like in Amsterdam?", session));
-
-// Cleanup by agent name removes the agent versions created.
-await aiProjectClient.Agents.DeleteAgentAsync(agent.Name);
-await aiProjectClient.Agents.DeleteAgentAsync(weatherAgent.Name);
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/README.md
deleted file mode 100644
index 5da59b6edb..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/README.md
+++ /dev/null
@@ -1,49 +0,0 @@
-# Using AI Agents as Function Tools (Nested Agents)
-
-This sample demonstrates how to expose an AI agent as a function tool, enabling nested agent scenarios where one agent can invoke another agent as a tool.
-
-## What this sample demonstrates
-
-- Creating an AI agent that can be used as a function tool
-- Wrapping an agent as an AIFunction
-- Using nested agents where one agent calls another
-- Managing multiple agent instances
-- Managing agent lifecycle (creation and deletion)
-
-## Prerequisites
-
-Before you begin, ensure you have the following prerequisites:
-
-- .NET 10 SDK or later
-- Azure Foundry service endpoint and deployment configured
-- Azure CLI installed and authenticated (for Azure credential authentication)
-
-**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
-
-Set the following environment variables:
-
-```powershell
-$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint
-$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini
-```
-
-## Run the sample
-
-Navigate to the FoundryAgents sample directory and run:
-
-```powershell
-cd dotnet/samples/02-agents/FoundryAgents
-dotnet run --project .\FoundryAgents_Step11_AsFunctionTool
-```
-
-## Expected behavior
-
-The sample will:
-
-1. Create a "JokerAgent" that tells jokes
-2. Wrap the JokerAgent as a function tool
-3. Create a "CoordinatorAgent" that has the JokerAgent as a function tool
-4. Run the CoordinatorAgent with a prompt that triggers it to call the JokerAgent
-5. The CoordinatorAgent will invoke the JokerAgent as a function tool
-6. Clean up resources by deleting both agents
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/FoundryAgents_Step12_Middleware.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/FoundryAgents_Step12_Middleware.csproj
deleted file mode 100644
index 9f29a8d7e6..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/FoundryAgents_Step12_Middleware.csproj
+++ /dev/null
@@ -1,21 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/Program.cs
deleted file mode 100644
index 7ea6bc88a3..0000000000
--- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/Program.cs
+++ /dev/null
@@ -1,223 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample shows multiple middleware layers working together with Azure Foundry Agents:
-// agent run (PII filtering and guardrails),
-// function invocation (logging and result overrides), and human-in-the-loop
-// approval workflows for sensitive function calls.
-
-using System.ComponentModel;
-using System.Text.RegularExpressions;
-using Azure.AI.Projects;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Extensions.AI;
-
-// Get Azure AI Foundry configuration from environment variables
-string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-string deploymentName = System.Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o";
-
-const string AssistantInstructions = "You are an AI assistant that helps people find information.";
-const string AssistantName = "InformationAssistant";
-
-// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents.
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-[Description("Get the weather for a given location.")]
-static string GetWeather([Description("The location to get the weather for.")] string location)
- => $"The weather in {location} is cloudy with a high of 15°C.";
-
-[Description("The current datetime offset.")]
-static string GetDateTime()
- => DateTimeOffset.Now.ToString();
-
-AITool dateTimeTool = AIFunctionFactory.Create(GetDateTime, name: nameof(GetDateTime));
-AITool getWeatherTool = AIFunctionFactory.Create(GetWeather, name: nameof(GetWeather));
-
-// Define the agent you want to create. (Prompt Agent in this case)
-AIAgent originalAgent = await aiProjectClient.CreateAIAgentAsync(
- name: AssistantName,
- model: deploymentName,
- instructions: AssistantInstructions,
- tools: [getWeatherTool, dateTimeTool]);
-
-// Adding middleware to the agent level
-AIAgent middlewareEnabledAgent = originalAgent
- .AsBuilder()
- .Use(FunctionCallMiddleware)
- .Use(FunctionCallOverrideWeather)
- .Use(PIIMiddleware, null)
- .Use(GuardrailMiddleware, null)
- .Build();
-
-AgentSession session = await middlewareEnabledAgent.CreateSessionAsync();
-
-Console.WriteLine("\n\n=== Example 1: Wording Guardrail ===");
-AgentResponse guardRailedResponse = await middlewareEnabledAgent.RunAsync("Tell me something harmful.");
-Console.WriteLine($"Guard railed response: {guardRailedResponse}");
-
-Console.WriteLine("\n\n=== Example 2: PII detection ===");
-AgentResponse piiResponse = await middlewareEnabledAgent.RunAsync("My name is John Doe, call me at 123-456-7890 or email me at john@something.com");
-Console.WriteLine($"Pii filtered response: {piiResponse}");
-
-Console.WriteLine("\n\n=== Example 3: Agent function middleware ===");
-
-// Agent function middleware support is limited to agents that wraps a upstream ChatClientAgent or derived from it.
-
-AgentResponse functionCallResponse = await middlewareEnabledAgent.RunAsync("What's the current time and the weather in Seattle?", session);
-Console.WriteLine($"Function calling response: {functionCallResponse}");
-
-// Special per-request middleware agent.
-Console.WriteLine("\n\n=== Example 4: Middleware with human in the loop function approval ===");
-
-AIAgent humanInTheLoopAgent = await aiProjectClient.CreateAIAgentAsync(
- name: "HumanInTheLoopAgent",
- model: deploymentName,
- instructions: "You are an Human in the loop testing AI assistant that helps people find information.",
-
- // Adding a function with approval required
- tools: [new ApprovalRequiredAIFunction(AIFunctionFactory.Create(GetWeather, name: nameof(GetWeather)))]);
-
-// Using the ConsolePromptingApprovalMiddleware for a specific request to handle user approval during function calls.
-AgentResponse response = await humanInTheLoopAgent
- .AsBuilder()
- .Use(ConsolePromptingApprovalMiddleware, null)
- .Build()
- .RunAsync("What's the current time and the weather in Seattle?");
-
-Console.WriteLine($"HumanInTheLoopAgent agent middleware response: {response}");
-
-// Function invocation middleware that logs before and after function calls.
-async ValueTask