From 74975ede9f0fd25b074b2a70959773ba28e9dc50 Mon Sep 17 00:00:00 2001 From: alliscode Date: Mon, 2 Mar 2026 08:05:47 -0800 Subject: [PATCH 1/3] Migrate .NET samples to Foundry-first ProjectResponsesClient Switch all .NET samples from model: prefix on AgentReference (which sends 'placeholder' over the wire via MEAI adapter) to ChatOptions.ModelId pattern. Changes: - All 36 sample Program.cs files updated to use ChatClientAgentOptions with ChatOptions.ModelId = deploymentName instead of defaultAgent: new AgentReference(model:deploymentName) - AGENTS.md and SAMPLE_GUIDELINES.md updated with correct code patterns - dotnet format applied to all changed projects Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- dotnet/agent-framework-dotnet.slnx | 13 - .../01_hello_agent/01_hello_agent.csproj | 4 +- .../01-get-started/01_hello_agent/Program.cs | 37 ++- .../02_add_tools/02_add_tools.csproj | 4 +- .../01-get-started/02_add_tools/Program.cs | 35 ++- .../03_multi_turn/03_multi_turn.csproj | 4 +- .../01-get-started/03_multi_turn/Program.cs | 29 ++- .../01-get-started/04_memory/04_memory.csproj | 4 +- .../01-get-started/04_memory/Program.cs | 30 ++- .../05_first_workflow/Program.cs | 4 + .../06_host_your_agent.csproj | 5 +- .../06_host_your_agent/Program.cs | 35 ++- .../AgentOpenTelemetry.csproj | 4 +- .../02-agents/AgentOpenTelemetry/Program.cs | 27 ++- .../02-agents/AgentOpenTelemetry/README.md | 30 +-- .../AgentOpenTelemetry/start-demo.ps1 | 16 +- .../Agent_Step01_BasicSkills.csproj | 5 +- .../Agent_Step01_BasicSkills/Program.cs | 17 +- .../Agent_Step01_BasicSkills/README.md | 6 +- ...WithMemory_Step01_ChatHistoryMemory.csproj | 3 +- .../Program.cs | 19 +- ...ntWithMemory_Step02_MemoryUsingMem0.csproj | 4 +- .../Program.cs | 17 +- .../AgentWithRAG_Step01_BasicTextRAG.csproj | 3 +- .../Program.cs | 17 +- ...WithRAG_Step02_CustomVectorStoreRAG.csproj | 3 +- .../Program.cs | 17 +- .../README.md | 14 +- ...tWithRAG_Step03_CustomRAGDataSource.csproj | 4 +- .../Program.cs | 17 +- ...p01_UsingFunctionToolsWithApprovals.csproj | 4 +- .../Program.cs | 25 +- .../Agent_Step02_StructuredOutput.csproj | 4 +- .../Agent_Step02_StructuredOutput/Program.cs | 46 ++-- .../Agent_Step02_StructuredOutput/README.md | 12 +- ...Agent_Step03_PersistedConversations.csproj | 4 +- .../Program.cs | 22 +- ...t_Step04_3rdPartyChatHistoryStorage.csproj | 4 +- .../Program.cs | 17 +- .../Agent_Step05_Observability.csproj | 4 +- .../Agent_Step05_Observability/Program.cs | 20 +- .../Agent_Step06_DependencyInjection.csproj | 4 +- .../Program.cs | 17 +- .../Agent_Step07_AsMcpTool.csproj | 5 +- .../Agents/Agent_Step07_AsMcpTool/Program.cs | 29 ++- .../Agents/Agent_Step07_AsMcpTool/README.md | 2 +- .../Agent_Step08_UsingImages.csproj | 5 +- .../Agent_Step08_UsingImages/Program.cs | 22 +- .../Agents/Agent_Step08_UsingImages/README.md | 10 +- .../Agent_Step09_AsFunctionTool.csproj | 5 +- .../Agent_Step09_AsFunctionTool/Program.cs | 49 ++-- ...undResponsesWithToolsAndPersistence.csproj | 5 +- .../Program.cs | 31 ++- .../README.md | 10 +- .../Agent_Step11_Middleware.csproj | 6 +- .../Agents/Agent_Step11_Middleware/Program.cs | 40 +++- .../Agents/Agent_Step11_Middleware/README.md | 6 +- .../Agent_Step12_Plugins.csproj | 4 +- .../Agents/Agent_Step12_Plugins/Program.cs | 30 ++- .../Agent_Step13_ChatReduction.csproj | 4 +- .../Agent_Step13_ChatReduction/Program.cs | 17 +- .../Agent_Step14_BackgroundResponses.csproj | 5 +- .../Program.cs | 21 +- .../README.md | 12 +- .../Agent_Step16_Declarative.csproj | 4 +- .../Agent_Step16_Declarative/Program.cs | 15 +- .../Agent_Step17_AdditionalAIContext.csproj | 4 +- .../Program.cs | 24 +- dotnet/samples/02-agents/Agents/README.md | 17 +- .../FoundryAgents_Step01.2_Running.csproj | 20 -- .../FoundryAgents_Step01.2_Running/Program.cs | 39 --- .../FoundryAgents_Step01.2_Running/README.md | 46 ---- ...Agents_Step02_MultiturnConversation.csproj | 20 -- .../Program.cs | 55 ----- .../README.md | 59 ----- ...dryAgents_Step03_UsingFunctionTools.csproj | 20 -- .../Program.cs | 54 ----- .../README.md | 48 ---- ...p04_UsingFunctionToolsWithApprovals.csproj | 20 -- .../Program.cs | 65 ----- .../README.md | 51 ---- ...undryAgents_Step05_StructuredOutput.csproj | 20 -- .../Program.cs | 95 -------- .../README.md | 49 ---- ...gents_Step06_PersistedConversations.csproj | 20 -- .../Program.cs | 47 ---- .../README.md | 50 ---- .../FoundryAgents_Step07_Observability.csproj | 23 -- .../Program.cs | 55 ----- .../README.md | 51 ---- ...ryAgents_Step08_DependencyInjection.csproj | 23 -- .../Program.cs | 97 -------- .../README.md | 51 ---- ...Agents_Step09_UsingMcpClientAsTools.csproj | 23 -- .../Program.cs | 50 ---- .../README.md | 50 ---- .../Assets/walkway.jpg | Bin 37970 -> 0 bytes .../FoundryAgents_Step10_UsingImages.csproj | 26 -- .../Program.cs | 38 --- .../README.md | 53 ----- ...FoundryAgents_Step11_AsFunctionTool.csproj | 21 -- .../Program.cs | 50 ---- .../README.md | 49 ---- .../FoundryAgents_Step12_Middleware.csproj | 21 -- .../Program.cs | 223 ------------------ .../FoundryAgents_Step12_Middleware/README.md | 58 ----- .../FoundryAgents_Step13_Plugins.csproj | 22 -- .../FoundryAgents_Step13_Plugins/Program.cs | 142 ----------- .../FoundryAgents_Step13_Plugins/README.md | 49 ---- ...oundryAgents_Step14_CodeInterpreter.csproj | 6 +- .../Program.cs | 63 +++-- .../FoundryAgents_Step16_FileSearch.csproj | 7 +- .../Program.cs | 57 +++-- .../FoundryAgents_Step17_OpenAPITools.csproj | 6 +- .../Program.cs | 57 +++-- ...undryAgents_Step18_BingCustomSearch.csproj | 6 +- .../Program.cs | 60 ++--- .../FoundryAgents_Step19_SharePoint.csproj | 6 +- .../Program.cs | 62 ++--- ...oundryAgents_Step20_MicrosoftFabric.csproj | 6 +- .../Program.cs | 65 ++--- .../FoundryAgents_Step21_WebSearch.csproj | 6 +- .../FoundryAgents_Step21_WebSearch/Program.cs | 57 +++-- .../FoundryAgents_Step23_LocalMCP.csproj | 5 +- .../FoundryAgents_Step23_LocalMCP/Program.cs | 56 +++-- .../samples/02-agents/FoundryAgents/README.md | 66 ++---- .../Agents/FoundryAgent/FoundryAgent.csproj | 4 +- .../Agents/FoundryAgent/Program.cs | 57 ++--- dotnet/samples/AGENTS.md | 38 +-- dotnet/samples/SAMPLE_GUIDELINES.md | 121 ++++++++++ 130 files changed, 1030 insertions(+), 2726 deletions(-) delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/FoundryAgents_Step01.2_Running.csproj delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/Program.cs delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/README.md delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/FoundryAgents_Step02_MultiturnConversation.csproj delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/Program.cs delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/README.md delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/FoundryAgents_Step03_UsingFunctionTools.csproj delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/Program.cs delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/README.md delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/FoundryAgents_Step04_UsingFunctionToolsWithApprovals.csproj delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/Program.cs delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/README.md delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/FoundryAgents_Step05_StructuredOutput.csproj delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/Program.cs delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/README.md delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/FoundryAgents_Step06_PersistedConversations.csproj delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/Program.cs delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/README.md delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/FoundryAgents_Step07_Observability.csproj delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/Program.cs delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/README.md delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/FoundryAgents_Step08_DependencyInjection.csproj delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/Program.cs delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/README.md delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/FoundryAgents_Step09_UsingMcpClientAsTools.csproj delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/Program.cs delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/README.md delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/Assets/walkway.jpg delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/FoundryAgents_Step10_UsingImages.csproj delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/Program.cs delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/README.md delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/FoundryAgents_Step11_AsFunctionTool.csproj delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/Program.cs delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/README.md delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/FoundryAgents_Step12_Middleware.csproj delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/Program.cs delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/README.md delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step13_Plugins/FoundryAgents_Step13_Plugins.csproj delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step13_Plugins/Program.cs delete mode 100644 dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step13_Plugins/README.md create mode 100644 dotnet/samples/SAMPLE_GUIDELINES.md diff --git a/dotnet/agent-framework-dotnet.slnx b/dotnet/agent-framework-dotnet.slnx index b96b891b00..ec6da01736 100644 --- a/dotnet/agent-framework-dotnet.slnx +++ b/dotnet/agent-framework-dotnet.slnx @@ -124,19 +124,6 @@ - - - - - - - - - - - - - diff --git a/dotnet/samples/01-get-started/01_hello_agent/01_hello_agent.csproj b/dotnet/samples/01-get-started/01_hello_agent/01_hello_agent.csproj index b32de63906..fdce858e37 100644 --- a/dotnet/samples/01-get-started/01_hello_agent/01_hello_agent.csproj +++ b/dotnet/samples/01-get-started/01_hello_agent/01_hello_agent.csproj @@ -9,13 +9,13 @@ - + - + diff --git a/dotnet/samples/01-get-started/01_hello_agent/Program.cs b/dotnet/samples/01-get-started/01_hello_agent/Program.cs index e461f9ba75..64721b0210 100644 --- a/dotnet/samples/01-get-started/01_hello_agent/Program.cs +++ b/dotnet/samples/01-get-started/01_hello_agent/Program.cs @@ -1,29 +1,40 @@ // Copyright (c) Microsoft. All rights reserved. -// This sample shows how to create and use a simple AI agent with Azure OpenAI as the backend. +// This sample shows how to create and use a simple AI agent with Azure AI Foundry as the backend. -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; -using OpenAI.Chat; +using Microsoft.Extensions.AI; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") + ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsAIAgent(instructions: "You are good at telling jokes.", name: "Joker"); +// +// Create a Foundry project Responses API client. +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); +// Create the agent with model specified in chat options. +ChatClientAgent agent = new(chatClient, new ChatClientAgentOptions +{ + Name = "Joker", + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." }, +}); +// + +// // Invoke the agent and output the text result. Console.WriteLine(await agent.RunAsync("Tell me a joke about a pirate.")); +// +// // Invoke the agent with streaming support. await foreach (var update in agent.RunStreamingAsync("Tell me a joke about a pirate.")) { Console.WriteLine(update); } +// diff --git a/dotnet/samples/01-get-started/02_add_tools/02_add_tools.csproj b/dotnet/samples/01-get-started/02_add_tools/02_add_tools.csproj index b32de63906..fdce858e37 100644 --- a/dotnet/samples/01-get-started/02_add_tools/02_add_tools.csproj +++ b/dotnet/samples/01-get-started/02_add_tools/02_add_tools.csproj @@ -9,13 +9,13 @@ - + - + diff --git a/dotnet/samples/01-get-started/02_add_tools/Program.cs b/dotnet/samples/01-get-started/02_add_tools/Program.cs index da0b638562..833152ab25 100644 --- a/dotnet/samples/01-get-started/02_add_tools/Program.cs +++ b/dotnet/samples/01-get-started/02_add_tools/Program.cs @@ -4,29 +4,43 @@ // It shows both non-streaming and streaming agent interactions using menu-related tools. using System.ComponentModel; -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; -using OpenAI.Chat; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") + ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +// [Description("Get the weather for a given location.")] static string GetWeather([Description("The location to get the weather for.")] string location) => $"The weather in {location} is cloudy with a high of 15°C."; +// -// Create the chat client and agent, and provide the function tool to the agent. +// +// Create a Foundry project Responses API client and agent with a function tool. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsAIAgent(instructions: "You are a helpful assistant", tools: [AIFunctionFactory.Create(GetWeather)]); +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); +ChatClientAgent agent = new(chatClient, new ChatClientAgentOptions +{ + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = "You are a helpful assistant", + Tools = [AIFunctionFactory.Create(GetWeather)] + }, +}); +// + +// // Non-streaming agent interaction with function tools. Console.WriteLine(await agent.RunAsync("What is the weather like in Amsterdam?")); @@ -35,3 +49,4 @@ static string GetWeather([Description("The location to get the weather for.")] s { Console.WriteLine(update); } +// diff --git a/dotnet/samples/01-get-started/03_multi_turn/03_multi_turn.csproj b/dotnet/samples/01-get-started/03_multi_turn/03_multi_turn.csproj index b32de63906..fdce858e37 100644 --- a/dotnet/samples/01-get-started/03_multi_turn/03_multi_turn.csproj +++ b/dotnet/samples/01-get-started/03_multi_turn/03_multi_turn.csproj @@ -9,13 +9,13 @@ - + - + diff --git a/dotnet/samples/01-get-started/03_multi_turn/Program.cs b/dotnet/samples/01-get-started/03_multi_turn/Program.cs index 5d49e806ed..f8f35496fc 100644 --- a/dotnet/samples/01-get-started/03_multi_turn/Program.cs +++ b/dotnet/samples/01-get-started/03_multi_turn/Program.cs @@ -2,23 +2,33 @@ // This sample shows how to create and use a simple AI agent with a multi-turn conversation. -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; -using OpenAI.Chat; +using Microsoft.Extensions.AI; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") + ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +// +// Create a Foundry project Responses API client and agent. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsAIAgent(instructions: "You are good at telling jokes.", name: "Joker"); +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); +ChatClientAgent agent = new(chatClient, new ChatClientAgentOptions +{ + Name = "Joker", + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." }, +}); +// + +// // Invoke the agent with a multi-turn conversation, where the context is preserved in the session object. AgentSession session = await agent.CreateSessionAsync(); Console.WriteLine(await agent.RunAsync("Tell me a joke about a pirate.", session)); @@ -34,3 +44,4 @@ { Console.WriteLine(update); } +// diff --git a/dotnet/samples/01-get-started/04_memory/04_memory.csproj b/dotnet/samples/01-get-started/04_memory/04_memory.csproj index b32de63906..fdce858e37 100644 --- a/dotnet/samples/01-get-started/04_memory/04_memory.csproj +++ b/dotnet/samples/01-get-started/04_memory/04_memory.csproj @@ -9,13 +9,13 @@ - + - + diff --git a/dotnet/samples/01-get-started/04_memory/Program.cs b/dotnet/samples/01-get-started/04_memory/Program.cs index fa6940f5fd..9f5e29b7d7 100644 --- a/dotnet/samples/01-get-started/04_memory/Program.cs +++ b/dotnet/samples/01-get-started/04_memory/Program.cs @@ -8,23 +8,25 @@ using System.Text; using System.Text.Json; -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; -using OpenAI.Chat; using SampleApp; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") + ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +// +// Create a Foundry project Responses API chat client. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -ChatClient chatClient = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName); +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); // Create the agent and provide a factory to add our custom memory component to // all sessions created by the agent. Here each new memory component will have its own @@ -33,13 +35,14 @@ // and preferably shared between multiple sessions used by the same user, ensure that the // factory reads the user id from the current context and scopes the memory component // and its storage to that user id. -AIAgent agent = chatClient.AsAIAgent(new ChatClientAgentOptions() +AIAgent agent = new ChatClientAgent(chatClient, new ChatClientAgentOptions() { - ChatOptions = new() { Instructions = "You are a friendly assistant. Always address the user by their name." }, - AIContextProviders = [new UserInfoMemory(chatClient.AsIChatClient())] + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a friendly assistant. Always address the user by their name." }, + AIContextProviders = [new UserInfoMemory(chatClient)] }); +// -// Create a new session for the conversation. +// AgentSession session = await agent.CreateSessionAsync(); Console.WriteLine(">> Use session with blank memory\n"); @@ -80,7 +83,9 @@ // Invoke the agent and output the text result. // This time the agent should remember the user's name and use it in the response. Console.WriteLine(await agent.RunAsync("What is my name and age?", newSession)); +// +// namespace SampleApp { /// @@ -159,4 +164,5 @@ internal sealed class UserInfo public string? UserName { get; set; } public int? UserAge { get; set; } } + // } diff --git a/dotnet/samples/01-get-started/05_first_workflow/Program.cs b/dotnet/samples/01-get-started/05_first_workflow/Program.cs index af1dcb50d9..3364d12a59 100644 --- a/dotnet/samples/01-get-started/05_first_workflow/Program.cs +++ b/dotnet/samples/01-get-started/05_first_workflow/Program.cs @@ -19,6 +19,7 @@ public static class Program { private static async Task Main() { + // // Create the executors Func uppercaseFunc = s => s.ToUpperInvariant(); var uppercase = uppercaseFunc.BindAsExecutor("UppercaseExecutor"); @@ -29,7 +30,9 @@ private static async Task Main() WorkflowBuilder builder = new(uppercase); builder.AddEdge(uppercase, reverse).WithOutputFrom(reverse); var workflow = builder.Build(); + // + // // Execute the workflow with input data await using Run run = await InProcessExecution.RunAsync(workflow, "Hello, World!"); foreach (WorkflowEvent evt in run.NewEvents) @@ -39,6 +42,7 @@ private static async Task Main() Console.WriteLine($"{executorComplete.ExecutorId}: {executorComplete.Data}"); } } + // } } diff --git a/dotnet/samples/01-get-started/06_host_your_agent/06_host_your_agent.csproj b/dotnet/samples/01-get-started/06_host_your_agent/06_host_your_agent.csproj index 2f0efd7b3a..bdd486e136 100644 --- a/dotnet/samples/01-get-started/06_host_your_agent/06_host_your_agent.csproj +++ b/dotnet/samples/01-get-started/06_host_your_agent/06_host_your_agent.csproj @@ -21,11 +21,12 @@ - + + - + diff --git a/dotnet/samples/01-get-started/06_host_your_agent/Program.cs b/dotnet/samples/01-get-started/06_host_your_agent/Program.cs index 6012119b25..24c595923f 100644 --- a/dotnet/samples/01-get-started/06_host_your_agent/Program.cs +++ b/dotnet/samples/01-get-started/06_host_your_agent/Program.cs @@ -4,37 +4,45 @@ // // Prerequisites: // - Azure Functions Core Tools -// - Azure OpenAI resource +// - Azure AI Foundry project // // Environment variables: -// AZURE_OPENAI_ENDPOINT -// AZURE_OPENAI_DEPLOYMENT_NAME (defaults to "gpt-4o-mini") +// AZURE_AI_PROJECT_ENDPOINT +// AZURE_AI_MODEL_DEPLOYMENT_NAME (defaults to "gpt-4o-mini") // // Run with: func start // Then call: POST http://localhost:7071/api/agents/HostedAgent/run -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Agents.AI.Hosting.AzureFunctions; using Microsoft.Azure.Functions.Worker.Builder; +using Microsoft.Extensions.AI; using Microsoft.Extensions.Hosting; -using OpenAI.Chat; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") - ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") + ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +// // Set up an AI agent following the standard Microsoft Agent Framework pattern. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsAIAgent( - instructions: "You are a helpful assistant hosted in Azure Functions.", - name: "HostedAgent"); +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); +ChatClientAgent agent = new(chatClient, new ChatClientAgentOptions +{ + Name = "HostedAgent", + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful assistant hosted in Azure Functions." }, +}); +// + +// // Configure the function app to host the AI agent. // This will automatically generate HTTP API endpoints for the agent. using IHost app = FunctionsApplication @@ -43,3 +51,4 @@ .ConfigureDurableAgents(options => options.AddAIAgent(agent, timeToLive: TimeSpan.FromHours(1))) .Build(); app.Run(); +// diff --git a/dotnet/samples/02-agents/AgentOpenTelemetry/AgentOpenTelemetry.csproj b/dotnet/samples/02-agents/AgentOpenTelemetry/AgentOpenTelemetry.csproj index e194fec9c2..4024ab687d 100644 --- a/dotnet/samples/02-agents/AgentOpenTelemetry/AgentOpenTelemetry.csproj +++ b/dotnet/samples/02-agents/AgentOpenTelemetry/AgentOpenTelemetry.csproj @@ -9,13 +9,12 @@ - + - @@ -25,7 +24,6 @@ - diff --git a/dotnet/samples/02-agents/AgentOpenTelemetry/Program.cs b/dotnet/samples/02-agents/AgentOpenTelemetry/Program.cs index 69d71e7b88..784b3a0031 100644 --- a/dotnet/samples/02-agents/AgentOpenTelemetry/Program.cs +++ b/dotnet/samples/02-agents/AgentOpenTelemetry/Program.cs @@ -3,7 +3,7 @@ using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.Metrics; -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Azure.Monitor.OpenTelemetry.Exporter; using Microsoft.Agents.AI; @@ -97,8 +97,8 @@ You can view the telemetry data in the Aspire Dashboard. Type your message and press Enter. Type 'exit' or empty message to quit. """); -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT environment variable is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT environment variable is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; // Log application startup appLogger.LogInformation("OpenTelemetry Aspire Demo application started"); @@ -113,9 +113,10 @@ static async Task GetWeatherAsync([Description("The location to get the // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -using var instrumentedChatClient = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsIChatClient() // Converts a native OpenAI SDK ChatClient into a Microsoft.Extensions.AI.IChatClient +using var instrumentedChatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() .AsBuilder() .UseFunctionInvocation() .UseOpenTelemetry(sourceName: SourceName, configure: (cfg) => cfg.EnableSensitiveData = true) // enable telemetry at the chat client level @@ -123,10 +124,16 @@ static async Task GetWeatherAsync([Description("The location to get the appLogger.LogInformation("Creating Agent with OpenTelemetry instrumentation"); // Create the agent with the instrumented chat client -var agent = new ChatClientAgent(instrumentedChatClient, - name: "OpenTelemetryDemoAgent", - instructions: "You are a helpful assistant that provides concise and informative responses.", - tools: [AIFunctionFactory.Create(GetWeatherAsync)]) +var agent = new ChatClientAgent(instrumentedChatClient, new ChatClientAgentOptions +{ + Name = "OpenTelemetryDemoAgent", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = "You are a helpful assistant that provides concise and informative responses.", + Tools = [AIFunctionFactory.Create(GetWeatherAsync)] + }, +}) .AsBuilder() .UseOpenTelemetry(SourceName, configure: (cfg) => cfg.EnableSensitiveData = true) // enable telemetry at the agent level .Build(); diff --git a/dotnet/samples/02-agents/AgentOpenTelemetry/README.md b/dotnet/samples/02-agents/AgentOpenTelemetry/README.md index 229d37dca6..de6e2d1248 100644 --- a/dotnet/samples/02-agents/AgentOpenTelemetry/README.md +++ b/dotnet/samples/02-agents/AgentOpenTelemetry/README.md @@ -1,6 +1,6 @@ -# OpenTelemetry Aspire Demo with Azure OpenAI +# OpenTelemetry Aspire Demo with Azure AI Foundry -This demo showcases the integration of OpenTelemetry with the Microsoft Agent Framework using Azure OpenAI and .NET Aspire Dashboard for telemetry visualization. +This demo showcases the integration of OpenTelemetry with the Microsoft Agent Framework using Azure AI Foundry and .NET Aspire Dashboard for telemetry visualization. ## Overview @@ -15,7 +15,7 @@ The demo consists of three main components: ```mermaid graph TD A["Console App
(Interactive)"] --> B["Agent Framework
with OpenTel
Instrumentation"] - B --> C["Azure OpenAI
Service"] + B --> C["Azure AI Foundry
Service"] A --> D["Aspire Dashboard
(OpenTelemetry Visualization)"] B --> D ``` @@ -23,21 +23,21 @@ graph TD ## Prerequisites - .NET 10 SDK or later -- Azure OpenAI service endpoint and deployment configured +- Azure AI Foundry project endpoint and model deployment configured - Azure CLI installed and authenticated (for Azure credential authentication) - Docker installed (for running Aspire Dashboard) - [Optional] Application Insights and Grafana ## Configuration -### Azure OpenAI Setup +### Azure AI Foundry Setup Set the following environment variables: ```powershell -$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" -$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini +$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com" +$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini ``` -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure OpenAI resource. +**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure AI Foundry project. ### [Optional] Application Insights Setup Set the following environment variables: @@ -56,7 +56,7 @@ The easiest way to run the demo is using the provided PowerShell script: ``` This script will automatically: -- ✅ Check prerequisites (Docker, Azure OpenAI configuration) +- ✅ Check prerequisites (Docker, Azure AI Foundry configuration) - 🔨 Build the console application - 🐳 Start the Aspire Dashboard via Docker (with anonymous access) - ⏳ Wait for dashboard to be ready (polls port until listening) @@ -158,7 +158,7 @@ Open dashboard in Azure portal: - **Telemetry correlation** across the entire request flow ### Agent Framework Features -- **ChatClientAgent** with Azure OpenAI integration +- **ChatClientAgent** with Azure AI Foundry integration - **OpenTelemetry wrapper** using `.WithOpenTelemetry()` - **Conversation threading** for multi-turn conversations - **Error handling** with telemetry correlation @@ -182,7 +182,7 @@ Complete demo startup script that handles everything automatically. ``` **Features:** -- **Automatic configuration detection** - Checks for Azure OpenAI configuration +- **Automatic configuration detection** - Checks for Azure AI Foundry configuration - **Project building** - Automatically builds projects before running - **Error handling** - Provides clear error messages if something goes wrong - **Multi-window support** - Opens dashboard in separate window for better experience @@ -201,10 +201,10 @@ If you encounter port binding errors, try: 2. Or kill any processes using the conflicting ports ### Authentication Issues -- Ensure your Azure OpenAI endpoint is correctly configured +- Ensure your Azure AI Foundry endpoint is correctly configured - Check that the environment variables are set in the correct terminal session -- Verify you're logged in with Azure CLI (`az login`) and have access to the Azure OpenAI resource -- Ensure the Azure OpenAI deployment name matches your actual deployment +- Verify you're logged in with Azure CLI (`az login`) and have access to the Azure AI Foundry project +- Ensure the model deployment name matches your actual deployment ### Build Issues - Ensure you're using .NET 10.0 SDK @@ -216,7 +216,7 @@ If you encounter port binding errors, try: ``` AgentOpenTelemetry/ ├── AgentOpenTelemetry.csproj # Project file with dependencies -├── Program.cs # Main application with Azure OpenAI agent integration +├── Program.cs # Main application with Azure AI Foundry agent integration ├── start-demo.ps1 # PowerShell script to start the demo └── README.md # This file ``` diff --git a/dotnet/samples/02-agents/AgentOpenTelemetry/start-demo.ps1 b/dotnet/samples/02-agents/AgentOpenTelemetry/start-demo.ps1 index 7af1c9d8ae..e5ffb4b088 100644 --- a/dotnet/samples/02-agents/AgentOpenTelemetry/start-demo.ps1 +++ b/dotnet/samples/02-agents/AgentOpenTelemetry/start-demo.ps1 @@ -21,18 +21,18 @@ try { exit 1 } -# Check for Azure OpenAI configuration -if ($env:AZURE_OPENAI_ENDPOINT) { - Write-Host "Found Azure OpenAI endpoint: $($env:AZURE_OPENAI_ENDPOINT)" -ForegroundColor Green - if ($env:AZURE_OPENAI_DEPLOYMENT_NAME) { - Write-Host "Using deployment: $($env:AZURE_OPENAI_DEPLOYMENT_NAME)" -ForegroundColor Green +# Check for Azure AI Foundry configuration +if ($env:AZURE_AI_PROJECT_ENDPOINT) { + Write-Host "Found Azure AI Foundry endpoint: $($env:AZURE_AI_PROJECT_ENDPOINT)" -ForegroundColor Green + if ($env:AZURE_AI_MODEL_DEPLOYMENT_NAME) { + Write-Host "Using deployment: $($env:AZURE_AI_MODEL_DEPLOYMENT_NAME)" -ForegroundColor Green } else { Write-Host "Using default deployment: gpt-4o-mini" -ForegroundColor Cyan } } else { - Write-Host "Warning: AZURE_OPENAI_ENDPOINT not found!" -ForegroundColor Yellow - Write-Host "Please set the AZURE_OPENAI_ENDPOINT environment variable" -ForegroundColor Yellow - Write-Host "Example: `$env:AZURE_OPENAI_ENDPOINT='https://your-resource.openai.azure.com/'" -ForegroundColor Yellow + Write-Host "Warning: AZURE_AI_PROJECT_ENDPOINT not found!" -ForegroundColor Yellow + Write-Host "Please set the AZURE_AI_PROJECT_ENDPOINT environment variable" -ForegroundColor Yellow + Write-Host "Example: `$env:AZURE_AI_PROJECT_ENDPOINT='https://your-project.services.ai.azure.com'" -ForegroundColor Yellow Write-Host "" } diff --git a/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Agent_Step01_BasicSkills.csproj b/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Agent_Step01_BasicSkills.csproj index 2a503bbfb2..15c7e85504 100644 --- a/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Agent_Step01_BasicSkills.csproj +++ b/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Agent_Step01_BasicSkills.csproj @@ -10,12 +10,13 @@ - + + - + diff --git a/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Program.cs b/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Program.cs index 290c3f9b6b..2b686a5fa9 100644 --- a/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Program.cs +++ b/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/Program.cs @@ -7,28 +7,31 @@ // This sample includes the expense-report skill: // - Policy-based expense filing with references and assets -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; -using OpenAI.Responses; +using Microsoft.Extensions.AI; // --- Configuration --- -string endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") - ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") + ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; // --- Skills Provider --- // Discovers skills from the 'skills' directory and makes them available to the agent var skillsProvider = new FileAgentSkillsProvider(skillPath: Path.Combine(AppContext.BaseDirectory, "skills")); // --- Agent Setup --- -AIAgent agent = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - .GetResponsesClient(deploymentName) +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() .AsAIAgent(new ChatClientAgentOptions { Name = "SkillsAgent", ChatOptions = new() { + ModelId = deploymentName, Instructions = "You are a helpful assistant.", }, AIContextProviders = [skillsProvider], diff --git a/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/README.md b/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/README.md index 78099fa8a5..d735f9a419 100644 --- a/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/README.md +++ b/dotnet/samples/02-agents/AgentSkills/Agent_Step01_BasicSkills/README.md @@ -36,13 +36,13 @@ Agent_Step01_BasicSkills/ ### Prerequisites - .NET 10.0 SDK -- Azure OpenAI endpoint with a deployed model +- Azure AI Foundry project endpoint with a deployed model ### Setup 1. Set environment variables: ```bash - export AZURE_OPENAI_ENDPOINT="https://your-endpoint.openai.azure.com/" - export AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini" + export AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com" + export AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" ``` 2. Run the sample: diff --git a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/AgentWithMemory_Step01_ChatHistoryMemory.csproj b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/AgentWithMemory_Step01_ChatHistoryMemory.csproj index 860089b621..1d2fb75ebc 100644 --- a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/AgentWithMemory_Step01_ChatHistoryMemory.csproj +++ b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/AgentWithMemory_Step01_ChatHistoryMemory.csproj @@ -10,13 +10,14 @@ + - + diff --git a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/Program.cs b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/Program.cs index ff4628ef7a..b6da6b951f 100644 --- a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/Program.cs +++ b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step01_ChatHistoryMemory/Program.cs @@ -4,15 +4,16 @@ // It can then use the chat history from prior conversations to inform responses in new conversations. using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.InMemory; -using OpenAI.Chat; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var embeddingEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); var embeddingDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME") ?? "text-embedding-3-large"; // Create a vector store to store the chat messages in. @@ -23,19 +24,19 @@ // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. - EmbeddingGenerator = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) + EmbeddingGenerator = new AzureOpenAIClient(new Uri(embeddingEndpoint), new DefaultAzureCredential()) .GetEmbeddingClient(embeddingDeploymentName) .AsIEmbeddingGenerator() }); // Create the agent and add the ChatHistoryMemoryProvider to store chat messages in the vector store. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() .AsAIAgent(new ChatClientAgentOptions { - ChatOptions = new() { Instructions = "You are good at telling jokes." }, + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." }, Name = "Joker", AIContextProviders = [new ChatHistoryMemoryProvider( vectorStore, diff --git a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/AgentWithMemory_Step02_MemoryUsingMem0.csproj b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/AgentWithMemory_Step02_MemoryUsingMem0.csproj index 1e0863d66f..8dda238192 100644 --- a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/AgentWithMemory_Step02_MemoryUsingMem0.csproj +++ b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/AgentWithMemory_Step02_MemoryUsingMem0.csproj @@ -9,13 +9,13 @@ - + - + diff --git a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/Program.cs b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/Program.cs index f1842eb634..4db73dc8c1 100644 --- a/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/Program.cs +++ b/dotnet/samples/02-agents/AgentWithMemory/AgentWithMemory_Step02_MemoryUsingMem0/Program.cs @@ -6,15 +6,14 @@ using System.Net.Http.Headers; using System.Text.Json; -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Agents.AI.Mem0; using Microsoft.Extensions.AI; -using OpenAI.Chat; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; var mem0ServiceUri = Environment.GetEnvironmentVariable("MEM0_ENDPOINT") ?? throw new InvalidOperationException("MEM0_ENDPOINT is not set."); var mem0ApiKey = Environment.GetEnvironmentVariable("MEM0_API_KEY") ?? throw new InvalidOperationException("MEM0_API_KEY is not set."); @@ -27,13 +26,13 @@ // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() .AsAIAgent(new ChatClientAgentOptions() { - ChatOptions = new() { Instructions = "You are a friendly travel assistant. Use known memories about the user when responding, and do not invent details." }, + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a friendly travel assistant. Use known memories about the user when responding, and do not invent details." }, // The stateInitializer can be used to customize the Mem0 scope per session and it will be called each time a session // is encountered by the Mem0Provider that does not already have Mem0Provider state stored on the session. // If each session should have its own Mem0 scope, you can create a new id per session via the stateInitializer, e.g.: diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/AgentWithRAG_Step01_BasicTextRAG.csproj b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/AgentWithRAG_Step01_BasicTextRAG.csproj index 860089b621..1d2fb75ebc 100644 --- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/AgentWithRAG_Step01_BasicTextRAG.csproj +++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/AgentWithRAG_Step01_BasicTextRAG.csproj @@ -10,13 +10,14 @@ + - + diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/Program.cs b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/Program.cs index c04601d940..1d14f7ce3e 100644 --- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/Program.cs +++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step01_BasicTextRAG/Program.cs @@ -6,23 +6,24 @@ // The TextSearchStore is a sample store implementation that hardcodes a storage schema and uses the vector store to store and retrieve documents. using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Agents.AI.Samples; using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.InMemory; -using OpenAI.Chat; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var embeddingEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); var embeddingDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME") ?? "text-embedding-3-large"; // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. AzureOpenAIClient azureOpenAIClient = new( - new Uri(endpoint), + new Uri(embeddingEndpoint), new DefaultAzureCredential()); // Create an In-Memory vector store that uses the Azure OpenAI embedding model to generate embeddings. @@ -60,11 +61,13 @@ }; // Create the AI agent with the TextSearchProvider as the AI context provider. -AIAgent agent = azureOpenAIClient - .GetChatClient(deploymentName) +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() .AsAIAgent(new ChatClientAgentOptions { - ChatOptions = new() { Instructions = "You are a helpful support specialist for Contoso Outdoors. Answer questions using the provided context and cite the source document when available." }, + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful support specialist for Contoso Outdoors. Answer questions using the provided context and cite the source document when available." }, AIContextProviders = [new TextSearchProvider(SearchAdapter, textSearchOptions)], // Since we are using ChatCompletion which stores chat history locally, we can also add a message filter // that removes messages produced by the TextSearchProvider before they are added to the chat history, so that diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/AgentWithRAG_Step02_CustomVectorStoreRAG.csproj b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/AgentWithRAG_Step02_CustomVectorStoreRAG.csproj index 33029395dd..e2fc3ac0fb 100644 --- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/AgentWithRAG_Step02_CustomVectorStoreRAG.csproj +++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/AgentWithRAG_Step02_CustomVectorStoreRAG.csproj @@ -10,13 +10,14 @@ + - + diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/Program.cs b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/Program.cs index 0c299a1445..8410083c07 100644 --- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/Program.cs +++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/Program.cs @@ -5,16 +5,17 @@ // The TextSearchProvider runs a search against the vector store before each model invocation and injects the results into the model context. using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.Qdrant; -using OpenAI.Chat; using Qdrant.Client; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var embeddingEndpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); var embeddingDeploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME") ?? "text-embedding-3-large"; var afOverviewUrl = "https://github.com/MicrosoftDocs/semantic-kernel-docs/blob/main/agent-framework/overview/agent-framework-overview.md"; var afMigrationUrl = "https://raw.githubusercontent.com/MicrosoftDocs/semantic-kernel-docs/refs/heads/main/agent-framework/migration-guide/from-semantic-kernel/index.md"; @@ -23,7 +24,7 @@ // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. AzureOpenAIClient azureOpenAIClient = new( - new Uri(endpoint), + new Uri(embeddingEndpoint), new DefaultAzureCredential()); // Create a Qdrant vector store that uses the Azure OpenAI embedding model to generate embeddings. @@ -69,11 +70,13 @@ }; // Create the AI agent with the TextSearchProvider as the AI context provider. -AIAgent agent = azureOpenAIClient - .GetChatClient(deploymentName) +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() .AsAIAgent(new ChatClientAgentOptions { - ChatOptions = new() { Instructions = "You are a helpful support specialist for the Microsoft Agent Framework. Answer questions using the provided context and cite the source document when available. Keep responses brief." }, + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful support specialist for the Microsoft Agent Framework. Answer questions using the provided context and cite the source document when available. Keep responses brief." }, AIContextProviders = [new TextSearchProvider(SearchAdapter, textSearchOptions)], // Configure a filter on the InMemoryChatHistoryProvider so that we don't persist the messages produced by the TextSearchProvider in chat history. // The default is to persist all messages except those that came from chat history in the first place. diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/README.md b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/README.md index 131adde82b..15c243ddc7 100644 --- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/README.md +++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step02_CustomVectorStoreRAG/README.md @@ -7,23 +7,21 @@ This sample uses Qdrant for the vector store, but this can easily be swapped out ## Prerequisites - .NET 10 SDK or later -- Azure OpenAI service endpoint -- Both a chat completion and embedding deployment configured in the Azure OpenAI resource +- Azure AI Foundry project endpoint +- An embedding deployment configured in an Azure OpenAI resource - Azure CLI installed and authenticated (for Azure credential authentication) -- User has the `Cognitive Services OpenAI Contributor` role for the Azure OpenAI resource. - An existing Qdrant instance. You can use a managed service or run a local instance using Docker, but the sample assumes the instance is running locally. -**Note**: These samples use Azure OpenAI models. For more information, see [how to deploy Azure OpenAI models with Azure AI Foundry](https://learn.microsoft.com/en-us/azure/ai-foundry/how-to/deploy-models-openai). - -**Note**: These samples use Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure OpenAI resource and have the `Cognitive Services OpenAI Contributor` role. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). +**Note**: These samples use Azure AI Foundry for agent chat and Azure OpenAI for embeddings. Make sure you're logged in with `az login` and have access to both resources. ## Running the sample from the console Set the following environment variables: ```powershell -$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure OpenAI resource endpoint -$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini +$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com" # Azure AI Foundry project endpoint +$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini +$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Azure OpenAI endpoint for embeddings $env:AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME="text-embedding-3-large" # Optional, defaults to text-embedding-3-large ``` diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/AgentWithRAG_Step03_CustomRAGDataSource.csproj b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/AgentWithRAG_Step03_CustomRAGDataSource.csproj index 0f9de7c359..ede4e4ca18 100644 --- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/AgentWithRAG_Step03_CustomRAGDataSource.csproj +++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/AgentWithRAG_Step03_CustomRAGDataSource.csproj @@ -9,13 +9,13 @@ - + - + diff --git a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/Program.cs b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/Program.cs index d4e3a40756..6aed669feb 100644 --- a/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/Program.cs +++ b/dotnet/samples/02-agents/AgentWithRAG/AgentWithRAG_Step03_CustomRAGDataSource/Program.cs @@ -6,14 +6,13 @@ // The provider invokes the custom search function // before each model invocation and injects the results into the model context. -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; -using OpenAI.Chat; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; TextSearchProviderOptions textSearchOptions = new() { @@ -25,13 +24,13 @@ // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() .AsAIAgent(new ChatClientAgentOptions { - ChatOptions = new() { Instructions = "You are a helpful support specialist for Contoso Outdoors. Answer questions using the provided context and cite the source document when available." }, + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful support specialist for Contoso Outdoors. Answer questions using the provided context and cite the source document when available." }, AIContextProviders = [new TextSearchProvider(MockSearchAsync, textSearchOptions)] }); diff --git a/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Agent_Step01_UsingFunctionToolsWithApprovals.csproj b/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Agent_Step01_UsingFunctionToolsWithApprovals.csproj index 0f9de7c359..ede4e4ca18 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Agent_Step01_UsingFunctionToolsWithApprovals.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Agent_Step01_UsingFunctionToolsWithApprovals.csproj @@ -9,13 +9,13 @@ - + - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Program.cs index 5bdfc9421c..6bb9bd0dbd 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step01_UsingFunctionToolsWithApprovals/Program.cs @@ -6,15 +6,14 @@ // while the agent is waiting for user input. using System.ComponentModel; -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; -using OpenAI.Chat; using ChatMessage = Microsoft.Extensions.AI.ChatMessage; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; // Create a sample function tool that the agent can use. [Description("Get the weather for a given location.")] @@ -26,11 +25,19 @@ static string GetWeather([Description("The location to get the weather for.")] s // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsAIAgent(instructions: "You are a helpful assistant", tools: [new ApprovalRequiredAIFunction(AIFunctionFactory.Create(GetWeather))]); +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() + .AsAIAgent(new ChatClientAgentOptions + { + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = "You are a helpful assistant", + Tools = [new ApprovalRequiredAIFunction(AIFunctionFactory.Create(GetWeather))] + }, + }); // Call the agent and check if there are any function approval requests to handle. // For simplicity, we are assuming here that only function approvals are pending. diff --git a/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Agent_Step02_StructuredOutput.csproj b/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Agent_Step02_StructuredOutput.csproj index 0f9de7c359..ede4e4ca18 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Agent_Step02_StructuredOutput.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Agent_Step02_StructuredOutput.csproj @@ -9,13 +9,13 @@ - + - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Program.cs index 7e74315e7d..b16864e58a 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/Program.cs @@ -5,25 +5,24 @@ using System.ComponentModel; using System.Text.Json; using System.Text.Json.Serialization; -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; -using OpenAI.Chat; using SampleApp; using ChatMessage = Microsoft.Extensions.AI.ChatMessage; -string endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; // Create chat client to be used by chat client agents. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -ChatClient chatClient = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName); +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); // Demonstrates how to work with structured output via ResponseFormat with the non-generic RunAsync method. // This approach is useful when: @@ -47,7 +46,7 @@ // the text output from the agent into structured data using a chat client. await UseStructuredOutputWithMiddlewareAsync(chatClient); -static async Task UseStructuredOutputWithResponseFormatAsync(ChatClient chatClient) +async Task UseStructuredOutputWithResponseFormatAsync(IChatClient chatClient) { Console.WriteLine("=== Structured Output with ResponseFormat ==="); @@ -57,9 +56,10 @@ static async Task UseStructuredOutputWithResponseFormatAsync(ChatClient chatClie Name = "HelpfulAssistant", ChatOptions = new() { + ModelId = deploymentName, Instructions = "You are a helpful assistant.", // Specify CityInfo as the type parameter of ForJsonSchema to indicate the expected structured output from the agent. - ResponseFormat = Microsoft.Extensions.AI.ChatResponseFormat.ForJsonSchema() + ResponseFormat = ChatResponseFormat.ForJsonSchema() } }); @@ -81,12 +81,16 @@ static async Task UseStructuredOutputWithResponseFormatAsync(ChatClient chatClie Console.WriteLine(); } -static async Task UseStructuredOutputWithRunAsync(ChatClient chatClient) +async Task UseStructuredOutputWithRunAsync(IChatClient chatClient) { Console.WriteLine("=== Structured Output with RunAsync ==="); // Create the agent - AIAgent agent = chatClient.AsAIAgent(name: "HelpfulAssistant", instructions: "You are a helpful assistant."); + AIAgent agent = chatClient.AsAIAgent(new ChatClientAgentOptions + { + Name = "HelpfulAssistant", + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful assistant." }, + }); // Set CityInfo as the type parameter of RunAsync method to specify the expected structured output from the agent and invoke it with some unstructured input. AgentResponse response = await agent.RunAsync("Provide information about the capital of France."); @@ -99,7 +103,7 @@ static async Task UseStructuredOutputWithRunAsync(ChatClient chatClient) Console.WriteLine(); } -static async Task UseStructuredOutputWithRunStreamingAsync(ChatClient chatClient) +async Task UseStructuredOutputWithRunStreamingAsync(IChatClient chatClient) { Console.WriteLine("=== Structured Output with RunStreamingAsync ==="); @@ -109,9 +113,10 @@ static async Task UseStructuredOutputWithRunStreamingAsync(ChatClient chatClient Name = "HelpfulAssistant", ChatOptions = new() { + ModelId = deploymentName, Instructions = "You are a helpful assistant.", // Specify CityInfo as the type parameter of ForJsonSchema to indicate the expected structured output from the agent. - ResponseFormat = Microsoft.Extensions.AI.ChatResponseFormat.ForJsonSchema() + ResponseFormat = ChatResponseFormat.ForJsonSchema() } }); @@ -129,15 +134,16 @@ static async Task UseStructuredOutputWithRunStreamingAsync(ChatClient chatClient Console.WriteLine(); } -static async Task UseStructuredOutputWithMiddlewareAsync(ChatClient chatClient) +async Task UseStructuredOutputWithMiddlewareAsync(IChatClient chatClient) { Console.WriteLine("=== Structured Output with UseStructuredOutput Middleware ==="); - // Create chat client that will transform the agent text response into structured output. - IChatClient meaiChatClient = chatClient.AsIChatClient(); - // Create the agent - AIAgent agent = meaiChatClient.AsAIAgent(name: "HelpfulAssistant", instructions: "You are a helpful assistant."); + AIAgent agent = chatClient.AsAIAgent(new ChatClientAgentOptions + { + Name = "HelpfulAssistant", + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful assistant." }, + }); // Add structured output middleware via UseStructuredOutput method to add structured output support to the agent. // This middleware transforms the agent's text response into structured data using a chat client. @@ -145,7 +151,7 @@ static async Task UseStructuredOutputWithMiddlewareAsync(ChatClient chatClient) // from the AgentRunOptions to emulate an agent that doesn't support structured output natively agent = agent .AsBuilder() - .UseStructuredOutput(meaiChatClient) + .UseStructuredOutput(chatClient) .Use(ResponseFormatRemovalMiddleware, null) .Build(); diff --git a/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/README.md b/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/README.md index 5652fe9b0a..2d7f899337 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/README.md +++ b/dotnet/samples/02-agents/Agents/Agent_Step02_StructuredOutput/README.md @@ -14,21 +14,21 @@ This sample demonstrates how to configure ChatClientAgent to produce structured Before you begin, ensure you have the following prerequisites: - .NET 10 SDK or later -- Azure OpenAI service endpoint and deployment configured +- Azure AI Foundry service endpoint and deployment configured - Azure CLI installed and authenticated (for Azure credential authentication) -- User has the `Cognitive Services OpenAI Contributor` role for the Azure OpenAI resource +- User has the `Cognitive Services OpenAI Contributor` role for the Azure AI Foundry resource -**Note**: This sample uses Azure OpenAI models. For more information, see [how to deploy Azure OpenAI models with Azure AI Foundry](https://learn.microsoft.com/en-us/azure/ai-foundry/how-to/deploy-models-openai). +**Note**: This sample uses Azure AI Foundry models. For more information, see [how to deploy Azure AI Foundry models with Azure AI Foundry](https://learn.microsoft.com/en-us/azure/ai-foundry/how-to/deploy-models-openai). -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure OpenAI resource and have the `Cognitive Services OpenAI Contributor` role. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). +**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure AI Foundry resource and have the `Cognitive Services OpenAI Contributor` role. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). ## Environment Variables Set the following environment variables: ```powershell -$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure OpenAI resource endpoint -$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini +$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com" # Replace with your Azure AI Foundry project endpoint +$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini ``` ## Run the sample diff --git a/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Agent_Step03_PersistedConversations.csproj b/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Agent_Step03_PersistedConversations.csproj index 0f9de7c359..ede4e4ca18 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Agent_Step03_PersistedConversations.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Agent_Step03_PersistedConversations.csproj @@ -9,13 +9,13 @@ - + - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Program.cs index d3331cb2b8..5c55ef1b32 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step03_PersistedConversations/Program.cs @@ -5,23 +5,27 @@ // This sample shows how to create and use a simple AI agent with a conversation that can be persisted to disk. using System.Text.Json; -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; -using OpenAI.Chat; +using Microsoft.Extensions.AI; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; // Create the agent // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsAIAgent(instructions: "You are good at telling jokes.", name: "Joker"); +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() + .AsAIAgent(new ChatClientAgentOptions + { + Name = "Joker", + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." }, + }); // Start a new session for the agent conversation. AgentSession session = await agent.CreateSessionAsync(); diff --git a/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Agent_Step04_3rdPartyChatHistoryStorage.csproj b/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Agent_Step04_3rdPartyChatHistoryStorage.csproj index 860089b621..1a0e2c0be1 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Agent_Step04_3rdPartyChatHistoryStorage.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Agent_Step04_3rdPartyChatHistoryStorage.csproj @@ -9,14 +9,14 @@ - + - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Program.cs index cbcf14157e..28ee2d1a65 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step04_3rdPartyChatHistoryStorage/Program.cs @@ -7,18 +7,17 @@ // the chat history can be retrieved from the custom storage location. using System.Text.Json; -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; using Microsoft.Extensions.VectorData; using Microsoft.SemanticKernel.Connectors.InMemory; -using OpenAI.Chat; using SampleApp; using ChatMessage = Microsoft.Extensions.AI.ChatMessage; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; // Create a vector store to store the chat messages in. // Replace this with a vector store implementation of your choice if you want to persist the chat history to disk. @@ -28,13 +27,13 @@ // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() .AsAIAgent(new ChatClientAgentOptions { - ChatOptions = new() { Instructions = "You are good at telling jokes." }, + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." }, Name = "Joker", // Create a new ChatHistoryProvider for this agent that stores chat history in a vector store. ChatHistoryProvider = new VectorChatHistoryProvider(vectorStore) diff --git a/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Agent_Step05_Observability.csproj b/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Agent_Step05_Observability.csproj index 1a618d660a..e86cc51346 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Agent_Step05_Observability.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Agent_Step05_Observability.csproj @@ -9,7 +9,7 @@ - + @@ -18,7 +18,7 @@ - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Program.cs index 20a0c252a2..f84f2191e4 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step05_Observability/Program.cs @@ -2,16 +2,16 @@ // This sample shows how to create and use a simple AI agent with Azure OpenAI as the backend that logs telemetry using OpenTelemetry. -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Azure.Monitor.OpenTelemetry.Exporter; using Microsoft.Agents.AI; -using OpenAI.Chat; +using Microsoft.Extensions.AI; using OpenTelemetry; using OpenTelemetry.Trace; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; var applicationInsightsConnectionString = Environment.GetEnvironmentVariable("APPLICATIONINSIGHTS_CONNECTION_STRING"); // Create TracerProvider with console exporter @@ -30,9 +30,15 @@ // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsAIAgent(instructions: "You are good at telling jokes.", name: "Joker") +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() + .AsAIAgent(new ChatClientAgentOptions + { + Name = "Joker", + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." }, + }) .AsBuilder() .UseOpenTelemetry(sourceName: sourceName) .Build(); diff --git a/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Agent_Step06_DependencyInjection.csproj b/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Agent_Step06_DependencyInjection.csproj index 0aaa471260..c73468be3e 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Agent_Step06_DependencyInjection.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Agent_Step06_DependencyInjection.csproj @@ -9,14 +9,14 @@ - + - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Program.cs index 218ab1a10e..75fcb539d9 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step06_DependencyInjection/Program.cs @@ -4,31 +4,30 @@ // This sample shows how to use dependency injection to register an AIAgent and use it from a hosted service with a user input chat loop. -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; // Create a host builder that we will register services with and then run. HostApplicationBuilder builder = Host.CreateApplicationBuilder(args); // Add agent options to the service collection. -builder.Services.AddSingleton(new ChatClientAgentOptions() { Name = "Joker", ChatOptions = new() { Instructions = "You are good at telling jokes." } }); +builder.Services.AddSingleton(new ChatClientAgentOptions() { Name = "Joker", ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." } }); // Add a chat client to the service collection. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -builder.Services.AddKeyedChatClient("AzureOpenAI", (sp) => new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsIChatClient()); +builder.Services.AddKeyedChatClient("AzureOpenAI", (sp) => new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient()); // Add the AI agent to the service collection. builder.Services.AddSingleton((sp) => new ChatClientAgent( diff --git a/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Agent_Step07_AsMcpTool.csproj b/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Agent_Step07_AsMcpTool.csproj index db776afd1e..1df2f9ce52 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Agent_Step07_AsMcpTool.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Agent_Step07_AsMcpTool.csproj @@ -10,14 +10,15 @@ - + + - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Program.cs index d621227ea0..deff1045f0 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/Program.cs @@ -2,9 +2,10 @@ // This sample shows how to expose an AI agent as an MCP tool. -using Azure.AI.Agents.Persistent; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; +using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; using ModelContextProtocol.Server; @@ -12,20 +13,24 @@ var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +// Create a code-first agent using ProjectResponsesClient. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -var persistentAgentsClient = new PersistentAgentsClient(endpoint, new DefaultAzureCredential()); - -// Create a server side persistent agent -var agentMetadata = await persistentAgentsClient.Administration.CreateAgentAsync( - model: deploymentName, - instructions: "You are good at telling jokes, and you always start each joke with 'Aye aye, captain!'.", - name: "Joker", - description: "An agent that tells jokes."); - -// Retrieve the server side persistent agent as an AIAgent. -AIAgent agent = await persistentAgentsClient.GetAIAgentAsync(agentMetadata.Value.Id); +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() + .AsAIAgent(new ChatClientAgentOptions + { + Name = "Joker", + Description = "An agent that tells jokes.", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = "You are good at telling jokes, and you always start each joke with 'Aye aye, captain!'.", + }, + }); // Convert the agent to an AIFunction and then to an MCP tool. // The agent name and description will be used as the mcp tool name and description. diff --git a/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/README.md b/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/README.md index e35cf01e90..b0e5e6e6db 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/README.md +++ b/dotnet/samples/02-agents/Agents/Agent_Step07_AsMcpTool/README.md @@ -21,7 +21,7 @@ To use the [MCP Inspector](https://modelcontextprotocol.io/docs/tools/inspector) ``` 1. Open a web browser and navigate to the URL displayed in the terminal. If not opened automatically, this will open the MCP Inspector interface. 1. In the MCP Inspector interface, add the following environment variables to allow your MCP server to access Azure AI Foundry Project to create and run the agent: - - AZURE_AI_PROJECT_ENDPOINT = https://your-resource.openai.azure.com/ # Replace with your Azure AI Foundry Project endpoint + - AZURE_AI_PROJECT_ENDPOINT = https://your-project.services.ai.azure.com # Replace with your Azure AI Foundry Project endpoint - AZURE_AI_MODEL_DEPLOYMENT_NAME = gpt-4o-mini # Replace with your model deployment name 1. Find and click the `Connect` button in the MCP Inspector interface to connect to the MCP server. 1. As soon as the connection is established, open the `Tools` tab in the MCP Inspector interface and select the `Joker` tool from the list. diff --git a/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Agent_Step08_UsingImages.csproj b/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Agent_Step08_UsingImages.csproj index 73a41005f1..8d2dbc66b3 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Agent_Step08_UsingImages.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Agent_Step08_UsingImages.csproj @@ -9,12 +9,13 @@ - + + - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Program.cs index 984a9e3b5c..4bc39839cd 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/Program.cs @@ -2,23 +2,27 @@ // This sample shows how to use Image Multi-Modality with an AI agent. -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; +using Microsoft.Agents.AI; using Microsoft.Extensions.AI; -using OpenAI.Chat; using ChatMessage = Microsoft.Extensions.AI.ChatMessage; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = System.Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = System.Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o"; // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -var agent = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsAIAgent( - name: "VisionAgent", - instructions: "You are a helpful agent that can analyze images"); +var agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() + .AsAIAgent(new ChatClientAgentOptions + { + Name = "VisionAgent", + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a helpful agent that can analyze images" }, + }); ChatMessage message = new(ChatRole.User, [ new TextContent("What do you see in this image?"), diff --git a/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/README.md b/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/README.md index e70c09f513..9d9c316441 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/README.md +++ b/dotnet/samples/02-agents/Agents/Agent_Step08_UsingImages/README.md @@ -1,6 +1,6 @@ # Using Images with AI Agents -This sample demonstrates how to use image multi-modality with an AI agent. It shows how to create a vision-enabled agent that can analyze and describe images using Azure OpenAI. +This sample demonstrates how to use image multi-modality with an AI agent. It shows how to create a vision-enabled agent that can analyze and describe images using Azure AI Foundry. ## What this sample demonstrates @@ -13,13 +13,13 @@ This sample demonstrates how to use image multi-modality with an AI agent. It sh - **Vision Agent**: Creates an agent specifically instructed to analyze images - **Multimodal Input**: Combines text questions with image uri in a single message -- **Azure OpenAI Integration**: Uses AzureOpenAI LLM agents +- **Azure AI Foundry Integration**: Uses AzureOpenAI LLM agents ## Prerequisites Before running this sample, ensure you have: -1. An Azure OpenAI project set up +1. An Azure AI Foundry project set up 2. A compatible model deployment (e.g., gpt-4o) 3. Azure CLI installed and authenticated @@ -28,8 +28,8 @@ Before running this sample, ensure you have: Set the following environment variables: ```powershell -$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure OpenAI endpoint -$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o" # Replace with your model deployment name (optional, defaults to gpt-4o) +$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com" # Replace with your Azure AI Foundry endpoint +$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o" # Replace with your model deployment name (optional, defaults to gpt-4o) ``` ## Run the sample diff --git a/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Agent_Step09_AsFunctionTool.csproj b/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Agent_Step09_AsFunctionTool.csproj index 2660090404..bbd4449ca8 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Agent_Step09_AsFunctionTool.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Agent_Step09_AsFunctionTool.csproj @@ -10,13 +10,14 @@ - + + - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Program.cs index aca1a95ce4..94683b01de 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step09_AsFunctionTool/Program.cs @@ -3,14 +3,13 @@ // This sample shows how to create and use a Azure OpenAI AI agent as a function tool. using System.ComponentModel; -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; -using OpenAI.Chat; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; [Description("Get the weather for a given location.")] static string GetWeather([Description("The location to get the weather for.")] string location) @@ -20,22 +19,36 @@ static string GetWeather([Description("The location to get the weather for.")] s // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent weatherAgent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsAIAgent( - instructions: "You answer questions about the weather.", - name: "WeatherAgent", - description: "An agent that answers questions about the weather.", - tools: [AIFunctionFactory.Create(GetWeather)]); +AIAgent weatherAgent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() + .AsAIAgent(new ChatClientAgentOptions + { + Name = "WeatherAgent", + Description = "An agent that answers questions about the weather.", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = "You answer questions about the weather.", + Tools = [AIFunctionFactory.Create(GetWeather)] + }, + }); // Create the main agent, and provide the weather agent as a function tool. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsAIAgent(instructions: "You are a helpful assistant who responds in French.", tools: [weatherAgent.AsAIFunction()]); +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() + .AsAIAgent(new ChatClientAgentOptions + { + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = "You are a helpful assistant who responds in French.", + Tools = [weatherAgent.AsAIFunction()] + }, + }); // Invoke the agent and output the text result. Console.WriteLine(await agent.RunAsync("What is the weather like in Amsterdam?")); diff --git a/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Agent_Step10_BackgroundResponsesWithToolsAndPersistence.csproj b/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Agent_Step10_BackgroundResponsesWithToolsAndPersistence.csproj index 29fab5f992..39f5c6ed62 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Agent_Step10_BackgroundResponsesWithToolsAndPersistence.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Agent_Step10_BackgroundResponsesWithToolsAndPersistence.csproj @@ -9,12 +9,13 @@ - + + - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Program.cs index 5d9c70a5fd..891584617b 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/Program.cs @@ -8,29 +8,34 @@ using System.ComponentModel; using System.Text.Json; -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; -using OpenAI.Responses; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-5"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-5"; var stateStore = new Dictionary(); // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetResponsesClient(deploymentName) - .AsAIAgent( - name: "SpaceNovelWriter", - instructions: "You are a space novel writer. Always research relevant facts and generate character profiles for the main characters before writing novels." + - "Write complete chapters without asking for approval or feedback. Do not ask the user about tone, style, pace, or format preferences - just write the novel based on the request.", - tools: [AIFunctionFactory.Create(ResearchSpaceFactsAsync), AIFunctionFactory.Create(GenerateCharacterProfilesAsync)]); +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() + .AsAIAgent(new ChatClientAgentOptions + { + Name = "SpaceNovelWriter", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = "You are a space novel writer. Always research relevant facts and generate character profiles for the main characters before writing novels." + + "Write complete chapters without asking for approval or feedback. Do not ask the user about tone, style, pace, or format preferences - just write the novel based on the request.", + Tools = [AIFunctionFactory.Create(ResearchSpaceFactsAsync), AIFunctionFactory.Create(GenerateCharacterProfilesAsync)] + }, + }); // Enable background responses (only supported by {Azure}OpenAI Responses at this time). AgentRunOptions options = new() { AllowBackgroundResponses = true }; diff --git a/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/README.md b/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/README.md index ca52e8afa3..b10a541cff 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/README.md +++ b/dotnet/samples/02-agents/Agents/Agent_Step10_BackgroundResponsesWithToolsAndPersistence/README.md @@ -1,6 +1,6 @@ # What This Sample Shows -This sample demonstrates how to use background responses with ChatCompletionAgent and Azure OpenAI Responses for long-running operations. Background responses support: +This sample demonstrates how to use background responses with ChatCompletionAgent and Azure AI Foundry Responses for long-running operations. Background responses support: - **Polling for completion** - Non-streaming APIs can start a background operation and return a continuation token. Poll with the token until the response completes. - **Function calling** - Functions can be called during background operations. @@ -15,14 +15,14 @@ For more information, see the [official documentation](https://learn.microsoft.c Before you begin, ensure you have the following prerequisites: - .NET 10 SDK or later -- Azure OpenAI service endpoint and deployment configured +- Azure AI Foundry service endpoint and deployment configured - Azure CLI installed and authenticated (for Azure credential authentication) -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure OpenAI resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). +**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure AI Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). Set the following environment variables: ```powershell -$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure OpenAI resource endpoint -$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-5" # Optional, defaults to gpt-5 +$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com" # Replace with your Azure AI Foundry project endpoint +$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-5" # Optional, defaults to gpt-5 ``` diff --git a/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Agent_Step11_Middleware.csproj b/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Agent_Step11_Middleware.csproj index 6582c30cd5..1696973a7c 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Agent_Step11_Middleware.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Agent_Step11_Middleware.csproj @@ -11,12 +11,12 @@ - + + - - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Program.cs index 09cd540378..da73862f20 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/Program.cs @@ -8,21 +8,23 @@ using System.ComponentModel; using System.Text.RegularExpressions; -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; // Get Azure AI Foundry configuration from environment variables -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = System.Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = System.Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o"; -// Get a client to create/retrieve server side agents with +// Create a chat client. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -var azureOpenAIClient = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - .GetChatClient(deploymentName); +var chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); [Description("Get the weather for a given location.")] static string GetWeather([Description("The location to get the weather for.")] string location) @@ -33,12 +35,18 @@ static string GetDateTime() => DateTimeOffset.Now.ToString(); // Adding middleware to the chat client level and building an agent on top of it -var originalAgent = azureOpenAIClient.AsIChatClient() +var originalAgent = chatClient .AsBuilder() .Use(getResponseFunc: ChatClientMiddleware, getStreamingResponseFunc: null) - .BuildAIAgent( - instructions: "You are an AI assistant that helps people find information.", - tools: [AIFunctionFactory.Create(GetDateTime, name: nameof(GetDateTime))]); + .BuildAIAgent(new ChatClientAgentOptions + { + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = "You are an AI assistant that helps people find information.", + Tools = [AIFunctionFactory.Create(GetDateTime, name: nameof(GetDateTime))] + }, + }); // Adding middleware to the agent level var middlewareEnabledAgent = originalAgent @@ -117,11 +125,17 @@ static string GetDateTime() // In this case we are attaching an AIContextProvider that only adds messages. Console.WriteLine("\n\n=== Example 6: AIContextProvider on chat client pipeline ==="); -var chatClientProviderAgent = azureOpenAIClient.AsIChatClient() +var chatClientProviderAgent = chatClient .AsBuilder() .UseAIContextProviders(new DateTimeContextProvider()) - .BuildAIAgent( - instructions: "You are an AI assistant that helps people find information."); + .BuildAIAgent(new ChatClientAgentOptions + { + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = "You are an AI assistant that helps people find information.", + }, + }); var chatClientContextResponse = await chatClientProviderAgent.RunAsync("Is it almost time for lunch?"); Console.WriteLine($"Chat client context-enriched response: {chatClientContextResponse}"); diff --git a/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/README.md b/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/README.md index 74895e0cdf..c744267372 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/README.md +++ b/dotnet/samples/02-agents/Agents/Agent_Step11_Middleware/README.md @@ -7,7 +7,7 @@ This sample demonstrates how to add middleware to intercept: ## What This Sample Shows -1. Azure OpenAI integration via `AzureOpenAIClient` and `DefaultAzureCredential` +1. Azure AI Foundry integration via `ProjectResponsesClient` and `DefaultAzureCredential` 2. Chat client middleware using `ChatClientBuilder.Use(...)` 3. Agent run middleware (PII redaction and wording guardrails) 4. Function invocation middleware (logging and overriding a tool result) @@ -26,8 +26,8 @@ Attempting to use function middleware on agents that do not wrap a ChatClientAge ## Prerequisites 1. Environment variables: - - `AZURE_OPENAI_ENDPOINT`: Your Azure OpenAI endpoint - - `AZURE_OPENAI_DEPLOYMENT_NAME`: Chat deployment name (optional; defaults to `gpt-4o`) + - `AZURE_AI_PROJECT_ENDPOINT`: Your Azure AI Foundry endpoint + - `AZURE_AI_MODEL_DEPLOYMENT_NAME`: Chat deployment name (optional; defaults to `gpt-4o`) 2. Sign in with Azure CLI (PowerShell): ```powershell az login diff --git a/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Agent_Step12_Plugins.csproj b/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Agent_Step12_Plugins.csproj index 122c2e77a4..c88c4fc605 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Agent_Step12_Plugins.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Agent_Step12_Plugins.csproj @@ -13,11 +13,11 @@ - + + - diff --git a/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Program.cs index 2e9b405183..73a88135b8 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step12_Plugins/Program.cs @@ -9,15 +9,14 @@ // as AI functions. The AsAITools method of the plugin class shows how to specify // which methods should be exposed to the AI agent. -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; using Microsoft.Extensions.DependencyInjection; -using OpenAI.Chat; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; // Create a service collection to hold the agent plugin and its dependencies. ServiceCollection services = new(); @@ -30,15 +29,20 @@ // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsAIAgent( - instructions: "You are a helpful assistant that helps people find information.", - name: "Assistant", - tools: [.. serviceProvider.GetRequiredService().AsAITools()], - services: serviceProvider); // Pass the service provider to the agent so it will be available to plugin functions to resolve dependencies. +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() + .AsAIAgent(new ChatClientAgentOptions + { + Name = "Assistant", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = "You are a helpful assistant that helps people find information.", + Tools = [.. serviceProvider.GetRequiredService().AsAITools()] + }, + }, services: serviceProvider); // Pass the service provider to the agent so it will be available to plugin functions to resolve dependencies. Console.WriteLine(await agent.RunAsync("Tell me current time and weather in Seattle.")); diff --git a/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Agent_Step13_ChatReduction.csproj b/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Agent_Step13_ChatReduction.csproj index 0f9de7c359..ede4e4ca18 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Agent_Step13_ChatReduction.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Agent_Step13_ChatReduction.csproj @@ -9,13 +9,13 @@ - + - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Program.cs index fe93ed785c..253e1fd0a7 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step13_ChatReduction/Program.cs @@ -5,26 +5,25 @@ // NOTE: this feature is only supported where the chat history is stored locally, such as with OpenAI Chat Completion. // Where the chat history is stored server side, such as with Azure Foundry Agents, the service must manage the chat history size. -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; -using OpenAI.Chat; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; // Construct the agent, and provide a factory to create an in-memory chat message store with a reducer that keeps only the last 2 non-system messages. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() .AsAIAgent(new ChatClientAgentOptions { - ChatOptions = new() { Instructions = "You are good at telling jokes." }, + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are good at telling jokes." }, Name = "Joker", ChatHistoryProvider = new InMemoryChatHistoryProvider(new() { ChatReducer = new MessageCountingChatReducer(2) }) }); diff --git a/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Agent_Step14_BackgroundResponses.csproj b/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Agent_Step14_BackgroundResponses.csproj index 1c95b4af25..621600228d 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Agent_Step14_BackgroundResponses.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Agent_Step14_BackgroundResponses.csproj @@ -9,12 +9,13 @@ - + + - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Program.cs index 62db550556..83560e80da 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/Program.cs @@ -2,22 +2,25 @@ // This sample shows how to use background responses with ChatClientAgent and Azure OpenAI Responses. -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; -using OpenAI.Responses; +using Microsoft.Extensions.AI; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetResponsesClient(deploymentName) - .AsAIAgent(); +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() + .AsAIAgent(new ChatClientAgentOptions + { + ChatOptions = new() { ModelId = deploymentName }, + }); // Enable background responses (only supported by OpenAI Responses at this time). AgentRunOptions options = new() { AllowBackgroundResponses = true }; diff --git a/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/README.md b/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/README.md index e898733bc3..59a93885c0 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/README.md +++ b/dotnet/samples/02-agents/Agents/Agent_Step14_BackgroundResponses/README.md @@ -1,6 +1,6 @@ -# What This Sample Shows +# What This Sample Shows -This sample demonstrates how to use background responses with ChatCompletionAgent and Azure OpenAI Responses for long-running operations. Background responses support: +This sample demonstrates how to use background responses with ChatCompletionAgent and Azure AI Foundry Responses for long-running operations. Background responses support: - **Polling for completion** - Non-streaming APIs can start a background operation and return a continuation token. Poll with the token until the response completes. - **Resuming after interruption** - Streaming APIs can be interrupted and resumed from the last update using the continuation token. @@ -14,14 +14,14 @@ For more information, see the [official documentation](https://learn.microsoft.c Before you begin, ensure you have the following prerequisites: - .NET 10 SDK or later -- Azure OpenAI service endpoint and deployment configured +- Azure AI Foundry service endpoint and deployment configured - Azure CLI installed and authenticated (for Azure credential authentication) -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure OpenAI resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). +**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure AI Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). Set the following environment variables: ```powershell -$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure OpenAI resource endpoint -$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini +$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com" # Replace with your Azure AI Foundry project endpoint +$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini ``` \ No newline at end of file diff --git a/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Agent_Step16_Declarative.csproj b/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Agent_Step16_Declarative.csproj index 99073874ee..d4521b560b 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Agent_Step16_Declarative.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Agent_Step16_Declarative.csproj @@ -9,7 +9,7 @@ - + @@ -19,7 +19,7 @@ - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Program.cs index 215833c795..a6d7f9ed25 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step16_Declarative/Program.cs @@ -2,23 +2,22 @@ // This sample shows how to create an agent from a YAML based declarative representation. -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var _ = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; // Create the chat client // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -IChatClient chatClient = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsIChatClient(); +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); // Define the agent using a YAML definition. var text = diff --git a/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Agent_Step17_AdditionalAIContext.csproj b/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Agent_Step17_AdditionalAIContext.csproj index 99073874ee..d4521b560b 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Agent_Step17_AdditionalAIContext.csproj +++ b/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Agent_Step17_AdditionalAIContext.csproj @@ -9,7 +9,7 @@ - + @@ -19,7 +19,7 @@ - + diff --git a/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Program.cs b/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Program.cs index a341abe8cd..87834af317 100644 --- a/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Program.cs +++ b/dotnet/samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Program.cs @@ -9,16 +9,14 @@ using System.Text; using System.Text.Json; -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; -using OpenAI.Chat; using SampleApp; -using MEAI = Microsoft.Extensions.AI; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-5-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-5-mini"; // A sample function to load the next three calendar events for the user. Func> loadNextThreeCalendarEvents = async () => @@ -36,13 +34,13 @@ // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient( - new Uri(endpoint), - new DefaultAzureCredential()) - .GetChatClient(deploymentName) +AIAgent agent = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient() .AsAIAgent(new ChatClientAgentOptions() { - ChatOptions = new() { Instructions = """ + ChatOptions = new() { ModelId = deploymentName, Instructions = """ You are a helpful personal assistant. You manage a TODO list for the user. When the user has completed one of the tasks it can be removed from the TODO list. Only provide the list of TODO items if asked. You remind users of upcoming calendar events when the user interacts with you. @@ -120,7 +118,7 @@ protected override ValueTask ProvideAIContextAsync(InvokingContext co ], Messages = [ - new MEAI.ChatMessage(ChatRole.User, outputMessageBuilder.ToString()) + new ChatMessage(ChatRole.User, outputMessageBuilder.ToString()) ] }); } @@ -150,7 +148,7 @@ private static void AddTodoItem(AgentSession? session, string item) ///
internal sealed class CalendarSearchAIContextProvider(Func> loadNextThreeCalendarEvents) : MessageAIContextProvider { - protected override async ValueTask> ProvideMessagesAsync(InvokingContext context, CancellationToken cancellationToken = default) + protected override async ValueTask> ProvideMessagesAsync(InvokingContext context, CancellationToken cancellationToken = default) { var events = await loadNextThreeCalendarEvents(); @@ -161,7 +159,7 @@ internal sealed class CalendarSearchAIContextProvider(Func> loadN outputMessageBuilder.AppendLine($" - {calendarEvent}"); } - return [new MEAI.ChatMessage(ChatRole.User, outputMessageBuilder.ToString())]; + return [new ChatMessage(ChatRole.User, outputMessageBuilder.ToString())]; } } } diff --git a/dotnet/samples/02-agents/Agents/README.md b/dotnet/samples/02-agents/Agents/README.md index 116cbfc06b..db0b29f4c8 100644 --- a/dotnet/samples/02-agents/Agents/README.md +++ b/dotnet/samples/02-agents/Agents/README.md @@ -1,10 +1,10 @@ # Getting started with agents The getting started with agents samples demonstrate the fundamental concepts and functionalities -of single agents and can be used with any agent type. +of single agents using Azure AI Foundry as the default provider. -While the functionality can be used with any agent type, these samples use Azure OpenAI as the AI provider -and use ChatCompletion as the type of service. +These samples use the `ProjectResponsesClient` from `Azure.AI.Projects.OpenAI` to connect to +Azure AI Foundry via the Responses API. This is the recommended approach for new development. For other samples that demonstrate how to create and configure each type of agent that come with the agent framework, see the [How to create an agent for each provider](../AgentProviders/README.md) samples. @@ -14,13 +14,12 @@ see the [How to create an agent for each provider](../AgentProviders/README.md) Before you begin, ensure you have the following prerequisites: - .NET 10 SDK or later -- Azure OpenAI service endpoint and deployment configured +- Azure AI Foundry project endpoint configured - Azure CLI installed and authenticated (for Azure credential authentication) -- User has the `Cognitive Services OpenAI Contributor` role for the Azure OpenAI resource. -**Note**: These samples use Azure OpenAI models. For more information, see [how to deploy Azure OpenAI models with Azure AI Foundry](https://learn.microsoft.com/en-us/azure/ai-foundry/how-to/deploy-models-openai). +**Note**: These samples use Azure AI Foundry. For more information, see [Azure AI Foundry documentation](https://learn.microsoft.com/en-us/azure/ai-foundry/). -**Note**: These samples use Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure OpenAI resource and have the `Cognitive Services OpenAI Contributor` role. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). +**Note**: These samples use Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure AI Foundry project. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). ## Samples @@ -56,8 +55,8 @@ cd Agent_Step01_UsingFunctionToolsWithApprovals Set the following environment variables: ```powershell -$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure OpenAI resource endpoint -$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini +$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry project endpoint +$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini ``` If the variables are not set, you will be prompted for the values when running the samples. diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/FoundryAgents_Step01.2_Running.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/FoundryAgents_Step01.2_Running.csproj deleted file mode 100644 index daf7e24494..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/FoundryAgents_Step01.2_Running.csproj +++ /dev/null @@ -1,20 +0,0 @@ - - - - Exe - net10.0 - - enable - enable - - - - - - - - - - - - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/Program.cs deleted file mode 100644 index dd5db03b15..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/Program.cs +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// This sample shows how to create and use a simple AI agent with Azure Foundry Agents as the backend. - -using Azure.AI.Projects; -using Azure.AI.Projects.OpenAI; -using Azure.Identity; -using Microsoft.Agents.AI; - -string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; - -const string JokerInstructions = "You are good at telling jokes."; -const string JokerName = "JokerAgent"; - -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); - -// Define the agent you want to create. (Prompt Agent in this case) -AgentVersionCreationOptions options = new(new PromptAgentDefinition(model: deploymentName) { Instructions = JokerInstructions }); - -// Azure.AI.Agents SDK creates and manages agent by name and versions. -// You can create a server side agent version with the Azure.AI.Agents SDK client below. -AgentVersion agentVersion = aiProjectClient.Agents.CreateAgentVersion(agentName: JokerName, options); - -// You can use an AIAgent with an already created server side agent version. -AIAgent jokerAgent = aiProjectClient.AsAIAgent(agentVersion); - -// Invoke the agent with streaming support. -await foreach (AgentResponseUpdate update in jokerAgent.RunStreamingAsync("Tell me a joke about a pirate.")) -{ - Console.WriteLine(update); -} - -// Cleanup by agent name removes the agent version created. -await aiProjectClient.Agents.DeleteAgentAsync(jokerAgent.Name); diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/README.md deleted file mode 100644 index 40cb5e107d..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step01.2_Running/README.md +++ /dev/null @@ -1,46 +0,0 @@ -# Running a Simple AI Agent with Streaming - -This sample demonstrates how to create and run a simple AI agent with Azure Foundry Agents, including both text and streaming responses. - -## What this sample demonstrates - -- Creating a simple AI agent with instructions -- Running an agent with text output -- Running an agent with streaming output -- Managing agent lifecycle (creation and deletion) - -## Prerequisites - -Before you begin, ensure you have the following prerequisites: - -- .NET 10 SDK or later -- Azure Foundry service endpoint and deployment configured -- Azure CLI installed and authenticated (for Azure credential authentication) - -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). - -Set the following environment variables: - -```powershell -$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini -``` - -## Run the sample - -Navigate to the FoundryAgents sample directory and run: - -```powershell -cd dotnet/samples/02-agents/FoundryAgents -dotnet run --project .\FoundryAgents_Step01.2_Running -``` - -## Expected behavior - -The sample will: - -1. Create an agent named "JokerAgent" with instructions to tell jokes -2. Run the agent with a text prompt and display the response -3. Run the agent again with streaming to display the response as it's generated -4. Clean up resources by deleting the agent - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/FoundryAgents_Step02_MultiturnConversation.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/FoundryAgents_Step02_MultiturnConversation.csproj deleted file mode 100644 index daf7e24494..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/FoundryAgents_Step02_MultiturnConversation.csproj +++ /dev/null @@ -1,20 +0,0 @@ - - - - Exe - net10.0 - - enable - enable - - - - - - - - - - - - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/Program.cs deleted file mode 100644 index 1ac51c30ad..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/Program.cs +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// This sample shows how to create and use a simple AI agent with a multi-turn conversation. - -using Azure.AI.Projects; -using Azure.AI.Projects.OpenAI; -using Azure.Identity; -using Microsoft.Agents.AI; - -string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; - -const string JokerInstructions = "You are good at telling jokes."; -const string JokerName = "JokerAgent"; - -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); - -// Define the agent you want to create. (Prompt Agent in this case) -AgentVersionCreationOptions options = new(new PromptAgentDefinition(model: deploymentName) { Instructions = JokerInstructions }); - -// Retrieve an AIAgent for the created server side agent version. -ChatClientAgent jokerAgent = await aiProjectClient.CreateAIAgentAsync(name: JokerName, options); - -// Invoke the agent with a multi-turn conversation, where the context is preserved in the session object. -// Create a conversation in the server -ProjectConversationsClient conversationsClient = aiProjectClient.GetProjectOpenAIClient().GetProjectConversationsClient(); -ProjectConversation conversation = await conversationsClient.CreateProjectConversationAsync(); - -// Providing the conversation Id is not strictly necessary, but by not providing it no information will show up in the Foundry Project UI as conversations. -// Sessions that don't have a conversation Id will work based on the `PreviousResponseId`. -AgentSession session = await jokerAgent.CreateSessionAsync(conversation.Id); - -Console.WriteLine(await jokerAgent.RunAsync("Tell me a joke about a pirate.", session)); -Console.WriteLine(await jokerAgent.RunAsync("Now add some emojis to the joke and tell it in the voice of a pirate's parrot.", session)); - -// Invoke the agent with a multi-turn conversation and streaming, where the context is preserved in the session object. -session = await jokerAgent.CreateSessionAsync(conversation.Id); -await foreach (AgentResponseUpdate update in jokerAgent.RunStreamingAsync("Tell me a joke about a pirate.", session)) -{ - Console.WriteLine(update); -} -await foreach (AgentResponseUpdate update in jokerAgent.RunStreamingAsync("Now add some emojis to the joke and tell it in the voice of a pirate's parrot.", session)) -{ - Console.WriteLine(update); -} - -// Cleanup by agent name removes the agent version created. -await aiProjectClient.Agents.DeleteAgentAsync(jokerAgent.Name); - -// Cleanup the conversation created. -await conversationsClient.DeleteConversationAsync(conversation.Id); diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/README.md deleted file mode 100644 index 86721bf960..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step02_MultiturnConversation/README.md +++ /dev/null @@ -1,59 +0,0 @@ -# Multi-turn Conversation with AI Agents - -This sample demonstrates how to implement multi-turn conversations with AI agents, where context is preserved across multiple agent runs using threads and conversation IDs. - -## What this sample demonstrates - -- Creating an AI agent with instructions -- Creating a project conversation to track conversations in the Foundry UI -- Using threads with conversation IDs to maintain conversation context -- Running multi-turn conversations with text output -- Running multi-turn conversations with streaming output -- Managing agent and conversation lifecycle (creation and deletion) - -## Prerequisites - -Before you begin, ensure you have the following prerequisites: - -- .NET 10 SDK or later -- Azure Foundry service endpoint and deployment configured -- Azure CLI installed and authenticated (for Azure credential authentication) - -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). - -Set the following environment variables: - -```powershell -$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini -``` - -## Run the sample - -Navigate to the FoundryAgents sample directory and run: - -```powershell -cd dotnet/samples/02-agents/FoundryAgents -dotnet run --project .\FoundryAgents_Step02_MultiturnConversation -``` - -## Expected behavior - -The sample will: - -1. Create an agent named "JokerAgent" with instructions to tell jokes -2. Create a project conversation to enable visibility in the Azure Foundry UI -3. Create a thread linked to the conversation ID for context tracking -4. Run the agent with a text prompt and display the response -5. Send a follow-up message to the same thread, demonstrating context preservation -6. Create a new thread sharing the same conversation ID and run the agent with streaming -7. Send a follow-up streaming message to demonstrate multi-turn streaming -8. Clean up resources by deleting the agent and conversation - -## Conversation ID vs PreviousResponseId - -When working with multi-turn conversations, there are two approaches: - -- **With Conversation ID**: By passing a `conversation.Id` to `CreateSessionAsync()`, the conversation will be visible in the Azure Foundry Project UI. This is useful for tracking and debugging conversations. -- **Without Conversation ID**: Sessions created without a conversation ID still work correctly, maintaining context via `PreviousResponseId`. However, these conversations may not appear in the Foundry UI. - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/FoundryAgents_Step03_UsingFunctionTools.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/FoundryAgents_Step03_UsingFunctionTools.csproj deleted file mode 100644 index daf7e24494..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/FoundryAgents_Step03_UsingFunctionTools.csproj +++ /dev/null @@ -1,20 +0,0 @@ - - - - Exe - net10.0 - - enable - enable - - - - - - - - - - - - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/Program.cs deleted file mode 100644 index cfd74000a6..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/Program.cs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// This sample demonstrates how to use an agent with function tools. -// It shows both non-streaming and streaming agent interactions using weather-related tools. - -using System.ComponentModel; -using Azure.AI.Projects; -using Azure.Identity; -using Microsoft.Agents.AI; -using Microsoft.Extensions.AI; - -string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; - -[Description("Get the weather for a given location.")] -static string GetWeather([Description("The location to get the weather for.")] string location) - => $"The weather in {location} is cloudy with a high of 15°C."; - -const string AssistantInstructions = "You are a helpful assistant that can get weather information."; -const string AssistantName = "WeatherAssistant"; - -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); - -// Define the agent with function tools. -AITool tool = AIFunctionFactory.Create(GetWeather); - -// Create AIAgent directly -var newAgent = await aiProjectClient.CreateAIAgentAsync(name: AssistantName, model: deploymentName, instructions: AssistantInstructions, tools: [tool]); - -// Getting an already existing agent by name with tools. -/* - * IMPORTANT: Since agents that are stored in the server only know the definition of the function tools (JSON Schema), - * you need to provided all invocable function tools when retrieving the agent so it can invoke them automatically. - * If no invocable tools are provided, the function calling needs to handled manually. - */ -var existingAgent = await aiProjectClient.GetAIAgentAsync(name: AssistantName, tools: [tool]); - -// Non-streaming agent interaction with function tools. -AgentSession session = await existingAgent.CreateSessionAsync(); -Console.WriteLine(await existingAgent.RunAsync("What is the weather like in Amsterdam?", session)); - -// Streaming agent interaction with function tools. -session = await existingAgent.CreateSessionAsync(); -await foreach (AgentResponseUpdate update in existingAgent.RunStreamingAsync("What is the weather like in Amsterdam?", session)) -{ - Console.WriteLine(update); -} - -// Cleanup by agent name removes the agent version created. -await aiProjectClient.Agents.DeleteAgentAsync(existingAgent.Name); diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/README.md deleted file mode 100644 index fa9b5baf21..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step03_UsingFunctionTools/README.md +++ /dev/null @@ -1,48 +0,0 @@ -# Using Function Tools with AI Agents - -This sample demonstrates how to use function tools with AI agents, allowing agents to call custom functions to retrieve information. - -## What this sample demonstrates - -- Creating function tools using AIFunctionFactory -- Passing function tools to an AI agent -- Running agents with function tools (text output) -- Running agents with function tools (streaming output) -- Managing agent lifecycle (creation and deletion) - -## Prerequisites - -Before you begin, ensure you have the following prerequisites: - -- .NET 10 SDK or later -- Azure Foundry service endpoint and deployment configured -- Azure CLI installed and authenticated (for Azure credential authentication) - -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). - -Set the following environment variables: - -```powershell -$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini -``` - -## Run the sample - -Navigate to the FoundryAgents sample directory and run: - -```powershell -cd dotnet/samples/02-agents/FoundryAgents -dotnet run --project .\FoundryAgents_Step03.1_UsingFunctionTools -``` - -## Expected behavior - -The sample will: - -1. Create an agent named "WeatherAssistant" with a GetWeather function tool -2. Run the agent with a text prompt asking about weather -3. The agent will invoke the GetWeather function tool to retrieve weather information -4. Run the agent again with streaming to display the response as it's generated -5. Clean up resources by deleting the agent - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/FoundryAgents_Step04_UsingFunctionToolsWithApprovals.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/FoundryAgents_Step04_UsingFunctionToolsWithApprovals.csproj deleted file mode 100644 index daf7e24494..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/FoundryAgents_Step04_UsingFunctionToolsWithApprovals.csproj +++ /dev/null @@ -1,20 +0,0 @@ - - - - Exe - net10.0 - - enable - enable - - - - - - - - - - - - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/Program.cs deleted file mode 100644 index f33fae35f4..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/Program.cs +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// This sample demonstrates how to use an agent with function tools that require a human in the loop for approvals. -// It shows both non-streaming and streaming agent interactions using weather-related tools. -// If the agent is hosted in a service, with a remote user, combine this sample with the Persisted Conversations sample to persist the chat history -// while the agent is waiting for user input. - -using System.ComponentModel; -using Azure.AI.Projects; -using Azure.Identity; -using Microsoft.Agents.AI; -using Microsoft.Extensions.AI; - -string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; - -// Create a sample function tool that the agent can use. -[Description("Get the weather for a given location.")] -static string GetWeather([Description("The location to get the weather for.")] string location) - => $"The weather in {location} is cloudy with a high of 15°C."; - -const string AssistantInstructions = "You are a helpful assistant that can get weather information."; -const string AssistantName = "WeatherAssistant"; - -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); - -ApprovalRequiredAIFunction approvalTool = new(AIFunctionFactory.Create(GetWeather, name: nameof(GetWeather))); - -// Create AIAgent directly -AIAgent agent = await aiProjectClient.CreateAIAgentAsync(name: AssistantName, model: deploymentName, instructions: AssistantInstructions, tools: [approvalTool]); - -// Call the agent with approval-required function tools. -// The agent will request approval before invoking the function. -AgentSession session = await agent.CreateSessionAsync(); -AgentResponse response = await agent.RunAsync("What is the weather like in Amsterdam?", session); - -// Check if there are any approval requests. -// For simplicity, we are assuming here that only function approvals are pending. -List approvalRequests = response.Messages.SelectMany(m => m.Contents).OfType().ToList(); - -while (approvalRequests.Count > 0) -{ - // Ask the user to approve each function call request. - List userInputMessages = approvalRequests - .ConvertAll(functionApprovalRequest => - { - Console.WriteLine($"The agent would like to invoke the following function, please reply Y to approve: Name {functionApprovalRequest.FunctionCall.Name}"); - bool approved = Console.ReadLine()?.Equals("Y", StringComparison.OrdinalIgnoreCase) ?? false; - return new ChatMessage(ChatRole.User, [functionApprovalRequest.CreateResponse(approved)]); - }); - - // Pass the user input responses back to the agent for further processing. - response = await agent.RunAsync(userInputMessages, session); - - approvalRequests = response.Messages.SelectMany(m => m.Contents).OfType().ToList(); -} - -Console.WriteLine($"\nAgent: {response}"); - -// Cleanup by agent name removes the agent version created. -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/README.md deleted file mode 100644 index 42cbd6ba32..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step04_UsingFunctionToolsWithApprovals/README.md +++ /dev/null @@ -1,51 +0,0 @@ -# Using Function Tools with Approvals (Human-in-the-Loop) - -This sample demonstrates how to use function tools that require human approval before execution, implementing a human-in-the-loop workflow. - -## What this sample demonstrates - -- Creating approval-required function tools using ApprovalRequiredAIFunction -- Handling user input requests for function approvals -- Implementing human-in-the-loop approval workflows -- Processing agent responses with pending approvals -- Managing agent lifecycle (creation and deletion) - -## Prerequisites - -Before you begin, ensure you have the following prerequisites: - -- .NET 10 SDK or later -- Azure Foundry service endpoint and deployment configured -- Azure CLI installed and authenticated (for Azure credential authentication) - -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). - -Set the following environment variables: - -```powershell -$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini -``` - -## Run the sample - -Navigate to the FoundryAgents sample directory and run: - -```powershell -cd dotnet/samples/02-agents/FoundryAgents -dotnet run --project .\FoundryAgents_Step04_UsingFunctionToolsWithApprovals -``` - -## Expected behavior - -The sample will: - -1. Create an agent named "WeatherAssistant" with an approval-required GetWeather function tool -2. Run the agent with a prompt asking about weather -3. The agent will request approval before invoking the GetWeather function -4. The sample will prompt the user to approve or deny the function call (enter 'Y' to approve) -5. After approval, the function will be executed and the result returned to the agent -6. Clean up resources by deleting the agent - -**Note**: For hosted agents with remote users, combine this sample with the Persisted Conversations sample to persist chat history while waiting for user approval. - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/FoundryAgents_Step05_StructuredOutput.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/FoundryAgents_Step05_StructuredOutput.csproj deleted file mode 100644 index daf7e24494..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/FoundryAgents_Step05_StructuredOutput.csproj +++ /dev/null @@ -1,20 +0,0 @@ - - - - Exe - net10.0 - - enable - enable - - - - - - - - - - - - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/Program.cs deleted file mode 100644 index 3c02a4cec2..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/Program.cs +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// This sample shows how to configure an agent to produce structured output. - -using System.ComponentModel; -using System.Text.Json; -using System.Text.Json.Serialization; -using Azure.AI.Projects; -using Azure.Identity; -using Microsoft.Agents.AI; -using SampleApp; - -#pragma warning disable CA5399 - -string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; - -const string AssistantInstructions = "You are a helpful assistant that extracts structured information about people."; -const string AssistantName = "StructuredOutputAssistant"; - -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); - -// Create ChatClientAgent directly -ChatClientAgent agent = await aiProjectClient.CreateAIAgentAsync( - model: deploymentName, - new ChatClientAgentOptions() - { - Name = AssistantName, - ChatOptions = new() - { - Instructions = AssistantInstructions, - ResponseFormat = Microsoft.Extensions.AI.ChatResponseFormat.ForJsonSchema() - } - }); - -// Set PersonInfo as the type parameter of RunAsync method to specify the expected structured output from the agent and invoke the agent with some unstructured input. -AgentResponse response = await agent.RunAsync("Please provide information about John Smith, who is a 35-year-old software engineer."); - -// Access the structured output via the Result property of the agent response. -Console.WriteLine("Assistant Output:"); -Console.WriteLine($"Name: {response.Result.Name}"); -Console.WriteLine($"Age: {response.Result.Age}"); -Console.WriteLine($"Occupation: {response.Result.Occupation}"); - -// Create the ChatClientAgent with the specified name, instructions, and expected structured output the agent should produce. -ChatClientAgent agentWithPersonInfo = await aiProjectClient.CreateAIAgentAsync( - model: deploymentName, - new ChatClientAgentOptions() - { - Name = AssistantName, - ChatOptions = new() - { - Instructions = AssistantInstructions, - ResponseFormat = Microsoft.Extensions.AI.ChatResponseFormat.ForJsonSchema() - } - }); - -// Invoke the agent with some unstructured input while streaming, to extract the structured information from. -IAsyncEnumerable updates = agentWithPersonInfo.RunStreamingAsync("Please provide information about John Smith, who is a 35-year-old software engineer."); - -// Assemble all the parts of the streamed output, since we can only deserialize once we have the full json, -// then deserialize the response into the PersonInfo class. -PersonInfo personInfo = JsonSerializer.Deserialize((await updates.ToAgentResponseAsync()).Text, JsonSerializerOptions.Web) - ?? throw new InvalidOperationException("Failed to deserialize the streamed response into PersonInfo."); - -Console.WriteLine("Assistant Output:"); -Console.WriteLine($"Name: {personInfo.Name}"); -Console.WriteLine($"Age: {personInfo.Age}"); -Console.WriteLine($"Occupation: {personInfo.Occupation}"); - -// Cleanup by agent name removes the agent version created. -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); - -namespace SampleApp -{ - /// - /// Represents information about a person, including their name, age, and occupation, matched to the JSON schema used in the agent. - /// - [Description("Information about a person including their name, age, and occupation")] - public class PersonInfo - { - [JsonPropertyName("name")] - public string? Name { get; set; } - - [JsonPropertyName("age")] - public int? Age { get; set; } - - [JsonPropertyName("occupation")] - public string? Occupation { get; set; } - } -} diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/README.md deleted file mode 100644 index 4c44230e18..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step05_StructuredOutput/README.md +++ /dev/null @@ -1,49 +0,0 @@ -# Structured Output with AI Agents - -This sample demonstrates how to configure AI agents to produce structured output in JSON format using JSON schemas. - -## What this sample demonstrates - -- Configuring agents with JSON schema response formats -- Using generic RunAsync method for structured output -- Deserializing structured responses into typed objects -- Running agents with streaming and structured output -- Managing agent lifecycle (creation and deletion) - -## Prerequisites - -Before you begin, ensure you have the following prerequisites: - -- .NET 10 SDK or later -- Azure Foundry service endpoint and deployment configured -- Azure CLI installed and authenticated (for Azure credential authentication) - -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). - -Set the following environment variables: - -```powershell -$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini -``` - -## Run the sample - -Navigate to the FoundryAgents sample directory and run: - -```powershell -cd dotnet/samples/02-agents/FoundryAgents -dotnet run --project .\FoundryAgents_Step05_StructuredOutput -``` - -## Expected behavior - -The sample will: - -1. Create an agent named "StructuredOutputAssistant" configured to produce JSON output -2. Run the agent with a prompt to extract person information -3. Deserialize the JSON response into a PersonInfo object -4. Display the structured data (Name, Age, Occupation) -5. Run the agent again with streaming and deserialize the streamed JSON response -6. Clean up resources by deleting the agent - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/FoundryAgents_Step06_PersistedConversations.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/FoundryAgents_Step06_PersistedConversations.csproj deleted file mode 100644 index daf7e24494..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/FoundryAgents_Step06_PersistedConversations.csproj +++ /dev/null @@ -1,20 +0,0 @@ - - - - Exe - net10.0 - - enable - enable - - - - - - - - - - - - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/Program.cs deleted file mode 100644 index d8a5a7cd35..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/Program.cs +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// This sample shows how to create and use a simple AI agent with a conversation that can be persisted to disk. - -using System.Text.Json; -using Azure.AI.Projects; -using Azure.Identity; -using Microsoft.Agents.AI; - -string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; - -const string JokerInstructions = "You are good at telling jokes."; -const string JokerName = "JokerAgent"; - -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); - -AIAgent agent = await aiProjectClient.CreateAIAgentAsync(name: JokerName, model: deploymentName, instructions: JokerInstructions); - -// Start a new session for the agent conversation. -AgentSession session = await agent.CreateSessionAsync(); - -// Run the agent with a new session. -Console.WriteLine(await agent.RunAsync("Tell me a joke about a pirate.", session)); - -// Serialize the session state to a JsonElement, so it can be stored for later use. -JsonElement serializedSession = await agent.SerializeSessionAsync(session); - -// Save the serialized session to a temporary file (for demonstration purposes). -string tempFilePath = Path.GetTempFileName(); -await File.WriteAllTextAsync(tempFilePath, JsonSerializer.Serialize(serializedSession)); - -// Load the serialized session from the temporary file (for demonstration purposes). -JsonElement reloadedSerializedSession = JsonElement.Parse(await File.ReadAllTextAsync(tempFilePath))!; - -// Deserialize the session state after loading from storage. -AgentSession resumedSession = await agent.DeserializeSessionAsync(reloadedSerializedSession); - -// Run the agent again with the resumed session. -Console.WriteLine(await agent.RunAsync("Now tell the same joke in the voice of a pirate, and add some emojis to the joke.", resumedSession)); - -// Cleanup by agent name removes the agent version created. -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/README.md deleted file mode 100644 index 57a032e9ec..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step06_PersistedConversations/README.md +++ /dev/null @@ -1,50 +0,0 @@ -# Persisted Conversations with AI Agents - -This sample demonstrates how to serialize and persist agent conversation threads to storage, allowing conversations to be resumed later. - -## What this sample demonstrates - -- Serializing agent threads to JSON -- Persisting thread state to disk -- Loading and deserializing thread state from storage -- Resuming conversations with persisted threads -- Managing agent lifecycle (creation and deletion) - -## Prerequisites - -Before you begin, ensure you have the following prerequisites: - -- .NET 10 SDK or later -- Azure Foundry service endpoint and deployment configured -- Azure CLI installed and authenticated (for Azure credential authentication) - -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). - -Set the following environment variables: - -```powershell -$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini -``` - -## Run the sample - -Navigate to the FoundryAgents sample directory and run: - -```powershell -cd dotnet/samples/02-agents/FoundryAgents -dotnet run --project .\FoundryAgents_Step06_PersistedConversations -``` - -## Expected behavior - -The sample will: - -1. Create an agent named "JokerAgent" with instructions to tell jokes -2. Create a thread and run the agent with an initial prompt -3. Serialize the thread state to JSON -4. Save the serialized thread to a temporary file -5. Load the thread from the file and deserialize it -6. Resume the conversation with the same thread using a follow-up prompt -7. Clean up resources by deleting the agent - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/FoundryAgents_Step07_Observability.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/FoundryAgents_Step07_Observability.csproj deleted file mode 100644 index 5ceeabb204..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/FoundryAgents_Step07_Observability.csproj +++ /dev/null @@ -1,23 +0,0 @@ - - - - Exe - net10.0 - - enable - enable - - - - - - - - - - - - - - - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/Program.cs deleted file mode 100644 index 257e24859f..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/Program.cs +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// This sample shows how to create and use a simple AI agent with Azure Foundry Agents as the backend that logs telemetry using OpenTelemetry. - -using Azure.AI.Projects; -using Azure.Identity; -using Azure.Monitor.OpenTelemetry.Exporter; -using Microsoft.Agents.AI; -using OpenTelemetry; -using OpenTelemetry.Trace; - -string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; -string? applicationInsightsConnectionString = Environment.GetEnvironmentVariable("APPLICATIONINSIGHTS_CONNECTION_STRING"); - -const string JokerInstructions = "You are good at telling jokes."; -const string JokerName = "JokerAgent"; - -// Create TracerProvider with console exporter -// This will output the telemetry data to the console. -string sourceName = Guid.NewGuid().ToString("N"); -TracerProviderBuilder tracerProviderBuilder = Sdk.CreateTracerProviderBuilder() - .AddSource(sourceName) - .AddConsoleExporter(); -if (!string.IsNullOrWhiteSpace(applicationInsightsConnectionString)) -{ - tracerProviderBuilder.AddAzureMonitorTraceExporter(options => options.ConnectionString = applicationInsightsConnectionString); -} -using var tracerProvider = tracerProviderBuilder.Build(); - -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); - -// Define the agent you want to create. (Prompt Agent in this case) -AIAgent agent = (await aiProjectClient.CreateAIAgentAsync(name: JokerName, model: deploymentName, instructions: JokerInstructions)) - .AsBuilder() - .UseOpenTelemetry(sourceName: sourceName) - .Build(); - -// Invoke the agent and output the text result. -AgentSession session = await agent.CreateSessionAsync(); -Console.WriteLine(await agent.RunAsync("Tell me a joke about a pirate.", session)); - -// Invoke the agent with streaming support. -session = await agent.CreateSessionAsync(); -await foreach (AgentResponseUpdate update in agent.RunStreamingAsync("Tell me a joke about a pirate.", session)) -{ - Console.WriteLine(update); -} - -// Cleanup by agent name removes the agent version created. -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/README.md deleted file mode 100644 index 459434bce2..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step07_Observability/README.md +++ /dev/null @@ -1,51 +0,0 @@ -# Observability with OpenTelemetry - -This sample demonstrates how to add observability to AI agents using OpenTelemetry for tracing and monitoring. - -## What this sample demonstrates - -- Setting up OpenTelemetry TracerProvider -- Configuring console exporter for telemetry output -- Configuring Azure Monitor exporter for Application Insights -- Adding OpenTelemetry middleware to agents -- Running agents with telemetry collection (text and streaming) -- Managing agent lifecycle (creation and deletion) - -## Prerequisites - -Before you begin, ensure you have the following prerequisites: - -- .NET 10 SDK or later -- Azure Foundry service endpoint and deployment configured -- Azure CLI installed and authenticated (for Azure credential authentication) -- (Optional) Application Insights connection string for Azure Monitor integration - -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). - -Set the following environment variables: - -```powershell -$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini -$env:APPLICATIONINSIGHTS_CONNECTION_STRING="your-connection-string" # Optional, for Azure Monitor integration -``` - -## Run the sample - -Navigate to the FoundryAgents sample directory and run: - -```powershell -cd dotnet/samples/02-agents/FoundryAgents -dotnet run --project .\FoundryAgents_Step07_Observability -``` - -## Expected behavior - -The sample will: - -1. Create a TracerProvider with console exporter (and optionally Azure Monitor exporter) -2. Create an agent named "JokerAgent" with OpenTelemetry middleware -3. Run the agent with a text prompt and display telemetry traces to console -4. Run the agent again with streaming and display telemetry traces -5. Clean up resources by deleting the agent - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/FoundryAgents_Step08_DependencyInjection.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/FoundryAgents_Step08_DependencyInjection.csproj deleted file mode 100644 index f1812befeb..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/FoundryAgents_Step08_DependencyInjection.csproj +++ /dev/null @@ -1,23 +0,0 @@ - - - - Exe - net10.0 - - enable - enable - - $(NoWarn);CA1812 - - - - - - - - - - - - - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/Program.cs deleted file mode 100644 index b7a9874e7b..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/Program.cs +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// This sample shows how to use dependency injection to register an AIAgent and use it from a hosted service with a user input chat loop. - -using System.ClientModel; -using Azure.AI.Projects; -using Azure.Identity; -using Microsoft.Agents.AI; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; - -string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; - -const string JokerInstructions = "You are good at telling jokes."; -const string JokerName = "JokerAgent"; - -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aIProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); - -// Create a new agent if one doesn't exist already. -ChatClientAgent agent; -try -{ - agent = await aIProjectClient.GetAIAgentAsync(name: JokerName); -} -catch (ClientResultException ex) when (ex.Status == 404) -{ - agent = await aIProjectClient.CreateAIAgentAsync(name: JokerName, model: deploymentName, instructions: JokerInstructions); -} - -// Create a host builder that we will register services with and then run. -HostApplicationBuilder builder = Host.CreateApplicationBuilder(args); - -// Add the agents client to the service collection. -builder.Services.AddSingleton((sp) => aIProjectClient); - -// Add the AI agent to the service collection. -builder.Services.AddSingleton((sp) => agent); - -// Add a sample service that will use the agent to respond to user input. -builder.Services.AddHostedService(); - -// Build and run the host. -using IHost host = builder.Build(); -await host.RunAsync().ConfigureAwait(false); - -/// -/// A sample service that uses an AI agent to respond to user input. -/// -internal sealed class SampleService(AIProjectClient client, AIAgent agent, IHostApplicationLifetime appLifetime) : IHostedService -{ - private AgentSession? _session; - - public async Task StartAsync(CancellationToken cancellationToken) - { - // Create a session that will be used for the entirety of the service lifetime so that the user can ask follow up questions. - this._session = await agent.CreateSessionAsync(cancellationToken); - _ = this.RunAsync(appLifetime.ApplicationStopping); - } - - public async Task RunAsync(CancellationToken cancellationToken) - { - // Delay a little to allow the service to finish starting. - await Task.Delay(100, cancellationToken); - - while (!cancellationToken.IsCancellationRequested) - { - Console.WriteLine("\nAgent: Ask me to tell you a joke about a specific topic. To exit just press Ctrl+C or enter without any input.\n"); - Console.Write("> "); - string? input = Console.ReadLine(); - - // If the user enters no input, signal the application to shut down. - if (string.IsNullOrWhiteSpace(input)) - { - appLifetime.StopApplication(); - break; - } - - // Stream the output to the console as it is generated. - await foreach (AgentResponseUpdate update in agent.RunStreamingAsync(input, this._session, cancellationToken: cancellationToken)) - { - Console.Write(update); - } - - Console.WriteLine(); - } - } - - public async Task StopAsync(CancellationToken cancellationToken) - { - Console.WriteLine("\nDeleting agent ..."); - await client.Agents.DeleteAgentAsync(agent.Name, cancellationToken).ConfigureAwait(false); - } -} diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/README.md deleted file mode 100644 index 12760e736f..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step08_DependencyInjection/README.md +++ /dev/null @@ -1,51 +0,0 @@ -# Dependency Injection with AI Agents - -This sample demonstrates how to use dependency injection to register and manage AI agents within a hosted service application. - -## What this sample demonstrates - -- Setting up dependency injection with HostApplicationBuilder -- Registering AIProjectClient as a singleton service -- Registering AIAgent as a singleton service -- Using agents in hosted services -- Interactive chat loop with streaming responses -- Managing agent lifecycle (creation and deletion) - -## Prerequisites - -Before you begin, ensure you have the following prerequisites: - -- .NET 10 SDK or later -- Azure Foundry service endpoint and deployment configured -- Azure CLI installed and authenticated (for Azure credential authentication) - -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). - -Set the following environment variables: - -```powershell -$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini -``` - -## Run the sample - -Navigate to the FoundryAgents sample directory and run: - -```powershell -cd dotnet/samples/02-agents/FoundryAgents -dotnet run --project .\FoundryAgents_Step08_DependencyInjection -``` - -## Expected behavior - -The sample will: - -1. Create a host with dependency injection configured -2. Register AIProjectClient and AIAgent as services -3. Create an agent named "JokerAgent" with instructions to tell jokes -4. Start an interactive chat loop where you can ask the agent questions -5. The agent will respond with streaming output -6. Enter an empty line or press Ctrl+C to exit -7. Clean up resources by deleting the agent - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/FoundryAgents_Step09_UsingMcpClientAsTools.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/FoundryAgents_Step09_UsingMcpClientAsTools.csproj deleted file mode 100644 index a6d96cb3db..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/FoundryAgents_Step09_UsingMcpClientAsTools.csproj +++ /dev/null @@ -1,23 +0,0 @@ - - - - Exe - net10.0 - - enable - enable - 3afc9b74-af74-4d8e-ae96-fa1c511d11ac - - - - - - - - - - - - - - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/Program.cs deleted file mode 100644 index e1968122a4..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/Program.cs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// This sample shows how to expose an AI agent as an MCP tool. - -using Azure.AI.Projects; -using Azure.Identity; -using Microsoft.Agents.AI; -using Microsoft.Extensions.AI; -using ModelContextProtocol.Client; - -string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; - -Console.WriteLine("Starting MCP Stdio for @modelcontextprotocol/server-github ... "); - -// Create an MCPClient for the GitHub server -await using var mcpClient = await McpClient.CreateAsync(new StdioClientTransport(new() -{ - Name = "MCPServer", - Command = "npx", - Arguments = ["-y", "--verbose", "@modelcontextprotocol/server-github"], -})); - -// Retrieve the list of tools available on the GitHub server -IList mcpTools = await mcpClient.ListToolsAsync(); -string agentName = "AgentWithMCP"; -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); - -Console.WriteLine($"Creating the agent '{agentName}' ..."); - -// Define the agent you want to create. (Prompt Agent in this case) -AIAgent agent = await aiProjectClient.CreateAIAgentAsync( - name: agentName, - model: deploymentName, - instructions: "You answer questions related to GitHub repositories only.", - tools: [.. mcpTools.Cast()]); - -string prompt = "Summarize the last four commits to the microsoft/semantic-kernel repository?"; - -Console.WriteLine($"Invoking agent '{agent.Name}' with prompt: {prompt} ..."); - -// Invoke the agent and output the text result. -Console.WriteLine(await agent.RunAsync(prompt)); - -// Clean up the agent after use. -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/README.md deleted file mode 100644 index e4e3fe537a..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step09_UsingMcpClientAsTools/README.md +++ /dev/null @@ -1,50 +0,0 @@ -# Using MCP Client Tools with AI Agents - -This sample demonstrates how to use Model Context Protocol (MCP) client tools with AI agents, allowing agents to access tools provided by MCP servers. This sample uses the GitHub MCP server to provide tools for querying GitHub repositories. - -## What this sample demonstrates - -- Creating MCP clients to connect to MCP servers (GitHub server) -- Retrieving tools from MCP servers -- Using MCP tools with AI agents -- Running agents with MCP-provided function tools -- Managing agent lifecycle (creation and deletion) - -## Prerequisites - -Before you begin, ensure you have the following prerequisites: - -- .NET 10 SDK or later -- Azure Foundry service endpoint and deployment configured -- Azure CLI installed and authenticated (for Azure credential authentication) -- Node.js and npm installed (for running the GitHub MCP server) - -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). - -Set the following environment variables: - -```powershell -$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini -``` - -## Run the sample - -Navigate to the FoundryAgents sample directory and run: - -```powershell -cd dotnet/samples/02-agents/FoundryAgents -dotnet run --project .\FoundryAgents_Step09_UsingMcpClientAsTools -``` - -## Expected behavior - -The sample will: - -1. Start the GitHub MCP server using `@modelcontextprotocol/server-github` -2. Create an MCP client to connect to the GitHub server -3. Retrieve the available tools from the GitHub MCP server -4. Create an agent named "AgentWithMCP" with the GitHub tools -5. Run the agent with a prompt to summarize the last four commits to the microsoft/semantic-kernel repository -6. The agent will use the GitHub MCP tools to query the repository information -7. Clean up resources by deleting the agent \ No newline at end of file diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/Assets/walkway.jpg b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/Assets/walkway.jpg deleted file mode 100644 index 13ef1e184087e7ac9fdb086d523a2bac486ce3f4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 37970 zcmbTdbx<8o7$tgfhv06(-Q6KTaCdjPNN_G52pS}~210NT?(XgvclV1sEWh2Y-KzK3 zd+(d>s;QdpsqeH*cc1Uf`{Mg1;FG+JoD2X83JPHQaRJ^}08#*GD5(F^|27ynnEwnO z78V8$0UiP2zl?~4f`o{OjEI1MjE0Pig8E?yNaz@7sObOQ|M!yrs{dE(BcUQ9ApTe4 z|C_yc0Wc8(+fZIGP@e(Nm{2g7Q15*JasU7d?!(*v9RA-11q}lW2mj#`GRjAR`cEJ3 z!@xj)I12|0`%&8e<2e8p6Ap`#LjoRK%?#nQ3l8V^_*_IP$?9%g^=Sw-m$_>&5;7h> z0U;3$Egd}rBR3B(AHRU0)E8+P*{^c)8k$<#I=XuL7M51lHnw*5ZtfnQUfv*|kRPF8 z;SrHh35iL`DXD4c8GrNg3kr*hOG;~M>*^aCo0?mCdi(kZ28V`6W@hK+!3&E^%Ujz! zyLJz)SpNsv{{t812QKK3K7d8|4;K`)*GGoIgoUHz zfX9+hLojo}{>=Fu5l1pUx4IjNic1}WYwkLYj7QDAMRWNdwEv0h{~fU4|6j=d53v7> zYZ-tF1NG5)Fqi-_z`29RJAfg$j*(-CzZ>Ty?vpspDtP*!*ul+!>ri#)hlx*Qoh@(B zlquPZ&W)x3;p1*r^z?QKW-q0G-k&r5CzCijRp=oNvY4pib*xYwKg*q_d@>^Tm|jm2 z#4?uxYWbuEjr{Nd9enQ%+IN6Mto>R0WzPN(;SJBEuI)It_gCeuBYoZstvT;?WT>Nl zw~=VPfy*+MrFvkoVhuofZ$eNzpXld6`eSmq{m{P}OUtonF=Q~2Vl8{TCCo99PM%KD zeq`WxY%LRjm;{`maF?{!6h?YNG=e023)|qYlW|mDS7ggMBtU}QUB0JGznrxioq1UE zBPT68dF&}T^%&}ru_1s+nZB!(k z(}D|lkTQ=$dS51^%AkmdTqUumZN5gMUy9Vqp}vatWX_Z;i=$WohnGN%i_8`MnyGR zshL_euaGIJ2|SdRngly34l4LJ&vJ1dT$Y%l(eE)kx~wCx_p>ABx4Rg2;p~>}j&1}D zadk%SI{+zqc@*>WnYmAFvgq%>dgFDRk3YyFhj3?Bym@wO;R#%_y)b8D0WQIEv16I0ao;#qwL$pQtyCC1yfkLs~i4tdlTEM zm0b2*z&qfRVa9$v=@+}Go~`@gY);UVd6VgZwvN+*VzXTtwQL#i(~3)7$8?ukS0#2o zn{Sy>aB(%|`Fe7FvAvFda8g5qH|TkcqxHJfE)R$v4yZ|-@X5F-kx(~WHry^zoAW$$ zTXydCjnO`w6zPt(Ns7iBdedrt2OtKd13&i>K_*rKo2YLDm6@`&VA>j0>!>wBQ&wdr zYc`~?$91z-k_Jv-aDy1`BuI!gQtSc2h?$4X!~?kBxtZmZvq^u8W=$|d3&Zc}B==Ib z>_-V1m7P7yV{`fv1!c{kM@HNlCmVO~t=i_q(3G(dHz)@M%Um^S3TIl;T~T> zB0{7zqW?QP@lJ%}PJg8@PxzmrR@nuPw7shy``;#^D$;S`z!vFf-$tGZdt>_6W&|&i zCyz?{O~bW)_feuea`&6joQRRWxVCl(6Ol;-152-si}LEVv9s^CLPaY~khFynaFW<> zMu08_x*b(i1HF8^B5gGH=>AQ#H#xGwW^Nw;GNymWQFW&JRABs@eqhv}dQ(N{eQk2a zhOguj>y5%Z+uC+0Dx@>DXPn9?PRYoW}d@a^<5W-B$A)BGOZ0r+Y z^)u%abx3-|9Vwc6AZAZcB0IrFM$bH~pwlggwZOrz)r`LASmi-Sgk~;rM3O;3*v0q_ zA5Ok>KX$`wAEb3Mfj;S0-F#`(Cj08!I8&;!2eIrsfN7eRv3ajA;Zi;mZc09 zaer_Xw1R;sVNtr&dN`XU>P;^vE!o(}2d9sTslcq+3O(8Ean?IQlpNe{%HdU&=D*Ep zXHGOE^Z*~!X(J#^IMoRc`pi05j0sL_unLzt1%;kCnH|HazS?HfBQ%2MEk+w5Tn!1H z)7QOf@U`5B>wHbXK-2A_P%_G;`-b#H)Sr9ArMZjl&wi`QDT=%JS1so%9%^m8;=#Sg za1X=zfto{=T7PA-1-Wd5*V(U#=RV+uRJaM(odCgeVetG!mRAldEz8Mrs>-y}?k_V$ z(o5}Qx-vMWqCc-9Sn91o(Q)?RhUu3pRLAa@(!02=PA=lR^OsA5X>UoM2-4j*+))jD z*wff$cj|kNQ*Je9&LPipDT7Z`*jLdo3!keN4^;;`g+r(cl?=Z+{;~*EvER@Be4uEz zI3{Qro6lkBi#$5#CnV&w#75e5WTX;Lvf&Z|Q=!@3c$M_>cwZc2ogJKnt)C67d~n)^ z?n`koOv&>XO=VbZf|nhte5fM`X{D;|4n%IA%1g*hQNA*MMN&zxk-7$6Buh(ZXk(nSN7PE7=Q?9*hUd|lR(Bn(Txf^qhCTOm zfro3rqj@dg<&6>&dfV_d!tu0=42Vuj3seZQTpQFP!8U#@@(vJ@*>Eo(f!@6|yer%E z1XnIfVJ`EdMF-R$n$_|^CUeJ45bq;1AsySWmb3=$w|XQY?|_9WKM0qE3xNy=r$w+d z2q8Rx$nMEl|8~CMIM0)J!8hMKPXRKYaEd|k+^X}7X=E%3sBq6`I zmR-YhW1=mvSo!THL7`Q|q;fFz>#?VPPIRT zzq3cg#7J>L7C5 zDH-0>Lh?ZRn;40()$haJHK3potJ^Gn=ym_bqDL%h!A6#>@rG5Wy{~O{WGFZ1w7X@P zc#@H>emQ%=g?(hC)-CO1#;lns#NVcw=xK4JoT!hr3+LqTy8uU<>2noWk&H1nS8aR$ zCy13+48nLAYog$r?(gIuyY$1k)64S(j&%10*87%a( zisTr-FDb!FWt-wsjW6<`abQ>zd*h)~aC86iOXauBPQeP=W;Z7PB{Yw~k~AG$f_`b% zjgrZ?8f~TF)=sYa_W7cD1<=@7+V1)8n+IRm`pkf5VAerg3rc`>6CT)E)5681LiV-R z^K7hQupY>YU%7cCS;cJfC3+`YV_zeQ^|^LwV;eF<>20=#g*!L`A4sA@TIGF=CbOAN znXp4ACh!hG-qwEuVLS~?&ZOD2I6neu{Q>(`P~E!1-0gpj?{z$@?lExtnldhTos|d1 z_iHT8&A;c#2mq@4aesgtHL{?Xy#97%5b*cAu8~Rg4Yw{nQG17vIlxL=tfFpb3N$wd z-d2tTw(xFel9f3ZG^k<9HscmpY#XR*Wpl6kuvZ;C^Pdpp6kV4H@mi_m4m~he<*2*q z)UDB4DpFFD+t-%ai`oC;wvxZ;f&qXV!(LWl7+qHWm6YfkW~3@Jj0`C1YZki7n?7wf zINYRIX_@+?Gvzi^N2#I=jQEnxmssUX##ZJ#-Cr0R8QLAH=kBxp5Qc(#W{M;R_$^jA z8uVh?9ZN<(#Ve3CzozBpEmr;>Bz&4Nez*faq1ArQ|2@RBJ5d3eIMMWRanB<&F5>Yo zVpO_zFV*?MP0Z$y3Xx3>S zqpZNV+f!Ujk-Nj!z9$`N-z|`n(T_o71fRY7PI#8+H}kOo<8RWw4{}pJ3T}ga&sL?2 z{ooDNjz&&KjyS@dl1ef4>8e{*N!hv^>RT}lqt}b5gcX|%jeCD~c%oX`%ZhxWxMhIa z057_m@g8#h=k}p&Y46!Yuq=7u3*Q`FmP~)L(58;j)<)<%VCn3+?@?|yK17bLUyBRx z%hy3=c!owXT>UNKro~S_+YP@Jjg(j{5MT9)&c}}&&fIV4jrkY&ClGiP)SAQCi({xl zx-^9YlEWRhn^>K+oM^-`$mxg4_1DRmrf-sny_^P75on8+G(!}U8_?_Myxil_leYvs zeg>C0T*1t`CJ*iyU4#Xoto-!wO(w2wh23wS#CXx4rOu*EyB_e3Iy1ja9CS{D`X(cl zXzXMbF)+B=$gmU=OZYr|&Fv_)`E2`nF}@h!_FY}y+jG4;B#zCi2)fzMPXol1*cP;A z-#*bPFkpjn?qY0drmZhPdVvX7#jt2k=D5g6{WWzFtaK1=o<(}^Tf0C9-OTh3!1w>H zd^BoN$%LYM*%Rbh9)67Gxdp*B>6y@9eDKfrY9zy+o#EP7f`g~=w<+XXhP@-F38bBsB~swSta3;m)T26IpAbbe`avTC z?kHWzxfAhTXv6opU=DSI2lQ<_aZlaUx$CIISiIo1v^FK{8=1Lx048OOYi1)^`f#bn zi{fvcJ#(=&g5nE5Dl0RrlBNU@hHBAwtvtvKcgSeu&<$OM$4r6bLUoM%t#eb2pLA=A zs_WN#>ZoZWZIpIFIodf39a3Bifmgeqn?Fa#n0dDlYW6FaUZ`5r|2@8xXvLCN(2O2A zUzsU;bf&yDa6()zMV6mV%74Xw#5q)UI-|J`+y`;lU4eeUb6PY_pebR5 z+oYkD4vO|I{3C2*F=eNey+2VK-XX|u&J*tZOD?BpUb$^c# zVR^8En~TsJBb|l8a^?S|<0S0waNyw5Ib|9s?~qORY_K!x`40G=(U`WK4XGv@L{A-b zOaHm$*b}$TS@k>~l|dff0I?i(&+;?|>TlofA>3oWvD)*yx3)_t(4cQZuy{F=ZsaVU`6ix_^11q;0H=!!S6ybk+sy z@m`HJQGjtT897=L1G#)STs`HeVD<37VM;=Ca`L_d7@3?dWPJyyv&^FobTcdTX8t@8 zr7oK%(*b>h8FQBOsaD8>+l90t1lo*hN#s5MJTMRx@H_$HM-#v+W>E`yIi?(E6AX1W zi}Zgn=~to}v)~pH&c_N1quT1OqhJoLiZ(j-EG+q_%cv7>3 zn8OI}3Xgm!y~Q|}nHC|W{AQ(G6by?H4ZR1>emxoGcYrFFkym&d6*T!;nHZ^y2~Tvt zdQ^+ZDGO1;bZxvd#<3r(S(r zs!{UDqOkJSsxn*;D@V*O`^EvzZ)XqJhT##nj)%RrJZr$aU-DyWn&|WWzzpRBei-|94~|0GHwTUVYz>tsDqJl}e5{yi^uOg@kAf31IV!cU5h# zwOW)M)jrdf<@t(m*9tZrEA`iaUx3%J~1!Gw?FeBVGf>q%>0__)3Wcz z^Qb;P!@5_cX3s*|4%{a_o!qBEo<#+TLjcgexv%N9aU2jbvAy*@g(ZkAov^}JEbOui z#ZeljWQV`OM9wP?Bbt;k`2B5sIPu$nH|{K&mr6t0PuE6_V{#9<*IF;vqR|@CN2;jD zTkWz`p7M?zaJ{GjU0~n!5jU+TnN}h@%mw=;eRDqv3>TzH9ox%-naZ-Rzhyf6Y%|*4*E^lhljug9PW5&?)Jz&V^yYD-;hkP4 zul}6CZ=?Mq04JO`v0#M~Q#Kb9lW~vC7Z{m|87{Fu_RnqQ9>S{?9Y0xfTE{B1l`bjoj z{pww;TuCJ9x$D_y6sUim8wxz_?0kco)W8iWG`CNI@b1q1%x;daRyH9t>e*oNdf}csxCh&{G)lHBH?qoP zKTB9Vy7uA=Mz@K!G zGb7O$NYwiJwCqr`0G(-RC#1d{HxgCi9pHv@|Ii9#2Cxi>)JE$}ZZjV6;I{89=L(l{ zw%9h8y)e&YU{x@ey9}U<$?x8>h5DF}KmY5BEm)j?-iWwMfY!IPxP1qlfJO5z(Z|ij zcnua|j0g~|r`Cqk%`XKU8q}oXg6l@04#5L?*TC!cMJJgN4YF!+A4mEVqS2LNtzy#b!N(9L8!T4??c?8yqXj zFIaxWuPrBuq-}NTzkE*NTJo(CQ!0^K7V6uxno`UvlgPTUD%Ank+F5eIkd^XR_U!at zjfLihl1e-uyXB1J)s)$-7(SCb-xA;uh5ote#_Kg6j zUAhPC8_Ft&Q7vIpPZ}#U1EV~rNel+AFvOGPPqvvC6eVi?i<%Ymy zXy+HO>R#-hnjWjm;=j~sPb#vQuk%|!?~r!&yFbPLc9`~yHymXRY{7MT+^;2;NDfmf zqbvLdTp7J=((BO72yg*j%f`jzMFh9nJ#oJSXsz^9PDnVv$_4<+E=5aR>eb961D8{a z_SE^pS#>RCBo6s2(1^RAes@1Ajbx8dt>sL#+s{L8HVp%INHFB%IyaI;6M6(p9lPQOr^HFewHs5=KNbvWRN)vYeg#Sk zOJP!-wxHF-uMM2)`Hp+F`po41J76omSI&;7OQ8AV%XlUJ3KYFo zy4s=mWiVf`8vxx-d(v%8G@Ji=o=3A%1?j)Gxhh)rw(iwRlQqj*9}&W8m8dMpoQ(!0 zc%ILv?pPpvPs(BA?6kaiG`w=jjWo$mAi4R%lG7KjP8VvGwB^8RatIx_t8!u@_29Kt zG4&mB*70o6bVM)wK>uWuIG@^qY)cgY9H<&hvXdKN5X*TaDumpw9slY<${*mF)K8ou`vzuJ^<^Zo{~T%GS*e)K?@=S|E2l8i!%- zHcuFVik-ftA9Wt;kDspmNIZ)I(}xBj*kAq){{D7lQH%<2o7vStI5UYCLusO6MBPD# z+Uvl_nPa7Df1bHXye8LRV0%?dzMUeDunn}z)COkMdpT>7&&8F@KG>=0+&JIxGi9>S zic-w(kfrm$tOR$fBks`sAq`CS@#{paZx`sT=*TKx3$cvr1ILAnVjMrso!4$0Ejus` zGq$F8gouF~{iH*R(7m*{JKHw4N=Ajt@CB|I9r<48I)%t++zQ$*?g{_71R?cfdaElG zLYaBeO#=@(AgM|w7?kqZAd0{G=6|-^v1hU$1 zH25aS^txlBDpz{+?lKPk?5A8)7yZ`>WhGv>@q>}i zP5V{ehwc(}SkNy}uM)<maEKbebH|c8F2_a`2kXr_vnH0U+ny*lP8og~Yj=aVYbI`i%OE zvcTq?M|2>}WYr6kg96}Jkn*>(Slh~N6Ex}1=fHm$e@z4GbaF|KkqokSL5{Ljl*6{U z7OAg{>sy@c`b^=50l0_n0F14=^l8(%a|_s)VcMBMRk=z|4KJ=OmIn5BKz7itz)yB& z2~Vpp;D)M6Eo)evZGxyK{pklo@|3}&Ix^a%4*qeUW1qM7ZJ-#2RP|nzWXC4?5b_RB zC%J@wJ0&WDe{S4~F5bP4+h~ifMHh+HPs|5Q{ghNbb^0$oandoeHs8|` zH~iI)F?MpjR(n;)5{3P=CGNqK@~y4uu~44;bhlL!Q*T7ac7RNssF@WChbUpL?-%h= z1CYWT&xPsEA!I&jrDKvh-+%S{cMZNi5eA)meJ5X6 z!I{IOErNVh#CBB>N^{UlSsi4(eyi1{zu}h!Grvs;)iqCygA+z-x_XJ2j@4tTd@P5k zp5_m>C8KwM30HU^xni$XVVWjckc@Jmyw6DhZMs)s%3*kXT2G+h&6-MogAWL>A`|69 zQVR1pul}jnF3_C-zQ&Ejyj6q6({PnqJ!b+(jI7Q1bfY0!;&k zegfD$O8STi8LROJw^eQ`vkwMZVmhElt?rQbTYNm=6ygLhq4z& zKqay2C0RuP@z{LjUG));xHjq8kNfa+4*|LJMd;=-wLuG-$>=_li^jv)DBV>uBH<=| zyywo%+L`k& zv`e=Ye&}Rjbo9)&V_xhr7cJae*eLzZm~&Rbw&H&n%7xVjZF3hBo&rzbm`cL8mH>D` ztYRHw_R9}Wlgd!WFieSYYb9MTa#i}-LU7UnXCIwyZ1bC$m$SV6KDT!;&q!eG&&OWG z$3ZhPPxKU{_WMy&y*n8Nq(9%eh2OdG_R|`{zu3a5z^v3-57Op-EYSc2u zgUbOs1FfmwAXxI5=40#nV^$qJ_t~!Lkd_32vK_cnUquW&1YOdl)647^piVAUzW3@^UtuzO*>!h3tTGo*f0kc-vRDAg39^LS)}8yEk$Hn+~!8M zKGcoI7ILK{?b~)6HoF|!_y%#xq2>*-S~2hdbKN(~xm2P)n{&y)i;M!70MgjKFBD`7 z-(P_ZNoGpEBkSe6CB`1SUXR#f(bQ6jfB+GJVynx&MObNPBFw4yoS711LkiOlODC0= zYsbZ>IdU$AI)Yx0ZkKrD-vnl*EG+lg{_^uj?audjiGxI=9B8{ZHz;*cGMC1Gtk8D# zhriEAekFV8=VcKDg}D(-mPKW-VJOk*nUV!{0P~K$ZOCM9ZaLnjs|ejHUQshoDxCIj zi(;SXEuV4zJ%yi2{a8LH|YG1D5yp@v_e?v4!GUx+61N(fCf zn^Xm1ziVM^>RuXWbfQ&~TouIq{Wp$Jj+#vf{q--OSkJ}eo}v4{W@WM>9s!y?w@Ofo zopBEH7Tt0Bfmf}4f@EvemW^5+3?g#gVbhztzL6YD8O?kjW(`JLv6R@B(Ofo672Jd4$*;$8?*Qm15&pe;{WiNYAGx$MAo48*d4)7f z8-4yRKBQ3IEg5cjcp&o&k<>V1q18%Z#lCHq^gjdJw)!S-4 zX+~bxl!J3rHQAgtj*In*Uh5zyHiDH21t^oq&VM#9IM*f*BHNC}N*0=PJZGYoDY`eq ztYTiw6C<+g`y!=CtGSI9&LkPHQm<*9PGZn+)9d81`-3{wvmThAyLmHxxm^sY^KFn( z{C0{72sa^2$6MLM`nV!XqM2`kBvYeya?AIg?JYSHb{S(0PQqo2{G@y#ufWP_Khwe$ zY#H>e-o9PyW;6jJ4+Q&vVljGG4$2pTaKB>z*oe4vk=@49;meoaFC6;l%Hz^^x+<@y z3eHv<2uEABG=_^1NmIYk;cBS0;;Y4@=qn0w-k0;vaQ`sLn0i_Dzk3G|m4NbX!$i)A zWFk1_Sb{|4Xqy>yqOE*eKQ{R1#Ywr2taiNFR?b=dH(mWwqDhh0pVQ*ph$tubnUHuU zk&d23zy){})+u-jEuft?GQ+@&Pm2geWZP}m2Je92pcfv7VkHAi7VkezS`q6?Yal-A23^tJi z$gjWX#nN$093ctoU;!+YlUDOJ`HuzjEFV(UkAeDd{$}7v*XLtg<$~}uRPjL(&lXWL zVF|{K>B#XE6pOpKE}d42RjAyRTUizaeo^n$Y~-mpp0JUFR7`}F5@M^hG&9QZtuUy? z-G4>6#9iv%7D@|FiAUK++TZq$|_aozM0N*2XC2p<+!ku_tKPG+AIX2Im^L~ zjYq7h0XDfolW!s_u{~;U4PO^Az7C0%5rG9lsq1CAA@xrR5P?O9gOnZjlwV2{5}zEX zHmEbEq&pK)c57n0^DAWWR58}!#4ZG&%j8P*%QsqMhn~-=-?UuDDFZ#p7uHR$j;pqF zZtroOxL0~gNlm5VF5Kksf8?cbTDGbWCPak5T%RbnnE8{<1uun5UH^MajtAu})?)J% zjkWWB-LKeJx8J~6)pT8?1Bp6!-U!Bs8aN6Lt^BmzBcNy|0rR{NvAgOok3JW<(Th#| zTajwW7M6CnEIDZ~?~G*(%iV1CZA-09Pz?N(gcT^br$sqFw)g6PpwSY?tUdT;`8+wl zPo3cA5=jAi3OG_e1|?V)XNAT%c3-B5;jz_X$WdAQ0oVouh- zNn~D*Fi+abX(^s`wpV&zhv77%nyhE%@2NRBNRbN>OTCAR{=S4P%2ik{jJZEa_oXwh zbo@Y$@b=MGckK&opD8pr@y^`C0{EBo8eb8da^Z@e#2pyOvL{TFRIw=?zXPynAXzgc zj80d>GngMTi$J$Xw9da=VOU8< ziM~gGZ#MZ9un>R^xrk(CW_7M)sWc5_HM!Bo?DKtDq=?JqIoKyrk%b``#non4UmB64 zeW`ReyOPs!jyGOe;`J&6GVK{^MR$Rh+v_@10f zJY0ah;--4a65W#&vDw+l)x4FdykB^$eendt(tF+khg!cK+%G@H=#(2}*fmsaeDT_Z zOy$ubDwur9Dcond4XgVeo>@Hbb?4rjJe)_|md}wby_i%V;tHL=6{t=3s0ECR$YA!u z{<3^q!_yMxc;BiDEsoaIu%7RAE@EHUdD$upxy$64n%mAfJ=@!6=qop{bIQNz5{}v_ z%dwhcXr8FUZS*pMyrM^Jx$w~Djcg3R1A4LKwm`$ynr)4UZr7{uR5N934b}EVZ?sUK z%;XX-05*jIofUq!+c^*P=y_;65;%C1OvChQ3{ZOL$kn};APeDag7~hgoHn;-xpzR% z;ALzD-(AiVDl5KPs_*BOWEz+ccFTR!G6=!V1R?#QGbB%$4M}d^c{ky9e*KAN?#9lN zrE!wgINRn>QD3>FSW+>yD(D-uP0mcz%Lc^xthLk+*j!s_ir4W*Ow7pgyf8(@ly8n~OIseI~6;jjs-LJy|EE$ z7V!h;Bu@@kEtM(GZC>t8hI<$(`=aCuvea!@U;AxFlSoE?U3w9>#2ps4ZV$i!JXH0G z(!9-0wb5*=@Z`&+9KXV~$x$G<<^4mZTFoypJlKU1z9CEIto&Ug)-8C?p|6XJ>;i&L3`k_mg>OwR!zrT2Xzgo5-Dqg);Zt-9gmha*%^dXcr8gxv^i+dPn;$ug@tro=xfX75EB?L% zx_JU{bwfVOdAWQZ-NR3-l~F;f%N5eKAZ@wb)rce9n=84R&pN%hGw$@ob%3YBxye#D z`XElFo`oOQ55ybiohtJA_*{ET=eDam6SkSsmXfF#vnBRo3H4$z6!#m1^)BPS#L46+ zy-hXNcm9Hl5~?gemD{0m%|azzbrlLRPA`1x-axNgtfHIs_^m!%m8Wa-jGeZN429gR zk!1=MGDxz(@D4y9d{8OQ-&WG;yBbH8QHXq-X2h48q!W%Nv2+UoH?O=y8$`L;9|SCUSW&_V(3WyP@6qkDf|J=ZcL^8X+ zZnBfWPTU_-V4R&@uoM{bxSKkHIkZU@|}i|I;ED_Cucg@((3~ z%9(SSd*X<<8Ibk&7iS(^xT&t+tJn)R+h(2DROgb1SY1SD3)>2*WHBH%$m!@subVDb z$-NU%%uFmy48G{6`m8mWBAeaf=PTt>{xH8Xyo#aw#kgUcbeCF@z*L#K22k0S$r!B9 zf~9>}&lzhH(eh&t4sNNP=T$}ff&Amp5*e=Z@FCVmT!^fY`|8-`+o~&tL_Ja|z6wA< z2MrQ*?c@MIx+6H(8`-?$izxMP(CUs7N2yEJ?^97jXtyaS6QLzPd`SKBu+Rz7Mn-1t z;7UI1OYc9D_-OaDs9fh~(Ff5d{B)iVsmm}{8LnFCc>M$jZ2(MhLm&)xf6R3_$dcO}~2>%j{&X#uP>x8{gIArCnI?KTxWlWfKPrc&$BECEV4}Cy#K)9Q=AZoUq zQ&&eX_wlELfzOh`ucu$XaMsjIdCdu6MAWG2wW$98c+wuJyhl-o#-> zKDeXA737_*g;45-2yU-49OYw6zRHNPL6+9)u1%OJQ}p1XIX~1icywn#$Iw-tG7Q2jL+vR=L(5y z&ga3|60n=%tzMd~Rq3-_XBp{t{#Lchlby)=BvsQXrr8 zXRZCb#G|3b1h}>`_aU<<=yWX&3_P$Ha2DIF5+iJ*snUIX!=I~f^{#|o?S73mx4Wcb z>c1-cv}jnOEg<>15#5kx#mY$$Z{3vETZ^@?p)#IR8mCxeJ=9K%GpfurOZDPfShPbni zlIy1m`LCC9U2qbN#ITzlR8-(rtL=3LH#W zml(vDg;0~_)%)E8A$;9LYc)OYWqiRKE=-ZSG9w_WgU!I(7{mFGa1+w{vcZ=Q@>wub z+!|d5c>p_7_gbv?Q|zm~+4j6H1H<_TC0RH7V#3wFheO_$U*2~AA}X`cx3SVVsMF%n z_C_+zy~3wQcsFW4ewU9fs>m^9o)3`WT)M*rKak%IEqJ^G!tObgwjO8uPF!{kMtPcb zX=KE=6`@|}TsJ)!Io;w7(c>Vql91AZwogY#c4Sk(81Z_-ERqQ+V4^>cJlYFwLV+b4 zvM%UNls z%veYH#>j8J?e>ky{~8MXLDmbfDcQ}qdXmnu*z811QT<=Tb?yb^5c{C7XkF`)xBf+h0x$giWoJGjLUJvQ zAoGN6#7kts&{bS-m|85vAy|yeD*FRNlPv#eX$8KQ$xf+GsHDB*y@@USY3i!`?C|mz z;DU#d9P6@lO`2YD&glXimblO##;?N%R0&N-##-BPSVvy?O0ZW5olP0C7J}!RupAUz zBwT$!tzEcEdL%D~+FR0Z4`ol7S)x+#*aZjEUkrtT`+afP(Gwt*uUm)boD`@i1O%l zc_VlGK=*o|Tci0F2N15A3bq}+v;r_cv&A<+Xk*gtFbDUTmnb<}qa#Jl2#bv#Crs9Q zt1+us?HctU7G73O*O(6S`{L)B`EauPEMe#}_~>PE8eL+Q@Eh_y;Fbh4>I$h%%hv`! zv266N)j+7KwlPBr4(luNd6?BAN35$9ycY~BN1okhy*4CuMez@P#tZ7_=TALl zroQfs3cXF0^Cr18r7*TR4GmAoN*HJx<3R8-^H$qyu*wIVtGo6z>&(0u6aJj7hNg>e zUTh{H?O^mRI{As-WsqbILjQ|rC0<(wi-vMY3 z2t8t$Rqo^FB18o1*c4{F{a8UC!Qm1IdgLZ5VbmQQZMJSlWtgquia$veZC*k)**MsS zpS02y(+w@-=Up*G&~UH$OBV9fW;7BM9Pb{k=HlpX>3CSIpo!Vjil-FOaFTwD4arw@Uu1x}97Y$6hl2|!Z5djWWbx?Ji zCQ$0q>NSL!FvYdF%u-k;PD)Hx$ZufRf0-^<8&k`VEe~+m?3i#n8XgpZ7jd}-QmCve zT!>+SmUnB97H@&XWyA{4-i8LF!Zw_S_#mXUY_7IAi3D^c{<3Aa2QOJLS`yyfr5snl z(-Ec)p;6QDfeUd^&;2o{5(wJ6=W61vUtf;e#!D8nNM?X*=oYYh|D?9P=CNx=OU);0 z&p{uuPuk*65|X1lmx`gRqNgXuNtK~zMDq&Dt}Qq*CYjmG0P|Ct)1uFQT^VCWB!o-a zby~T1nGEP@NG`R#6ZE@WS?r~%w3KM-A;>b5!C#0!JLIn-oxW|&jL{j2`SM*(lkqD< zERw*<+*^GS!cW>+l66DDpaWY;sKF7pg+^6@o7)Ow@g$H&Yk+yI>uGXAWBfT%`7(DI z&sc_ri3AlYvrrTV0OD(!nB{YOq<7{0rWC!PQULNiKOr$C7$pmE_&y*mQk}R^0WuV< z=zocE*(cDp!(kVBK*iAbKIQ9Kyw~)rA7$8mfFP^&{HV@ipY9}X%%Z}AjG%C#%+W(lPy#Rno&U9vc?jmrVAZ3dTtLC-f7n-6Cz5<^Ms7m(i^62*lNQNig>wX z|3>$5=h^*tJc`5Q)NSK#_PaNiX;LKUX!5Gq$E<>#)YB30aT9QB+a0?k0-XZwl;iN9-GA!>#oN_z( zUa%%0rs!v*uj7$}!u*)5IDgXA}Zu}nr zr9fK0ZhUQVZm3PPo@B*=-n(*7VUOV%=sN!ZFDiq?v&%Zi8AZAT{Gj~72x2qGTrW>? zRyB_sT-#hlZtodBYZi0WesPif{{S;v&mn_JsG=5kIK{qg4@>b6j4rRVRY+jDha0CV zue7dNPD>1v(4WNgsk|EDFC>z7p6C&^RqmNKAY;cl*jFCEEsyK^X{gP8%6L^S!a|O=bOu6-m=pS1?)I=jYt^D_wHiOw}#j ztl&sxV$Ud>%uvU0IL;5uO8ej*L8;G*EwvVnmK~Q9=ges4U~&iriT)g)PhU#$yPp;d zZ!p5(%!*NlNL+@(`(rrtP$x;F9_p1ry5o+synbREg?3d(v z=ZN*IEhA3*A+lK`eB2O|5;$?UCq3{o3Gdq{Hf%gtm)c9Eu!_nQI|{MqXaj&r$;bfl zfH|)hyYX<=br7;my5153hGX**KTLqw@A8vbn#YV}wVq+-6qa(#s!v{e`*G9jM0jfI z(|3E{U&z1h&R2G{@;%|aZKubn%KPJyrA?a^$pvx;0Q4JF44#=i>mFYmTWPmflLDK} zN>#w*?cKCw9tS+*9c#d+@x=CW%<^2s0bS*{50SXa{W1^Nky!WN6IR~G&6asMQqh9I zanO^`(}ULrx23}=H>rx^88xPd(pH}lt*stLdBM7q0p<;yXXQM5j(TKd)^4ToE+w@| zBApWMBE%NKEO6Q9sL9*u)1`dKJ}EuMu^cf=A|jGD?j28G!{#SGn63}vm&T1xL!L1K zE@x*dV2T@QQ=8cno-G0z8s$Q*V)_2jxHr+wo+N(a6rqb_3H>{X#< zATr<_a!x*x~}F+qf&aaCtL6 z-^RagtH3!_@XC!!5O!9(iof4u(V-}$zcUNr*X<3aYn~AcaV5Q;s~$|jzEaG1&If+P z^ZjdX>*9=lEWfj}h?|$dGWldHp~wfPOp<+i8u`cLsE@(kErROK&28=EB4)UavXYRw z;{^2>>yPSnG*5|k*0)-IqYJ8-q1`MotH*^416s-cNa1}*6|8PVQAk3V;RbyP{W?5KD{e=nm;N#p(W&|M{*%W zJfEGI;E%ij3P0dP_UFrVKiWP$W+^t9mQ_W^?*{zjWMp8HK;!BwBgOv!6K`*4jue{R zB7tN(M5S0U0I4VNCyw><)G~}r>Zr|H#T-IaTtGieUhS-EJwcj}^c{@hXdVBsJVJ5kMr(LvBYEno9MU<%< zgARHg`St0FiZ2`L)|V_BQh6e~eb(%!A#fMw&N=VcpO|&&Uh2s!>f#4%t*})=mPck# z7#RQ<<0A!0>6{#Qt&9CJXIU3f)eV9t&zTtKDt7H4oRUZ3(*$JKgy?=C@idxk<15IQ ziX;fL$W;N#oDMl85>MgHboxX(wfBbQeQITz;#CfcqjuxY2qbgclS;lvh_dFWthCc} zw+Y19oIbRq*XVjSi({f)MlFNhUCHH0QKeP#<5mE%Q_y|hG1HEjHN$HkvnPdaTT_}n zQu-TaL=xM@uQl1{IyX)KV5A~|Pu5VZ5d(VLX0A~*dcrMP} zX48b?I10^|!jexd+dYm?CkxyP=q;=?ZC_a*+3#s#bsH+i_WMh?7#KNIg(EA+9PxrH z%Dgi+tKxk&IVGM{v@082ySB~0InGbfJ9_(8HPp8{9;w?9{J>5Xw_E-+k@X>F#Oc0I2`_#>vnGMM#kOYCuOGYPIsiQKy*a>sX5 z(C~K;N{v1M{65nzWs>D)k*+P?CY9%JGDR#wAOJ>A(aGln(-q~=c!yJx0AEs(gzgp2 za37tgp1lWtc279~;0I!~zbkqYKOFx6_3>34n;zwPVp>nJ$6q{^U24Yu zr=j?BNbuK&Zf`ujIty5@^51Hf^zU)FxC83jc)-SevTN#|5z?md1b5Lzrb%n2#IP)H zCh;&kK~@A{^dRSh&j&pD#-*uT*$dmTBHSgk1IdVZv|any!SHP zDlkPugnyBU?Va1O4+nxV>U|1X9alF$Z2a!u`Gp*7lS(ORuT3qf?OGMqpP~JxJvw+T zyzQn@0|>Ez2F?yx0yCTr-|b|tj(khvO?SeYrP3_-SVkz940fqt-1fj2BxIgX0*dq@ z!PO?Yp;0vYwW-%bj4`xh$xlV(?0S8K#=}6Cd8L+cjVGFtHXH%DRB_z5)C_$qt<$vl zd|J>rSx4GsSt3Rol|ft$-2lNS9-BvAmCyJKQnivBSmzGb`mNMsGb)vkOiBWM@~q$R zAaFP*uJ|Pc;`Yx{zw=Iju|UAMzixUd`)w z`kZY3BN_$V$vwzFC_sSZGcg%Hxfp)Fp5nE%OQ|)jP{PZ+#-?vC%M38ByoU7$ZZYaW z_NHHWI@?>(>o#@+cK^$XLcW%xnjCAYWMZm(@tPwd?`Mp+~Q zGVJPukM7_V4cwLkrUi4wu4t`Quk+Y+FDj`$wfr|QZMB9Pwb>E8Qh}10*&rY&DtnAz zdj9|qmML)!ee;&CqS;7prR?}WLnt#>Ah_aS* zoS)%7-8%7H9iPQ<9nI*TA@&_E(n;aYHI~hpWv8%tBIT`0F6!gg~3FtU(qOtF_PwflYMcI)ijUR9vXCF3m+paorN$b|K zwQm$&2Gi}98^x0I7gr^dafNXp3$%Z-Mh18!4CHaT+CArsmA=bq`axycXcx zTfl(-0HUD?2JXd_g$f2q5*%~}xaB%?ZFzr^9241Ya=O019;@Y<8VI2C7nO0oPu;dy zbS!zuW84mE)|IJw)_SWbCJ3X7*r~_LKvr$1gVf``32NQ(f5XihElKT6(fzYgkrpV5 zhC~ceH!y6Ff-(jUPsxrjb3j<>R$5inwY)EJ7n}Bz7bT-m@seAdmLqV$?3<^g80EDWJ*+6|T$0v?4E_KB4d5$Sx zGM5Gg3Q*ujFc`pTzVyi2P)+c%J7^yPkQW(>DoFE+c6% zv>@-ak(~D&@m#lvJ|YR%Y`OF0V(lD5az=XX&QCcV2XIF?tKKQKx3ja7@CZ%K)X%9q zKm>04v*oGkPE@JKe4Z=K{7-lP00{E`0HMs+Q6h(CMd29yqvmWJl20HKaC&$4u-IBv z9pH{merD7Qx_`ubtBclRJhJRrPR-KYOXSu4=ESMQ3SZs8Z>jfJ7=| zVgnMOdjaXw0=<9sPxwc1WvCraNuzxHnGyqmp19{5x$ZN8#c}@t2!0T^jdkdxT|LZ> z%I&%LEO30FsRR4`9P^SrRM*)501o~U>3SBC40f6=poUaav~Z`tLVulpkHP#sR+LsI zvesJbznA|xRYy%Sx+;s>1YvpBWyd|$aq>*_}A8W_7uFb4>4oA!N$>W|giu>cm)`t5@nl;jN z>seh1$d+bc2cQ7t^V+^<__^@H_8Z$|x{lQi^xHE5c#=Xv^4diy_#6)1I@jjhag$TV zP-@nFAD>V1^k$W5dssO2JcjQ``vuf`m8dHKMKY%13lwZ_RREAM%f?4vYLiOwBkNMl zF;bJO3?nxMvUwTphR>j_IBjRr^}}PRu~QymnidR9P4g4CILOWhag&kjT;GiEUf0Dd z<=aUVC^s`J5+uOdGoMx&IOsa?EAS?qr|jgdC8ykYv%B|RjU85jpi48EE@vNU*rG6U z4oK%dxyV2Mvt168;r%|}OO83(S>=WxSTV}yX%~uy)pEzO0%xztGA$|RmB}`?xm-A zZXdUW*%%6-JLChM!F}>`_#V~JcwfaM#20pRNI{NP^43)#ENt0Oc=aPc@E>aNj}ctz zz9at2xv-0TL8mUrV>$11rW%#ee!^5|a73#(W63C>Lz$B(W zkjLTgRCRxeb6wf!w$R5TM92Yg)PnAJA8gNrAs{{X}O7CdUfuRKOKTiVa&LlVm@l8?NkjDS1ga&yn!KZd>K zkkWiPqE0NKjw5W*I6rz^J21)WaEyZ_^v`a-^Wjf}5a~;+qRVe(5(bOPU{n_JLzJHyU?&O0ai zGau~Ak%XIS_G|8F@y%<*(`wT;ma)7sMy)K!TW;YR$aZ~B4^FwJ-e|Jwx^vuI;bOXx z3<$Xl%M@zO$3QtF*E#8m>89|UR(}!|-rTcXymKo^et#}jXxnpk!?=%*dLG<&dY^{$ z+lxDCBv^#1E@S{P7+|}cV4su{&4GYFtCl(xd3~IBGNT!57q9i9^WViwnY=S)a>}v~ zv!sSLVXzhl3JC4br1_PM28JT#U1ocYk@(+b(eESI-d(#URDeDTWJK2Tq%`GDs*>sO)i zmDJWNaRsTkHt@tGNdmH?Nx8Cfkf30m*x;Ifhx~D;Xx<>XFoxFl8;aTZ+DhM(qLT2wUTd*9@i z-!l&X0L4}sQL?$XwJkN=5;eeN&PbDK&#@s-UZdKI-q$=Wr)qi(aq1E!hH)ISlmy$x z*pzR=@=I)9Iupp}Ir0UG#b+x*vvgi#g``t&$d+NZ(|W6lN+@sW`zTaxf1dXC4#aFA%=9 zt*xbv!!b)`xD7OcRSU=#OpN1l$XpEc>z>Efqu1|#C&S^bV#-*qE?lBr0De$5#;!?V zHlZ8X9zOGoan4xBT9l(3>8sbjPrlFQeEcOd3iC>X#$FpnR~hV2Y@URWI;(vvOtaLi^$40N zZSLll`60KAF_a9(n~pQzp~gcNB&znO8Kd=NQwAX>IGOZ}Bzdlp}Wg9#Qanz}Mav)xOble9}P$u)DaHRw#pWNOvl5 z3mlF~?kh6! zTw0hdkV*1ECh1W(5)Ui)iN-imIR_Q(o&h?3i=b-WDv?rqONgFW!=NfHODb}D@TeO+ z1CTiZg*?kKsNc1yz3r|401a*b07Ggtr%@?A62JBMA3A&|xx60}E_EA=OE`3UDCUZ3 zOYV8*a$z#EmgjWwM#FlYss8B)dh_8{tE2oq@txP1Zyu?txU{tf7XJW5irOh>QWuey zXjFrY4Y})qUg7Z1_I>c@hCUZhd9O`pThLpepjcizL!V0 zgu*orYPw!B+s^C$82N8a_(5@f@k8vQ@^F)A4+oPw2ik(>-6w7^3CUr=86;KvFWF|+ z_xpNHHqOG`wF_^s*}cZqfF!OzHfh?i+k9KrM`_H6;0snGVUuY)Gcuxjlj0ucKeseB=YL1 zw1OEpI0JS!1CU+tr$fBbJb!T%Qb{eGZ7O;DalP6#l?dmIbH+d&a5x8`__1wq;_nkY zFzGk8S`CcRt)RDGH6UPB5i-9kIVWiwi{zFg0|N)ZM^^Cuy=Wv$%|rbt!FMH`W9*jE zuKloiz?5R4z#&1&87^>Z>D2KRsL9KAw%^b1TJ7C>pED>_a(2^GJH?*`wGSD1<41E9 z{F2>X#AQh1L?|9iWVr;h1DyP$jB*ZZsJ8IBH;W{{lHtXcp==amCH=ree2ab$;;Kh7&uTGnKHTQpCk#UTYaeJdrSnw5| zk*DZeZQkAWN#1!9X*U8D(f4iINN4j8T=YFTu3uEMlU&lSufUQfywoF|o5&GJ+TkbF8&7$5$-EQ{vP!PsF4WHBfXoRB4@_5` zUHF4p)UWi&VwO1d1c)eVbreY#u)N&bG1PO zKX~-0d{g37@gIn+v^KjRWr3iA>fkUY-Z2`70~{^_md~pm*{?hJvEo}>JIzK*No=h| z*E@fI6OByk2lNa%%dkHjQ;>1YXZ{d)(!B>G>ji1u_XL2 z%mL?a0qc=~J;B29UYBoo;7h5kKYxF(O!m=4u_!k2p$17?00YYb+mPqdw*DI4HnFN| zTCM!nbH{56tWXtKmRTfVt0NwvS0#uady}5ke?;&+$#G$9s|2@ZB+RisV@2G~#IYWa zgPeBbn)(WIjSV}0U*vRCYM!j-ei?XHO<%&caLqKWV;LmpDtXS`pz(rF zU1h8dS6wzywa~P-g^Ro@NMg7uHy)YEI2k8_ft(J9!`f6b&;6UGz>9Nm&AvccqD9^N z!ghdg3LX!r^}+T30D`^-u<+T$J21e zQ1e_&u@$t8cJ==NfxA2Ku=cZcX0+-4XLI{Cd^np&(S(xEBvZTY*kmD9P71C8>@nA= z$?snMqu5S{HCdqrR0R(tVE$!APvA>CZ5rALBGcvaq8MDukUjEbuHM602?;-7m^1PK zx$D#Zn)+AmSbRKb!6x-RHKxetEp#nK+?5uMCNfcOAkWjMTDhnAQ&GB9Eet9M`>Qk` zpB2=upk#?z?PgSAgZ`JC55jIKE$(8L+C8rnd=6x0e%%QBKp&kGQ0o5x6IiEXn7i;| zAu-5Tc*8K*t^ho9!LK&>+3>GP@g9LPPknTZxn+1siICuq8zUWi8uu@>tM3hSYQT&K z5s~;0-mtD~#%&&>rbwZDhw^+I_>pprV)qtk zMdgIE0p(mq@;kgI1364!`l(KNujh_B$bF6@>-cLmqLsV!e?1fPIdg0plzy>K=5Shn zgC@}YMQ&}~Zb%IYc5;M*7w_`0B zB;e<9KAdoQ73j7$w*Dl!vzpL(G2En*LmL)(BXx|*0+KlyUY$7^t8#c}QShgMbv-`9 zHn>RUNNx-OF_DWaFB?ZXV;IDdnG+e{a=FMS zfuE&t@55uD@CluW@EQ` z#MajhAqw+Nh77=xdPt~qk@EE8f#^23HogNyKjK3on+? z#8O5W&g>aS80YE7xkbft^DiTkHCs+G+{W?U+}66~(H6H>yV^KnR2%%g@uNEvlbqx( zdS$+q&Q7PDdOH_U#(R_FHf5L2hm=mN!`3 zJ`b77g3J6QvuR?5B$-UJ5IF>Lq0g^Bg?bpAW5mMTsV%>+yySATR{p=OPRjQ8 zPPrF43Z=A`u*il3F9vbtAF{E-`+n=8azBk%=pPICiVL3x>$-N9wkA^%({%V-WN$0S36RKGg)$J4vfv)A#(LL7@b~t7 zyV7-;Z?z(q*1ut%Xd<>REzYGIzUF2>Du!Z4Gv8<&DRt^J{{Ri?7k)m_;(Z|7Sm}Oc z#mv@hhT7fag5j7F>Q3(=U|jcSP@sh>I`a(9jv~D5wBGA&H~g)qSKQ~77NxJt_5FV{ zjlZ_>F00}V2f~p^kVzh$1lpyQa7!n^gGNcfC;{fFPM}U3?S$t3gy?vTV}#rZqLSD=&K=lfYz={d({T04<@^I~_mVuCj;lVV zkeh2rq>+rVKys{hhR-MFVo4iSPB0HWABy}-tJvz+FlqC_t3AcH*aVzmSOVY?*CqCz z_y}?cCqF-$W={~+J+!~w*YP!{QW2#Y)LMUA`rGkWI(=uudQ|q8Iyk$xx3kr>_C=Y< zi_Dql49>BSnUC(HJF~{lNg!vQ_>XrNhJF(1Ha6EVy`|>5?<>y;SiK`>CQ`^+qO&M}Y)89DMWlzAbjs+W>3=^~+tMY&wEHc?7jgJ@WKiXPb*<3{wu^U)M z3|V9>RKCeLQVx0y2EzXUc!A}<6c&r)TOn^GMtPx>u?%q&5{$~l#q&zn`7g-Q7__Om z)!O#5>-hZ2J}U6#w0bqo)U6%e!Y`8mRNWP@RtkPz7?vTpZwX176rqSV&cqNdS!FgFx6#|{C%C4+Q$s1Rk zjMv*`v#E%~^G05lx1zVtt@Z4AxcY95enqXG_U-zf+oRZNdfv5lVS6<3T&Ab?)pXd6 z6w&#_$tVL1xRZrp^OEdx3W~Rt9Pm%g+If6)28FKK-br&RTH5)J zkvYs}IM67_9Woux(!rRy$T&FBVkTht4na+9n)ck|*6=vPFPIxaG_`cf5!lKhoh}zkuf@#`u*co7k5xPmr zo*9^y$;YM%$ra~+5PmVu;yLBIf?K~gkj7|R5uub4n}^Ck!TC!Mx(?nenY{6}r;9Fb zY{lKl8hMTYH*Jz5`?q#t9P)4rdk#ADS$<1Wlf3rc%gyiS)8&5W46##<(%8T8TgR5` z_K|NS(dq^$qH%QPoeHjGB=NBnMo8RpaK|_#*NvydNVWd}?QK%fL~UX-I1H_cau{q} z@RXr(lePDqauNyn*d~z+2~rhYl-b(@O#}o zq?6dS-<)HHMlB>M9@P(?Ig!ZD0O3jbl#2Vz4qTy4D|Y_B$f!b6_pSY0>Lb0=EcAQ5 zZqZr`i`&BylHo(W;My6&p#Uxh(oRkUgbeocuO9q2*MH#-xbaQoLdagipJZuQdi;_3 zvNjJG1Q1B)a6EBcgi>l>6y$;nZTdEoVP$N;XSaiL+p4sUHd8+_^PW?Za!h#6GgA1o z;)D3F;r*&>B0NQ@-OUB02h3%+NRbi{>z|pG3KDX60g=uRu}(6jB;vJgU%Fim>QGBm zdH(?H+2FN@ReAMm%ak|vsVf7LCL=7tTRA+C2LN&gbDH&gZ9Q~y=w&M@^j}gAMvJ6ccy~y!xzuk`DWid=B`}Er`>cgG9k|*~7lF7}(0{St!tEo) z+9CLr;+SP!3&XL?CY>bX%LH;{xx(WZBG^UF265B zE5Tn2Eeh;5a@cdJyowTiV zOzkXb{=JyxpYbO@jd_Q|WN(O`G`;aw29;~5!Y%DBZmy9Y*4XZ28v;(&+~+$ocp3Kk zXTUmj+`3#!lO?<{F2pe8#)<|28AanAfXBUl2M%M`4aU!I-r+3UEd%aY~W z9Xj@P5vG$4KI?y@?#Hh=^sT7$T|uE9UX5-O7%Ep8`~sT0rdUkCldBMlG0bFh-#mRQ zTFXWfs)g0zW6>)4VBmYH~bR@9T!}R07ipg%7C(vImf z0-`pXWdq<2ZIQFb)C$y_!%QNO4NezlQ-JZZgXz#!xx5Vtg<5;7U^CAzocnQEaz~~9 z^HnEpPI5gvU7sl;=_srZK2j(@`1!?VYq}1owvJkBAh8lymQo1&#Ag}DJ-sWV)Vwus zcNq~it1@IWi@8G&s3-YXC*w~G#x(P=?r<^zydV-dJns5?*FAWySIc(&O{072cz?%T z3A{<6${SRP00cUucmQM6At(Iwu2;pHG@l=TXPc{du3uO2e}u599?(`*MLP#Q45@*h z?t>!>TE7x}8E|HuXSt5*(_uS4(7V*+^N>bHI}!NTm1(*)=Y~9CcI*A44x6Z1rK3D9 z?HqCk202Oe8DRM*3d~1J{O>iuO1zq@9Veu$Xu_>JHXR zG?B0cHy8+Qk|8+Y@IlEO6Oq_^oEs&#)NVASk_V1?ZrquXT1ed6K1$~qTsa;7QaP^_ z@ibPN7mRQ3;JyOGS#5^aB6ir_h|ifDjmK`|k~am(-COz(j^xv3HqSovx}WS(0&vI7 zRmnLx-S=^mjFHyAo%oL@tAoc%f?IvfJ8R~A%|!6^94vMJ02RDJZT|oZFG<$4`=J_X zfm-(66l4{Pft~kZUc}{9laOWSIvG8__2SW z>pEmMGeMc2Sa<9ykt&BQcmx5wat|LN?bp)g@gr(>FkR|b&{-|E5uq%yNE_x>007JK z~)6yaq!c+<3x}272IjB>iiVwecK!B=Nn}XHlBvRT@TQRE0?cb7wgN zVK~oHKqon`Nbnx1{{RU#oh6BhZ7mi#?XBmJt2vq{XWT|zul@xdg8tbnkZc8?t(HRy-4X7msYU8JJ&NGQuT~ z_kw6b##u@Yst)ap#~I;HeJhjk3yW1X>edj&dU8}WN1Vh6H&zZttu>(0B zYp(sBJW(f#ylFm#Xg+7O=9NE4H;ywe0@@ zU-)CM4~Ltmmwi2Y1nD+`q|a=Sl(N>0HsK{8EQ=@wayM`iRde){9qX9*Q{cldiY&{P zjcp8%BFP|m83HkHl=2W@sqQjQ*VleC_&N(q%{?q_?loy*xwVQIRey6bNSmO2!Aord zJh8%n@*aJ2v=0kicq>uyHmf9e7STs%9E__OQv*&Sn`l+TkPg+xSzR5lrg0VHk79V^FvEJ1yzT4}azrpte;eX=;k%IV@7A@w1uygYRo!uPgnkyRLQ}Gl0N7Ccg9nr2KZN~Q8tN;{t2!&W+i~>pf$DrdN ztHoCK4l$^%+P;pJUymB`nI=eYj>whCGFf1pE60Le1_cWF}sih zDEV?mc*SQ)u4y{V7WdQZZ62wpDVp6{Hdx_}$R<)Tg2e4&%sCC7Fqr4>tn|%G;yUR% zeeAb0?E7uxFhr{bsgaei#rZy5UohT2UBiM~k39H`@e@k$)|0AhH`f;t%Pqu_*$*%o zr4c91AQ4*L<4eT*3&(REm82~m76{rwg?1+khnPV*C!_&5AN5|9v1k|VW#-sPZxLgcG`BCcRh`lk_*i&(|wtD z@NvL-asAQL9M_YWWFF3HP>tl1(RaPJ>#o60PIFXh%E|tgOJD1u>|YSPWe3A=gSXm+ z)x@i*lLJ_!ZmlV4N0|=qmzMqPWSkF}_g*W2)c!DP7akct zben>v%kqEa)6n34ScGJVr+G=D+s7m*Xz2z?&qv+tk`PiO!trO<%j~ch~fO zr=RLtZCDo>lE%fse zc^_(xu2kjQ$Y50LBLEOvsOSg=x$R5g?yIQ$T+^hIb8Vq`Vo$OoJ{fg4RSXzlf_`|| zpHk!uWr*)QfACh{OIy!jp_b`2D2%@YPHyJTOx5}Xh~uU`>T zljTYDzMVDwRlPqhT9bve)aHB@b9X0-EL_-6d8O$hTiZBg%SU+PJ@E+~03Bqw2RR)X zMn@v6Us~I0zButErMv0cg6XDDLlm>H_D|6$j>UEXOcY%_d&ANb?qYJSmL<4v9#LQU4hKsy_t_2 zfFv;kIr+H*HO%XmI@YnN={9yI-u^kRAdEt>sryio@_FLHR*FM;T!2)RjdT zJ@>y|(KudaRpJbNqIXlB%i!%-~Rw< zPl$dW(RFP<;d0AsYo%#0!#(tNkQjuDp#7fLJ4)kng55Ln05OqWwv;q~*su0(32k|4 zsCb`LAtCdTq8gR_g8;J`4tgf zXHhH0s~|Y}hs;g^;DA3PUq$#&OYm&AG0NJulKX%p#_5CWjMpFGkAxl`J|VSud|{_~ zkt(l}RnME(kM)7Of-pxQcE~=fW$@Icg2pwwXrd>k`P5^-{I@-EkH)`1%doX!Pm`W9 zx0?R|HTfKr-L#ee03Y%t({wKetZJ*M#?tj~CP2Uw$pZtPwY{czGsBmSnC~Z;CmX!L zj6XxgW5Mv`JChc-^1= zi+z6Q{zpx7;B69kh+RtLFGf$cg&E!Ju>Sxr#-q6KXNRq=fR9p)P0$%NJdelZ6~$`C z^G%6Ny0CQxcQKXt1cBb{eSZq6d8xw$3p03<*92fBlDhZngLo@92D&f5{t2vY=l2_$ z&x24(rY&zsX#0VA7~A+C@D+otcrqkikZM*hD{xoLfagA)`u_m>>MP$DT0#4{d_$zL zWgqg~kPb6~e%fmrQTT~wp`(vq(gQd5E*+1rQFof>n@M#0{{Y~d#xKm`J|g%dP_$_h z8Lg4qDz5%P2h*N^op`5>JPD{-O(NXg+cM-ki*31lADK?o=Oc`J*S+}9$F{=M=J!vy zj~Lvu%Z47~&prKX%e-ZKYvLJvn59`G+%PSJgZPsn=ReB4JY@wvn!nz5QqxwCJJvi6 zZ}AS>P)jMihFI3t=1jYJa)tfOs-S$$+~gCGdo_6Wt8J|4x6ZfXD=k;;2^z*C<|kPr zjY^Z$XABQ;M@seWUs#L4^IX^jku3GeR^IvMjbacELe8$FjnTf+bDXF>PAkWLAnRK8 ztFB6Q6;D4)Eo-SqaM@rZ+|kOU0!oJY8-hTQSbfU;-;45Ztl|B-w>>TUxc>ma{{Se4x#}UOX&9t`rSAOM)v*3aNQl$MyOjmQD=s)p4GMJ!dxR_r^ z3vTmO_Gfkn%IC}+dM~|kT3?B$zS07#<`_f36+-7bk$}lw2h0aK1Pt@VYw4ajd3<{Y zog6b5?Bk3~bCApeXWX{f!u?eB>VF8U?PczNE&lBfhpj$YdNa;^Mc|!X{9SIA4DPn~ zH#1Hmh_G$aJE+M#0@7~iN6U)5b=#i`_WQ#0(w-DI{}(F<$iw>ZdBJvfcde{d|p~U*5cqheYu_8o!2gndGy* zxsDh@#Iq^iCA-K*81flbPE>=Puhd{?=uJz zJitls?54agOz`cNnW$>l*AIQ=TWOXzdSgOI*|iYy!m^-5Kp$$m;Z6=?Ag@Z6%U_>Z z@gIlpE+m|_-R;e(`$PP;>hm#GZ$ejoF^&lQJdD!^3e&n?>c6Ghzx+R~j+xYyTW_)J z`o@#tkBHtL*K~KemTwV5r8V3mLRJWo<(>H_~!TFkL?>izCW_1I-F8pL7e7mP;gO? z8!+V*jAxcTF`q!XgG|z)7Q%S#;PBk;k~|0fBI8V%cU!jN`SU(Q7|RpBB6}M1B`TC@ zs(Z#sKSygczI>^5yFP<1$HJ24sRY{l_T4?Mp1W^j=?hO2!L0Z~`E`55j!0v9j4V}V zxPSu@AUHVM$Qek_&77{<;q}d1S@BJ*O>?IR?5<>VYX#biEKM90C zXU%nwjQ4&m@ibp%3}l|?%CKv7SmH1sj1*!rLaU?T<&IPg9)s8XPo~*;E5kaBPjvIm zbc-d0yikcJiZILpUJ;qwa}UF@$*-8jDbVCoUi6-eT{>y$roLYzyA_yyVw_X^I=9~K zzguc;S?k^w(|l1LhZou5)-EmNfo2UPXm>ZxrHe9SaT(o?0HjM~GM5krn6XwyJ4sdt8+ia{(>#BRp|kKukF0gQLUfN$xU&$+B!Q%sAsdE& zkxBWmhZ)XTC>;%UzYhK)L#g=o*I1KV`&Es>`&W__7c7id_m8{F3*{2TG3RUGHvn`` z4Jr^-la#KLzP4>Ww)N_)a!(T)mV;KdnpTFiNs-%N=&Fi;gIpe zMn_y^u9#yfRf?3X{aev?>-~M_qOjED+@&{uw!Zyd$ISl#9eygy;Qs&=>X7JHc5kE2 zaJC{%Iojgp?%_yg-yjSK@@>kEuE%PIZQvX+Ue}F zE5mGv*+;~!^PWK5*xbqHA5_=vJRR`Q;Qh=`Wqt7rSJQ*oO{-c?EHwLhW<-a}wO|@q zh-Hl$L{wqB?*qQRTk%$az8~=|{*eZW6{fLgh5o}1>jsjs63q89NaW>P1@|yOcsN{V z>oRP~ZDGAuJx=;X%(6(wg+n2k*^YRSWr+mWRq#gC_>;lus`!#^Hs4X#wM&J03{1;m zquP1W0x&^EV*zrV7$On#5nPU!@X9X}{64zzKB0g8rKhrgWm_eqNeovfA!S5zLou0z zizKQ5jtgtd5*!oUe%ZIzUI6`i4Y%w{@e9Lsv3B`^`$^xR^jm&zEf!j!U~^d+b#V zA}qRNrl{16F!5V*rM}6w`}O(n)wSo+pzFEfX1&;N(*B;V{{W@kk<56z#4!9c@nlx^ zH&a|oAK6wZWl;yPQN-?VS>*mt` z05^a52;-*}S*1NU(S7LkPlG-Q*0isI{w{l2E;ij>%M_B!!pVtk$Y4qNmoKn-htGPz z@rR2czl%^?Th+8p9^oaPG$bK}Fipp)$YU3Coz2@A={^wrSch8uo$jN!W@TkeJH!*o z2kgY=H6uB2p;T}_na7O$MbfS`{X)xDxSh27Ul3_?tJ^zen`^kq4jml3y^B6BNCxm$upQ4hY* zdf?Yh@ay7z=t<@Ip||tOv;{`#wg9SBW97jv?0aM$0T}S>7P*e{YnE|oVLC0acZtku zzDmZW6lI&M^cc#VoY!}x>$*OZplSLAt;VS%UM?;tkeJTE0_`9d3R$>j7$<>@oE&Kow*YvBYwHtdoyEoKh^ASuiOA(IV6^i5yxWG(;gl&*youeLWp?Hqw`^OVpPh$?3 zXDqiKRqXM{G^iy9pE0u0uyUlLg*#X1YuWx2_^mDc6?v}S-bEx*pi-czhE!=?;{Xg` z6A~dFkBsn5cvVxOWS2#MU+enR=bf!(y|nfIzarP{;h^bF;m;7otiXZ`JBwHnNngnG zgmbi#Ha7u*xZ`&Pim~g8o>7vrj*Wh1pNXv^ z@Q3z3i*Mw~uUKi)S(}+vw(oQqTp+;$RfnAN3lW?yIQ?5fw(v#LhnC1qi5AvaHwfU3 z-IoR;xsis?EDQ!a&JROJjyz?k>)tWAx6{!5w%j$UOwq5IY|hZ2%Mpax8Y$%DIVZWz z{{ReVxB88|vg+ZjE#hXemZH!r5GnSzU$ZY)Ycr(S8T4n zA&=qqpKo#IL2m+yzt+gaAl^)^AKlJZEn^m?=>5B zn#MT^4QmSq0ruqJu-%VD=ss>d&-Qcuoa}#UPaZO}NiKomdEuJ!DJ|YBOSbaaR^N6G z?idCD9svY`MSV;0!%&;V{tnc<9i=t3rlG6aeUun33de4kW#O<0z+ifMbDI0E7s?ux zYsxLoZ%h7c`;3b%rx?6dRMovtF`ce|X}^wb;$bbUo*K|IWunGpBg|~#OM}KfW5SS6 z$}%yy*U>%(@SN8AdheFX*<0^!zd%MmKDEO95B;8X4+v;BI&5j85pNJw#oWkL0geih zfnK|yd^6W=A`fo1ib&cSH3oD40FSvf`<`Ecl?qBKKE9^-n%>rKQf8ivZR}ML>N-4S zSA+JjcMS4!P}Zb+^Z>}xYkDrl0aSgiJbItt=;Szt1OjO~`(pRRX`^iry}kLGhI!@u?S7WF@fns%5u*R&YU(lEDik)Kby zRblbfg~i6?@g9&o!TdLn{<%X=ZwPq0AU4>}Hu1YKl7I3gLvi818N~}Uc2l=QX#W5bpNe#=c`QIy z(`0Cg7)?Q>=r}wK=NSC!gYg%Kjm4CTERx|iX99U$LwCmI$KY{W-YoI{t#7M@eLm>I z50@}Hh9gtJ1Q7gkKs@8-6@lXqh!1b3>~#D7l%)9Hg;Pl$XQ14kC2c5SX?vb1Y$NoSKDXaRyd zZQYZzVU4;yPXx|{|xHJmRi2?U7K6B`4=eaw1}r}3{h)_g%{CcE|<#Zd*y z`8N%)tZygqk_gTL!0E;tv9HazgEp2DOWo$?+g_U9>&x_IR>jH@T3tPl7?Z}A9t-%n zH-+^LEp2R|xr^+QNat+Ox-eP6`AIodh=GzAg&Yx{s@%Va{7+=}Qna^AJ4x~k1H3y( zJ~>={8;{(-U8k((Y|f!uJOC;qrvbCAzY+fyg-u1d4IDC;-mt=e!em z-xDLXf;s>x_wF^j^B@9W5Fj<(U&ZLqPKFU6oFT~Sh3mqnT1J4DNaZCwN zVKQZp9A|neu08NElbhNGsRx5^=P?+kD5b9VMxsbI4ax}|5J>~K-p@6|d`8yc*M2wa zLdMkGOL=i;9A-&OL6sqoFZWN%rBrS>11FmKTr_1;tYI6ZmG9=PzgBn=T+Z?7`IfA; zNVP8j>N2_|{_L_P}6mmuuXzmotCH-+nMMa4-o3 z;;2un_?N^U3Go%}j@uaKyRx*3>Na^=WN7Eh7L)E`ijL zr1@_yuOvd|+C{f*!ZZWsJDh;L0%SNNk~X{xuc}O(KA*%*@XFq-M$x##IS`bDZM0zC7w;{{Z36n|L(MTU5Sy(mP!y#n}UvD(dn~ zMoel$IdT2wAg=zebW|L#H1>lLuA|1&T`@y%E>HJx5|EPtCkFVkO$tcpT^)EYE#B z{SK;iT8r#*-?XL1xvJ>;MgFI$$z^;ZFpFI}2Dy$h3`LqXKxAQ(!zADXj`SoqMqu1 zEdVF1!Z%nuNZn})I-A#YuJ1DH)DWu$0fkZ`Y4(}{S1mI_k zE=cUn=~BbtDbZ5B+ge+?dU`AB_1~$jEoYlpxsLwkdw|e2)v|et(upNf`*0Mh3xEK? z#EaaKMSLTxPLuX9inMg=_x#VSqnRkvin81K?s!k^r8EBkZ)ostZz9jbT3NS`KO_Xh zl2mkDpGx@S_KKb0@F#)U9g(%2LsFYiDsXR|8c?k-{Pbn)TO?6MezB2%$j+y#{ZE+3 ze-Gy`{U!&n#!Mdve`i}1J1nlX{W)g0ULu%1%!R`*L%8yCIqy|}Zu#C%ixByR5zue{j0B-*P=_fn!g4QSfr+yXK z7~?h~>q>EKRCz(3;nmLGnPo%-H)3; z7w~|DNyOuG9je>>GKs#fl zeH-FK_CJRgaYOs4*2=&HPnAGeu8QzI2n2c>D6c0S{7C-*i5z&ZjelK+c+XV2gWxsv zsdG9?n#P$Ly2wK+x0AU+^f@^l^HX?57VpJ6Ig|+u@yH}DGv)@G^;jJB$W;eDKpvD) zT(oxk{{SRReh-)V4ASl-7fm2{4=;yrE}gd=yxGT?Pj|orcU}c#&v`eS;i2Zqg|&uN z8;=f5V7J!tY`dx>$4sc_`lbg_fPB#V+NJ`B+hMNirBf-Wp610ZgD)|F~@Kf zKs|5=rFgH4FQ&TvsXSL4vr8m)wPEA^ba__Y53z>m}A{{SNB z{{Y*+lRk;iY~hbjLa|1qfszyq@IS=YSK-@u<%xGjjrCA_QAK`@gZ>2x#DK^KJ-yeuIEiJh~lrjXRLp21^(95wuuPG z+MgJ}_pXl1#TuL14cd+I$>%o@&p%oytd~=xsv$bv?3eaMuP&J4PO`D|_pJ*ZUhVEk zH&+B_p^*ChXrh!#UkTDkp{*Ur1-*OrsIAm}xdsLgQR_t&Q#qr#jjF1?Wss?uK) z0Khnq;-S5PBD!*jigI}a9q6LGtUvpmKk%X`{0QJaAV|w+a>1B3$6V)}Z9G?n`0jT) z#J@XyvPcI#2Ghk9SL0dd{EQ*%`V}vNKjQrlOZZJAU3gPS^I4?dV`|N~=W+4}CpF-I zvre0DqThJl+Rg}VE#UE$@<$XXBS_RnKmZN^9@J4@haG>&o?rIw96`7r{4AIdivK>s@ukXDaZS?`Igi-$~gi>13yd;$WcXpH80|PpZ?l> zeD{CXVjtO7(ks6c&ho_^j~IlbI)*AurZy$6cfXJu$@;S9K5f>3XlqN3_3%FY$lvtf!+&&GGKuBFtiy;8#L^R|Z*f2=8A& z`0h2+JU`-ewoz`8=#$8c5ZD!#E7Ghn(1NN#>{NPDMRI3f9WO=t9TdJ7{{U{DH{ttu z?~a_4qD-^JeDa4-!a}jY6t;ShI2q_G+5R2M>!SD~V9X^t9f2+n%t?R~&t6C$l@wP# zbNmSU%km~44E=voweh92bNIi))~^~ptZO4AETG1uGLS;_1XJu`O+rm0?FzH*)YP9a zZpm0JiuLp$5$S*~D3{_5c6? diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/FoundryAgents_Step10_UsingImages.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/FoundryAgents_Step10_UsingImages.csproj deleted file mode 100644 index 53661ff199..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/FoundryAgents_Step10_UsingImages.csproj +++ /dev/null @@ -1,26 +0,0 @@ - - - - Exe - net10.0 - - enable - enable - - - - - - - - - - - - - - Always - - - - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/Program.cs deleted file mode 100644 index d44d62df51..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/Program.cs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// This sample shows how to use Image Multi-Modality with an AI agent. - -using Azure.AI.Projects; -using Azure.Identity; -using Microsoft.Agents.AI; -using Microsoft.Extensions.AI; - -string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o"; - -const string VisionInstructions = "You are a helpful agent that can analyze images"; -const string VisionName = "VisionAgent"; - -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); - -// Define the agent you want to create. (Prompt Agent in this case) -AIAgent agent = await aiProjectClient.CreateAIAgentAsync(name: VisionName, model: deploymentName, instructions: VisionInstructions); - -ChatMessage message = new(ChatRole.User, [ - new TextContent("What do you see in this image?"), - await DataContent.LoadFromAsync("assets/walkway.jpg"), -]); - -AgentSession session = await agent.CreateSessionAsync(); - -await foreach (AgentResponseUpdate update in agent.RunStreamingAsync(message, session)) -{ - Console.WriteLine(update); -} - -// Cleanup by agent name removes the agent version created. -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/README.md deleted file mode 100644 index 220104a291..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step10_UsingImages/README.md +++ /dev/null @@ -1,53 +0,0 @@ -# Using Images with AI Agents - -This sample demonstrates how to use image multi-modality with an AI agent. It shows how to create a vision-enabled agent that can analyze and describe images using Azure Foundry Agents. - -## What this sample demonstrates - -- Creating a vision-enabled AI agent with image analysis capabilities -- Sending both text and image content to an agent in a single message -- Using `UriContent` for URI-referenced images -- Processing multimodal input (text + image) with an AI agent -- Managing agent lifecycle (creation and deletion) - -## Key features - -- **Vision Agent**: Creates an agent specifically instructed to analyze images -- **Multimodal Input**: Combines text questions with image URI in a single message -- **Azure Foundry Agents Integration**: Uses Azure Foundry Agents with vision capabilities - -## Prerequisites - -Before running this sample, ensure you have: - -1. An Azure OpenAI project set up -2. A compatible model deployment (e.g., gpt-4o) -3. Azure CLI installed and authenticated - -## Environment Variables - -Set the following environment variables: - -```powershell -$env:AZURE_AI_PROJECT_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure Foundry Project endpoint -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o" # Replace with your model deployment name (optional, defaults to gpt-4o) -``` - -## Run the sample - -Navigate to the FoundryAgents sample directory and run: - -```powershell -cd dotnet/samples/02-agents/FoundryAgents -dotnet run --project .\FoundryAgents_Step10_UsingImages -``` - -## Expected behavior - -The sample will: - -1. Create a vision-enabled agent named "VisionAgent" -2. Send a message containing both text ("What do you see in this image?") and a URI-referenced image of a green walkway (nature boardwalk) -3. The agent will analyze the image and provide a description -4. Clean up resources by deleting the agent - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/FoundryAgents_Step11_AsFunctionTool.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/FoundryAgents_Step11_AsFunctionTool.csproj deleted file mode 100644 index 54f37f1aa6..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/FoundryAgents_Step11_AsFunctionTool.csproj +++ /dev/null @@ -1,21 +0,0 @@ - - - - Exe - net10.0 - - enable - enable - 3afc9b74-af74-4d8e-ae96-fa1c511d11ac - - - - - - - - - - - - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/Program.cs deleted file mode 100644 index 585725322e..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/Program.cs +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// This sample shows how to create and use an Azure Foundry Agents AI agent as a function tool. - -using System.ComponentModel; -using Azure.AI.Projects; -using Azure.Identity; -using Microsoft.Agents.AI; -using Microsoft.Extensions.AI; - -string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; - -const string WeatherInstructions = "You answer questions about the weather."; -const string WeatherName = "WeatherAgent"; -const string MainInstructions = "You are a helpful assistant who responds in French."; -const string MainName = "MainAgent"; - -[Description("Get the weather for a given location.")] -static string GetWeather([Description("The location to get the weather for.")] string location) - => $"The weather in {location} is cloudy with a high of 15°C."; - -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); - -// Create the weather agent with function tools. -AITool weatherTool = AIFunctionFactory.Create(GetWeather); -AIAgent weatherAgent = await aiProjectClient.CreateAIAgentAsync( - name: WeatherName, - model: deploymentName, - instructions: WeatherInstructions, - tools: [weatherTool]); - -// Create the main agent, and provide the weather agent as a function tool. -AIAgent agent = await aiProjectClient.CreateAIAgentAsync( - name: MainName, - model: deploymentName, - instructions: MainInstructions, - tools: [weatherAgent.AsAIFunction()]); - -// Invoke the agent and output the text result. -AgentSession session = await agent.CreateSessionAsync(); -Console.WriteLine(await agent.RunAsync("What is the weather like in Amsterdam?", session)); - -// Cleanup by agent name removes the agent versions created. -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); -await aiProjectClient.Agents.DeleteAgentAsync(weatherAgent.Name); diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/README.md deleted file mode 100644 index 5da59b6edb..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step11_AsFunctionTool/README.md +++ /dev/null @@ -1,49 +0,0 @@ -# Using AI Agents as Function Tools (Nested Agents) - -This sample demonstrates how to expose an AI agent as a function tool, enabling nested agent scenarios where one agent can invoke another agent as a tool. - -## What this sample demonstrates - -- Creating an AI agent that can be used as a function tool -- Wrapping an agent as an AIFunction -- Using nested agents where one agent calls another -- Managing multiple agent instances -- Managing agent lifecycle (creation and deletion) - -## Prerequisites - -Before you begin, ensure you have the following prerequisites: - -- .NET 10 SDK or later -- Azure Foundry service endpoint and deployment configured -- Azure CLI installed and authenticated (for Azure credential authentication) - -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). - -Set the following environment variables: - -```powershell -$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini -``` - -## Run the sample - -Navigate to the FoundryAgents sample directory and run: - -```powershell -cd dotnet/samples/02-agents/FoundryAgents -dotnet run --project .\FoundryAgents_Step11_AsFunctionTool -``` - -## Expected behavior - -The sample will: - -1. Create a "JokerAgent" that tells jokes -2. Wrap the JokerAgent as a function tool -3. Create a "CoordinatorAgent" that has the JokerAgent as a function tool -4. Run the CoordinatorAgent with a prompt that triggers it to call the JokerAgent -5. The CoordinatorAgent will invoke the JokerAgent as a function tool -6. Clean up resources by deleting both agents - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/FoundryAgents_Step12_Middleware.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/FoundryAgents_Step12_Middleware.csproj deleted file mode 100644 index 9f29a8d7e6..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/FoundryAgents_Step12_Middleware.csproj +++ /dev/null @@ -1,21 +0,0 @@ - - - - Exe - net10.0 - - enable - enable - - - - - - - - - - - - - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/Program.cs deleted file mode 100644 index 7ea6bc88a3..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/Program.cs +++ /dev/null @@ -1,223 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// This sample shows multiple middleware layers working together with Azure Foundry Agents: -// agent run (PII filtering and guardrails), -// function invocation (logging and result overrides), and human-in-the-loop -// approval workflows for sensitive function calls. - -using System.ComponentModel; -using System.Text.RegularExpressions; -using Azure.AI.Projects; -using Azure.Identity; -using Microsoft.Agents.AI; -using Microsoft.Extensions.AI; - -// Get Azure AI Foundry configuration from environment variables -string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); -string deploymentName = System.Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o"; - -const string AssistantInstructions = "You are an AI assistant that helps people find information."; -const string AssistantName = "InformationAssistant"; - -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); - -[Description("Get the weather for a given location.")] -static string GetWeather([Description("The location to get the weather for.")] string location) - => $"The weather in {location} is cloudy with a high of 15°C."; - -[Description("The current datetime offset.")] -static string GetDateTime() - => DateTimeOffset.Now.ToString(); - -AITool dateTimeTool = AIFunctionFactory.Create(GetDateTime, name: nameof(GetDateTime)); -AITool getWeatherTool = AIFunctionFactory.Create(GetWeather, name: nameof(GetWeather)); - -// Define the agent you want to create. (Prompt Agent in this case) -AIAgent originalAgent = await aiProjectClient.CreateAIAgentAsync( - name: AssistantName, - model: deploymentName, - instructions: AssistantInstructions, - tools: [getWeatherTool, dateTimeTool]); - -// Adding middleware to the agent level -AIAgent middlewareEnabledAgent = originalAgent - .AsBuilder() - .Use(FunctionCallMiddleware) - .Use(FunctionCallOverrideWeather) - .Use(PIIMiddleware, null) - .Use(GuardrailMiddleware, null) - .Build(); - -AgentSession session = await middlewareEnabledAgent.CreateSessionAsync(); - -Console.WriteLine("\n\n=== Example 1: Wording Guardrail ==="); -AgentResponse guardRailedResponse = await middlewareEnabledAgent.RunAsync("Tell me something harmful."); -Console.WriteLine($"Guard railed response: {guardRailedResponse}"); - -Console.WriteLine("\n\n=== Example 2: PII detection ==="); -AgentResponse piiResponse = await middlewareEnabledAgent.RunAsync("My name is John Doe, call me at 123-456-7890 or email me at john@something.com"); -Console.WriteLine($"Pii filtered response: {piiResponse}"); - -Console.WriteLine("\n\n=== Example 3: Agent function middleware ==="); - -// Agent function middleware support is limited to agents that wraps a upstream ChatClientAgent or derived from it. - -AgentResponse functionCallResponse = await middlewareEnabledAgent.RunAsync("What's the current time and the weather in Seattle?", session); -Console.WriteLine($"Function calling response: {functionCallResponse}"); - -// Special per-request middleware agent. -Console.WriteLine("\n\n=== Example 4: Middleware with human in the loop function approval ==="); - -AIAgent humanInTheLoopAgent = await aiProjectClient.CreateAIAgentAsync( - name: "HumanInTheLoopAgent", - model: deploymentName, - instructions: "You are an Human in the loop testing AI assistant that helps people find information.", - - // Adding a function with approval required - tools: [new ApprovalRequiredAIFunction(AIFunctionFactory.Create(GetWeather, name: nameof(GetWeather)))]); - -// Using the ConsolePromptingApprovalMiddleware for a specific request to handle user approval during function calls. -AgentResponse response = await humanInTheLoopAgent - .AsBuilder() - .Use(ConsolePromptingApprovalMiddleware, null) - .Build() - .RunAsync("What's the current time and the weather in Seattle?"); - -Console.WriteLine($"HumanInTheLoopAgent agent middleware response: {response}"); - -// Function invocation middleware that logs before and after function calls. -async ValueTask FunctionCallMiddleware(AIAgent agent, FunctionInvocationContext context, Func> next, CancellationToken cancellationToken) -{ - Console.WriteLine($"Function Name: {context!.Function.Name} - Middleware 1 Pre-Invoke"); - var result = await next(context, cancellationToken); - Console.WriteLine($"Function Name: {context!.Function.Name} - Middleware 1 Post-Invoke"); - - return result; -} - -// Function invocation middleware that overrides the result of the GetWeather function. -async ValueTask FunctionCallOverrideWeather(AIAgent agent, FunctionInvocationContext context, Func> next, CancellationToken cancellationToken) -{ - Console.WriteLine($"Function Name: {context!.Function.Name} - Middleware 2 Pre-Invoke"); - - var result = await next(context, cancellationToken); - - if (context.Function.Name == nameof(GetWeather)) - { - // Override the result of the GetWeather function - result = "The weather is sunny with a high of 25°C."; - } - Console.WriteLine($"Function Name: {context!.Function.Name} - Middleware 2 Post-Invoke"); - return result; -} - -// This middleware redacts PII information from input and output messages. -async Task PIIMiddleware(IEnumerable messages, AgentSession? session, AgentRunOptions? options, AIAgent innerAgent, CancellationToken cancellationToken) -{ - // Redact PII information from input messages - var filteredMessages = FilterMessages(messages); - Console.WriteLine("Pii Middleware - Filtered Messages Pre-Run"); - - var response = await innerAgent.RunAsync(filteredMessages, session, options, cancellationToken).ConfigureAwait(false); - - // Redact PII information from output messages - response.Messages = FilterMessages(response.Messages); - - Console.WriteLine("Pii Middleware - Filtered Messages Post-Run"); - - return response; - - static IList FilterMessages(IEnumerable messages) - { - return messages.Select(m => new ChatMessage(m.Role, FilterPii(m.Text))).ToList(); - } - - static string FilterPii(string content) - { - // Regex patterns for PII detection (simplified for demonstration) - Regex[] piiPatterns = [ - new(@"\b\d{3}-\d{3}-\d{4}\b", RegexOptions.Compiled), // Phone number (e.g., 123-456-7890) - new(@"\b[\w\.-]+@[\w\.-]+\.\w+\b", RegexOptions.Compiled), // Email address - new(@"\b[A-Z][a-z]+\s[A-Z][a-z]+\b", RegexOptions.Compiled) // Full name (e.g., John Doe) - ]; - - foreach (var pattern in piiPatterns) - { - content = pattern.Replace(content, "[REDACTED: PII]"); - } - - return content; - } -} - -// This middleware enforces guardrails by redacting certain keywords from input and output messages. -async Task GuardrailMiddleware(IEnumerable messages, AgentSession? session, AgentRunOptions? options, AIAgent innerAgent, CancellationToken cancellationToken) -{ - // Redact keywords from input messages - var filteredMessages = FilterMessages(messages); - - Console.WriteLine("Guardrail Middleware - Filtered messages Pre-Run"); - - // Proceed with the agent run - var response = await innerAgent.RunAsync(filteredMessages, session, options, cancellationToken); - - // Redact keywords from output messages - response.Messages = FilterMessages(response.Messages); - - Console.WriteLine("Guardrail Middleware - Filtered messages Post-Run"); - - return response; - - List FilterMessages(IEnumerable messages) - { - return messages.Select(m => new ChatMessage(m.Role, FilterContent(m.Text))).ToList(); - } - - static string FilterContent(string content) - { - foreach (var keyword in new[] { "harmful", "illegal", "violence" }) - { - if (content.Contains(keyword, StringComparison.OrdinalIgnoreCase)) - { - return "[REDACTED: Forbidden content]"; - } - } - - return content; - } -} - -// This middleware handles Human in the loop console interaction for any user approval required during function calling. -async Task ConsolePromptingApprovalMiddleware(IEnumerable messages, AgentSession? session, AgentRunOptions? options, AIAgent innerAgent, CancellationToken cancellationToken) -{ - AgentResponse response = await innerAgent.RunAsync(messages, session, options, cancellationToken); - - // For simplicity, we are assuming here that only function approvals are pending. - List approvalRequests = response.Messages.SelectMany(m => m.Contents).OfType().ToList(); - - while (approvalRequests.Count > 0) - { - // Ask the user to approve each function call request. - // Pass the user input responses back to the agent for further processing. - response.Messages = approvalRequests - .ConvertAll(functionApprovalRequest => - { - Console.WriteLine($"The agent would like to invoke the following function, please reply Y to approve: Name {functionApprovalRequest.FunctionCall.Name}"); - bool approved = Console.ReadLine()?.Equals("Y", StringComparison.OrdinalIgnoreCase) ?? false; - return new ChatMessage(ChatRole.User, [functionApprovalRequest.CreateResponse(approved)]); - }); - - response = await innerAgent.RunAsync(response.Messages, session, options, cancellationToken); - - approvalRequests = response.Messages.SelectMany(m => m.Contents).OfType().ToList(); - } - - return response; -} - -// Cleanup by agent name removes the agent version created. -await aiProjectClient.Agents.DeleteAgentAsync(middlewareEnabledAgent.Name); diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/README.md deleted file mode 100644 index 96d12d9828..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step12_Middleware/README.md +++ /dev/null @@ -1,58 +0,0 @@ -# Agent Middleware - -This sample demonstrates how to add middleware to intercept agent runs and function calls to implement cross-cutting concerns like logging, validation, and guardrails. - -## What This Sample Shows - -1. Azure Foundry Agents integration via `AIProjectClient` and `DefaultAzureCredential` -2. Agent run middleware (logging and monitoring) -3. Function invocation middleware (logging and overriding tool results) -4. Per-request agent run middleware -5. Per-request function pipeline with approval -6. Combining agent-level and per-request middleware - -## Function Invocation Middleware - -Not all agents support function invocation middleware. - -Attempting to use function middleware on agents that do not wrap a ChatClientAgent or derives from it will throw an InvalidOperationException. - -## Prerequisites - -Before you begin, ensure you have the following prerequisites: - -- .NET 10 SDK or later -- Azure Foundry service endpoint and deployment configured -- Azure CLI installed and authenticated (for Azure credential authentication) - -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). - -Set the following environment variables: - -```powershell -$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini -``` - -## Running the Sample - -Navigate to the FoundryAgents sample directory and run: - -```powershell -cd dotnet/samples/02-agents/FoundryAgents -dotnet run --project .\FoundryAgents_Step12_Middleware -``` - -## Expected Behavior - -When you run this sample, you will see the following demonstrations: - -1. **Example 1: Wording Guardrail** - The agent receives a request for harmful content. The guardrail middleware intercepts the request and prevents the agent from responding to harmful prompts, returning a safe response instead. - -2. **Example 2: PII Detection** - The agent receives a message containing personally identifiable information (name, phone number, email). The PII middleware detects and filters this sensitive information before processing. - -3. **Example 3: Agent Function Middleware** - The agent uses function tools (GetDateTime and GetWeather) to answer a question about the current time and weather in Seattle. The function middleware logs the function calls and can override results if needed. - -4. **Example 4: Human-in-the-Loop Function Approval** - The agent attempts to call a weather function, but the approval middleware intercepts the call and prompts the user to approve or deny the function invocation before it executes. The user can respond with "Y" to approve or any other input to deny. - -Each example demonstrates how middleware can be used to implement cross-cutting concerns and control agent behavior at different levels (agent-level and per-request). diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step13_Plugins/FoundryAgents_Step13_Plugins.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step13_Plugins/FoundryAgents_Step13_Plugins.csproj deleted file mode 100644 index 4a34560946..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step13_Plugins/FoundryAgents_Step13_Plugins.csproj +++ /dev/null @@ -1,22 +0,0 @@ - - - - Exe - net10.0 - - enable - enable - $(NoWarn);CA1812 - - - - - - - - - - - - - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step13_Plugins/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step13_Plugins/Program.cs deleted file mode 100644 index 244d83d632..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step13_Plugins/Program.cs +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -// This sample shows how to use plugins with an AI agent. Plugin classes can -// depend on other services that need to be injected. In this sample, the -// AgentPlugin class uses the WeatherProvider and CurrentTimeProvider classes -// to get weather and current time information. Both services are registered -// in the service collection and injected into the plugin. -// Plugin classes may have many methods, but only some are intended to be used -// as AI functions. The AsAITools method of the plugin class shows how to specify -// which methods should be exposed to the AI agent. - -using Azure.AI.Projects; -using Azure.Identity; -using Microsoft.Agents.AI; -using Microsoft.Extensions.AI; -using Microsoft.Extensions.DependencyInjection; - -string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; - -const string AssistantInstructions = "You are a helpful assistant that helps people find information."; -const string AssistantName = "PluginAssistant"; - -// Create a service collection to hold the agent plugin and its dependencies. -ServiceCollection services = new(); -services.AddSingleton(); -services.AddSingleton(); -services.AddSingleton(); // The plugin depends on WeatherProvider and CurrentTimeProvider registered above. - -IServiceProvider serviceProvider = services.BuildServiceProvider(); - -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. -// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid -// latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); - -// Define the agent with plugin tools -// Define the agent you want to create. (Prompt Agent in this case) -AIAgent agent = await aiProjectClient.CreateAIAgentAsync( - name: AssistantName, - model: deploymentName, - instructions: AssistantInstructions, - tools: serviceProvider.GetRequiredService().AsAITools().ToList(), - services: serviceProvider); - -// Invoke the agent and output the text result. -AgentSession session = await agent.CreateSessionAsync(); -Console.WriteLine(await agent.RunAsync("Tell me current time and weather in Seattle.", session)); - -// Cleanup by agent name removes the agent version created. -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); - -/// -/// The agent plugin that provides weather and current time information. -/// -/// The weather provider to get weather information. -internal sealed class AgentPlugin(WeatherProvider weatherProvider) -{ - /// - /// Gets the weather information for the specified location. - /// - /// - /// This method demonstrates how to use the dependency that was injected into the plugin class. - /// - /// The location to get the weather for. - /// The weather information for the specified location. - public string GetWeather(string location) - { - return weatherProvider.GetWeather(location); - } - - /// - /// Gets the current date and time for the specified location. - /// - /// - /// This method demonstrates how to resolve a dependency using the service provider passed to the method. - /// - /// The service provider to resolve the . - /// The location to get the current time for. - /// The current date and time as a . - public DateTimeOffset GetCurrentTime(IServiceProvider sp, string location) - { - // Resolve the CurrentTimeProvider from the service provider - CurrentTimeProvider currentTimeProvider = sp.GetRequiredService(); - - return currentTimeProvider.GetCurrentTime(location); - } - - /// - /// Returns the functions provided by this plugin. - /// - /// - /// In real world scenarios, a class may have many methods and only a subset of them may be intended to be exposed as AI functions. - /// This method demonstrates how to explicitly specify which methods should be exposed to the AI agent. - /// - /// The functions provided by this plugin. - public IEnumerable AsAITools() - { - yield return AIFunctionFactory.Create(this.GetWeather); - yield return AIFunctionFactory.Create(this.GetCurrentTime); - } -} - -/// -/// The weather provider that returns weather information. -/// -internal sealed class WeatherProvider -{ - /// - /// Gets the weather information for the specified location. - /// - /// - /// The weather information is hardcoded for demonstration purposes. - /// In a real application, this could call a weather API to get actual weather data. - /// - /// The location to get the weather for. - /// The weather information for the specified location. - public string GetWeather(string location) - { - return $"The weather in {location} is cloudy with a high of 15°C."; - } -} - -/// -/// Provides the current date and time. -/// -/// -/// This class returns the current date and time using the system's clock. -/// -internal sealed class CurrentTimeProvider -{ - /// - /// Gets the current date and time. - /// - /// The location to get the current time for (not used in this implementation). - /// The current date and time as a . - public DateTimeOffset GetCurrentTime(string location) - { - return DateTimeOffset.Now; - } -} diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step13_Plugins/README.md b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step13_Plugins/README.md deleted file mode 100644 index 5c52ffcd1c..0000000000 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step13_Plugins/README.md +++ /dev/null @@ -1,49 +0,0 @@ -# Using Plugins with AI Agents - -This sample demonstrates how to use plugins with AI agents, where plugins are services registered in dependency injection that expose methods as AI function tools. - -## What this sample demonstrates - -- Creating plugin services with methods to expose as tools -- Using AsAITools() to selectively expose plugin methods -- Registering plugins in dependency injection -- Using plugins with AI agents -- Managing agent lifecycle (creation and deletion) - -## Prerequisites - -Before you begin, ensure you have the following prerequisites: - -- .NET 10 SDK or later -- Azure Foundry service endpoint and deployment configured -- Azure CLI installed and authenticated (for Azure credential authentication) - -**Note**: This demo uses Azure CLI credentials for authentication. Make sure you're logged in with `az login` and have access to the Azure Foundry resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively). - -Set the following environment variables: - -```powershell -$env:AZURE_AI_PROJECT_ENDPOINT="https://your-foundry-service.services.ai.azure.com/api/projects/your-foundry-project" # Replace with your Azure Foundry resource endpoint -$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" # Optional, defaults to gpt-4o-mini -``` - -## Run the sample - -Navigate to the FoundryAgents sample directory and run: - -```powershell -cd dotnet/samples/02-agents/FoundryAgents -dotnet run --project .\FoundryAgents_Step13_Plugins -``` - -## Expected behavior - -The sample will: - -1. Create a plugin service with methods to expose as tools -2. Register the plugin in dependency injection -3. Create an agent named "PluginAgent" with the plugin methods as function tools -4. Run the agent with a prompt that triggers it to call plugin methods -5. The agent will invoke the plugin methods to retrieve information -6. Clean up resources by deleting the agent - diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step14_CodeInterpreter/FoundryAgents_Step14_CodeInterpreter.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step14_CodeInterpreter/FoundryAgents_Step14_CodeInterpreter.csproj index 4a34560946..0e43c26a91 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step14_CodeInterpreter/FoundryAgents_Step14_CodeInterpreter.csproj +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step14_CodeInterpreter/FoundryAgents_Step14_CodeInterpreter.csproj @@ -10,13 +10,13 @@ - + - + - +
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step14_CodeInterpreter/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step14_CodeInterpreter/Program.cs index 854d317495..c350859de5 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step14_CodeInterpreter/Program.cs +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step14_CodeInterpreter/Program.cs @@ -3,7 +3,6 @@ // This sample shows how to use Code Interpreter Tool with AI Agents. using System.Text; -using Azure.AI.Projects; using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; @@ -15,40 +14,42 @@ string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; const string AgentInstructions = "You are a personal math tutor. When asked a math question, write and run code using the python tool to answer the question."; -const string AgentNameMEAI = "CoderAgent-MEAI"; -const string AgentNameNative = "CoderAgent-NATIVE"; -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. +// Create a Foundry project Responses API client. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); -// Option 1 - Using HostedCodeInterpreterTool + AgentOptions (MEAI + AgentFramework) -// Create the server side agent version -AIAgent agentOption1 = await aiProjectClient.CreateAIAgentAsync( - model: deploymentName, - name: AgentNameMEAI, - instructions: AgentInstructions, - tools: [new HostedCodeInterpreterTool() { Inputs = [] }]); +// Option 1 - Using HostedCodeInterpreterTool (MEAI + AgentFramework) +ChatClientAgent agentOption1 = new(chatClient, new ChatClientAgentOptions +{ + Name = "CoderAgent-MEAI", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = [new HostedCodeInterpreterTool() { Inputs = [] }] + }, +}); -// Option 2 - Using PromptAgentDefinition SDK native type -// Create the server side agent version -AIAgent agentOption2 = await aiProjectClient.CreateAIAgentAsync( - name: AgentNameNative, - creationOptions: new AgentVersionCreationOptions( - new PromptAgentDefinition(model: deploymentName) - { - Instructions = AgentInstructions, - Tools = { - ResponseTool.CreateCodeInterpreterTool( - new CodeInterpreterToolContainer( - CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration(fileIds: []) - ) - ), - } - }) -); +// Option 2 - Using ResponseTool.CreateCodeInterpreterTool converted via AsAITool (Native SDK type) +ChatClientAgent agentOption2 = new(chatClient, new ChatClientAgentOptions +{ + Name = "CoderAgent-NATIVE", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = [ResponseTool.CreateCodeInterpreterTool( + new CodeInterpreterToolContainer( + CodeInterpreterToolContainerConfiguration.CreateAutomaticContainerConfiguration(fileIds: []) + )).AsAITool()] + }, +}); // Either invoke option1 or option2 agent, should have same result // Option 1 @@ -87,7 +88,3 @@ """); } } - -// Cleanup by agent name removes the agent version created. -await aiProjectClient.Agents.DeleteAgentAsync(agentOption1.Name); -await aiProjectClient.Agents.DeleteAgentAsync(agentOption2.Name); diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step16_FileSearch/FoundryAgents_Step16_FileSearch.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step16_FileSearch/FoundryAgents_Step16_FileSearch.csproj index 4a34560946..593b41389e 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step16_FileSearch/FoundryAgents_Step16_FileSearch.csproj +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step16_FileSearch/FoundryAgents_Step16_FileSearch.csproj @@ -10,13 +10,14 @@ - - + + + - +
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step16_FileSearch/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step16_FileSearch/Program.cs index 36f28c2387..20ccf69af3 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step16_FileSearch/Program.cs +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step16_FileSearch/Program.cs @@ -16,7 +16,7 @@ const string AgentInstructions = "You are a helpful assistant that can search through uploaded files to answer questions."; -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. +// Create a project client for file and vector store management. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. @@ -52,8 +52,14 @@ string vectorStoreId = vectorStoreResult.Value.Id; Console.WriteLine($"Created vector store, vector store ID: {vectorStoreId}"); -AIAgent agent = await CreateAgentWithMEAI(); -// AIAgent agent = await CreateAgentWithNativeSDK(); +// Create a Foundry project Responses API client. +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); + +ChatClientAgent agent = CreateAgentWithMEAI(); +// ChatClientAgent agent = CreateAgentWithNativeSDK(); // Run the agent Console.WriteLine("\n--- Running File Search Agent ---"); @@ -73,9 +79,8 @@ } } -// Cleanup. +// Cleanup file and vector store resources (no agent cleanup needed). Console.WriteLine("\n--- Cleanup ---"); -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); await vectorStoresClient.DeleteVectorStoreAsync(vectorStoreId); await filesClient.DeleteFileAsync(uploadedFile.Id); File.Delete(searchFilePath); @@ -85,27 +90,31 @@ #pragma warning disable CS8321 // Local function is declared but never used // Option 1 - Using HostedFileSearchTool (MEAI + AgentFramework) -async Task CreateAgentWithMEAI() +ChatClientAgent CreateAgentWithMEAI() { - return await aiProjectClient.CreateAIAgentAsync( - model: deploymentName, - name: "FileSearchAgent-MEAI", - instructions: AgentInstructions, - tools: [new HostedFileSearchTool() { Inputs = [new HostedVectorStoreContent(vectorStoreId)] }]); + return new ChatClientAgent(chatClient, new ChatClientAgentOptions + { + Name = "FileSearchAgent-MEAI", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = [new HostedFileSearchTool() { Inputs = [new HostedVectorStoreContent(vectorStoreId)] }] + }, + }); } -// Option 2 - Using PromptAgentDefinition with ResponseTool.CreateFileSearchTool (Native SDK) -async Task CreateAgentWithNativeSDK() +// Option 2 - Using ResponseTool.CreateFileSearchTool converted via AsAITool (Native SDK type) +ChatClientAgent CreateAgentWithNativeSDK() { - return await aiProjectClient.CreateAIAgentAsync( - name: "FileSearchAgent-NATIVE", - creationOptions: new AgentVersionCreationOptions( - new PromptAgentDefinition(model: deploymentName) - { - Instructions = AgentInstructions, - Tools = { - ResponseTool.CreateFileSearchTool(vectorStoreIds: [vectorStoreId]) - } - }) - ); + return new ChatClientAgent(chatClient, new ChatClientAgentOptions + { + Name = "FileSearchAgent-NATIVE", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = [ResponseTool.CreateFileSearchTool(vectorStoreIds: [vectorStoreId]).AsAITool()] + }, + }); } diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step17_OpenAPITools/FoundryAgents_Step17_OpenAPITools.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step17_OpenAPITools/FoundryAgents_Step17_OpenAPITools.csproj index 77b76acfa0..396d293084 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step17_OpenAPITools/FoundryAgents_Step17_OpenAPITools.csproj +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step17_OpenAPITools/FoundryAgents_Step17_OpenAPITools.csproj @@ -10,13 +10,13 @@ - + - + - +
\ No newline at end of file diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step17_OpenAPITools/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step17_OpenAPITools/Program.cs index 2ee5a94458..1b3a06d793 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step17_OpenAPITools/Program.cs +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step17_OpenAPITools/Program.cs @@ -2,10 +2,10 @@ // This sample shows how to use OpenAPI Tools with AI Agents. -using Azure.AI.Projects; using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; +using Microsoft.Extensions.AI; using OpenAI.Responses; // Warning: DefaultAzureCredential is intended for simplicity in development. For production scenarios, consider using a more specific credential. @@ -68,8 +68,11 @@ } """; -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); +// Create a Foundry project Responses API client. +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); // Create the OpenAPI function definition var openApiFunction = new OpenAPIFunctionDefinition( @@ -80,37 +83,41 @@ Description = "Retrieve information about countries by currency code" }; -AIAgent agent = await CreateAgentWithMEAI(); -// AIAgent agent = await CreateAgentWithNativeSDK(); +ChatClientAgent agent = CreateAgentWithMEAI(); +// ChatClientAgent agent = CreateAgentWithNativeSDK(); // Run the agent with a question about countries Console.WriteLine(await agent.RunAsync("What countries use the Euro (EUR) as their currency? Please list them.")); -// Cleanup by deleting the agent -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); - // --- Agent Creation Options --- +#pragma warning disable CS8321 // Local function is declared but never used // Option 1 - Using AsAITool wrapping for OpenApiTool (MEAI + AgentFramework) -async Task CreateAgentWithMEAI() +ChatClientAgent CreateAgentWithMEAI() { - return await aiProjectClient.CreateAIAgentAsync( - model: deploymentName, - name: "OpenAPIToolsAgent-MEAI", - instructions: AgentInstructions, - tools: [((ResponseTool)AgentTool.CreateOpenApiTool(openApiFunction)).AsAITool()]); + return new ChatClientAgent(chatClient, new ChatClientAgentOptions + { + Name = "OpenAPIToolsAgent-MEAI", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = [((ResponseTool)AgentTool.CreateOpenApiTool(openApiFunction)).AsAITool()] + }, + }); } -// Option 2 - Using PromptAgentDefinition with AgentTool.CreateOpenApiTool (Native SDK) -async Task CreateAgentWithNativeSDK() +// Option 2 - Using ResponseTool via AsAITool (Native SDK type) +ChatClientAgent CreateAgentWithNativeSDK() { - return await aiProjectClient.CreateAIAgentAsync( - name: "OpenAPIToolsAgent-NATIVE", - creationOptions: new AgentVersionCreationOptions( - new PromptAgentDefinition(model: deploymentName) - { - Instructions = AgentInstructions, - Tools = { (ResponseTool)AgentTool.CreateOpenApiTool(openApiFunction) } - }) - ); + return new ChatClientAgent(chatClient, new ChatClientAgentOptions + { + Name = "OpenAPIToolsAgent-NATIVE", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = [((ResponseTool)AgentTool.CreateOpenApiTool(openApiFunction)).AsAITool()] + }, + }); } diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step18_BingCustomSearch/FoundryAgents_Step18_BingCustomSearch.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step18_BingCustomSearch/FoundryAgents_Step18_BingCustomSearch.csproj index 730d284bd9..0b1eaa04e5 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step18_BingCustomSearch/FoundryAgents_Step18_BingCustomSearch.csproj +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step18_BingCustomSearch/FoundryAgents_Step18_BingCustomSearch.csproj @@ -10,13 +10,13 @@ - + - + - +
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step18_BingCustomSearch/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step18_BingCustomSearch/Program.cs index 365bf6ed08..5b924c77d8 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step18_BingCustomSearch/Program.cs +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step18_BingCustomSearch/Program.cs @@ -2,10 +2,10 @@ // This sample shows how to use Bing Custom Search Tool with AI Agents. -using Azure.AI.Projects; using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; +using Microsoft.Extensions.AI; using OpenAI.Responses; string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); @@ -18,19 +18,22 @@ You are a helpful agent that can use Bing Custom Search tools to assist users. Use the available Bing Custom Search tools to answer questions and perform tasks. """; -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. +// Create a Foundry project Responses API client. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); // Bing Custom Search tool parameters shared by both options BingCustomSearchToolParameters bingCustomSearchToolParameters = new([ new BingCustomSearchConfiguration(connectionId, instanceName) ]); -AIAgent agent = await CreateAgentWithMEAIAsync(); -// AIAgent agent = await CreateAgentWithNativeSDKAsync(); +ChatClientAgent agent = CreateAgentWithMEAI(); +// ChatClientAgent agent = CreateAgentWithNativeSDK(); Console.WriteLine($"Created agent: {agent.Name}"); @@ -43,34 +46,35 @@ Use the available Bing Custom Search tools to answer questions and perform tasks Console.WriteLine(message.Text); } -// Cleanup by deleting the agent -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); -Console.WriteLine($"\nDeleted agent: {agent.Name}"); - // --- Agent Creation Options --- +#pragma warning disable CS8321 // Local function is declared but never used // Option 1 - Using AsAITool wrapping for the ResponseTool returned by AgentTool.CreateBingCustomSearchTool (MEAI + AgentFramework) -async Task CreateAgentWithMEAIAsync() +ChatClientAgent CreateAgentWithMEAI() { - return await aiProjectClient.CreateAIAgentAsync( - model: deploymentName, - name: "BingCustomSearchAgent-MEAI", - instructions: AgentInstructions, - tools: [((ResponseTool)AgentTool.CreateBingCustomSearchTool(bingCustomSearchToolParameters)).AsAITool()]); + return new ChatClientAgent(chatClient, new ChatClientAgentOptions + { + Name = "BingCustomSearchAgent-MEAI", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = [((ResponseTool)AgentTool.CreateBingCustomSearchTool(bingCustomSearchToolParameters)).AsAITool()] + }, + }); } -// Option 2 - Using PromptAgentDefinition with AgentTool.CreateBingCustomSearchTool (Native SDK) -async Task CreateAgentWithNativeSDKAsync() +// Option 2 - Using ResponseTool via AsAITool (Native SDK type) +ChatClientAgent CreateAgentWithNativeSDK() { - return await aiProjectClient.CreateAIAgentAsync( - name: "BingCustomSearchAgent-NATIVE", - creationOptions: new AgentVersionCreationOptions( - new PromptAgentDefinition(model: deploymentName) - { - Instructions = AgentInstructions, - Tools = { - (ResponseTool)AgentTool.CreateBingCustomSearchTool(bingCustomSearchToolParameters), - } - }) - ); + return new ChatClientAgent(chatClient, new ChatClientAgentOptions + { + Name = "BingCustomSearchAgent-NATIVE", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = [((ResponseTool)AgentTool.CreateBingCustomSearchTool(bingCustomSearchToolParameters)).AsAITool()] + }, + }); } diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step19_SharePoint/FoundryAgents_Step19_SharePoint.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step19_SharePoint/FoundryAgents_Step19_SharePoint.csproj index 4d17fe06bb..1aa35f927f 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step19_SharePoint/FoundryAgents_Step19_SharePoint.csproj +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step19_SharePoint/FoundryAgents_Step19_SharePoint.csproj @@ -10,13 +10,13 @@ - + - + - +
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step19_SharePoint/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step19_SharePoint/Program.cs index 6d1daf85df..bb74952e90 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step19_SharePoint/Program.cs +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step19_SharePoint/Program.cs @@ -2,14 +2,14 @@ // This sample shows how to use SharePoint Grounding Tool with AI Agents. -using Azure.AI.Projects; using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; +using Microsoft.Extensions.AI; using OpenAI.Responses; -string endpoint = Environment.GetEnvironmentVariable("AZURE_FOUNDRY_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_FOUNDRY_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_FOUNDRY_PROJECT_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; string sharepointConnectionId = Environment.GetEnvironmentVariable("SHAREPOINT_PROJECT_CONNECTION_ID") ?? throw new InvalidOperationException("SHAREPOINT_PROJECT_CONNECTION_ID is not set."); const string AgentInstructions = """ @@ -17,18 +17,21 @@ You are a helpful agent that can use SharePoint tools to assist users. Use the available SharePoint tools to answer questions and perform tasks. """; -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. +// Create a Foundry project Responses API client. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); // Create SharePoint tool options with project connection var sharepointOptions = new SharePointGroundingToolOptions(); sharepointOptions.ProjectConnections.Add(new ToolProjectConnection(sharepointConnectionId)); -AIAgent agent = await CreateAgentWithMEAIAsync(); -// AIAgent agent = await CreateAgentWithNativeSDKAsync(); +ChatClientAgent agent = CreateAgentWithMEAI(); +// ChatClientAgent agent = CreateAgentWithNativeSDK(); Console.WriteLine($"Created agent: {agent.Name}"); @@ -53,32 +56,35 @@ Use the available SharePoint tools to answer questions and perform tasks. } } -// Cleanup by agent name removes the agent version created. -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); -Console.WriteLine($"\nDeleted agent: {agent.Name}"); - // --- Agent Creation Options --- +#pragma warning disable CS8321 // Local function is declared but never used // Option 1 - Using AgentTool.CreateSharepointTool + AsAITool() (MEAI + AgentFramework) -async Task CreateAgentWithMEAIAsync() +ChatClientAgent CreateAgentWithMEAI() { - return await aiProjectClient.CreateAIAgentAsync( - model: deploymentName, - name: "SharePointAgent-MEAI", - instructions: AgentInstructions, - tools: [((ResponseTool)AgentTool.CreateSharepointTool(sharepointOptions)).AsAITool()]); + return new ChatClientAgent(chatClient, new ChatClientAgentOptions + { + Name = "SharePointAgent-MEAI", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = [((ResponseTool)AgentTool.CreateSharepointTool(sharepointOptions)).AsAITool()] + }, + }); } -// Option 2 - Using PromptAgentDefinition SDK native type -async Task CreateAgentWithNativeSDKAsync() +// Option 2 - Using ResponseTool via AsAITool (Native SDK type) +ChatClientAgent CreateAgentWithNativeSDK() { - return await aiProjectClient.CreateAIAgentAsync( - name: "SharePointAgent-NATIVE", - creationOptions: new AgentVersionCreationOptions( - new PromptAgentDefinition(model: deploymentName) - { - Instructions = AgentInstructions, - Tools = { AgentTool.CreateSharepointTool(sharepointOptions) } - }) - ); + return new ChatClientAgent(chatClient, new ChatClientAgentOptions + { + Name = "SharePointAgent-NATIVE", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = [((ResponseTool)AgentTool.CreateSharepointTool(sharepointOptions)).AsAITool()] + }, + }); } diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step20_MicrosoftFabric/FoundryAgents_Step20_MicrosoftFabric.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step20_MicrosoftFabric/FoundryAgents_Step20_MicrosoftFabric.csproj index 4d17fe06bb..1aa35f927f 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step20_MicrosoftFabric/FoundryAgents_Step20_MicrosoftFabric.csproj +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step20_MicrosoftFabric/FoundryAgents_Step20_MicrosoftFabric.csproj @@ -10,13 +10,13 @@ - + - + - +
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step20_MicrosoftFabric/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step20_MicrosoftFabric/Program.cs index 2f13c2c30c..789522e5f5 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step20_MicrosoftFabric/Program.cs +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step20_MicrosoftFabric/Program.cs @@ -2,30 +2,33 @@ // This sample shows how to use Microsoft Fabric Tool with AI Agents. -using Azure.AI.Projects; using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; +using Microsoft.Extensions.AI; using OpenAI.Responses; -string endpoint = Environment.GetEnvironmentVariable("AZURE_FOUNDRY_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_FOUNDRY_PROJECT_ENDPOINT is not set."); -string deploymentName = Environment.GetEnvironmentVariable("AZURE_FOUNDRY_PROJECT_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; string fabricConnectionId = Environment.GetEnvironmentVariable("FABRIC_PROJECT_CONNECTION_ID") ?? throw new InvalidOperationException("FABRIC_PROJECT_CONNECTION_ID is not set."); const string AgentInstructions = "You are a helpful assistant with access to Microsoft Fabric data. Answer questions based on data available through your Fabric connection."; -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. +// Create a Foundry project Responses API client. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); // Configure Microsoft Fabric tool options with project connection var fabricToolOptions = new FabricDataAgentToolOptions(); fabricToolOptions.ProjectConnections.Add(new ToolProjectConnection(fabricConnectionId)); -AIAgent agent = await CreateAgentWithMEAIAsync(); -// AIAgent agent = await CreateAgentWithNativeSDKAsync(); +ChatClientAgent agent = CreateAgentWithMEAI(); +// ChatClientAgent agent = CreateAgentWithNativeSDK(); Console.WriteLine($"Created agent: {agent.Name}"); @@ -38,35 +41,35 @@ Console.WriteLine(message.Text); } -// Cleanup by deleting the agent -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); -Console.WriteLine($"\nDeleted agent: {agent.Name}"); - // --- Agent Creation Options --- +#pragma warning disable CS8321 // Local function is declared but never used // Option 1 - Using AsAITool wrapping for the ResponseTool returned by AgentTool.CreateMicrosoftFabricTool (MEAI + AgentFramework) -async Task CreateAgentWithMEAIAsync() +ChatClientAgent CreateAgentWithMEAI() { - return await aiProjectClient.CreateAIAgentAsync( - model: deploymentName, - name: "FabricAgent-MEAI", - instructions: AgentInstructions, - tools: [((ResponseTool)AgentTool.CreateMicrosoftFabricTool(fabricToolOptions)).AsAITool()]); + return new ChatClientAgent(chatClient, new ChatClientAgentOptions + { + Name = "FabricAgent-MEAI", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = [((ResponseTool)AgentTool.CreateMicrosoftFabricTool(fabricToolOptions)).AsAITool()] + }, + }); } -// Option 2 - Using PromptAgentDefinition with AgentTool.CreateMicrosoftFabricTool (Native SDK) -async Task CreateAgentWithNativeSDKAsync() +// Option 2 - Using ResponseTool via AsAITool (Native SDK type) +ChatClientAgent CreateAgentWithNativeSDK() { - return await aiProjectClient.CreateAIAgentAsync( - name: "FabricAgent-NATIVE", - creationOptions: new AgentVersionCreationOptions( - new PromptAgentDefinition(model: deploymentName) - { - Instructions = AgentInstructions, - Tools = - { - AgentTool.CreateMicrosoftFabricTool(fabricToolOptions), - } - }) - ); + return new ChatClientAgent(chatClient, new ChatClientAgentOptions + { + Name = "FabricAgent-NATIVE", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = [((ResponseTool)AgentTool.CreateMicrosoftFabricTool(fabricToolOptions)).AsAITool()] + }, + }); } diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step21_WebSearch/FoundryAgents_Step21_WebSearch.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step21_WebSearch/FoundryAgents_Step21_WebSearch.csproj index 4d17fe06bb..1aa35f927f 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step21_WebSearch/FoundryAgents_Step21_WebSearch.csproj +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step21_WebSearch/FoundryAgents_Step21_WebSearch.csproj @@ -10,13 +10,13 @@ - + - + - +
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step21_WebSearch/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step21_WebSearch/Program.cs index 1ac312ddae..87cc2aa6ee 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step21_WebSearch/Program.cs +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step21_WebSearch/Program.cs @@ -2,7 +2,6 @@ // This sample shows how to use the Responses API Web Search Tool with AI Agents. -using Azure.AI.Projects; using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; @@ -13,16 +12,36 @@ string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; const string AgentInstructions = "You are a helpful assistant that can search the web to find current information and answer questions accurately."; -const string AgentName = "WebSearchAgent"; -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); +// Create a Foundry project Responses API client. +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); // Option 1 - Using HostedWebSearchTool (MEAI + AgentFramework) -AIAgent agent = await CreateAgentWithMEAIAsync(); - -// Option 2 - Using PromptAgentDefinition with the Responses API native type -// AIAgent agent = await CreateAgentWithNativeSDKAsync(); +ChatClientAgent agent = new(chatClient, new ChatClientAgentOptions +{ + Name = "WebSearchAgent", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = [new HostedWebSearchTool()] + }, +}); + +// Option 2 - Using ResponseTool.CreateWebSearchTool converted via AsAITool (Native SDK type) +// ChatClientAgent agent = new(chatClient, new ChatClientAgentOptions +// { +// Name = "WebSearchAgent", +// ChatOptions = new() +// { +// ModelId = deploymentName, +// Instructions = AgentInstructions, +// Tools = [ResponseTool.CreateWebSearchTool().AsAITool()] +// }, +// }); AgentResponse response = await agent.RunAsync("What's the weather today in Seattle?"); @@ -41,25 +60,3 @@ """); } } - -// Cleanup by agent name removes the agent version created. -await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); - -// Creates the agent using the HostedWebSearchTool MEAI abstraction that maps to the built-in Responses API web search tool. -async Task CreateAgentWithMEAIAsync() - => await aiProjectClient.CreateAIAgentAsync( - name: AgentName, - model: deploymentName, - instructions: AgentInstructions, - tools: [new HostedWebSearchTool()]); - -// Creates the agent using the PromptAgentDefinition with the Responses API native ResponseTool.CreateWebSearchTool(). -async Task CreateAgentWithNativeSDKAsync() - => await aiProjectClient.CreateAIAgentAsync( - AgentName, - new AgentVersionCreationOptions( - new PromptAgentDefinition(model: deploymentName) - { - Instructions = AgentInstructions, - Tools = { ResponseTool.CreateWebSearchTool() } - })); diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step23_LocalMCP/FoundryAgents_Step23_LocalMCP.csproj b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step23_LocalMCP/FoundryAgents_Step23_LocalMCP.csproj index 1e3e6f57e3..68f7791da5 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step23_LocalMCP/FoundryAgents_Step23_LocalMCP.csproj +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step23_LocalMCP/FoundryAgents_Step23_LocalMCP.csproj @@ -10,13 +10,14 @@ + - + - +
diff --git a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step23_LocalMCP/Program.cs b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step23_LocalMCP/Program.cs index d41771ef37..6b96729e24 100644 --- a/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step23_LocalMCP/Program.cs +++ b/dotnet/samples/02-agents/FoundryAgents/FoundryAgents_Step23_LocalMCP/Program.cs @@ -5,7 +5,7 @@ // and then passed to the Foundry agent as client-side tools. // This sample uses the Microsoft Learn MCP endpoint to search documentation. -using Azure.AI.Projects; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Extensions.AI; @@ -15,7 +15,6 @@ string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; const string AgentInstructions = "You are a helpful assistant that can help with Microsoft documentation questions. Use the Microsoft Learn MCP tool to search for documentation."; -const string AgentName = "DocsAgent"; // Connect to the MCP server locally via HTTP (Streamable HTTP transport). // The MCP server is hosted at Microsoft Learn and provides documentation search capabilities. @@ -34,43 +33,42 @@ // Wrap each MCP tool with a DelegatingAIFunction to log local invocations. List wrappedTools = mcpTools.Select(tool => (AITool)new LoggingMcpTool(tool)).ToList(); -// Get a client to create/retrieve/delete server side agents with Azure Foundry Agents. +// Create a Foundry project Responses API client. // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); // Create the agent with the locally-resolved MCP tools. -AIAgent agent = await aiProjectClient.CreateAIAgentAsync( - model: deploymentName, - name: AgentName, - instructions: AgentInstructions, - tools: wrappedTools); +ChatClientAgent agent = new(chatClient, new ChatClientAgentOptions +{ + Name = "DocsAgent", + ChatOptions = new() + { + ModelId = deploymentName, + Instructions = AgentInstructions, + Tools = wrappedTools + }, +}); Console.WriteLine($"Agent '{agent.Name}' created successfully."); -try -{ - // First query - const string Prompt1 = "How does one create an Azure storage account using az cli?"; - Console.WriteLine($"\nUser: {Prompt1}\n"); - AgentResponse response1 = await agent.RunAsync(Prompt1); - Console.WriteLine($"Agent: {response1}"); +// First query +const string Prompt1 = "How does one create an Azure storage account using az cli?"; +Console.WriteLine($"\nUser: {Prompt1}\n"); +AgentResponse response1 = await agent.RunAsync(Prompt1); +Console.WriteLine($"Agent: {response1}"); - Console.WriteLine("\n=======================================\n"); +Console.WriteLine("\n=======================================\n"); - // Second query - const string Prompt2 = "What is Microsoft Agent Framework?"; - Console.WriteLine($"User: {Prompt2}\n"); - AgentResponse response2 = await agent.RunAsync(Prompt2); - Console.WriteLine($"Agent: {response2}"); -} -finally -{ - // Cleanup by removing the agent when done - await aiProjectClient.Agents.DeleteAgentAsync(agent.Name); - Console.WriteLine($"\nAgent '{agent.Name}' deleted."); -} +// Second query +const string Prompt2 = "What is Microsoft Agent Framework?"; +Console.WriteLine($"User: {Prompt2}\n"); +AgentResponse response2 = await agent.RunAsync(Prompt2); +Console.WriteLine($"Agent: {response2}"); /// /// Wraps an MCP tool to log when it is invoked locally, diff --git a/dotnet/samples/02-agents/FoundryAgents/README.md b/dotnet/samples/02-agents/FoundryAgents/README.md index 426a8cdad5..228b58be0e 100644 --- a/dotnet/samples/02-agents/FoundryAgents/README.md +++ b/dotnet/samples/02-agents/FoundryAgents/README.md @@ -1,32 +1,22 @@ -# Getting started with Foundry Agents +# Foundry-Specific Features -The getting started with Foundry Agents samples demonstrate the fundamental concepts and functionalities -of Azure Foundry Agents and can be used with Azure Foundry as the AI provider. +These samples demonstrate features that are specific to Azure AI Foundry, including +CRUD agent lifecycle management, server-side tools (code interpreter, file search, web search), +and evaluation capabilities. -These samples showcase how to work with agents managed through Azure Foundry, including agent creation, -versioning, multi-turn conversations, and advanced features like code interpretation and computer use. - -## Classic vs New Foundry Agents - -> [!NOTE] -> Recently, Azure Foundry introduced a new and improved experience for creating and managing AI agents, which is the target of these samples. - -For more information about the previous classic agents and for what's new in Foundry Agents, see the [Foundry Agents migration documentation](https://learn.microsoft.com/en-us/azure/ai-foundry/agents/how-to/migrate?view=foundry). - -For a sample demonstrating how to use classic Foundry Agents, see the following: [Agent with Azure AI Persistent](../AgentProviders/Agent_With_AzureAIAgentsPersistent/README.md). +For general-purpose agent samples (function tools, middleware, plugins, observability, etc.), +see the [Agents](../Agents/README.md) samples, which now use the Foundry `ProjectResponsesClient` by default. ## Agent Versioning and Static Definitions -One of the key architectural changes in the new Foundry Agents compared to the classic experience is how agent definitions are handled. In the new architecture, agents have **versions** and their definitions are established at creation time. This means that the agent's configuration—including instructions, tools, and options—is fixed when the agent version is created. +One of the key architectural options in Foundry is managing agents with **versions** where definitions are established at creation time. This means that the agent's configuration—including instructions, tools, and options—is fixed when the agent version is created. > [!IMPORTANT] > Agent versions are static and strictly adhere to their original definition. Any attempt to provide or override tools, instructions, or options during an agent run or request will be ignored by the agent, as the API does not support runtime configuration changes. All agent behavior must be defined at agent creation time. This design ensures consistency and predictability in agent behavior across all interactions with a specific agent version. -The Agent Framework intentionally ignores unsupported runtime parameters rather than throwing exceptions. This abstraction-first approach ensures that code written against the unified agent abstraction remains portable across providers (OpenAI, Azure OpenAI, Foundry Agents). It removes the need for provider-specific conditional logic. Teams can adopt Foundry Agents without rewriting existing orchestration code. Configurations that work with other providers will gracefully degrade, rather than fail, when the underlying API does not support them. - -## Getting started with Foundry Agents prerequisites +## Prerequisites Before you begin, ensure you have the following prerequisites: @@ -40,32 +30,26 @@ Before you begin, ensure you have the following prerequisites: ## Samples +### CRUD Agent Lifecycle + |Sample|Description| |---|---| |[Basics](./FoundryAgents_Step01.1_Basics/)|This sample demonstrates how to create and manage AI agents with versioning| -|[Running a simple agent](./FoundryAgents_Step01.2_Running/)|This sample demonstrates how to create and run a basic Foundry agent| -|[Multi-turn conversation](./FoundryAgents_Step02_MultiturnConversation/)|This sample demonstrates how to implement a multi-turn conversation with a Foundry agent| -|[Using function tools](./FoundryAgents_Step03_UsingFunctionTools/)|This sample demonstrates how to use function tools with a Foundry agent| -|[Using function tools with approvals](./FoundryAgents_Step04_UsingFunctionToolsWithApprovals/)|This sample demonstrates how to use function tools where approvals require human in the loop approvals before execution| -|[Structured output](./FoundryAgents_Step05_StructuredOutput/)|This sample demonstrates how to use structured output with a Foundry agent| -|[Persisted conversations](./FoundryAgents_Step06_PersistedConversations/)|This sample demonstrates how to persist conversations and reload them later| -|[Observability](./FoundryAgents_Step07_Observability/)|This sample demonstrates how to add telemetry to a Foundry agent| -|[Dependency injection](./FoundryAgents_Step08_DependencyInjection/)|This sample demonstrates how to add and resolve a Foundry agent with a dependency injection container| -|[Using MCP client as tools](./FoundryAgents_Step09_UsingMcpClientAsTools/)|This sample demonstrates how to use MCP clients as tools with a Foundry agent| -|[Using images](./FoundryAgents_Step10_UsingImages/)|This sample demonstrates how to use image multi-modality with a Foundry agent| -|[Exposing as a function tool](./FoundryAgents_Step11_AsFunctionTool/)|This sample demonstrates how to expose a Foundry agent as a function tool| -|[Using middleware](./FoundryAgents_Step12_Middleware/)|This sample demonstrates how to use middleware with a Foundry agent| -|[Using plugins](./FoundryAgents_Step13_Plugins/)|This sample demonstrates how to use plugins with a Foundry agent| -|[Code interpreter](./FoundryAgents_Step14_CodeInterpreter/)|This sample demonstrates how to use the code interpreter tool with a Foundry agent| -|[Computer use](./FoundryAgents_Step15_ComputerUse/)|This sample demonstrates how to use computer use capabilities with a Foundry agent| -|[File search](./FoundryAgents_Step16_FileSearch/)|This sample demonstrates how to use the file search tool with a Foundry agent| -|[OpenAPI tools](./FoundryAgents_Step17_OpenAPITools/)|This sample demonstrates how to use OpenAPI tools with a Foundry agent| -|[Bing Custom Search](./FoundryAgents_Step18_BingCustomSearch/)|This sample demonstrates how to use Bing Custom Search tool with a Foundry agent| -|[SharePoint grounding](./FoundryAgents_Step19_SharePoint/)|This sample demonstrates how to use the SharePoint grounding tool with a Foundry agent| -|[Microsoft Fabric](./FoundryAgents_Step20_MicrosoftFabric/)|This sample demonstrates how to use Microsoft Fabric tool with a Foundry agent| -|[Web search](./FoundryAgents_Step21_WebSearch/)|This sample demonstrates how to use the Responses API web search tool with a Foundry agent| -|[Memory search](./FoundryAgents_Step22_MemorySearch/)|This sample demonstrates how to use memory search tool with a Foundry agent| -|[Local MCP](./FoundryAgents_Step23_LocalMCP/)|This sample demonstrates how to use a local MCP client with a Foundry agent| + +### Server-Side Tools (Responses API) + +|Sample|Description| +|---|---| +|[Code interpreter](./FoundryAgents_Step14_CodeInterpreter/)|This sample demonstrates how to use the code interpreter tool| +|[Computer use](./FoundryAgents_Step15_ComputerUse/)|This sample demonstrates how to use computer use capabilities| +|[File search](./FoundryAgents_Step16_FileSearch/)|This sample demonstrates how to use the file search tool| +|[OpenAPI tools](./FoundryAgents_Step17_OpenAPITools/)|This sample demonstrates how to use OpenAPI tools| +|[Bing Custom Search](./FoundryAgents_Step18_BingCustomSearch/)|This sample demonstrates how to use Bing Custom Search tool| +|[SharePoint grounding](./FoundryAgents_Step19_SharePoint/)|This sample demonstrates how to use the SharePoint grounding tool| +|[Microsoft Fabric](./FoundryAgents_Step20_MicrosoftFabric/)|This sample demonstrates how to use Microsoft Fabric tool| +|[Web search](./FoundryAgents_Step21_WebSearch/)|This sample demonstrates how to use the web search tool| +|[Memory search](./FoundryAgents_Step22_MemorySearch/)|This sample demonstrates how to use memory search tool| +|[Local MCP](./FoundryAgents_Step23_LocalMCP/)|This sample demonstrates how to use a local MCP client| ## Evaluation Samples diff --git a/dotnet/samples/03-workflows/Agents/FoundryAgent/FoundryAgent.csproj b/dotnet/samples/03-workflows/Agents/FoundryAgent/FoundryAgent.csproj index 30227d3f20..537165637b 100644 --- a/dotnet/samples/03-workflows/Agents/FoundryAgent/FoundryAgent.csproj +++ b/dotnet/samples/03-workflows/Agents/FoundryAgent/FoundryAgent.csproj @@ -9,13 +9,13 @@ - + + - diff --git a/dotnet/samples/03-workflows/Agents/FoundryAgent/Program.cs b/dotnet/samples/03-workflows/Agents/FoundryAgent/Program.cs index f322bb882d..6ba7630e01 100644 --- a/dotnet/samples/03-workflows/Agents/FoundryAgent/Program.cs +++ b/dotnet/samples/03-workflows/Agents/FoundryAgent/Program.cs @@ -1,6 +1,6 @@ // Copyright (c) Microsoft. All rights reserved. -using Azure.AI.Agents.Persistent; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; using Microsoft.Agents.AI.Workflows; @@ -9,7 +9,7 @@ namespace WorkflowFoundryAgentSample; /// -/// This sample shows how to use Azure Foundry Agents within a workflow. +/// This sample shows how to use code-first agents within a workflow. /// /// /// Pre-requisites: @@ -20,16 +20,32 @@ public static class Program { private static async Task Main() { - // Set up the Azure OpenAI client + // Set up the Foundry project client var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; - var persistentAgentsClient = new PersistentAgentsClient(endpoint, new AzureCliCredential()); - // Create agents - AIAgent frenchAgent = await GetTranslationAgentAsync("French", persistentAgentsClient, deploymentName); - AIAgent spanishAgent = await GetTranslationAgentAsync("Spanish", persistentAgentsClient, deploymentName); - AIAgent englishAgent = await GetTranslationAgentAsync("English", persistentAgentsClient, deploymentName); + // Create agents using code-first pattern (no server-side agent registration) + IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); + + AIAgent frenchAgent = chatClient.AsAIAgent(new ChatClientAgentOptions + { + Name = "French Translator", + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a translation assistant that translates the provided text to French." }, + }); + AIAgent spanishAgent = chatClient.AsAIAgent(new ChatClientAgentOptions + { + Name = "Spanish Translator", + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a translation assistant that translates the provided text to Spanish." }, + }); + AIAgent englishAgent = chatClient.AsAIAgent(new ChatClientAgentOptions + { + Name = "English Translator", + ChatOptions = new() { ModelId = deploymentName, Instructions = "You are a translation assistant that translates the provided text to English." }, + }); // Build the workflow by adding executors and connecting them var workflow = new WorkflowBuilder(frenchAgent) @@ -50,30 +66,5 @@ private static async Task Main() Console.WriteLine($"{executorComplete.ExecutorId}: {executorComplete.Data}"); } } - - // Cleanup the agents created for the sample. - await persistentAgentsClient.Administration.DeleteAgentAsync(frenchAgent.Id); - await persistentAgentsClient.Administration.DeleteAgentAsync(spanishAgent.Id); - await persistentAgentsClient.Administration.DeleteAgentAsync(englishAgent.Id); - } - - /// - /// Creates a translation agent for the specified target language. - /// - /// The target language for translation - /// The PersistentAgentsClient to create the agent - /// The model to use for the agent - /// A ChatClientAgent configured for the specified language - private static async Task GetTranslationAgentAsync( - string targetLanguage, - PersistentAgentsClient persistentAgentsClient, - string model) - { - var agentMetadata = await persistentAgentsClient.Administration.CreateAgentAsync( - model: model, - name: $"{targetLanguage} Translator", - instructions: $"You are a translation assistant that translates the provided text to {targetLanguage}."); - - return await persistentAgentsClient.GetAIAgentAsync(agentMetadata.Value.Id); } } diff --git a/dotnet/samples/AGENTS.md b/dotnet/samples/AGENTS.md index 1578b39a26..7ec87a81d3 100644 --- a/dotnet/samples/AGENTS.md +++ b/dotnet/samples/AGENTS.md @@ -74,30 +74,38 @@ dotnet/samples/ ## Default provider -All canonical samples (01-get-started) use **Azure OpenAI** via `AzureOpenAIClient` -with `DefaultAzureCredential`: +All canonical samples (01-get-started) use **Azure AI Foundry** via +`ProjectResponsesClient` from the `Azure.AI.Projects.OpenAI` package with +`DefaultAzureCredential`: ```csharp -using Azure.AI.OpenAI; +using Azure.AI.Projects.OpenAI; using Azure.Identity; using Microsoft.Agents.AI; -using OpenAI.Chat; +using Microsoft.Extensions.AI; -var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") - ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); -var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") + ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. -AIAgent agent = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential()) - .GetChatClient(deploymentName) - .AsAIAgent(instructions: "...", name: "..."); +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); + +ChatClientAgent agent = new(chatClient, new ChatClientAgentOptions +{ + Name = "...", + ChatOptions = new() { ModelId = deploymentName, Instructions = "..." }, +}); ``` Environment variables: -- `AZURE_OPENAI_ENDPOINT` — Your Azure OpenAI endpoint -- `AZURE_OPENAI_DEPLOYMENT_NAME` — Model deployment name (defaults to `gpt-4o-mini`) +- `AZURE_AI_PROJECT_ENDPOINT` — Your Azure AI Foundry project endpoint +- `AZURE_AI_MODEL_DEPLOYMENT_NAME` — Model deployment name (defaults to `gpt-4o-mini`) For authentication, run `az login` before running samples. @@ -122,9 +130,11 @@ dotnet run ## Current API notes -- `AIAgent` is the primary agent abstraction (created via `ChatClient.AsAIAgent(...)`) +- `ChatClientAgent` is the primary agent type — construct with `new ChatClientAgent(chatClient, options)` +- `ProjectResponsesClient` provides a Foundry-aware Responses API client +- Model deployment name is specified via `ChatOptions.ModelId` on the agent options - `AgentSession` manages multi-turn conversation state - `AIContextProvider` injects memory and context -- Prefer `client.GetChatClient(deployment).AsAIAgent(...)` extension method pattern +- `.AsAIAgent(...)` extension methods exist as convenience but samples prefer explicit `ChatClientAgent` construction - Azure Functions hosting uses `ConfigureDurableAgents(options => options.AddAIAgent(agent))` - Workflows use `WorkflowBuilder` with `Executor` and edge connections diff --git a/dotnet/samples/SAMPLE_GUIDELINES.md b/dotnet/samples/SAMPLE_GUIDELINES.md new file mode 100644 index 0000000000..5a997a540e --- /dev/null +++ b/dotnet/samples/SAMPLE_GUIDELINES.md @@ -0,0 +1,121 @@ +# Sample Guidelines — .NET + +Samples are extremely important for developers to get started with Agent Framework. We strive to provide a wide range of samples that demonstrate the capabilities of Agent Framework with consistency and quality. This document outlines the guidelines for creating .NET samples. + +## Project Structure + +Every sample is a standalone C# project with the following structure: + +``` +/ +├── .csproj # Project file +├── Program.cs # Main entry point +└── README.md # (optional) Sample-specific docs +``` + +### Getting Started Samples (01-get-started) + +Named as `NN_snake_case/` (e.g., `01_hello_agent/`, `02_add_tools/`). Each step builds on the previous and demonstrates exactly one concept. + +### Concept Samples (02-agents through 05-end-to-end) + +Named as `Category_StepNN_PascalCase/` (e.g., `Agent_Step01_UsingFunctionToolsWithApprovals/`). + +## .csproj Conventions + +- Target `net10.0` +- Use central package management (`Directory.Build.props` / `Directory.Packages.props`) +- Use `ProjectReference` to framework source (not NuGet packages) +- Do not add `` attributes — versions are centrally managed + +```xml + + + Exe + net10.0 + enable + enable + + + + + + + + + + +``` + +## Default Provider Pattern + +All get-started samples use **Azure AI Foundry** via `ProjectResponsesClient`: + +```csharp +using Azure.AI.Projects.OpenAI; +using Azure.Identity; +using Microsoft.Agents.AI; +using Microsoft.Extensions.AI; + +var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") + ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); +var deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o-mini"; + +IChatClient chatClient = new ProjectResponsesClient( + projectEndpoint: new Uri(endpoint), + tokenProvider: new DefaultAzureCredential()) + .AsIChatClient(); + +ChatClientAgent agent = new(chatClient, new ChatClientAgentOptions +{ + Name = "...", + ChatOptions = new() { ModelId = deploymentName, Instructions = "..." }, +}); +``` + +## Environment Variables + +| Variable | Description | Default | +|----------|-------------|---------| +| `AZURE_AI_PROJECT_ENDPOINT` | Azure AI Foundry project endpoint | _(required)_ | +| `AZURE_AI_MODEL_DEPLOYMENT_NAME` | Model deployment name | `gpt-4o-mini` | + +For authentication, run `az login` before running samples. + +## Snippet Tags + +All get-started samples must include named snippet regions for `:::code` doc integration: + +```csharp +// +code here +// +``` + +Standard snippet IDs by sample: + +| Sample | Snippet IDs | +|--------|-------------| +| 01_hello_agent | `create_agent`, `run_agent`, `run_agent_streaming` | +| 02_add_tools | `define_tool`, `create_agent_with_tools`, `run_agent` | +| 03_multi_turn | `create_agent`, `multi_turn` | +| 04_memory | `context_provider`, `create_agent`, `run_with_memory` | +| 05_first_workflow | `create_workflow`, `run_workflow` | +| 06_host_your_agent | `create_agent`, `host_agent` | + +## General Guidelines + +- **Clear and Concise**: Demonstrate a specific feature or capability. The fewer concepts per sample, the better. +- **Consistent Structure**: Follow naming conventions and project layout. +- **Incremental Complexity**: Start simple and gradually increase. Each getting-started step should build on the previous. +- **Prefer explicit construction**: Use `new ChatClientAgent(chatClient, ...)` rather than `.AsAIAgent(...)` extension methods for clarity. +- **Documentation**: Include a copyright header, descriptive comments, and a file-level comment explaining the sample's purpose. + +## Building and Running + +All samples use project references to the framework source: + +```bash +cd dotnet/samples/01-get-started/01_hello_agent +dotnet run +``` From 1b9bc7a0d25c09f3283f6dfc3027099d3c245664 Mon Sep 17 00:00:00 2001 From: alliscode Date: Mon, 2 Mar 2026 13:49:43 -0800 Subject: [PATCH 2/3] Formatting fixes. --- .../AnthropicChatCompletionFixture.cs | 2 +- .../AnthropicSkillsIntegrationTests.cs | 2 +- .../CopilotStudio.IntegrationTests/CopilotStudioFixture.cs | 2 +- .../Framework/IntegrationTest.cs | 1 - 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/dotnet/tests/AnthropicChatCompletion.IntegrationTests/AnthropicChatCompletionFixture.cs b/dotnet/tests/AnthropicChatCompletion.IntegrationTests/AnthropicChatCompletionFixture.cs index 16bb97d218..bdaaeb85f6 100644 --- a/dotnet/tests/AnthropicChatCompletion.IntegrationTests/AnthropicChatCompletionFixture.cs +++ b/dotnet/tests/AnthropicChatCompletion.IntegrationTests/AnthropicChatCompletionFixture.cs @@ -1,4 +1,4 @@ -// Copyright (c) Microsoft. All rights reserved. +// Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; using System.Linq; diff --git a/dotnet/tests/AnthropicChatCompletion.IntegrationTests/AnthropicSkillsIntegrationTests.cs b/dotnet/tests/AnthropicChatCompletion.IntegrationTests/AnthropicSkillsIntegrationTests.cs index 50474a1eeb..aada9025fe 100644 --- a/dotnet/tests/AnthropicChatCompletion.IntegrationTests/AnthropicSkillsIntegrationTests.cs +++ b/dotnet/tests/AnthropicChatCompletion.IntegrationTests/AnthropicSkillsIntegrationTests.cs @@ -1,4 +1,4 @@ -// Copyright (c) Microsoft. All rights reserved. +// Copyright (c) Microsoft. All rights reserved. using System.Threading.Tasks; using AgentConformance.IntegrationTests.Support; diff --git a/dotnet/tests/CopilotStudio.IntegrationTests/CopilotStudioFixture.cs b/dotnet/tests/CopilotStudio.IntegrationTests/CopilotStudioFixture.cs index 8dfeba1972..f2f0ce5eb3 100644 --- a/dotnet/tests/CopilotStudio.IntegrationTests/CopilotStudioFixture.cs +++ b/dotnet/tests/CopilotStudio.IntegrationTests/CopilotStudioFixture.cs @@ -1,4 +1,4 @@ -// Copyright (c) Microsoft. All rights reserved. +// Copyright (c) Microsoft. All rights reserved. using System; using System.Collections.Generic; diff --git a/dotnet/tests/Microsoft.Agents.AI.Workflows.Declarative.IntegrationTests/Framework/IntegrationTest.cs b/dotnet/tests/Microsoft.Agents.AI.Workflows.Declarative.IntegrationTests/Framework/IntegrationTest.cs index 470de21166..517dba9e4e 100644 --- a/dotnet/tests/Microsoft.Agents.AI.Workflows.Declarative.IntegrationTests/Framework/IntegrationTest.cs +++ b/dotnet/tests/Microsoft.Agents.AI.Workflows.Declarative.IntegrationTests/Framework/IntegrationTest.cs @@ -5,7 +5,6 @@ using System.Reflection; using System.Threading.Tasks; using Azure.Identity; -using Microsoft.Agents.AI.Workflows.Declarative.IntegrationTests.Agents; using Microsoft.Agents.AI.Workflows.Declarative.PowerFx; using Microsoft.Agents.ObjectModel; using Microsoft.Extensions.AI; From 6455ed4147b47eda78522cb20872185f57ed7309 Mon Sep 17 00:00:00 2001 From: alliscode Date: Mon, 2 Mar 2026 17:07:16 -0800 Subject: [PATCH 3/3] Ignoring unreachable link in build pipeline. --- .github/.linkspector.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/.linkspector.yml b/.github/.linkspector.yml index eb365c2982..4b0ce3bd77 100644 --- a/.github/.linkspector.yml +++ b/.github/.linkspector.yml @@ -18,6 +18,7 @@ ignorePatterns: - pattern: "0001-madr-architecture-decisions.md" - pattern: "https://api.powerplatform.com/.default" - pattern: "https://your-resource.openai.azure.com/" + - pattern: "https://your-project.services.ai.azure.com" - pattern: "http://host.docker.internal" - pattern: "https://openai.github.io/openai-agents-js/openai/agents/classes/" # excludedDirs: