Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions dotnet/agent-framework-dotnet.slnx
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
<Project Path="samples/02-agents/Agents/Agent_Step15_DeepResearch/Agent_Step15_DeepResearch.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step16_Declarative/Agent_Step16_Declarative.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Agent_Step17_AdditionalAIContext.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step18_CompactionPipeline/Agent_Step18_CompactionPipeline.csproj" />
</Folder>
<Folder Name="/Samples/02-agents/DeclarativeAgents/">
<Project Path="samples/02-agents/DeclarativeAgents/ChatClient/DeclarativeChatClientAgents.csproj" />
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFrameworks>net10.0</TargetFrameworks>

<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Azure.AI.OpenAI" />
<PackageReference Include="Azure.Identity" />
<PackageReference Include="Microsoft.Extensions.AI.OpenAI" />
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\..\..\..\src\Microsoft.Agents.AI.OpenAI\Microsoft.Agents.AI.OpenAI.csproj" />
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
// Copyright (c) Microsoft. All rights reserved.

// This sample demonstrates how to use a ChatHistoryCompactionPipeline as the ChatReducer for an agent's
// in-memory chat history. The pipeline chains multiple compaction strategies from gentle to aggressive:
// 1. ToolResultCompactionStrategy - Collapses old tool-call groups into concise summaries
// 2. SummarizationCompactionStrategy - LLM-compresses older conversation spans
// 3. SlidingWindowCompactionStrategy - Keeps only the most recent N user turns
// 4. TruncationCompactionStrategy - Emergency token-budget backstop

using System.ComponentModel;
using Azure.AI.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Agents.AI.Compaction;
using Microsoft.Extensions.AI;

var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";

// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
AzureOpenAIClient openAIClient = new(new Uri(endpoint), new DefaultAzureCredential());

// Create a chat client for the agent and a separate one for the summarization strategy.
// Using the same model for simplicity; in production, use a smaller/cheaper model for summarization.
IChatClient agentChatClient = openAIClient.GetChatClient(deploymentName).AsIChatClient();
IChatClient summarizerChatClient = openAIClient.GetChatClient(deploymentName).AsIChatClient();

// Define a tool the agent can use, so we can see tool-result compaction in action.
[Description("Look up the current price of a product by name.")]
static string LookupPrice([Description("The product name to look up.")] string productName) =>
productName.ToUpperInvariant() switch
{
"LAPTOP" => "The laptop costs $999.99.",
"KEYBOARD" => "The keyboard costs $79.99.",
"MOUSE" => "The mouse costs $29.99.",
_ => $"Sorry, I don't have pricing for '{productName}'."
};

// Configure the compaction pipeline with one of each strategy, ordered least to most aggressive.
const int MaxTokens = 512;
const int MaxTurns = 4;

ChatHistoryCompactionPipeline compactionPipeline =
new(// 1. Gentle: collapse old tool-call groups into short summaries like "[Tool calls: LookupPrice]"
new ToolResultCompactionStrategy(MaxTokens, preserveRecentGroups: 2),

// 2. Moderate: use an LLM to summarize older conversation spans into a concise message
new SummarizationCompactionStrategy(summarizerChatClient, MaxTokens, preserveRecentGroups: 2),

// 3. Aggressive: keep only the last N user turns and their responses
new SlidingWindowCompactionStrategy(MaxTurns),

// 4. Emergency: drop oldest groups until under the token budget
new TruncationCompactionStrategy(MaxTokens, preserveRecentGroups: 1));

// TODO: PRECONFIGURED PIPELINE
////Create(
//// Approach.Balanced,
//// Size.Compact,
//// summarizerChatClient);

// Create the agent with an in-memory chat history provider whose reducer is the compaction pipeline.
AIAgent agent =
agentChatClient.AsAIAgent(
new ChatClientAgentOptions
{
Name = "ShoppingAssistant",
ChatOptions = new()
{
Instructions =
"""
You are a helpful, but long winded, shopping assistant.
Help the user look up prices and compare products.
When responding, Be sure to be extra descriptive and use as
many words as possible without sounding ridiculous.
""",
Tools = [AIFunctionFactory.Create(LookupPrice)],
},
ChatHistoryProvider = new InMemoryChatHistoryProvider(new() { ChatReducer = compactionPipeline }),
});

AgentSession session = await agent.CreateSessionAsync();

// Helper to print chat history size
void PrintChatHistory()
{
if (session.TryGetInMemoryChatHistory(out var history))
{
Console.WriteLine($" [Chat history: {history.Count} messages]\n");
}
}

// Run a multi-turn conversation with tool calls to exercise the pipeline.
string[] prompts =
[
"What's the price of a laptop?",
"How about a keyboard?",
"And a mouse?",
"Which product is the cheapest?",
"Can you compare the laptop and the keyboard for me?",
"What was the first product I asked about?",
"Thank you!",
];

foreach (string prompt in prompts)
{
Console.WriteLine($"User: {prompt}");
Console.WriteLine($"Agent: {await agent.RunAsync(prompt, session)}");
PrintChatHistory();
}
1 change: 1 addition & 0 deletions dotnet/samples/02-agents/Agents/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ Before you begin, ensure you have the following prerequisites:
|[Deep research with an agent](./Agent_Step15_DeepResearch/)|This sample demonstrates how to use the Deep Research Tool to perform comprehensive research on complex topics|
|[Declarative agent](./Agent_Step16_Declarative/)|This sample demonstrates how to declaratively define an agent.|
|[Providing additional AI Context to an agent using multiple AIContextProviders](./Agent_Step17_AdditionalAIContext/)|This sample demonstrates how to inject additional AI context into a ChatClientAgent using multiple custom AIContextProvider components that are attached to the agent.|
|[Using compaction pipeline with an agent](./Agent_Step18_CompactionPipeline/)|This sample demonstrates how to use a compaction pipeline to efficiently limit the size of the conversation history for an agent.|

## Running the samples from the console

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using Microsoft.Extensions.AI;

namespace Microsoft.Agents.AI.Compaction;

public partial class ChatHistoryCompactionPipeline
{
/// <summary>
/// %%% COMMENT
/// </summary>
public enum Size
{
/// <summary>
/// %%% COMMENT
/// </summary>
Compact,
/// <summary>
/// %%% COMMENT
/// </summary>
Adequate,
/// <summary>
/// %%% COMMENT
/// </summary>
Accomodating,
}

/// <summary>
/// %%% COMMENT
/// </summary>
public enum Approach
{
/// <summary>
/// %%% COMMENT
/// </summary>
Aggressive,
/// <summary>
/// %%% COMMENT
/// </summary>
Balanced,
/// <summary>
/// %%% COMMENT
/// </summary>
Gentle,
}

/// <summary>
/// %%% COMMENT
/// </summary>
/// <param name="approach"></param>
/// <param name="size"></param>
/// <param name="chatClient"></param>
/// <returns></returns>
/// <exception cref="NotImplementedException"></exception>
public static ChatHistoryCompactionPipeline Create(Approach approach, Size size, IChatClient chatClient) =>
approach switch
{
Approach.Aggressive => CreateAgressive(size, chatClient),
Approach.Balanced => CreateBalanced(size),
Approach.Gentle => CreateGentle(size),
_ => throw new NotImplementedException(), // %%% EXCEPTION
};

private static ChatHistoryCompactionPipeline CreateAgressive(Size size, IChatClient chatClient) =>
new(// 1. Gentle: collapse old tool-call groups into short summaries like "[Tool calls: LookupPrice]"
new ToolResultCompactionStrategy(MaxTokens(size), preserveRecentGroups: 2),
// 2. Moderate: use an LLM to summarize older conversation spans into a concise message
new SummarizationCompactionStrategy(chatClient, MaxTokens(size), preserveRecentGroups: 2),
// 3. Aggressive: keep only the last N user turns and their responses
new SlidingWindowCompactionStrategy(MaxTurns(size)),
// 4. Emergency: drop oldest groups until under the token budget
new TruncationCompactionStrategy(MaxTokens(size), preserveRecentGroups: 1));

private static ChatHistoryCompactionPipeline CreateBalanced(Size size) =>
new(// 1. Gentle: collapse old tool-call groups into short summaries like "[Tool calls: LookupPrice]"
new ToolResultCompactionStrategy(MaxTokens(size), preserveRecentGroups: 2),
// 2. Aggressive: keep only the last N user turns and their responses
new SlidingWindowCompactionStrategy(MaxTurns(size)));

private static ChatHistoryCompactionPipeline CreateGentle(Size size) =>
new(// 1. Gentle: collapse old tool-call groups into short summaries like "[Tool calls: LookupPrice]"
new ToolResultCompactionStrategy(MaxTokens(size), preserveRecentGroups: 2));

private static int MaxTokens(Size size) =>
size switch
{
Size.Compact => 500,
Size.Adequate => 1000,
Size.Accomodating => 2000,
_ => throw new NotImplementedException(), // %%% EXCEPTION
};

private static int MaxTurns(Size size) =>
size switch
{
Size.Compact => 10,
Size.Adequate => 50,
Size.Accomodating => 100,
_ => throw new NotImplementedException(), // %%% EXCEPTION
};
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.AI;
using Microsoft.Shared.Diagnostics;

namespace Microsoft.Agents.AI.Compaction;

/// <summary>
/// Executes a chain of <see cref="ChatHistoryCompactionStrategy"/> instances in order
/// against a mutable message list.
/// </summary>
/// <remarks>
/// <para>
/// Each strategy's trigger is evaluated against the metrics <em>as they stand after prior strategies</em>,
/// so earlier strategies can bring the conversation within thresholds that cause later strategies to skip.
/// </para>
/// <para>
/// The pipeline is fully standalone — it can be used without any agent, session, or context provider.
/// It also implements <see cref="IChatReducer"/> so it can be used directly anywhere a reducer is
/// accepted (e.g., <see cref="InMemoryChatHistoryProviderOptions.ChatReducer"/>).
/// </para>
/// </remarks>
public partial class ChatHistoryCompactionPipeline : IChatReducer
{
private readonly ChatHistoryCompactionStrategy[] _strategies;
private readonly IChatHistoryMetricsCalculator _metricsCalculator;

/// <summary>
/// Initializes a new instance of the <see cref="ChatHistoryCompactionPipeline"/> class.
/// </summary>
/// <param name="strategies">The ordered list of compaction strategies to execute.</param>
/// <remarks>
/// By default, <see cref="DefaultChatHistoryMetricsCalculator"/> is used.
/// </remarks>
public ChatHistoryCompactionPipeline(
params IEnumerable<ChatHistoryCompactionStrategy> strategies)
: this(metricsCalculator: null, strategies) { }

/// <summary>
/// Initializes a new instance of the <see cref="ChatHistoryCompactionPipeline"/> class.
/// </summary>
/// <param name="metricsCalculator">
/// An optional metrics calculator. When <see langword="null"/>, a
/// <see cref="DefaultChatHistoryMetricsCalculator"/> is used.
/// </param>
/// <param name="strategies">The ordered list of compaction strategies to execute.</param>
public ChatHistoryCompactionPipeline(
IChatHistoryMetricsCalculator? metricsCalculator,
params IEnumerable<ChatHistoryCompactionStrategy> strategies)
{
this._strategies = [.. Throw.IfNull(strategies)];
this._metricsCalculator = metricsCalculator ?? DefaultChatHistoryMetricsCalculator.Instance;
}

/// <summary>
/// Reduces the given messages by running all strategies in sequence.
/// </summary>
/// <param name="messages">The messages to reduce.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests.</param>
/// <returns>The reduced set of messages.</returns>
public virtual async Task<IEnumerable<ChatMessage>> ReduceAsync(
IEnumerable<ChatMessage> messages,
CancellationToken cancellationToken = default)
{
List<ChatMessage> messageBuffer = messages is List<ChatMessage> messageList ? messageList : [.. messages];
await this.CompactAsync(messageBuffer, cancellationToken).ConfigureAwait(false);
return messageBuffer;
}

/// <summary>
/// Run all strategies in sequence against the given messages.
/// </summary>
/// <param name="messages">The mutable message list to compact.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests.</param>
/// <returns>A <see cref="CompactionPipelineResult"/> with aggregate and per-strategy metrics.</returns>
public async ValueTask<CompactionPipelineResult> CompactAsync(
List<ChatMessage> messages,
CancellationToken cancellationToken = default)
{
Throw.IfNull(messages);

ChatHistoryMetric overallBefore = this._metricsCalculator.Calculate(messages);

Debug.WriteLine($"COMPACTION: BEGIN x{overallBefore.MessageCount}/#{overallBefore.UserTurnCount} ({overallBefore.TokenCount} tokens)");

List<CompactionResult> compactionResults = new(this._strategies.Length);

Stopwatch timer = new();
TimeSpan startTime = TimeSpan.Zero;
ChatHistoryMetric overallAfter = overallBefore;
ChatHistoryMetric currentBefore = overallBefore;
foreach (ChatHistoryCompactionStrategy strategy in this._strategies)
{
// %%% VERBOSE - Debug.WriteLine($"COMPACTION: {strategy.Name} START");
timer.Start();
ChatHistoryCompactionStrategy.s_currentMetrics.Value = currentBefore;
CompactionResult strategyResult = await strategy.CompactAsync(messages, this._metricsCalculator, cancellationToken).ConfigureAwait(false);
timer.Stop();
TimeSpan elapsedTime = timer.Elapsed - startTime;
// %%% VERBOSE - Debug.WriteLine($"COMPACTION: {strategy.Name} FINISH [{elapsedTime}]");
compactionResults.Add(strategyResult);
overallAfter = currentBefore = strategyResult.After;
}

Debug.WriteLineIf(overallBefore.TokenCount != overallAfter.TokenCount, $"COMPACTION: TOTAL [{timer.Elapsed}] {overallBefore.TokenCount} => {overallAfter.TokenCount} tokens");

return new(overallBefore, overallAfter, compactionResults);
}
}
Loading
Loading