diff --git a/.gitignore b/.gitignore
index 4994e9e2fe..514b0d5e31 100644
--- a/.gitignore
+++ b/.gitignore
@@ -136,6 +136,10 @@ celerybeat.pid
.venv
env/
venv/
+
+# Foundry agent CLI (contains secrets, auto-generated)
+.foundry-agent.json
+.foundry-agent-build.log
ENV/
env.bak/
venv.bak/
diff --git a/dotnet/.gitignore b/dotnet/.gitignore
index ce1409abe9..572680831e 100644
--- a/dotnet/.gitignore
+++ b/dotnet/.gitignore
@@ -402,4 +402,11 @@ FodyWeavers.xsd
*.msp
# JetBrains Rider
-*.sln.iml
\ No newline at end of file
+*.sln.iml
+
+# Foundry agent CLI config (contains secrets, auto-generated)
+.foundry-agent.json
+.foundry-agent-build.log
+
+# Pre-published output for Docker builds
+out/
\ No newline at end of file
diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props
index 0270f0e38b..ce1bbffa0a 100644
--- a/dotnet/Directory.Packages.props
+++ b/dotnet/Directory.Packages.props
@@ -19,11 +19,16 @@
+
+
+
-
-
+
+
+
+
@@ -48,15 +53,15 @@
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/agent-framework-dotnet.slnx b/dotnet/agent-framework-dotnet.slnx
index 24b596509e..5cc3327b8d 100644
--- a/dotnet/agent-framework-dotnet.slnx
+++ b/dotnet/agent-framework-dotnet.slnx
@@ -1,4 +1,4 @@
-
+
@@ -260,7 +260,41 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -310,15 +344,6 @@
-
-
-
-
-
-
-
-
-
@@ -480,13 +505,12 @@
-
-
+
@@ -508,11 +532,10 @@
-
+
-
@@ -528,12 +551,11 @@
-
-
+
diff --git a/dotnet/samples/04-hosting/.gitignore b/dotnet/samples/04-hosting/.gitignore
new file mode 100644
index 0000000000..324c8dcfb3
--- /dev/null
+++ b/dotnet/samples/04-hosting/.gitignore
@@ -0,0 +1 @@
+**/Properties/launchSettings.json
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/.env.example b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/.env.example
new file mode 100644
index 0000000000..46a6ae748c
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/.env.example
@@ -0,0 +1,2 @@
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/Dockerfile b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/Dockerfile
new file mode 100644
index 0000000000..24585dec12
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/Dockerfile
@@ -0,0 +1,17 @@
+# Use the official .NET 10.0 ASP.NET runtime as a parent image
+FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
+WORKDIR /app
+
+FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
+WORKDIR /src
+COPY . .
+RUN dotnet restore
+RUN dotnet publish -c Release -o /app/publish
+
+# Final stage
+FROM base AS final
+WORKDIR /app
+COPY --from=build /app/publish .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedInvocationsEchoAgent.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/Dockerfile.contributor b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/Dockerfile.contributor
new file mode 100644
index 0000000000..91a403c26c
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/Dockerfile.contributor
@@ -0,0 +1,19 @@
+# Dockerfile for contributors building from the agent-framework repository source.
+#
+# This project uses ProjectReference to the local Microsoft.Agents.AI.Abstractions source,
+# which means a standard multi-stage Docker build cannot resolve dependencies outside
+# this folder. Instead, pre-publish the app targeting the container runtime and copy
+# the output into the container:
+#
+# dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+# docker build -f Dockerfile.contributor -t hosted-invocations-echo-agent .
+# docker run --rm -p 8088:8088 hosted-invocations-echo-agent
+#
+# For end-users consuming the NuGet package (not ProjectReference), use the standard
+# Dockerfile which performs a full dotnet restore + publish inside the container.
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+COPY out/ .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedInvocationsEchoAgent.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/EchoAIAgent.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/EchoAIAgent.cs
new file mode 100644
index 0000000000..ccbfe72781
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/EchoAIAgent.cs
@@ -0,0 +1,85 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Runtime.CompilerServices;
+using System.Text.Json;
+using Microsoft.Extensions.AI;
+
+namespace Microsoft.Agents.AI;
+
+///
+/// A minimal that echoes the user's input text back as the response.
+/// No LLM or external service is required.
+///
+public sealed class EchoAIAgent : AIAgent
+{
+ ///
+ public override string Name => "echo-agent";
+
+ ///
+ public override string Description => "An agent that echoes back the input message.";
+
+ ///
+ protected override Task RunCoreAsync(
+ IEnumerable messages,
+ AgentSession? session = null,
+ AgentRunOptions? options = null,
+ CancellationToken cancellationToken = default)
+ {
+ var inputText = GetInputText(messages);
+ var response = new AgentResponse(new ChatMessage(ChatRole.Assistant, $"Echo: {inputText}"));
+ return Task.FromResult(response);
+ }
+
+ ///
+ protected override async IAsyncEnumerable RunCoreStreamingAsync(
+ IEnumerable messages,
+ AgentSession? session = null,
+ AgentRunOptions? options = null,
+ [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ var inputText = GetInputText(messages);
+ yield return new AgentResponseUpdate
+ {
+ Role = ChatRole.Assistant,
+ Contents = [new TextContent($"Echo: {inputText}")],
+ };
+
+ await Task.CompletedTask;
+ }
+
+ ///
+ protected override ValueTask CreateSessionCoreAsync(CancellationToken cancellationToken = default)
+ => new(new EchoAgentSession());
+
+ ///
+ protected override ValueTask SerializeSessionCoreAsync(
+ AgentSession session,
+ JsonSerializerOptions? jsonSerializerOptions = null,
+ CancellationToken cancellationToken = default)
+ => new(JsonSerializer.SerializeToElement(new { }, jsonSerializerOptions));
+
+ ///
+ protected override ValueTask DeserializeSessionCoreAsync(
+ JsonElement serializedState,
+ JsonSerializerOptions? jsonSerializerOptions = null,
+ CancellationToken cancellationToken = default)
+ => new(new EchoAgentSession());
+
+ private static string GetInputText(IEnumerable messages)
+ {
+ foreach (var message in messages)
+ {
+ if (message.Role == ChatRole.User)
+ {
+ return message.Text ?? string.Empty;
+ }
+ }
+
+ return string.Empty;
+ }
+
+ ///
+ /// Minimal session for the echo agent. No state is persisted.
+ ///
+ private sealed class EchoAgentSession : AgentSession;
+}
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/EchoInvocationHandler.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/EchoInvocationHandler.cs
new file mode 100644
index 0000000000..f0101a57f4
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/EchoInvocationHandler.cs
@@ -0,0 +1,32 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Azure.AI.AgentServer.Invocations;
+using Microsoft.Agents.AI;
+
+namespace HostedInvocationsEchoAgent;
+
+///
+/// An that reads the request body as plain text,
+/// passes it to the , and writes the response back.
+///
+public sealed class EchoInvocationHandler(EchoAIAgent agent) : InvocationHandler
+{
+ ///
+ public override async Task HandleAsync(
+ HttpRequest request,
+ HttpResponse response,
+ InvocationContext context,
+ CancellationToken cancellationToken)
+ {
+ // Read the raw text from the request body.
+ using var reader = new StreamReader(request.Body);
+ var input = await reader.ReadToEndAsync(cancellationToken);
+
+ // Run the echo agent with the input text.
+ var agentResponse = await agent.RunAsync(input, cancellationToken: cancellationToken);
+
+ // Write the agent response text back to the HTTP response.
+ response.ContentType = "text/plain";
+ await response.WriteAsync(agentResponse.Text, cancellationToken);
+ }
+}
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/Hosted-Invocations-EchoAgent.csproj b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/Hosted-Invocations-EchoAgent.csproj
new file mode 100644
index 0000000000..d925172007
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/Hosted-Invocations-EchoAgent.csproj
@@ -0,0 +1,30 @@
+
+
+
+ net10.0
+ enable
+ enable
+ false
+ HostedInvocationsEchoAgent
+ HostedInvocationsEchoAgent
+ $(NoWarn);
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/Program.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/Program.cs
new file mode 100644
index 0000000000..d5944560ae
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/Program.cs
@@ -0,0 +1,28 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Azure.AI.AgentServer.Invocations;
+using DotNetEnv;
+using HostedInvocationsEchoAgent;
+using Microsoft.Agents.AI;
+
+// Load .env file if present (for local development)
+Env.TraversePath().Load();
+
+var builder = WebApplication.CreateBuilder(args);
+
+// Register the echo agent as a singleton (no LLM needed).
+builder.Services.AddSingleton();
+
+// Register the Invocations SDK services and wire the handler.
+builder.Services.AddInvocationsServer();
+builder.Services.AddScoped();
+
+var app = builder.Build();
+
+// Map the Invocations protocol endpoints:
+// POST /invocations — invoke the agent
+// GET /invocations/{id} — get result (not used by this sample)
+// POST /invocations/{id}/cancel — cancel (not used by this sample)
+app.MapInvocationsServer();
+
+app.Run();
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/README.md b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/README.md
new file mode 100644
index 0000000000..5fcfddab22
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/README.md
@@ -0,0 +1,76 @@
+# Hosted-Invocations-EchoAgent
+
+A minimal echo agent hosted as a Foundry Hosted Agent using the **Invocations protocol**. The agent reads the request body as plain text, passes it through a custom `EchoAIAgent`, and writes the echoed text back in the response. No LLM or Azure credentials are required.
+
+## Prerequisites
+
+- [.NET 10 SDK](https://dotnet.microsoft.com/download/dotnet/10.0)
+
+## Configuration
+
+Copy the template:
+
+```bash
+cp .env.example .env
+```
+
+> **Note:** `.env` is gitignored. The `.env.example` template is checked in as a reference.
+
+## Running directly (contributors)
+
+This project uses `ProjectReference` to build against the local Agent Framework source.
+
+```bash
+cd dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent
+dotnet run
+```
+
+The agent will start on `http://localhost:8088`.
+
+### Test it
+
+```bash
+curl -X POST http://localhost:8088/invocations \
+ -H "Content-Type: text/plain" \
+ -d "Hello, world!"
+```
+
+Expected response:
+
+```
+Echo: Hello, world!
+```
+
+## Running with Docker
+
+Since this project uses `ProjectReference`, the standard `Dockerfile` cannot resolve dependencies outside this folder. Use `Dockerfile.contributor` which takes a pre-published output.
+
+### 1. Publish for the container runtime (Linux Alpine)
+
+```bash
+dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+```
+
+### 2. Build the Docker image
+
+```bash
+docker build -f Dockerfile.contributor -t hosted-invocations-echo-agent .
+```
+
+### 3. Run the container
+
+```bash
+docker run --rm -p 8088:8088 hosted-invocations-echo-agent
+```
+
+### 4. Test it
+
+```bash
+curl -X POST http://localhost:8088/invocations \
+ -H "Content-Type: text/plain" \
+ -d "Hello from Docker!"
+```
+
+## NuGet package users
+
+If you are consuming the Agent Framework as a NuGet package (not building from source), use the standard `Dockerfile` instead of `Dockerfile.contributor`. See the commented section in `Hosted-Invocations-EchoAgent.csproj` for the `PackageReference` alternative.
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/agent.manifest.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/agent.manifest.yaml
new file mode 100644
index 0000000000..09e4b0f885
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/agent.manifest.yaml
@@ -0,0 +1,27 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/AgentManifest.yaml
+name: hosted-invocations-echo-agent
+displayName: "Hosted Invocations Echo Agent"
+
+description: >
+ A minimal echo agent hosted as a Foundry Hosted Agent using the Invocations
+ protocol. Reads the request body as plain text, echoes it back in the response.
+
+metadata:
+ tags:
+ - AI Agent Hosting
+ - Azure AI AgentServer
+ - Invocations Protocol
+ - Agent Framework
+
+template:
+ name: hosted-invocations-echo-agent
+ kind: hosted
+ protocols:
+ - protocol: invocations
+ version: 1.0.0
+ resources:
+ cpu: "0.25"
+ memory: 0.5Gi
+parameters:
+ properties: []
+resources: []
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/agent.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/agent.yaml
new file mode 100644
index 0000000000..001a19f0ac
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Hosted-Invocations-EchoAgent/agent.yaml
@@ -0,0 +1,9 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/ContainerAgent.yaml
+kind: hosted
+name: hosted-invocations-echo-agent
+protocols:
+ - protocol: invocations
+ version: 1.0.0
+resources:
+ cpu: "0.25"
+ memory: 0.5Gi
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Using-Samples/SimpleInvocationsAgent/InvocationsAIAgent.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Using-Samples/SimpleInvocationsAgent/InvocationsAIAgent.cs
new file mode 100644
index 0000000000..db291458c2
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Using-Samples/SimpleInvocationsAgent/InvocationsAIAgent.cs
@@ -0,0 +1,129 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Runtime.CompilerServices;
+using System.Text.Json;
+using Microsoft.Extensions.AI;
+
+namespace Microsoft.Agents.AI;
+
+///
+/// An that invokes a remote agent hosted with the Invocations protocol
+/// by sending plain-text HTTP POST requests to the /invocations endpoint.
+///
+public sealed class InvocationsAIAgent : AIAgent
+{
+ private readonly HttpClient _httpClient;
+ private readonly Uri _invocationsUri;
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ ///
+ /// The base URI of the hosted agent (e.g., http://localhost:8089).
+ /// The /invocations path is appended automatically.
+ ///
+ /// Optional to use. If , a new instance is created.
+ /// Optional name for the agent.
+ /// Optional description for the agent.
+ public InvocationsAIAgent(
+ Uri agentEndpoint,
+ HttpClient? httpClient = null,
+ string? name = null,
+ string? description = null)
+ {
+ ArgumentNullException.ThrowIfNull(agentEndpoint);
+
+ this._httpClient = httpClient ?? new HttpClient();
+
+ // Ensure the base URI ends with a slash so that combining works correctly.
+ var baseUri = agentEndpoint.AbsoluteUri.EndsWith('/')
+ ? agentEndpoint
+ : new Uri(agentEndpoint.AbsoluteUri + "/");
+ this._invocationsUri = new Uri(baseUri, "invocations");
+
+ this.Name = name ?? "invocations-agent";
+ this.Description = description ?? "An agent that calls a remote Invocations protocol endpoint.";
+ }
+
+ ///
+ public override string? Name { get; }
+
+ ///
+ public override string? Description { get; }
+
+ ///
+ protected override async Task RunCoreAsync(
+ IEnumerable messages,
+ AgentSession? session = null,
+ AgentRunOptions? options = null,
+ CancellationToken cancellationToken = default)
+ {
+ var inputText = GetLastUserText(messages);
+ var responseText = await this.SendInvocationAsync(inputText, cancellationToken).ConfigureAwait(false);
+ return new AgentResponse(new ChatMessage(ChatRole.Assistant, responseText));
+ }
+
+ ///
+ protected override async IAsyncEnumerable RunCoreStreamingAsync(
+ IEnumerable messages,
+ AgentSession? session = null,
+ AgentRunOptions? options = null,
+ [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ // The Invocations protocol returns a complete response (no SSE streaming),
+ // so we yield a single update with the full text.
+ var inputText = GetLastUserText(messages);
+ var responseText = await this.SendInvocationAsync(inputText, cancellationToken).ConfigureAwait(false);
+
+ yield return new AgentResponseUpdate
+ {
+ Role = ChatRole.Assistant,
+ Contents = [new TextContent(responseText)],
+ };
+ }
+
+ ///
+ protected override ValueTask CreateSessionCoreAsync(CancellationToken cancellationToken = default)
+ => new(new InvocationsAgentSession());
+
+ ///
+ protected override ValueTask SerializeSessionCoreAsync(
+ AgentSession session,
+ JsonSerializerOptions? jsonSerializerOptions = null,
+ CancellationToken cancellationToken = default)
+ => new(JsonSerializer.SerializeToElement(new { }, jsonSerializerOptions));
+
+ ///
+ protected override ValueTask DeserializeSessionCoreAsync(
+ JsonElement serializedState,
+ JsonSerializerOptions? jsonSerializerOptions = null,
+ CancellationToken cancellationToken = default)
+ => new(new InvocationsAgentSession());
+
+ private async Task SendInvocationAsync(string input, CancellationToken cancellationToken)
+ {
+ using var content = new StringContent(input, System.Text.Encoding.UTF8, "text/plain");
+ using var response = await this._httpClient.PostAsync(this._invocationsUri, content, cancellationToken).ConfigureAwait(false);
+ response.EnsureSuccessStatusCode();
+ return await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ }
+
+ private static string GetLastUserText(IEnumerable messages)
+ {
+ string? lastUserText = null;
+ foreach (var message in messages)
+ {
+ if (message.Role == ChatRole.User)
+ {
+ lastUserText = message.Text;
+ }
+ }
+
+ return lastUserText ?? string.Empty;
+ }
+
+ ///
+ /// Minimal session for the invocations agent. No state is persisted.
+ ///
+ private sealed class InvocationsAgentSession : AgentSession;
+}
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Using-Samples/SimpleInvocationsAgent/Program.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Using-Samples/SimpleInvocationsAgent/Program.cs
new file mode 100644
index 0000000000..915e73737d
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Using-Samples/SimpleInvocationsAgent/Program.cs
@@ -0,0 +1,61 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using DotNetEnv;
+using Microsoft.Agents.AI;
+
+// Load .env file if present (for local development)
+Env.TraversePath().Load();
+
+Uri agentEndpoint = new(Environment.GetEnvironmentVariable("AGENT_ENDPOINT")
+ ?? "http://localhost:8088");
+
+// Create an agent that calls the remote Invocations endpoint.
+InvocationsAIAgent agent = new(agentEndpoint);
+
+// REPL
+Console.ForegroundColor = ConsoleColor.Cyan;
+Console.WriteLine($"""
+ ══════════════════════════════════════════════════════════
+ Simple Invocations Agent Sample
+ Connected to: {agentEndpoint}
+ Type a message or 'quit' to exit
+ ══════════════════════════════════════════════════════════
+ """);
+Console.ResetColor();
+Console.WriteLine();
+
+while (true)
+{
+ Console.ForegroundColor = ConsoleColor.Green;
+ Console.Write("You> ");
+ Console.ResetColor();
+
+ string? input = Console.ReadLine();
+
+ if (string.IsNullOrWhiteSpace(input)) { continue; }
+ if (input.Equals("quit", StringComparison.OrdinalIgnoreCase)) { break; }
+
+ try
+ {
+ Console.ForegroundColor = ConsoleColor.Yellow;
+ Console.Write("Agent> ");
+ Console.ResetColor();
+
+ await foreach (var update in agent.RunStreamingAsync(input))
+ {
+ Console.Write(update);
+ }
+
+ Console.WriteLine();
+ }
+ catch (Exception ex)
+ {
+ Console.ForegroundColor = ConsoleColor.Red;
+ Console.WriteLine($"Error: {ex.Message}");
+ Console.ResetColor();
+ }
+
+ Console.WriteLine();
+}
+
+Console.WriteLine("Goodbye!");
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Using-Samples/SimpleInvocationsAgent/SimpleInvocationsAgent.csproj b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Using-Samples/SimpleInvocationsAgent/SimpleInvocationsAgent.csproj
new file mode 100644
index 0000000000..d509bd2c70
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/invocations/Using-Samples/SimpleInvocationsAgent/SimpleInvocationsAgent.csproj
@@ -0,0 +1,22 @@
+
+
+
+ Exe
+ net10.0
+ enable
+ enable
+ false
+ SimpleInvocationsAgentClient
+ simple-invocations-agent-client
+ $(NoWarn);NU1903;NU1605
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/.env.example b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/.env.example
new file mode 100644
index 0000000000..984e8625cf
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/.env.example
@@ -0,0 +1,6 @@
+AZURE_AI_PROJECT_ENDPOINT=
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
+AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o
+AGENT_NAME=hosted-chat-client-agent
+AZURE_BEARER_TOKEN=DefaultAzureCredential
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/Dockerfile b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/Dockerfile
new file mode 100644
index 0000000000..6f1be8ee8e
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/Dockerfile
@@ -0,0 +1,17 @@
+# Use the official .NET 10.0 ASP.NET runtime as a parent image
+FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
+WORKDIR /app
+
+FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
+WORKDIR /src
+COPY . .
+RUN dotnet restore
+RUN dotnet publish -c Release -o /app/publish
+
+# Final stage
+FROM base AS final
+WORKDIR /app
+COPY --from=build /app/publish .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedChatClientAgent.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/Dockerfile.contributor b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/Dockerfile.contributor
new file mode 100644
index 0000000000..200f674bdd
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/Dockerfile.contributor
@@ -0,0 +1,19 @@
+# Dockerfile for contributors building from the agent-framework repository source.
+#
+# This project uses ProjectReference to the local Microsoft.Agents.AI.Foundry source,
+# which means a standard multi-stage Docker build cannot resolve dependencies outside
+# this folder. Instead, pre-publish the app targeting the container runtime and copy
+# the output into the container:
+#
+# dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+# docker build -f Dockerfile.contributor -t hosted-chat-client-agent .
+# docker run --rm -p 8088:8088 -e AGENT_NAME=hosted-chat-client-agent --env-file .env hosted-chat-client-agent
+#
+# For end-users consuming the NuGet package (not ProjectReference), use the standard
+# Dockerfile which performs a full dotnet restore + publish inside the container.
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+COPY out/ .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedChatClientAgent.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/HostedChatClientAgent.csproj b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/HostedChatClientAgent.csproj
new file mode 100644
index 0000000000..b1fe8d3e3c
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/HostedChatClientAgent.csproj
@@ -0,0 +1,30 @@
+
+
+
+ net10.0
+ enable
+ enable
+ false
+ HostedChatClientAgent
+ HostedChatClientAgent
+ $(NoWarn);
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/Program.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/Program.cs
new file mode 100644
index 0000000000..ee86bbae1b
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/Program.cs
@@ -0,0 +1,98 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Azure.AI.Projects;
+using Azure.Core;
+using Azure.Identity;
+using DotNetEnv;
+using Microsoft.Agents.AI;
+using Microsoft.Agents.AI.Foundry.Hosting;
+
+// Load .env file if present (for local development)
+Env.TraversePath().Load();
+
+var projectEndpoint = new Uri(Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."));
+
+var agentName = Environment.GetEnvironmentVariable("AGENT_NAME")
+ ?? throw new InvalidOperationException("AGENT_NAME is not set.");
+
+var deployment = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o";
+
+// Use a chained credential: try a temporary dev token first (for local Docker debugging),
+// then fall back to DefaultAzureCredential (for local dev via dotnet run / managed identity running in foundry).
+TokenCredential credential = new ChainedTokenCredential(
+ new DevTemporaryTokenCredential(),
+ new DefaultAzureCredential());
+
+// Create the agent via the AI project client using the Responses API.
+AIAgent agent = new AIProjectClient(projectEndpoint, credential)
+ .AsAIAgent(
+ model: deployment,
+ instructions: """
+ You are a helpful AI assistant hosted as a Foundry Hosted Agent.
+ You can help with a wide range of tasks including answering questions,
+ providing explanations, brainstorming ideas, and offering guidance.
+ Be concise, clear, and helpful in your responses.
+ """,
+ name: agentName,
+ description: "A simple general-purpose AI assistant");
+
+// Host the agent as a Foundry Hosted Agent using the Responses API.
+var builder = WebApplication.CreateBuilder(args);
+builder.Services.AddFoundryResponses(agent);
+
+var app = builder.Build();
+app.MapFoundryResponses();
+
+// In Development, also map the OpenAI-compatible route that AIProjectClient uses.
+if (app.Environment.IsDevelopment())
+{
+ app.MapFoundryResponses("openai/v1");
+}
+
+app.Run();
+
+///
+/// A for local Docker debugging only.
+///
+/// When debugging and testing a hosted agent in a local Docker container, Azure CLI
+/// and other interactive credentials are not available. This credential reads a
+/// pre-fetched bearer token from the AZURE_BEARER_TOKEN environment variable.
+///
+/// This should NOT be used in production — tokens expire (~1 hour) and cannot be refreshed.
+/// In production, the Foundry platform injects a managed identity automatically.
+///
+/// Generate a token on your host and pass it to the container:
+/// export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
+/// docker run -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN ...
+///
+internal sealed class DevTemporaryTokenCredential : TokenCredential
+{
+ private const string EnvironmentVariable = "AZURE_BEARER_TOKEN";
+ private readonly string? _token;
+
+ public DevTemporaryTokenCredential()
+ {
+ this._token = Environment.GetEnvironmentVariable(EnvironmentVariable);
+ }
+
+ public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ {
+ return this.GetAccessToken();
+ }
+
+ public override ValueTask GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ {
+ return new ValueTask(this.GetAccessToken());
+ }
+
+ private AccessToken GetAccessToken()
+ {
+ if (string.IsNullOrEmpty(this._token) || this._token == "DefaultAzureCredential")
+ {
+ throw new CredentialUnavailableException($"{EnvironmentVariable} environment variable is not set.");
+ }
+
+ return new AccessToken(this._token, DateTimeOffset.UtcNow.AddHours(1));
+ }
+}
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/README.md b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/README.md
new file mode 100644
index 0000000000..ace8892572
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/README.md
@@ -0,0 +1,109 @@
+# Hosted-ChatClientAgent
+
+A simple general-purpose AI assistant hosted as a Foundry Hosted Agent using the Agent Framework instance hosting pattern. The agent is created inline via `AIProjectClient.AsAIAgent(model, instructions)` and served using the Responses protocol.
+
+## Prerequisites
+
+- [.NET 10 SDK](https://dotnet.microsoft.com/download/dotnet/10.0)
+- An Azure AI Foundry project with a deployed model (e.g., `gpt-4o`)
+- Azure CLI logged in (`az login`)
+
+## Configuration
+
+Copy the template and fill in your project endpoint:
+
+```bash
+cp .env.example .env
+```
+
+Edit `.env` and set your Azure AI Foundry project endpoint:
+
+```env
+AZURE_AI_PROJECT_ENDPOINT=https://.services.ai.azure.com/api/projects/
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
+AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o
+```
+
+> **Note:** `.env` is gitignored. The `.env.example` template is checked in as a reference.
+
+## Running directly (contributors)
+
+This project uses `ProjectReference` to build against the local Agent Framework source.
+
+```bash
+cd dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent
+dotnet run
+```
+
+The agent will start on `http://localhost:8088`.
+
+### Test it
+
+Using the Azure Developer CLI:
+
+```bash
+azd ai agent invoke --local "Hello!"
+```
+
+Or with curl (specifying the agent name explicitly):
+
+```bash
+curl -X POST http://localhost:8088/responses \
+ -H "Content-Type: application/json" \
+ -d '{"input": "Hello!", "model": "hosted-chat-client-agent"}'
+```
+
+## Running with Docker
+
+Since this project uses `ProjectReference`, the standard `Dockerfile` cannot resolve dependencies outside this folder. Use `Dockerfile.contributor` which takes a pre-published output.
+
+### 1. Publish for the container runtime (Linux Alpine)
+
+```bash
+dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+```
+
+### 2. Build the Docker image
+
+```bash
+docker build -f Dockerfile.contributor -t hosted-chat-client-agent .
+```
+
+### 3. Run the container
+
+Generate a bearer token on your host and pass it to the container:
+
+```bash
+# Generate token (expires in ~1 hour)
+export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
+
+# Run with token
+docker run --rm -p 8088:8088 \
+ -e AGENT_NAME=hosted-chat-client-agent \
+ -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN \
+ --env-file .env \
+ hosted-chat-client-agent
+```
+
+> **Note:** `AGENT_NAME` is passed via `-e` to simulate the platform injection. `AZURE_BEARER_TOKEN` provides Azure credentials to the container (tokens expire after ~1 hour). The `.env` file provides the remaining configuration.
+
+### 4. Test it
+
+Using the Azure Developer CLI:
+
+```bash
+azd ai agent invoke --local "Hello!"
+```
+
+Or with curl (specifying the agent name explicitly):
+
+```bash
+curl -X POST http://localhost:8088/responses \
+ -H "Content-Type: application/json" \
+ -d '{"input": "Hello!", "model": "hosted-chat-client-agent"}'
+```
+
+## NuGet package users
+
+If you are consuming the Agent Framework as a NuGet package (not building from source), use the standard `Dockerfile` instead of `Dockerfile.contributor` — it performs a full `dotnet restore` and `dotnet publish` inside the container. See the commented section in `HostedChatClientAgent.csproj` for the `PackageReference` alternative.
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/agent.manifest.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/agent.manifest.yaml
new file mode 100644
index 0000000000..58a07d8bb3
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/agent.manifest.yaml
@@ -0,0 +1,28 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/AgentManifest.yaml
+name: hosted-chat-client-agent
+displayName: "Hosted Chat Client Agent"
+
+description: >
+ A simple general-purpose AI assistant hosted as a Foundry Hosted Agent
+ using the Agent Framework instance hosting pattern.
+
+metadata:
+ tags:
+ - AI Agent Hosting
+ - Azure AI AgentServer
+ - Responses Protocol
+ - Streaming
+ - Agent Framework
+
+template:
+ name: hosted-chat-client-agent
+ kind: hosted
+ protocols:
+ - protocol: responses
+ version: 1.0.0
+ resources:
+ cpu: "0.25"
+ memory: 0.5Gi
+parameters:
+ properties: []
+resources: []
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/agent.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/agent.yaml
new file mode 100644
index 0000000000..0a97abc35a
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-ChatClientAgent/agent.yaml
@@ -0,0 +1,9 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/ContainerAgent.yaml
+kind: hosted
+name: hosted-chat-client-agent
+protocols:
+ - protocol: responses
+ version: 1.0.0
+resources:
+ cpu: "0.25"
+ memory: 0.5Gi
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/.env.example b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/.env.example
new file mode 100644
index 0000000000..c72380d125
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/.env.example
@@ -0,0 +1,5 @@
+AZURE_AI_PROJECT_ENDPOINT=
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
+AGENT_NAME=
+AZURE_BEARER_TOKEN=DefaultAzureCredential
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/Dockerfile b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/Dockerfile
new file mode 100644
index 0000000000..eda1f7e1e9
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/Dockerfile
@@ -0,0 +1,17 @@
+# Use the official .NET 10.0 ASP.NET runtime as a parent image
+FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
+WORKDIR /app
+
+FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
+WORKDIR /src
+COPY . .
+RUN dotnet restore
+RUN dotnet publish -c Release -o /app/publish
+
+# Final stage
+FROM base AS final
+WORKDIR /app
+COPY --from=build /app/publish .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedFoundryAgent.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/Dockerfile.contributor b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/Dockerfile.contributor
new file mode 100644
index 0000000000..2b6a2dbbc4
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/Dockerfile.contributor
@@ -0,0 +1,19 @@
+# Dockerfile for contributors building from the agent-framework repository source.
+#
+# This project uses ProjectReference to the local Microsoft.Agents.AI.Foundry source,
+# which means a standard multi-stage Docker build cannot resolve dependencies outside
+# this folder. Instead, pre-publish the app targeting the container runtime and copy
+# the output into the container:
+#
+# dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+# docker build -f Dockerfile.contributor -t hosted-foundry-agent .
+# docker run --rm -p 8088:8088 -e AGENT_NAME= -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN --env-file .env hosted-foundry-agent
+#
+# For end-users consuming the NuGet package (not ProjectReference), use the standard
+# Dockerfile which performs a full dotnet restore + publish inside the container.
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+COPY out/ .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedFoundryAgent.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/HostedFoundryAgent.csproj b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/HostedFoundryAgent.csproj
new file mode 100644
index 0000000000..e49a15769f
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/HostedFoundryAgent.csproj
@@ -0,0 +1,30 @@
+
+
+
+ net10.0
+ enable
+ enable
+ false
+ HostedFoundryAgent
+ HostedFoundryAgent
+ $(NoWarn);
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/Program.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/Program.cs
new file mode 100644
index 0000000000..b593b16671
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/Program.cs
@@ -0,0 +1,91 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Azure.AI.Projects;
+using Azure.AI.Projects.Agents;
+using Azure.Core;
+using Azure.Identity;
+using DotNetEnv;
+using Microsoft.Agents.AI.Foundry;
+using Microsoft.Agents.AI.Foundry.Hosting;
+
+// Load .env file if present (for local development)
+Env.TraversePath().Load();
+
+var projectEndpoint = new Uri(Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."));
+var agentName = Environment.GetEnvironmentVariable("AGENT_NAME")
+ ?? throw new InvalidOperationException("AGENT_NAME is not set.");
+
+// Use a chained credential: try a temporary dev token first (for local Docker debugging),
+// then fall back to DefaultAzureCredential (for local dev via dotnet run / managed identity running in foundry).
+TokenCredential credential = new ChainedTokenCredential(
+ new DevTemporaryTokenCredential(),
+ new DefaultAzureCredential());
+
+var aiProjectClient = new AIProjectClient(projectEndpoint, credential);
+
+// Retrieve the Foundry-managed agent by name (latest version).
+ProjectsAgentRecord agentRecord = await aiProjectClient
+ .AgentAdministrationClient.GetAgentAsync(agentName);
+
+FoundryAgent agent = aiProjectClient.AsAIAgent(agentRecord);
+
+// Host the agent as a Foundry Hosted Agent using the Responses API.
+var builder = WebApplication.CreateBuilder(args);
+builder.Services.AddFoundryResponses(agent);
+
+var app = builder.Build();
+app.MapFoundryResponses();
+
+// In Development, also map the OpenAI-compatible route that AIProjectClient uses.
+if (app.Environment.IsDevelopment())
+{
+ app.MapFoundryResponses("openai/v1");
+}
+
+app.Run();
+
+///
+/// A for local Docker debugging only.
+///
+/// When debugging and testing a hosted agent in a local Docker container, Azure CLI
+/// and other interactive credentials are not available. This credential reads a
+/// pre-fetched bearer token from the AZURE_BEARER_TOKEN environment variable.
+///
+/// This should NOT be used in production — tokens expire (~1 hour) and cannot be refreshed.
+/// In production, the Foundry platform injects a managed identity automatically.
+///
+/// Generate a token on your host and pass it to the container:
+/// export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
+/// docker run -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN ...
+///
+internal sealed class DevTemporaryTokenCredential : TokenCredential
+{
+ private const string EnvironmentVariable = "AZURE_BEARER_TOKEN";
+ private readonly string? _token;
+
+ public DevTemporaryTokenCredential()
+ {
+ this._token = Environment.GetEnvironmentVariable(EnvironmentVariable);
+ }
+
+ public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ {
+ return this.GetAccessToken();
+ }
+
+ public override ValueTask GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ {
+ return new ValueTask(this.GetAccessToken());
+ }
+
+ private AccessToken GetAccessToken()
+ {
+ if (string.IsNullOrEmpty(this._token) || this._token == "DefaultAzureCredential")
+ {
+ throw new CredentialUnavailableException($"{EnvironmentVariable} environment variable is not set.");
+ }
+
+ return new AccessToken(this._token, DateTimeOffset.UtcNow.AddHours(1));
+ }
+}
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/README.md b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/README.md
new file mode 100644
index 0000000000..8265a80632
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/README.md
@@ -0,0 +1,121 @@
+# Hosted-FoundryAgent
+
+A hosted agent that delegates to a **Foundry-managed agent definition**. Instead of defining the model, instructions, and tools inline in code, this sample retrieves an existing agent registered in the Foundry platform via `AIProjectClient.AsAIAgent(agentRecord)` and hosts it using the Responses protocol.
+
+This is the **Foundry hosting** pattern — the agent's behavior is configured in the platform (via Foundry UI, CLI, or API), and this server simply wraps and serves it.
+
+## Prerequisites
+
+- [.NET 10 SDK](https://dotnet.microsoft.com/download/dotnet/10.0)
+- An Azure AI Foundry project with a **registered agent** (created via Foundry UI, CLI, or API)
+- Azure CLI logged in (`az login`)
+
+## Configuration
+
+Copy the template and fill in your project endpoint:
+
+```bash
+cp .env.example .env
+```
+
+Edit `.env` and set your Azure AI Foundry project endpoint:
+
+```env
+AZURE_AI_PROJECT_ENDPOINT=https://.services.ai.azure.com/api/projects/
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
+```
+
+> **Note:** `.env` is gitignored. The `.env.example` template is checked in as a reference.
+
+You also need to set `AGENT_NAME` — the name of the Foundry-managed agent to host. This is injected automatically by the Foundry platform when deployed. For local development, pass it as an environment variable.
+
+## Running directly (contributors)
+
+This project uses `ProjectReference` to build against the local Agent Framework source.
+
+```bash
+cd dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent
+AGENT_NAME= dotnet run
+```
+
+The agent will start on `http://localhost:8088`.
+
+### Test it
+
+Using the Azure Developer CLI:
+
+```bash
+azd ai agent invoke --local "Hello!"
+```
+
+Or with curl (specifying the agent name explicitly):
+
+```bash
+curl -X POST http://localhost:8088/responses \
+ -H "Content-Type: application/json" \
+ -d '{"input": "Hello!", "model": ""}'
+```
+
+## Running with Docker
+
+Since this project uses `ProjectReference`, the standard `Dockerfile` cannot resolve dependencies outside this folder. Use `Dockerfile.contributor` which takes a pre-published output.
+
+### 1. Publish for the container runtime (Linux Alpine)
+
+```bash
+dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+```
+
+### 2. Build the Docker image
+
+```bash
+docker build -f Dockerfile.contributor -t hosted-foundry-agent .
+```
+
+### 3. Run the container
+
+Generate a bearer token on your host and pass it to the container:
+
+```bash
+# Generate token (expires in ~1 hour)
+export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
+
+# Run with token
+docker run --rm -p 8088:8088 \
+ -e AGENT_NAME= \
+ -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN \
+ --env-file .env \
+ hosted-foundry-agent
+```
+
+> **Note:** `AGENT_NAME` is passed via `-e` to simulate the platform injection. `AZURE_BEARER_TOKEN` provides Azure credentials to the container (tokens expire after ~1 hour). The `.env` file provides the remaining configuration.
+
+### 4. Test it
+
+Using the Azure Developer CLI:
+
+```bash
+azd ai agent invoke --local "Hello!"
+```
+
+Or with curl (specifying the agent name explicitly):
+
+```bash
+curl -X POST http://localhost:8088/responses \
+ -H "Content-Type: application/json" \
+ -d '{"input": "Hello!", "model": ""}'
+```
+
+## NuGet package users
+
+If you are consuming the Agent Framework as a NuGet package (not building from source), use the standard `Dockerfile` instead of `Dockerfile.contributor` — it performs a full `dotnet restore` and `dotnet publish` inside the container. See the commented section in `HostedFoundryAgent.csproj` for the `PackageReference` alternative.
+
+## How it differs from Hosted-ChatClientAgent
+
+| | Hosted-ChatClientAgent | Hosted-FoundryAgent |
+|---|---|---|
+| **Agent definition** | Inline in code (`AsAIAgent(model, instructions)`) | Managed in Foundry platform (`AsAIAgent(agentRecord)`) |
+| **Model/instructions** | Set in `Program.cs` | Set in Foundry UI/CLI/API |
+| **Tools** | Defined in code | Configured in the platform |
+| **Use case** | Full control over agent behavior | Platform-managed agent with centralized config |
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/agent.manifest.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/agent.manifest.yaml
new file mode 100644
index 0000000000..9b33646c8a
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/agent.manifest.yaml
@@ -0,0 +1,28 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/AgentManifest.yaml
+name: hosted-foundry-agent
+displayName: "Hosted Foundry Agent"
+
+description: >
+ A simple general-purpose AI assistant hosted as a Foundry Hosted Agent,
+ backed by a Foundry-managed agent definition.
+
+metadata:
+ tags:
+ - AI Agent Hosting
+ - Azure AI AgentServer
+ - Responses Protocol
+ - Streaming
+ - Agent Framework
+
+template:
+ name: hosted-foundry-agent
+ kind: hosted
+ protocols:
+ - protocol: responses
+ version: 1.0.0
+ resources:
+ cpu: "0.25"
+ memory: 0.5Gi
+parameters:
+ properties: []
+resources: []
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/agent.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/agent.yaml
new file mode 100644
index 0000000000..74223e72fe
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-FoundryAgent/agent.yaml
@@ -0,0 +1,9 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/ContainerAgent.yaml
+kind: hosted
+name: hosted-foundry-agent
+protocols:
+ - protocol: responses
+ version: 1.0.0
+resources:
+ cpu: "0.25"
+ memory: 0.5Gi
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/.env.example b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/.env.example
new file mode 100644
index 0000000000..b8fe9e8e7a
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/.env.example
@@ -0,0 +1,5 @@
+AZURE_AI_PROJECT_ENDPOINT=
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
+AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o
+AZURE_BEARER_TOKEN=DefaultAzureCredential
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/Dockerfile b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/Dockerfile
new file mode 100644
index 0000000000..1b72fcd93f
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/Dockerfile
@@ -0,0 +1,17 @@
+# Use the official .NET 10.0 ASP.NET runtime as a parent image
+FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
+WORKDIR /app
+
+FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
+WORKDIR /src
+COPY . .
+RUN dotnet restore
+RUN dotnet publish -c Release -o /app/publish
+
+# Final stage
+FROM base AS final
+WORKDIR /app
+COPY --from=build /app/publish .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedLocalTools.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/Dockerfile.contributor b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/Dockerfile.contributor
new file mode 100644
index 0000000000..65f920824a
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/Dockerfile.contributor
@@ -0,0 +1,19 @@
+# Dockerfile for contributors building from the agent-framework repository source.
+#
+# This project uses ProjectReference to the local Microsoft.Agents.AI.Foundry source,
+# which means a standard multi-stage Docker build cannot resolve dependencies outside
+# this folder. Instead, pre-publish the app targeting the container runtime and copy
+# the output into the container:
+#
+# dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+# docker build -f Dockerfile.contributor -t hosted-local-tools .
+# docker run --rm -p 8088:8088 -e AGENT_NAME=hosted-local-tools -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN --env-file .env hosted-local-tools
+#
+# For end-users consuming the NuGet package (not ProjectReference), use the standard
+# Dockerfile which performs a full dotnet restore + publish inside the container.
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+COPY out/ .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedLocalTools.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/HostedLocalTools.csproj b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/HostedLocalTools.csproj
new file mode 100644
index 0000000000..b0d39d8cee
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/HostedLocalTools.csproj
@@ -0,0 +1,30 @@
+
+
+
+ net10.0
+ enable
+ enable
+ false
+ HostedLocalTools
+ HostedLocalTools
+ $(NoWarn);
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/Program.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/Program.cs
new file mode 100644
index 0000000000..f0e3566fe1
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/Program.cs
@@ -0,0 +1,164 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// Seattle Hotel Agent - A hosted agent with local C# function tools.
+// Demonstrates how to define and wire local tools that the LLM can invoke,
+// a key advantage of code-based hosted agents over prompt agents.
+
+using System.ComponentModel;
+using System.Globalization;
+using System.Text;
+using Azure.AI.Projects;
+using Azure.Core;
+using Azure.Identity;
+using DotNetEnv;
+using Microsoft.Agents.AI;
+using Microsoft.Agents.AI.Foundry.Hosting;
+using Microsoft.Extensions.AI;
+
+// Load .env file if present (for local development)
+Env.TraversePath().Load();
+
+string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o";
+
+// Use a chained credential: try a temporary dev token first (for local Docker debugging),
+// then fall back to DefaultAzureCredential (for local dev via dotnet run / managed identity in production).
+TokenCredential credential = new ChainedTokenCredential(
+ new DevTemporaryTokenCredential(),
+ new DefaultAzureCredential());
+
+// ── Hotel data ───────────────────────────────────────────────────────────────
+
+Hotel[] seattleHotels =
+[
+ new("Contoso Suites", 189, 4.5, "Downtown"),
+ new("Fabrikam Residences", 159, 4.2, "Pike Place Market"),
+ new("Alpine Ski House", 249, 4.7, "Seattle Center"),
+ new("Margie's Travel Lodge", 219, 4.4, "Waterfront"),
+ new("Northwind Inn", 139, 4.0, "Capitol Hill"),
+ new("Relecloud Hotel", 99, 3.8, "University District"),
+];
+
+// ── Tool: GetAvailableHotels ─────────────────────────────────────────────────
+
+[Description("Get available hotels in Seattle for the specified dates.")]
+string GetAvailableHotels(
+ [Description("Check-in date in YYYY-MM-DD format")] string checkInDate,
+ [Description("Check-out date in YYYY-MM-DD format")] string checkOutDate,
+ [Description("Maximum price per night in USD (optional, defaults to 500)")] int maxPrice = 500)
+{
+ if (!DateTime.TryParseExact(checkInDate, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var checkIn))
+ {
+ return "Error parsing check-in date. Please use YYYY-MM-DD format.";
+ }
+
+ if (!DateTime.TryParseExact(checkOutDate, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var checkOut))
+ {
+ return "Error parsing check-out date. Please use YYYY-MM-DD format.";
+ }
+
+ if (checkOut <= checkIn)
+ {
+ return "Error: Check-out date must be after check-in date.";
+ }
+
+ int nights = (checkOut - checkIn).Days;
+ List availableHotels = seattleHotels.Where(h => h.PricePerNight <= maxPrice).ToList();
+
+ if (availableHotels.Count == 0)
+ {
+ return $"No hotels found in Seattle within your budget of ${maxPrice}/night.";
+ }
+
+ StringBuilder result = new();
+ result.AppendLine($"Available hotels in Seattle from {checkInDate} to {checkOutDate} ({nights} nights):");
+ result.AppendLine();
+
+ foreach (Hotel hotel in availableHotels)
+ {
+ int totalCost = hotel.PricePerNight * nights;
+ result.AppendLine($"**{hotel.Name}**");
+ result.AppendLine($" Location: {hotel.Location}");
+ result.AppendLine($" Rating: {hotel.Rating}/5");
+ result.AppendLine($" ${hotel.PricePerNight}/night (Total: ${totalCost})");
+ result.AppendLine();
+ }
+
+ return result.ToString();
+}
+
+// ── Create and host the agent ────────────────────────────────────────────────
+
+AIAgent agent = new AIProjectClient(new Uri(endpoint), credential)
+ .AsAIAgent(
+ model: deploymentName,
+ instructions: """
+ You are a helpful travel assistant specializing in finding hotels in Seattle, Washington.
+
+ When a user asks about hotels in Seattle:
+ 1. Ask for their check-in and check-out dates if not provided
+ 2. Ask about their budget preferences if not mentioned
+ 3. Use the GetAvailableHotels tool to find available options
+ 4. Present the results in a friendly, informative way
+ 5. Offer to help with additional questions about the hotels or Seattle
+
+ Be conversational and helpful. If users ask about things outside of Seattle hotels,
+ politely let them know you specialize in Seattle hotel recommendations.
+ """,
+ name: Environment.GetEnvironmentVariable("AGENT_NAME") ?? "hosted-local-tools",
+ description: "Seattle hotel search agent with local function tools",
+ tools: [AIFunctionFactory.Create(GetAvailableHotels)]);
+
+var builder = WebApplication.CreateBuilder(args);
+builder.Services.AddFoundryResponses(agent);
+
+var app = builder.Build();
+app.MapFoundryResponses();
+
+if (app.Environment.IsDevelopment())
+{
+ app.MapFoundryResponses("openai/v1");
+}
+
+app.Run();
+
+// ── Types ────────────────────────────────────────────────────────────────────
+
+internal sealed record Hotel(string Name, int PricePerNight, double Rating, string Location);
+
+///
+/// A for local Docker debugging only.
+/// Reads a pre-fetched bearer token from the AZURE_BEARER_TOKEN environment variable
+/// once at startup. This should NOT be used in production.
+///
+/// Generate a token on your host and pass it to the container:
+/// export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
+/// docker run -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN ...
+///
+internal sealed class DevTemporaryTokenCredential : TokenCredential
+{
+ private const string EnvironmentVariable = "AZURE_BEARER_TOKEN";
+ private readonly string? _token;
+
+ public DevTemporaryTokenCredential()
+ {
+ this._token = Environment.GetEnvironmentVariable(EnvironmentVariable);
+ }
+
+ public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ => this.GetAccessToken();
+
+ public override ValueTask GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ => new(this.GetAccessToken());
+
+ private AccessToken GetAccessToken()
+ {
+ if (string.IsNullOrEmpty(this._token) || this._token == "DefaultAzureCredential")
+ {
+ throw new CredentialUnavailableException($"{EnvironmentVariable} environment variable is not set.");
+ }
+
+ return new AccessToken(this._token, DateTimeOffset.UtcNow.AddHours(1));
+ }
+}
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/README.md b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/README.md
new file mode 100644
index 0000000000..8016ff7ae9
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/README.md
@@ -0,0 +1,113 @@
+# Hosted-LocalTools
+
+A hosted agent with **local C# function tools** for hotel search. Demonstrates how to define and wire local tools that the LLM can invoke — a key advantage of code-based hosted agents over prompt agents.
+
+The agent specializes in finding hotels in Seattle, with a `GetAvailableHotels` tool that searches a mock hotel database by dates and budget.
+
+## Prerequisites
+
+- [.NET 10 SDK](https://dotnet.microsoft.com/download/dotnet/10.0)
+- An Azure AI Foundry project with a deployed model (e.g., `gpt-4o`)
+- Azure CLI logged in (`az login`)
+
+## Configuration
+
+Copy the template and fill in your project endpoint:
+
+```bash
+cp .env.example .env
+```
+
+Edit `.env` and set your Azure AI Foundry project endpoint:
+
+```env
+AZURE_AI_PROJECT_ENDPOINT=https://.services.ai.azure.com/api/projects/
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
+AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o
+```
+
+> **Note:** `.env` is gitignored. The `.env.example` template is checked in as a reference.
+
+## Running directly (contributors)
+
+This project uses `ProjectReference` to build against the local Agent Framework source.
+
+```bash
+cd dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools
+AGENT_NAME=hosted-local-tools dotnet run
+```
+
+The agent will start on `http://localhost:8088`.
+
+### Test it
+
+Using the Azure Developer CLI:
+
+```bash
+azd ai agent invoke --local "Find me a hotel in Seattle for Dec 20-25 under $200/night"
+```
+
+Or with curl:
+
+```bash
+curl -X POST http://localhost:8088/responses \
+ -H "Content-Type: application/json" \
+ -d '{"input": "Find me a hotel in Seattle for Dec 20-25 under $200/night", "model": "hosted-local-tools"}'
+```
+
+## Running with Docker
+
+Since this project uses `ProjectReference`, use `Dockerfile.contributor` which takes a pre-published output.
+
+### 1. Publish for the container runtime (Linux Alpine)
+
+```bash
+dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+```
+
+### 2. Build the Docker image
+
+```bash
+docker build -f Dockerfile.contributor -t hosted-local-tools .
+```
+
+### 3. Run the container
+
+Generate a bearer token on your host and pass it to the container:
+
+```bash
+# Generate token (expires in ~1 hour)
+export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
+
+# Run with token
+docker run --rm -p 8088:8088 \
+ -e AGENT_NAME=hosted-local-tools \
+ -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN \
+ --env-file .env \
+ hosted-local-tools
+```
+
+### 4. Test it
+
+Using the Azure Developer CLI:
+
+```bash
+azd ai agent invoke --local "What hotels are available in Seattle for next weekend?"
+```
+
+## How local tools work
+
+The agent has a single tool `GetAvailableHotels` defined as a C# method with `[Description]` attributes. The LLM decides when to call it based on the user's request:
+
+| Parameter | Type | Description |
+|-----------|------|-------------|
+| `checkInDate` | string | Check-in date (YYYY-MM-DD) |
+| `checkOutDate` | string | Check-out date (YYYY-MM-DD) |
+| `maxPrice` | int | Max price per night in USD (default: 500) |
+
+The tool searches a mock database of 6 Seattle hotels and returns formatted results with name, location, rating, and pricing.
+
+## NuGet package users
+
+If you are consuming the Agent Framework as a NuGet package (not building from source), use the standard `Dockerfile` instead of `Dockerfile.contributor`. See the commented section in `HostedLocalTools.csproj` for the `PackageReference` alternative.
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/agent.manifest.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/agent.manifest.yaml
new file mode 100644
index 0000000000..a056b51649
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/agent.manifest.yaml
@@ -0,0 +1,29 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/AgentManifest.yaml
+name: hosted-local-tools
+displayName: "Seattle Hotel Agent with Local Tools"
+
+description: >
+ A travel assistant agent that helps users find hotels in Seattle.
+ Demonstrates local C# tool execution — a key advantage of code-based
+ hosted agents over prompt agents.
+
+metadata:
+ tags:
+ - AI Agent Hosting
+ - Azure AI AgentServer
+ - Responses Protocol
+ - Local Tools
+ - Agent Framework
+
+template:
+ name: hosted-local-tools
+ kind: hosted
+ protocols:
+ - protocol: responses
+ version: 1.0.0
+ resources:
+ cpu: "0.25"
+ memory: 0.5Gi
+parameters:
+ properties: []
+resources: []
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/agent.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/agent.yaml
new file mode 100644
index 0000000000..18ecc4a9f7
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-LocalTools/agent.yaml
@@ -0,0 +1,9 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/ContainerAgent.yaml
+kind: hosted
+name: hosted-local-tools
+protocols:
+ - protocol: responses
+ version: 1.0.0
+resources:
+ cpu: "0.25"
+ memory: 0.5Gi
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/.env.example b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/.env.example
new file mode 100644
index 0000000000..b8fe9e8e7a
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/.env.example
@@ -0,0 +1,5 @@
+AZURE_AI_PROJECT_ENDPOINT=
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
+AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o
+AZURE_BEARER_TOKEN=DefaultAzureCredential
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/Dockerfile b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/Dockerfile
new file mode 100644
index 0000000000..fe7fceb685
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/Dockerfile
@@ -0,0 +1,17 @@
+# Use the official .NET 10.0 ASP.NET runtime as a parent image
+FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
+WORKDIR /app
+
+FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
+WORKDIR /src
+COPY . .
+RUN dotnet restore
+RUN dotnet publish -c Release -o /app/publish
+
+# Final stage
+FROM base AS final
+WORKDIR /app
+COPY --from=build /app/publish .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedMcpTools.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/Dockerfile.contributor b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/Dockerfile.contributor
new file mode 100644
index 0000000000..51c8c347d8
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/Dockerfile.contributor
@@ -0,0 +1,18 @@
+# Dockerfile for contributors building from the agent-framework repository source.
+#
+# This project uses ProjectReference to the local source, which means a standard
+# multi-stage Docker build cannot resolve dependencies outside this folder.
+# Pre-publish the app targeting the container runtime and copy the output:
+#
+# dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+# docker build -f Dockerfile.contributor -t hosted-mcp-tools .
+# docker run --rm -p 8088:8088 -e AGENT_NAME=mcp-tools -e GITHUB_PAT=$GITHUB_PAT -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN --env-file .env hosted-mcp-tools
+#
+# For end-users consuming the NuGet package (not ProjectReference), use the standard
+# Dockerfile which performs a full dotnet restore + publish inside the container.
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+COPY out/ .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedMcpTools.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/HostedMcpTools.csproj b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/HostedMcpTools.csproj
new file mode 100644
index 0000000000..9ce19dd540
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/HostedMcpTools.csproj
@@ -0,0 +1,31 @@
+
+
+
+ net10.0
+ enable
+ enable
+ false
+ HostedMcpTools
+ HostedMcpTools
+ $(NoWarn);
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/Program.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/Program.cs
new file mode 100644
index 0000000000..a027047a1d
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/Program.cs
@@ -0,0 +1,130 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// This sample demonstrates a hosted agent with two layers of MCP (Model Context Protocol) tools:
+//
+// 1. CLIENT-SIDE MCP: The agent connects to the Microsoft Learn MCP server directly via
+// McpClient, discovers tools, and handles tool invocations locally within the agent process.
+//
+// 2. SERVER-SIDE MCP: The agent declares a HostedMcpServerTool for the same MCP server which
+// delegates tool discovery and invocation to the LLM provider (Azure OpenAI Responses API).
+// The provider calls the MCP server on behalf of the agent — no local connection needed.
+//
+// Both patterns use the Microsoft Learn MCP server to illustrate the architectural difference:
+// client-side tools are resolved and invoked by the agent, while server-side tools are resolved
+// and invoked by the LLM provider.
+
+#pragma warning disable MEAI001 // HostedMcpServerTool is experimental
+
+using Azure.AI.Projects;
+using Azure.Core;
+using Azure.Identity;
+using DotNetEnv;
+using Microsoft.Agents.AI;
+using Microsoft.Agents.AI.Foundry.Hosting;
+using Microsoft.Extensions.AI;
+using ModelContextProtocol.Client;
+
+// Load .env file if present (for local development)
+Env.TraversePath().Load();
+
+var projectEndpoint = new Uri(Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."));
+var deployment = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o";
+
+// Use a chained credential: try a temporary dev token first (for local Docker debugging),
+// then fall back to DefaultAzureCredential (for local dev via dotnet run / managed identity in production).
+TokenCredential credential = new ChainedTokenCredential(
+ new DevTemporaryTokenCredential(),
+ new DefaultAzureCredential());
+
+// ── Client-side MCP: Microsoft Learn (local resolution) ──────────────────────
+// Connect directly to the MCP server. The agent discovers and invokes tools locally.
+Console.WriteLine("Connecting to Microsoft Learn MCP server (client-side)...");
+
+await using var learnMcp = await McpClient.CreateAsync(new HttpClientTransport(new()
+{
+ Endpoint = new Uri("https://learn.microsoft.com/api/mcp"),
+ Name = "Microsoft Learn (client)",
+}));
+
+var clientTools = await learnMcp.ListToolsAsync();
+Console.WriteLine($"Client-side MCP tools: {string.Join(", ", clientTools.Select(t => t.Name))}");
+
+// ── Server-side MCP: Microsoft Learn (provider resolution) ───────────────────
+// Declare a HostedMcpServerTool — the LLM provider (Responses API) handles tool
+// invocations directly. No local MCP connection needed for this pattern.
+AITool serverTool = new HostedMcpServerTool(
+ serverName: "microsoft_learn_hosted",
+ serverAddress: "https://learn.microsoft.com/api/mcp")
+{
+ AllowedTools = ["microsoft_docs_search"],
+ ApprovalMode = HostedMcpServerToolApprovalMode.NeverRequire
+};
+Console.WriteLine("Server-side MCP tool: microsoft_docs_search (via HostedMcpServerTool)");
+
+// ── Combine both tool types into a single agent ──────────────────────────────
+// The agent has access to tools from both MCP patterns simultaneously.
+List allTools = [.. clientTools.Cast(), serverTool];
+
+AIAgent agent = new AIProjectClient(projectEndpoint, credential)
+ .AsAIAgent(
+ model: deployment,
+ instructions: """
+ You are a helpful developer assistant with access to Microsoft Learn documentation.
+ Use the available tools to search and retrieve documentation.
+ Be concise and provide direct answers with relevant links.
+ """,
+ name: "mcp-tools",
+ description: "Developer assistant with dual-layer MCP tools (client-side and server-side)",
+ tools: allTools);
+
+// Host the agent as a Foundry Hosted Agent using the Responses API.
+var builder = WebApplication.CreateBuilder(args);
+builder.Services.AddFoundryResponses(agent);
+
+var app = builder.Build();
+app.MapFoundryResponses();
+
+// In Development, also map the OpenAI-compatible route that AIProjectClient uses.
+if (app.Environment.IsDevelopment())
+{
+ app.MapFoundryResponses("openai/v1");
+}
+
+app.Run();
+
+///
+/// A for local Docker debugging only.
+/// Reads a pre-fetched bearer token from the AZURE_BEARER_TOKEN environment variable
+/// once at startup. This should NOT be used in production.
+///
+/// Generate a token on your host and pass it to the container:
+/// export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
+/// docker run -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN ...
+///
+internal sealed class DevTemporaryTokenCredential : TokenCredential
+{
+ private const string EnvironmentVariable = "AZURE_BEARER_TOKEN";
+ private readonly string? _token;
+
+ public DevTemporaryTokenCredential()
+ {
+ this._token = Environment.GetEnvironmentVariable(EnvironmentVariable);
+ }
+
+ public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ => this.GetAccessToken();
+
+ public override ValueTask GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ => new(this.GetAccessToken());
+
+ private AccessToken GetAccessToken()
+ {
+ if (string.IsNullOrEmpty(this._token) || this._token == "DefaultAzureCredential")
+ {
+ throw new CredentialUnavailableException($"{EnvironmentVariable} environment variable is not set.");
+ }
+
+ return new AccessToken(this._token, DateTimeOffset.UtcNow.AddHours(1));
+ }
+}
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/README.md b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/README.md
new file mode 100644
index 0000000000..3313633b0f
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/README.md
@@ -0,0 +1,86 @@
+# Hosted-McpTools
+
+A hosted agent demonstrating **two layers of MCP (Model Context Protocol) tool integration**:
+
+1. **Client-side MCP (GitHub)** — The agent connects directly to the GitHub MCP server via `McpClient`, discovers tools, and handles tool invocations locally within the agent process.
+
+2. **Server-side MCP (Microsoft Learn)** — The agent declares a `HostedMcpServerTool` which delegates tool discovery and invocation to the LLM provider (Azure OpenAI Responses API). The provider calls the MCP server on behalf of the agent with no local connection needed.
+
+## How the two MCP patterns differ
+
+| | Client-side MCP | Server-side MCP |
+|---|---|---|
+| **Connection** | Agent connects to MCP server directly | LLM provider connects to MCP server |
+| **Tool invocation** | Handled by the agent process | Handled by the Responses API |
+| **Auth** | Agent manages credentials (e.g., GitHub PAT) | Provider manages credentials |
+| **Use case** | Custom/private MCP servers, fine-grained control | Public MCP servers, simpler setup |
+| **Example** | GitHub (`McpClient` + `HttpClientTransport`) | Microsoft Learn (`HostedMcpServerTool`) |
+
+## Prerequisites
+
+- [.NET 10 SDK](https://dotnet.microsoft.com/download/dotnet/10.0)
+- An Azure AI Foundry project with a deployed model (e.g., `gpt-4o`)
+- Azure CLI logged in (`az login`)
+- A **GitHub Personal Access Token** (create at https://github.com/settings/tokens)
+
+## Configuration
+
+Copy the template and fill in your values:
+
+```bash
+cp .env.example .env
+```
+
+Edit `.env`:
+
+```env
+AZURE_AI_PROJECT_ENDPOINT=https://.services.ai.azure.com/api/projects/
+AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o
+GITHUB_PAT=ghp_your_token_here
+```
+
+## Running directly (contributors)
+
+```bash
+cd dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools
+dotnet run
+```
+
+### Test it
+
+Using the Azure Developer CLI:
+
+```bash
+# Uses GitHub MCP (client-side)
+azd ai agent invoke --local "Search for the agent-framework repository on GitHub"
+
+# Uses Microsoft Learn MCP (server-side)
+azd ai agent invoke --local "How do I create an Azure storage account using az cli?"
+```
+
+## Running with Docker
+
+### 1. Publish for the container runtime
+
+```bash
+dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+```
+
+### 2. Build and run
+
+```bash
+docker build -f Dockerfile.contributor -t hosted-mcp-tools .
+
+export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
+
+docker run --rm -p 8088:8088 \
+ -e AGENT_NAME=mcp-tools \
+ -e GITHUB_PAT=$GITHUB_PAT \
+ -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN \
+ --env-file .env \
+ hosted-mcp-tools
+```
+
+## NuGet package users
+
+Use the standard `Dockerfile` instead of `Dockerfile.contributor`. See the commented section in `HostedMcpTools.csproj` for the `PackageReference` alternative.
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/agent.manifest.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/agent.manifest.yaml
new file mode 100644
index 0000000000..d5952940b0
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/agent.manifest.yaml
@@ -0,0 +1,30 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/AgentManifest.yaml
+name: mcp-tools
+displayName: "MCP Tools Agent"
+
+description: >
+ A developer assistant demonstrating dual-layer MCP integration:
+ client-side GitHub MCP tools handled by the agent and server-side
+ Microsoft Learn MCP tools delegated to the LLM provider.
+
+metadata:
+ tags:
+ - AI Agent Hosting
+ - Azure AI AgentServer
+ - Responses Protocol
+ - Agent Framework
+ - MCP
+ - Model Context Protocol
+
+template:
+ name: mcp-tools
+ kind: hosted
+ protocols:
+ - protocol: responses
+ version: 1.0.0
+ resources:
+ cpu: "0.25"
+ memory: 0.5Gi
+parameters:
+ properties: []
+resources: []
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/agent.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/agent.yaml
new file mode 100644
index 0000000000..34beb3e2c9
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/agent.yaml
@@ -0,0 +1,9 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/ContainerAgent.yaml
+kind: hosted
+name: mcp-tools
+protocols:
+ - protocol: responses
+ version: 1.0.0
+resources:
+ cpu: "0.25"
+ memory: 0.5Gi
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/.env.example b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/.env.example
new file mode 100644
index 0000000000..b8fe9e8e7a
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/.env.example
@@ -0,0 +1,5 @@
+AZURE_AI_PROJECT_ENDPOINT=
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
+AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o
+AZURE_BEARER_TOKEN=DefaultAzureCredential
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/Dockerfile b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/Dockerfile
new file mode 100644
index 0000000000..062d0f4f7e
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/Dockerfile
@@ -0,0 +1,17 @@
+# Use the official .NET 10.0 ASP.NET runtime as a parent image
+FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
+WORKDIR /app
+
+FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
+WORKDIR /src
+COPY . .
+RUN dotnet restore
+RUN dotnet publish -c Release -o /app/publish
+
+# Final stage
+FROM base AS final
+WORKDIR /app
+COPY --from=build /app/publish .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedTextRag.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/Dockerfile.contributor b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/Dockerfile.contributor
new file mode 100644
index 0000000000..9a90c74335
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/Dockerfile.contributor
@@ -0,0 +1,19 @@
+# Dockerfile for contributors building from the agent-framework repository source.
+#
+# This project uses ProjectReference to the local Microsoft.Agents.AI.Foundry source,
+# which means a standard multi-stage Docker build cannot resolve dependencies outside
+# this folder. Instead, pre-publish the app targeting the container runtime and copy
+# the output into the container:
+#
+# dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+# docker build -f Dockerfile.contributor -t hosted-text-rag .
+# docker run --rm -p 8088:8088 -e AGENT_NAME=hosted-text-rag -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN --env-file .env hosted-text-rag
+#
+# For end-users consuming the NuGet package (not ProjectReference), use the standard
+# Dockerfile which performs a full dotnet restore + publish inside the container.
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+COPY out/ .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedTextRag.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/HostedTextRag.csproj b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/HostedTextRag.csproj
new file mode 100644
index 0000000000..9a22108c7b
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/HostedTextRag.csproj
@@ -0,0 +1,32 @@
+
+
+
+ net10.0
+ enable
+ enable
+ false
+ HostedTextRag
+ HostedTextRag
+ $(NoWarn);
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/Program.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/Program.cs
new file mode 100644
index 0000000000..33edb58901
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/Program.cs
@@ -0,0 +1,130 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// This sample shows how to use TextSearchProvider to add retrieval augmented generation (RAG)
+// capabilities to a hosted agent. The provider runs a search against an external knowledge base
+// before each model invocation and injects the results into the model context.
+
+using Azure.AI.Projects;
+using Azure.Core;
+using Azure.Identity;
+using DotNetEnv;
+using Microsoft.Agents.AI;
+using Microsoft.Agents.AI.Foundry.Hosting;
+using Microsoft.Extensions.AI;
+using OpenAI.Chat;
+
+// Load .env file if present (for local development)
+Env.TraversePath().Load();
+
+string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o";
+
+// Use a chained credential: try a temporary dev token first (for local Docker debugging),
+// then fall back to DefaultAzureCredential (for local dev via dotnet run / managed identity in production).
+TokenCredential credential = new ChainedTokenCredential(
+ new DevTemporaryTokenCredential(),
+ new DefaultAzureCredential());
+
+TextSearchProviderOptions textSearchOptions = new()
+{
+ SearchTime = TextSearchProviderOptions.TextSearchBehavior.BeforeAIInvoke,
+ RecentMessageMemoryLimit = 6,
+};
+
+AIAgent agent = new AIProjectClient(new Uri(endpoint), credential)
+ .AsAIAgent(new ChatClientAgentOptions
+ {
+ Name = Environment.GetEnvironmentVariable("AGENT_NAME") ?? "hosted-text-rag",
+ ChatOptions = new ChatOptions
+ {
+ ModelId = deploymentName,
+ Instructions = "You are a helpful support specialist for Contoso Outdoors. Answer questions using the provided context and cite the source document when available.",
+ },
+ AIContextProviders = [new TextSearchProvider(MockSearchAsync, textSearchOptions)]
+ });
+
+// Host the agent as a Foundry Hosted Agent using the Responses API.
+var builder = WebApplication.CreateBuilder(args);
+builder.Services.AddFoundryResponses(agent);
+
+var app = builder.Build();
+app.MapFoundryResponses();
+
+if (app.Environment.IsDevelopment())
+{
+ app.MapFoundryResponses("openai/v1");
+}
+
+app.Run();
+
+// ── Mock search function ─────────────────────────────────────────────────────
+// In production, replace this with a real search provider (e.g., Azure AI Search).
+
+static Task> MockSearchAsync(string query, CancellationToken cancellationToken)
+{
+ List results = [];
+
+ if (query.Contains("return", StringComparison.OrdinalIgnoreCase) || query.Contains("refund", StringComparison.OrdinalIgnoreCase))
+ {
+ results.Add(new()
+ {
+ SourceName = "Contoso Outdoors Return Policy",
+ SourceLink = "https://contoso.com/policies/returns",
+ Text = "Customers may return any item within 30 days of delivery. Items should be unused and include original packaging. Refunds are issued to the original payment method within 5 business days of inspection."
+ });
+ }
+
+ if (query.Contains("shipping", StringComparison.OrdinalIgnoreCase))
+ {
+ results.Add(new()
+ {
+ SourceName = "Contoso Outdoors Shipping Guide",
+ SourceLink = "https://contoso.com/help/shipping",
+ Text = "Standard shipping is free on orders over $50 and typically arrives in 3-5 business days within the continental United States. Expedited options are available at checkout."
+ });
+ }
+
+ if (query.Contains("tent", StringComparison.OrdinalIgnoreCase) || query.Contains("fabric", StringComparison.OrdinalIgnoreCase))
+ {
+ results.Add(new()
+ {
+ SourceName = "TrailRunner Tent Care Instructions",
+ SourceLink = "https://contoso.com/manuals/trailrunner-tent",
+ Text = "Clean the tent fabric with lukewarm water and a non-detergent soap. Allow it to air dry completely before storage and avoid prolonged UV exposure to extend the lifespan of the waterproof coating."
+ });
+ }
+
+ return Task.FromResult>(results);
+}
+
+///
+/// A for local Docker debugging only.
+/// Reads a pre-fetched bearer token from the AZURE_BEARER_TOKEN environment variable.
+/// This should NOT be used in production — tokens expire (~1 hour) and cannot be refreshed.
+///
+/// Generate a token on your host and pass it to the container:
+/// export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
+/// docker run -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN ...
+///
+internal sealed class DevTemporaryTokenCredential : TokenCredential
+{
+ private const string EnvironmentVariable = "AZURE_BEARER_TOKEN";
+
+ public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ => GetAccessToken();
+
+ public override ValueTask GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ => new(GetAccessToken());
+
+ private static AccessToken GetAccessToken()
+ {
+ var token = Environment.GetEnvironmentVariable(EnvironmentVariable);
+ if (string.IsNullOrEmpty(token) || token == "DefaultAzureCredential")
+ {
+ throw new CredentialUnavailableException($"{EnvironmentVariable} environment variable is not set.");
+ }
+
+ return new AccessToken(token, DateTimeOffset.UtcNow.AddHours(1));
+ }
+}
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/README.md b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/README.md
new file mode 100644
index 0000000000..5e4e5140c0
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/README.md
@@ -0,0 +1,116 @@
+# Hosted-TextRag
+
+A hosted agent with **Retrieval Augmented Generation (RAG)** capabilities using `TextSearchProvider`. The agent grounds its answers in product documentation by running a search before each model invocation, then citing the source in its response.
+
+This sample demonstrates how to add knowledge grounding to a hosted agent without requiring an external search index — using a mock search function that can be replaced with Azure AI Search or any other provider.
+
+## Prerequisites
+
+- [.NET 10 SDK](https://dotnet.microsoft.com/download/dotnet/10.0)
+- An Azure AI Foundry project with a deployed model (e.g., `gpt-4o`)
+- Azure CLI logged in (`az login`)
+
+## Configuration
+
+Copy the template and fill in your project endpoint:
+
+```bash
+cp .env.example .env
+```
+
+Edit `.env` and set your Azure AI Foundry project endpoint:
+
+```env
+AZURE_AI_PROJECT_ENDPOINT=https://.services.ai.azure.com/api/projects/
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
+AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o
+AZURE_BEARER_TOKEN=
+```
+
+> **Note:** `.env` is gitignored. The `.env.example` template is checked in as a reference.
+
+## Running directly (contributors)
+
+This project uses `ProjectReference` to build against the local Agent Framework source.
+
+```bash
+cd dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag
+AGENT_NAME=hosted-text-rag dotnet run
+```
+
+The agent will start on `http://localhost:8088`.
+
+### Test it
+
+Using the Azure Developer CLI:
+
+```bash
+azd ai agent invoke --local "What is your return policy?"
+azd ai agent invoke --local "How long does shipping take?"
+azd ai agent invoke --local "How do I clean my tent?"
+```
+
+Or with curl:
+
+```bash
+curl -X POST http://localhost:8088/responses \
+ -H "Content-Type: application/json" \
+ -d '{"input": "What is your return policy?", "model": "hosted-text-rag"}'
+```
+
+## Running with Docker
+
+Since this project uses `ProjectReference`, use `Dockerfile.contributor` which takes a pre-published output.
+
+### 1. Publish for the container runtime (Linux Alpine)
+
+```bash
+dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+```
+
+### 2. Build the Docker image
+
+```bash
+docker build -f Dockerfile.contributor -t hosted-text-rag .
+```
+
+### 3. Run the container
+
+Generate a bearer token on your host and pass it to the container:
+
+```bash
+# Generate token (expires in ~1 hour)
+export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
+
+# Run with token
+docker run --rm -p 8088:8088 \
+ -e AGENT_NAME=hosted-text-rag \
+ -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN \
+ --env-file .env \
+ hosted-text-rag
+```
+
+### 4. Test it
+
+Using the Azure Developer CLI:
+
+```bash
+azd ai agent invoke --local "What is your return policy?"
+```
+
+## How RAG works in this sample
+
+The `TextSearchProvider` runs a mock search **before each model invocation**:
+
+| User query contains | Search result injected |
+|---|---|
+| "return" or "refund" | Contoso Outdoors Return Policy |
+| "shipping" | Contoso Outdoors Shipping Guide |
+| "tent" or "fabric" | TrailRunner Tent Care Instructions |
+
+The model receives the search results as additional context and cites the source in its response. In production, replace `MockSearchAsync` with a call to Azure AI Search or your preferred search provider.
+
+## NuGet package users
+
+If you are consuming the Agent Framework as a NuGet package (not building from source), use the standard `Dockerfile` instead of `Dockerfile.contributor`. See the commented section in `HostedTextRag.csproj` for the `PackageReference` alternative.
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/agent.manifest.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/agent.manifest.yaml
new file mode 100644
index 0000000000..1459925136
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/agent.manifest.yaml
@@ -0,0 +1,30 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/AgentManifest.yaml
+name: hosted-text-rag
+displayName: "Hosted Text RAG Agent"
+
+description: >
+ A support specialist agent for Contoso Outdoors with RAG capabilities.
+ Uses TextSearchProvider to ground answers in product documentation
+ before each model invocation.
+
+metadata:
+ tags:
+ - AI Agent Hosting
+ - Azure AI AgentServer
+ - Responses Protocol
+ - RAG
+ - Text Search
+ - Agent Framework
+
+template:
+ name: hosted-text-rag
+ kind: hosted
+ protocols:
+ - protocol: responses
+ version: 1.0.0
+ resources:
+ cpu: "0.25"
+ memory: 0.5Gi
+parameters:
+ properties: []
+resources: []
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/agent.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/agent.yaml
new file mode 100644
index 0000000000..c8d6928e2e
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-TextRag/agent.yaml
@@ -0,0 +1,9 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/ContainerAgent.yaml
+kind: hosted
+name: hosted-text-rag
+protocols:
+ - protocol: responses
+ version: 1.0.0
+resources:
+ cpu: "0.25"
+ memory: 0.5Gi
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/.env.example b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/.env.example
new file mode 100644
index 0000000000..bfb3c97208
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/.env.example
@@ -0,0 +1,5 @@
+AZURE_OPENAI_ENDPOINT=https://.openai.azure.com/
+AZURE_OPENAI_DEPLOYMENT=gpt-4o
+AZURE_BEARER_TOKEN=DefaultAzureCredential
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/Dockerfile b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/Dockerfile
new file mode 100644
index 0000000000..14b356ad98
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/Dockerfile
@@ -0,0 +1,17 @@
+# Use the official .NET 10.0 ASP.NET runtime as a parent image
+FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
+WORKDIR /app
+
+FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
+WORKDIR /src
+COPY . .
+RUN dotnet restore
+RUN dotnet publish -c Release -o /app/publish
+
+# Final stage
+FROM base AS final
+WORKDIR /app
+COPY --from=build /app/publish .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedWorkflowHandoff.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/Dockerfile.contributor b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/Dockerfile.contributor
new file mode 100644
index 0000000000..4cc047c8bc
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/Dockerfile.contributor
@@ -0,0 +1,19 @@
+# Dockerfile for contributors building from the agent-framework repository source.
+#
+# This project uses ProjectReference to the local Microsoft.Agents.AI.Foundry source,
+# which means a standard multi-stage Docker build cannot resolve dependencies outside
+# this folder. Instead, pre-publish the app targeting the container runtime and copy
+# the output into the container:
+#
+# dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+# docker build -f Dockerfile.contributor -t hosted-workflow-handoff .
+# docker run --rm -p 8088:8088 -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN --env-file .env hosted-workflow-handoff
+#
+# For end-users consuming the NuGet package (not ProjectReference), use the standard
+# Dockerfile which performs a full dotnet restore + publish inside the container.
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+COPY out/ .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedWorkflowHandoff.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/HostedWorkflowHandoff.csproj b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/HostedWorkflowHandoff.csproj
new file mode 100644
index 0000000000..3c0df909c9
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/HostedWorkflowHandoff.csproj
@@ -0,0 +1,40 @@
+
+
+
+ Exe
+ net10.0
+ enable
+ enable
+ HostedWorkflowHandoff
+ HostedWorkflowHandoff
+ false
+ $(NoWarn);NU1903;NU1605;MAAIW001
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/Pages.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/Pages.cs
new file mode 100644
index 0000000000..916b0fdf17
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/Pages.cs
@@ -0,0 +1,470 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+///
+/// Static HTML pages served by the sample application.
+///
+internal static class Pages
+{
+ // ═══════════════════════════════════════════════════════════════════════
+ // Homepage
+ // ═══════════════════════════════════════════════════════════════════════
+
+ internal const string Home = """
+
+
+
+
+ Foundry Responses Hosting — Demos
+
+
+
+
+
🚀 Foundry Responses Hosting
+
+ Agent-framework agents hosted via the Azure AI Responses Server SDK.
+ Each demo registers a different agent and serves it through POST /responses.
+
';
+ }
+
+ h += `Event log (${this.events.length})
`;
+ this.events.forEach((e, i) => {
+ h += `
${i}${esc(e.eventType)}
`;
+ });
+ h += '
';
+
+ el.innerHTML = h;
+ return el;
+ }
+}
+""";
+}
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/Program.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/Program.cs
new file mode 100644
index 0000000000..9783aca8f3
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/Program.cs
@@ -0,0 +1,221 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// This sample demonstrates hosting agent-framework agents as Foundry Hosted Agents
+// using the Azure AI Responses Server SDK.
+//
+// Demos:
+// / - Homepage listing all demos
+// /tool-demo - Agent with local tools + remote MCP tools
+// /workflow-demo - Triage workflow routing to specialist agents
+//
+// Prerequisites:
+// - Azure OpenAI resource with a deployed model
+//
+// Environment variables:
+// - AZURE_OPENAI_ENDPOINT - your Azure OpenAI endpoint
+// - AZURE_OPENAI_DEPLOYMENT - the model deployment name (default: "gpt-4o")
+
+using System.ComponentModel;
+using Azure.AI.OpenAI;
+using Azure.Core;
+using Azure.Identity;
+using DotNetEnv;
+using Microsoft.Agents.AI;
+using Microsoft.Agents.AI.Foundry.Hosting;
+using Microsoft.Agents.AI.Hosting;
+using Microsoft.Agents.AI.Workflows;
+using Microsoft.Extensions.AI;
+using ModelContextProtocol.Client;
+
+// Load .env file if present (for local development)
+Env.TraversePath().Load();
+
+var builder = WebApplication.CreateBuilder(args);
+
+// ---------------------------------------------------------------------------
+// 1. Create the shared Azure OpenAI chat client
+// ---------------------------------------------------------------------------
+var endpoint = new Uri(Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."));
+var deployment = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT") ?? "gpt-4o";
+
+var azureClient = new AzureOpenAIClient(endpoint, new ChainedTokenCredential(
+ new DevTemporaryTokenCredential(),
+ new DefaultAzureCredential()));
+IChatClient chatClient = azureClient.GetResponsesClient().AsIChatClient(deployment);
+
+// ---------------------------------------------------------------------------
+// 2. DEMO 1: Tool Agent — local tools + Microsoft Learn MCP
+// ---------------------------------------------------------------------------
+Console.WriteLine("Connecting to Microsoft Learn MCP server...");
+McpClient mcpClient = await McpClient.CreateAsync(new HttpClientTransport(new()
+{
+ Endpoint = new Uri("https://learn.microsoft.com/api/mcp"),
+ Name = "Microsoft Learn MCP",
+}));
+var mcpTools = await mcpClient.ListToolsAsync();
+Console.WriteLine($"MCP tools available: {string.Join(", ", mcpTools.Select(t => t.Name))}");
+
+builder.AddAIAgent(
+ name: "tool-agent",
+ instructions: """
+ You are a helpful assistant hosted as a Foundry Hosted Agent.
+ You have access to several tools - use them proactively:
+ - GetCurrentTime: Returns the current date/time in any timezone.
+ - GetWeather: Returns weather conditions for any location.
+ - Microsoft Learn MCP tools: Search and fetch Microsoft documentation.
+ When a user asks a technical question about Microsoft products, use the
+ documentation search tools to give accurate, up-to-date answers.
+ """,
+ chatClient: chatClient)
+ .WithAITool(AIFunctionFactory.Create(GetCurrentTime))
+ .WithAITool(AIFunctionFactory.Create(GetWeather))
+ .WithAITools(mcpTools.Cast().ToArray());
+
+// ---------------------------------------------------------------------------
+// 3. DEMO 2: Triage Workflow — routes to specialist agents
+// ---------------------------------------------------------------------------
+ChatClientAgent triageAgent = new(
+ chatClient,
+ instructions: """
+ You are a triage agent that determines which specialist to hand off to.
+ Based on the user's question, ALWAYS hand off to one of the available agents.
+ Do NOT answer the question yourself - just route it.
+ """,
+ name: "triage_agent",
+ description: "Routes messages to the appropriate specialist agent");
+
+ChatClientAgent codeExpert = new(
+ chatClient,
+ instructions: """
+ You are a coding and technology expert. You help with programming questions,
+ explain technical concepts, debug code, and suggest best practices.
+ Provide clear, well-structured answers with code examples when appropriate.
+ """,
+ name: "code_expert",
+ description: "Specialist agent for programming and technology questions");
+
+ChatClientAgent creativeWriter = new(
+ chatClient,
+ instructions: """
+ You are a creative writing specialist. You help write stories, poems,
+ marketing copy, emails, and other creative content. You have a flair
+ for engaging language and vivid descriptions.
+ """,
+ name: "creative_writer",
+ description: "Specialist agent for creative writing and content tasks");
+
+Workflow triageWorkflow = AgentWorkflowBuilder.CreateHandoffBuilderWith(triageAgent)
+ .WithHandoffs(triageAgent, [codeExpert, creativeWriter])
+ .WithHandoffs([codeExpert, creativeWriter], triageAgent)
+ .Build();
+
+builder.AddAIAgent("triage-workflow", (_, key) =>
+ triageWorkflow.AsAIAgent(name: key));
+
+// Register triage-workflow as the non-keyed default so azd invoke (no model) works
+builder.Services.AddSingleton(sp =>
+ sp.GetRequiredKeyedService("triage-workflow"));
+
+// ---------------------------------------------------------------------------
+// 4. Wire up the agent-framework handler and Responses Server SDK
+// ---------------------------------------------------------------------------
+builder.Services.AddFoundryResponses();
+
+var app = builder.Build();
+
+// Dispose the MCP client on shutdown
+app.Lifetime.ApplicationStopping.Register(() =>
+ mcpClient.DisposeAsync().AsTask().GetAwaiter().GetResult());
+
+// ---------------------------------------------------------------------------
+// 5. Routes
+// ---------------------------------------------------------------------------
+app.MapGet("/ready", () => Results.Ok("ready"));
+app.MapFoundryResponses();
+
+app.MapGet("/", () => Results.Content(Pages.Home, "text/html"));
+app.MapGet("/tool-demo", () => Results.Content(Pages.ToolDemo, "text/html"));
+app.MapGet("/workflow-demo", () => Results.Content(Pages.WorkflowDemo, "text/html"));
+app.MapGet("/js/sse-validator.js", () => Results.Content(Pages.ValidationScript, "application/javascript"));
+
+// Validation endpoint: accepts captured SSE lines and validates them
+app.MapPost("/api/validate", (HostedWorkflowHandoff.CapturedSseStream captured) =>
+{
+ var validator = new HostedWorkflowHandoff.ResponseStreamValidator();
+ foreach (var evt in captured.Events)
+ {
+ validator.ProcessEvent(evt.EventType, evt.Data);
+ }
+
+ validator.Complete();
+ return Results.Json(validator.GetResult());
+});
+
+app.Run();
+
+// ---------------------------------------------------------------------------
+// Local tool definitions
+// ---------------------------------------------------------------------------
+
+// ---------------------------------------------------------------------------
+// Dev-only credential: reads a pre-fetched bearer token from AZURE_BEARER_TOKEN.
+// When the value is missing or set to "DefaultAzureCredential", this credential
+// throws CredentialUnavailableException so the ChainedTokenCredential falls
+// through to DefaultAzureCredential.
+// ---------------------------------------------------------------------------
+
+[Description("Gets the current date and time in the specified timezone.")]
+static string GetCurrentTime(
+ [Description("IANA timezone (e.g. 'America/New_York', 'Europe/London', 'UTC'). Defaults to UTC.")]
+ string timezone = "UTC")
+{
+ try
+ {
+ var tz = TimeZoneInfo.FindSystemTimeZoneById(timezone);
+ return TimeZoneInfo.ConvertTimeFromUtc(DateTime.UtcNow, tz).ToString("F");
+ }
+ catch
+ {
+ return DateTime.UtcNow.ToString("F") + " (UTC - unknown timezone: " + timezone + ")";
+ }
+}
+
+[Description("Gets the current weather for a location. Returns temperature, conditions, and humidity.")]
+static string GetWeather(
+ [Description("The city or location (e.g. 'Seattle', 'London, UK').")]
+ string location)
+{
+ // Simulated weather - deterministic per location for demo consistency
+ var rng = new Random(location.ToUpperInvariant().GetHashCode());
+ var temp = rng.Next(-5, 35);
+ string[] conditions = ["sunny", "partly cloudy", "overcast", "rainy", "snowy", "windy", "foggy"];
+ var condition = conditions[rng.Next(conditions.Length)];
+ return $"Weather in {location}: {temp}C, {condition}. Humidity: {rng.Next(30, 90)}%. Wind: {rng.Next(5, 30)} km/h.";
+}
+
+internal sealed class DevTemporaryTokenCredential : TokenCredential
+{
+ private const string EnvironmentVariable = "AZURE_BEARER_TOKEN";
+ private readonly string? _token;
+
+ public DevTemporaryTokenCredential()
+ {
+ this._token = Environment.GetEnvironmentVariable(EnvironmentVariable);
+ }
+
+ public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ => this.GetAccessToken();
+
+ public override ValueTask GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ => new(this.GetAccessToken());
+
+ private AccessToken GetAccessToken()
+ {
+ if (string.IsNullOrEmpty(this._token) || this._token == "DefaultAzureCredential")
+ {
+ throw new CredentialUnavailableException($"{EnvironmentVariable} environment variable is not set.");
+ }
+
+ return new AccessToken(this._token, DateTimeOffset.UtcNow.AddHours(1));
+ }
+}
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/README.md b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/README.md
new file mode 100644
index 0000000000..643af74551
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/README.md
@@ -0,0 +1,126 @@
+# Hosted-Workflow-Handoff
+
+A hosted agent server demonstrating two patterns in a single app:
+
+- **`tool-agent`** — an agent with local tools (time, weather) plus remote Microsoft Learn MCP tools
+- **`triage-workflow`** — a handoff workflow that routes conversations to specialist agents (code expert or creative writer) using `AgentWorkflowBuilder`
+
+Both agents are served over the Responses protocol. The server also exposes interactive web demos at `/tool-demo` and `/workflow-demo`.
+
+> Unlike the other samples in this folder, this one connects to an **Azure OpenAI** resource directly (not an Azure AI Foundry project endpoint).
+
+## Prerequisites
+
+- [.NET 10 SDK](https://dotnet.microsoft.com/download/dotnet/10.0)
+- An Azure OpenAI resource with a deployed model (e.g., `gpt-4o`)
+- Azure CLI logged in (`az login`)
+
+## Configuration
+
+Copy the template and fill in your values:
+
+```bash
+cp .env.example .env
+```
+
+Edit `.env`:
+
+```env
+AZURE_OPENAI_ENDPOINT=https://.openai.azure.com/
+AZURE_OPENAI_DEPLOYMENT=gpt-4o
+AZURE_BEARER_TOKEN=DefaultAzureCredential
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
+```
+
+`AZURE_BEARER_TOKEN=DefaultAzureCredential` is a sentinel value that tells the app to skip the bearer token and fall through to `DefaultAzureCredential` (requires `az login`). Set it to a real token only when running in Docker.
+
+> **Note:** `.env` is gitignored. The `.env.example` template is checked in as a reference.
+
+## Running directly (contributors)
+
+```bash
+cd dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff
+dotnet run
+```
+
+The server starts on `http://localhost:8088`. Open `http://localhost:8088` to see the demo index page.
+
+### Test it
+
+Using the Azure Developer CLI (invokes `triage-workflow` — the primary/default agent):
+
+```bash
+azd ai agent invoke --local "Write me a short poem about coding"
+```
+
+To target a specific agent by name, use curl:
+
+```bash
+# Invoke triage-workflow explicitly
+curl -X POST http://localhost:8088/responses \
+ -H "Content-Type: application/json" \
+ -d '{"input": "Write me a haiku about autumn", "model": "triage-workflow"}'
+```
+
+```bash
+# Invoke tool-agent (local tools + MCP)
+curl -X POST http://localhost:8088/responses \
+ -H "Content-Type: application/json" \
+ -d '{"input": "What time is it in Tokyo?", "model": "tool-agent"}'
+```
+
+## Running with Docker
+
+### 1. Publish for the container runtime
+
+```bash
+dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+```
+
+### 2. Build the Docker image
+
+```bash
+docker build -f Dockerfile.contributor -t hosted-workflow-handoff .
+```
+
+### 3. Run the container
+
+```bash
+export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
+
+docker run --rm -p 8088:8088 \
+ -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN \
+ --env-file .env \
+ hosted-workflow-handoff
+```
+
+### 4. Test it
+
+```bash
+azd ai agent invoke --local "Explain async/await in C#"
+```
+
+## How the triage workflow works
+
+```
+User message
+ │
+ ▼
+┌──────────────┐
+│ Triage Agent │ ──routes──▶ ┌─────────────┐
+│ (router) │ │ Code Expert │
+└──────────────┘ └─────────────┘
+ ▲ │
+ │◀──────────────────────────────┘
+ │
+ └──routes──▶ ┌─────────────────┐
+ │ Creative Writer │
+ └─────────────────┘
+```
+
+The triage agent receives every message and hands off to the appropriate specialist. Specialists route back to the triage agent after responding, allowing for multi-turn conversations.
+
+## NuGet package users
+
+Use the standard `Dockerfile` instead of `Dockerfile.contributor`. See the commented section in `HostedWorkflowHandoff.csproj` for the `PackageReference` alternative.
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/ResponseStreamValidator.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/ResponseStreamValidator.cs
new file mode 100644
index 0000000000..75822608e5
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/ResponseStreamValidator.cs
@@ -0,0 +1,601 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Text.Json;
+using System.Text.Json.Serialization;
+
+namespace HostedWorkflowHandoff;
+
+/// Captured SSE event for validation.
+[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1812:AvoidUninstantiatedInternalClasses", Justification = "Instantiated by JSON deserialization")]
+internal sealed record CapturedSseEvent(
+ [property: JsonPropertyName("eventType")] string EventType,
+ [property: JsonPropertyName("data")] string Data);
+
+/// Captured SSE stream sent from the client for server-side validation.
+[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1812:AvoidUninstantiatedInternalClasses", Justification = "Instantiated by JSON deserialization")]
+internal sealed record CapturedSseStream(
+ [property: JsonPropertyName("events")] List Events);
+
+///
+/// Validates an SSE event stream from the Azure AI Responses Server SDK against
+/// the API behaviour contract. Feed events sequentially via
+/// and call when the stream ends.
+///
+internal sealed class ResponseStreamValidator
+{
+ private readonly List _violations = [];
+ private int _eventCount;
+ private int _expectedSequenceNumber;
+ private StreamState _state = StreamState.Initial;
+ private string? _responseId;
+ private readonly HashSet _addedItemIndices = [];
+ private readonly HashSet _doneItemIndices = [];
+ private readonly HashSet _addedContentParts = []; // "outputIdx:partIdx"
+ private readonly HashSet _doneContentParts = [];
+ private readonly Dictionary _textAccumulators = []; // "outputIdx:contentIdx" → accumulated text
+ private bool _hasTerminal;
+
+ /// All violations found so far.
+ internal IReadOnlyList Violations => this._violations;
+
+ ///
+ /// Processes a single SSE event line pair (event type + JSON data).
+ ///
+ /// The SSE event type (e.g. "response.created").
+ /// The raw JSON data payload.
+ internal void ProcessEvent(string eventType, string jsonData)
+ {
+ JsonElement data;
+ try
+ {
+ data = JsonDocument.Parse(jsonData).RootElement;
+ }
+ catch (JsonException ex)
+ {
+ this.Fail("PARSE-01", $"Invalid JSON in event data: {ex.Message}");
+ return;
+ }
+
+ this._eventCount++;
+
+ // ── Sequence number validation ──────────────────────────────────
+ if (data.TryGetProperty("sequence_number", out var seqProp) && seqProp.ValueKind == JsonValueKind.Number)
+ {
+ int seq = seqProp.GetInt32();
+ if (seq != this._expectedSequenceNumber)
+ {
+ this.Fail("SEQ-01", $"Expected sequence_number {this._expectedSequenceNumber}, got {seq}");
+ }
+
+ this._expectedSequenceNumber = seq + 1;
+ }
+ else if (this._state != StreamState.Initial || eventType != "error")
+ {
+ // Pre-creation error events may not have sequence_number
+ this.Fail("SEQ-02", $"Missing sequence_number on event '{eventType}'");
+ }
+
+ // ── Post-terminal guard ─────────────────────────────────────────
+ if (this._hasTerminal)
+ {
+ this.Fail("TERM-01", $"Event '{eventType}' received after terminal event");
+ return;
+ }
+
+ // ── Dispatch by event type ──────────────────────────────────────
+ switch (eventType)
+ {
+ case "response.created":
+ this.ValidateResponseCreated(data);
+ break;
+
+ case "response.queued":
+ this.ValidateStateTransition(eventType, StreamState.Created, StreamState.Queued);
+ this.ValidateResponseEnvelope(data, eventType);
+ break;
+
+ case "response.in_progress":
+ if (this._state is StreamState.Created or StreamState.Queued)
+ {
+ this._state = StreamState.InProgress;
+ }
+ else
+ {
+ this.Fail("ORDER-02", $"'response.in_progress' received in state {this._state} (expected Created or Queued)");
+ }
+
+ this.ValidateResponseEnvelope(data, eventType);
+ break;
+
+ case "response.output_item.added":
+ case "output_item.added":
+ this.ValidateInProgress(eventType);
+ this.ValidateOutputItemAdded(data);
+ break;
+
+ case "response.output_item.done":
+ case "output_item.done":
+ this.ValidateInProgress(eventType);
+ this.ValidateOutputItemDone(data);
+ break;
+
+ case "response.content_part.added":
+ case "content_part.added":
+ this.ValidateInProgress(eventType);
+ this.ValidateContentPartAdded(data);
+ break;
+
+ case "response.content_part.done":
+ case "content_part.done":
+ this.ValidateInProgress(eventType);
+ this.ValidateContentPartDone(data);
+ break;
+
+ case "response.output_text.delta":
+ case "output_text.delta":
+ this.ValidateInProgress(eventType);
+ this.ValidateTextDelta(data);
+ break;
+
+ case "response.output_text.done":
+ case "output_text.done":
+ this.ValidateInProgress(eventType);
+ this.ValidateTextDone(data);
+ break;
+
+ case "response.function_call_arguments.delta":
+ case "function_call_arguments.delta":
+ this.ValidateInProgress(eventType);
+ break;
+
+ case "response.function_call_arguments.done":
+ case "function_call_arguments.done":
+ this.ValidateInProgress(eventType);
+ break;
+
+ case "response.completed":
+ this.ValidateTerminal(data, "completed");
+ break;
+
+ case "response.failed":
+ this.ValidateTerminal(data, "failed");
+ break;
+
+ case "response.incomplete":
+ this.ValidateTerminal(data, "incomplete");
+ break;
+
+ case "error":
+ // Pre-creation error — standalone, no response.created precedes it
+ if (this._state != StreamState.Initial)
+ {
+ this.Fail("ERR-01", "'error' event received after response.created — should use response.failed instead");
+ }
+
+ this._hasTerminal = true;
+ break;
+
+ default:
+ // Unknown events are not violations — the spec may evolve
+ break;
+ }
+ }
+
+ ///
+ /// Call after the stream ends. Checks that a terminal event was received.
+ ///
+ internal void Complete()
+ {
+ if (!this._hasTerminal && this._state != StreamState.Initial)
+ {
+ this.Fail("TERM-02", "Stream ended without a terminal event (response.completed, response.failed, or response.incomplete)");
+ }
+
+ if (this._state == StreamState.Initial && this._eventCount == 0)
+ {
+ this.Fail("EMPTY-01", "No events received in the stream");
+ }
+
+ // Check for output items that were added but never completed
+ foreach (int idx in this._addedItemIndices)
+ {
+ if (!this._doneItemIndices.Contains(idx))
+ {
+ this.Fail("ITEM-03", $"Output item at index {idx} was added but never received output_item.done");
+ }
+ }
+
+ // Check for content parts that were added but never completed
+ foreach (string key in this._addedContentParts)
+ {
+ if (!this._doneContentParts.Contains(key))
+ {
+ this.Fail("CONTENT-03", $"Content part '{key}' was added but never received content_part.done");
+ }
+ }
+ }
+
+ ///
+ /// Returns a summary of all validation results.
+ ///
+ internal ValidationResult GetResult()
+ {
+ return new ValidationResult(
+ EventCount: this._eventCount,
+ IsValid: this._violations.Count == 0,
+ Violations: [.. this._violations]);
+ }
+
+ // ═══════════════════════════════════════════════════════════════════════
+ // Event-specific validators
+ // ═══════════════════════════════════════════════════════════════════════
+
+ private void ValidateResponseCreated(JsonElement data)
+ {
+ if (this._state != StreamState.Initial)
+ {
+ this.Fail("ORDER-01", $"'response.created' received in state {this._state} (expected Initial — must be first event)");
+ return;
+ }
+
+ this._state = StreamState.Created;
+
+ // Must have a response envelope
+ if (!data.TryGetProperty("response", out var resp))
+ {
+ this.Fail("FIELD-01", "'response.created' missing 'response' object");
+ return;
+ }
+
+ // Required response fields
+ this.ValidateRequiredResponseFields(resp, "response.created");
+
+ // Capture response ID for cross-event checks
+ if (resp.TryGetProperty("id", out var idProp))
+ {
+ this._responseId = idProp.GetString();
+ }
+
+ // Status must be non-terminal
+ if (resp.TryGetProperty("status", out var statusProp))
+ {
+ string? status = statusProp.GetString();
+ if (status is "completed" or "failed" or "incomplete" or "cancelled")
+ {
+ this.Fail("STATUS-01", $"'response.created' has terminal status '{status}' — must be 'queued' or 'in_progress'");
+ }
+ }
+ }
+
+ private void ValidateTerminal(JsonElement data, string expectedKind)
+ {
+ if (this._state is StreamState.Initial or StreamState.Created)
+ {
+ this.Fail("ORDER-03", $"Terminal event 'response.{expectedKind}' received before 'response.in_progress'");
+ }
+
+ this._hasTerminal = true;
+ this._state = StreamState.Terminal;
+
+ if (!data.TryGetProperty("response", out var resp))
+ {
+ this.Fail("FIELD-01", $"'response.{expectedKind}' missing 'response' object");
+ return;
+ }
+
+ this.ValidateRequiredResponseFields(resp, $"response.{expectedKind}");
+
+ if (resp.TryGetProperty("status", out var statusProp))
+ {
+ string? status = statusProp.GetString();
+
+ // completed_at validation (B6)
+ bool hasCompletedAt = resp.TryGetProperty("completed_at", out var catProp)
+ && catProp.ValueKind != JsonValueKind.Null;
+
+ if (status == "completed" && !hasCompletedAt)
+ {
+ this.Fail("FIELD-02", "'completed_at' must be non-null when status is 'completed'");
+ }
+
+ if (status != "completed" && hasCompletedAt)
+ {
+ this.Fail("FIELD-03", $"'completed_at' must be null when status is '{status}'");
+ }
+
+ // error field validation
+ bool hasError = resp.TryGetProperty("error", out var errProp)
+ && errProp.ValueKind != JsonValueKind.Null;
+
+ if (status == "failed" && !hasError)
+ {
+ this.Fail("FIELD-04", "'error' must be non-null when status is 'failed'");
+ }
+
+ if (status is "completed" or "incomplete" && hasError)
+ {
+ this.Fail("FIELD-05", $"'error' must be null when status is '{status}'");
+ }
+
+ // error structure validation
+ if (hasError)
+ {
+ this.ValidateErrorObject(errProp, $"response.{expectedKind}");
+ }
+
+ // cancelled output must be empty (B11)
+ if (status == "cancelled" && resp.TryGetProperty("output", out var outputProp)
+ && outputProp.ValueKind == JsonValueKind.Array && outputProp.GetArrayLength() > 0)
+ {
+ this.Fail("CANCEL-01", "Cancelled response must have empty output array (B11)");
+ }
+
+ // response ID consistency
+ if (this._responseId is not null && resp.TryGetProperty("id", out var idProp)
+ && idProp.GetString() != this._responseId)
+ {
+ this.Fail("ID-01", $"Response ID changed: was '{this._responseId}', now '{idProp.GetString()}'");
+ }
+ }
+
+ // Usage validation (optional, but if present must be structured correctly)
+ if (resp.TryGetProperty("usage", out var usageProp) && usageProp.ValueKind == JsonValueKind.Object)
+ {
+ this.ValidateUsage(usageProp, $"response.{expectedKind}");
+ }
+ }
+
+ private void ValidateOutputItemAdded(JsonElement data)
+ {
+ if (data.TryGetProperty("output_index", out var idxProp) && idxProp.ValueKind == JsonValueKind.Number)
+ {
+ int index = idxProp.GetInt32();
+ if (!this._addedItemIndices.Add(index))
+ {
+ this.Fail("ITEM-01", $"Duplicate output_item.added for output_index {index}");
+ }
+ }
+ else
+ {
+ this.Fail("FIELD-06", "output_item.added missing 'output_index' field");
+ }
+
+ if (!data.TryGetProperty("item", out _))
+ {
+ this.Fail("FIELD-07", "output_item.added missing 'item' object");
+ }
+ }
+
+ private void ValidateOutputItemDone(JsonElement data)
+ {
+ if (data.TryGetProperty("output_index", out var idxProp) && idxProp.ValueKind == JsonValueKind.Number)
+ {
+ int index = idxProp.GetInt32();
+ if (!this._addedItemIndices.Contains(index))
+ {
+ this.Fail("ITEM-02", $"output_item.done for output_index {index} without preceding output_item.added");
+ }
+
+ this._doneItemIndices.Add(index);
+ }
+ else
+ {
+ this.Fail("FIELD-06", "output_item.done missing 'output_index' field");
+ }
+ }
+
+ private void ValidateContentPartAdded(JsonElement data)
+ {
+ string key = GetContentPartKey(data);
+ if (!this._addedContentParts.Add(key))
+ {
+ this.Fail("CONTENT-01", $"Duplicate content_part.added for {key}");
+ }
+ }
+
+ private void ValidateContentPartDone(JsonElement data)
+ {
+ string key = GetContentPartKey(data);
+ if (!this._addedContentParts.Contains(key))
+ {
+ this.Fail("CONTENT-02", $"content_part.done for {key} without preceding content_part.added");
+ }
+
+ this._doneContentParts.Add(key);
+ }
+
+ private void ValidateTextDelta(JsonElement data)
+ {
+ string key = GetTextKey(data);
+ string delta = data.TryGetProperty("delta", out var deltaProp)
+ ? deltaProp.GetString() ?? string.Empty
+ : string.Empty;
+
+ if (!this._textAccumulators.TryGetValue(key, out string? existing))
+ {
+ this._textAccumulators[key] = delta;
+ }
+ else
+ {
+ this._textAccumulators[key] = existing + delta;
+ }
+ }
+
+ private void ValidateTextDone(JsonElement data)
+ {
+ string key = GetTextKey(data);
+ string? finalText = data.TryGetProperty("text", out var textProp)
+ ? textProp.GetString()
+ : null;
+
+ if (finalText is null)
+ {
+ this.Fail("TEXT-01", $"output_text.done for {key} missing 'text' field");
+ return;
+ }
+
+ if (this._textAccumulators.TryGetValue(key, out string? accumulated) && accumulated != finalText)
+ {
+ this.Fail("TEXT-02", $"output_text.done text for {key} does not match accumulated deltas (accumulated {accumulated.Length} chars, done has {finalText.Length} chars)");
+ }
+ }
+
+ // ═══════════════════════════════════════════════════════════════════════
+ // Shared field validators
+ // ═══════════════════════════════════════════════════════════════════════
+
+ private void ValidateRequiredResponseFields(JsonElement resp, string context)
+ {
+ if (!HasNonNullString(resp, "id"))
+ {
+ this.Fail("FIELD-01", $"{context}: response missing 'id'");
+ }
+
+ if (resp.TryGetProperty("object", out var objProp))
+ {
+ if (objProp.GetString() != "response")
+ {
+ this.Fail("FIELD-08", $"{context}: response.object must be 'response', got '{objProp.GetString()}'");
+ }
+ }
+ else
+ {
+ this.Fail("FIELD-08", $"{context}: response missing 'object' field");
+ }
+
+ if (!resp.TryGetProperty("created_at", out var catProp) || catProp.ValueKind == JsonValueKind.Null)
+ {
+ this.Fail("FIELD-09", $"{context}: response missing 'created_at'");
+ }
+
+ if (!resp.TryGetProperty("status", out _))
+ {
+ this.Fail("FIELD-10", $"{context}: response missing 'status'");
+ }
+
+ if (!resp.TryGetProperty("output", out var outputProp) || outputProp.ValueKind != JsonValueKind.Array)
+ {
+ this.Fail("FIELD-11", $"{context}: response missing 'output' array");
+ }
+ }
+
+ private void ValidateErrorObject(JsonElement error, string context)
+ {
+ if (!HasNonNullString(error, "code"))
+ {
+ this.Fail("ERR-02", $"{context}: error object missing 'code' field");
+ }
+
+ if (!HasNonNullString(error, "message"))
+ {
+ this.Fail("ERR-03", $"{context}: error object missing 'message' field");
+ }
+ }
+
+ private void ValidateUsage(JsonElement usage, string context)
+ {
+ if (!usage.TryGetProperty("input_tokens", out _))
+ {
+ this.Fail("USAGE-01", $"{context}: usage missing 'input_tokens'");
+ }
+
+ if (!usage.TryGetProperty("output_tokens", out _))
+ {
+ this.Fail("USAGE-02", $"{context}: usage missing 'output_tokens'");
+ }
+
+ if (!usage.TryGetProperty("total_tokens", out _))
+ {
+ this.Fail("USAGE-03", $"{context}: usage missing 'total_tokens'");
+ }
+ }
+
+ private void ValidateResponseEnvelope(JsonElement data, string eventType)
+ {
+ if (!data.TryGetProperty("response", out var resp))
+ {
+ this.Fail("FIELD-01", $"'{eventType}' missing 'response' object");
+ return;
+ }
+
+ this.ValidateRequiredResponseFields(resp, eventType);
+
+ // Response ID consistency
+ if (this._responseId is not null && resp.TryGetProperty("id", out var idProp)
+ && idProp.GetString() != this._responseId)
+ {
+ this.Fail("ID-01", $"Response ID changed: was '{this._responseId}', now '{idProp.GetString()}'");
+ }
+ }
+
+ // ═══════════════════════════════════════════════════════════════════════
+ // Helpers
+ // ═══════════════════════════════════════════════════════════════════════
+
+ private void ValidateInProgress(string eventType)
+ {
+ if (this._state != StreamState.InProgress)
+ {
+ this.Fail("ORDER-04", $"'{eventType}' received in state {this._state} (expected InProgress)");
+ }
+ }
+
+ private void ValidateStateTransition(string eventType, StreamState expected, StreamState next)
+ {
+ if (this._state != expected)
+ {
+ this.Fail("ORDER-05", $"'{eventType}' received in state {this._state} (expected {expected})");
+ }
+ else
+ {
+ this._state = next;
+ }
+ }
+
+ private void Fail(string ruleId, string message)
+ {
+ this._violations.Add(new ValidationViolation(ruleId, message, this._eventCount));
+ }
+
+ private static bool HasNonNullString(JsonElement obj, string property)
+ {
+ return obj.TryGetProperty(property, out var prop)
+ && prop.ValueKind == JsonValueKind.String
+ && !string.IsNullOrEmpty(prop.GetString());
+ }
+
+ private static string GetContentPartKey(JsonElement data)
+ {
+ int outputIdx = data.TryGetProperty("output_index", out var oi) ? oi.GetInt32() : -1;
+ int partIdx = data.TryGetProperty("content_index", out var pi) ? pi.GetInt32() : -1;
+ return $"{outputIdx}:{partIdx}";
+ }
+
+ private static string GetTextKey(JsonElement data)
+ {
+ int outputIdx = data.TryGetProperty("output_index", out var oi) ? oi.GetInt32() : -1;
+ int contentIdx = data.TryGetProperty("content_index", out var ci) ? ci.GetInt32() : -1;
+ return $"{outputIdx}:{contentIdx}";
+ }
+
+ private enum StreamState
+ {
+ Initial,
+ Created,
+ Queued,
+ InProgress,
+ Terminal,
+ }
+}
+
+/// A single validation violation.
+/// The rule identifier (e.g. SEQ-01, FIELD-02).
+/// Human-readable description of the violation.
+/// 1-based index of the event that triggered this violation.
+internal sealed record ValidationViolation(string RuleId, string Message, int EventIndex);
+
+/// Overall validation result.
+/// Total number of events processed.
+/// True if no violations were found.
+/// List of all violations.
+internal sealed record ValidationResult(int EventCount, bool IsValid, IReadOnlyList Violations);
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/agent.manifest.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/agent.manifest.yaml
new file mode 100644
index 0000000000..7909463901
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/agent.manifest.yaml
@@ -0,0 +1,30 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/AgentManifest.yaml
+name: triage-workflow
+displayName: "Triage Handoff Workflow Agent"
+
+description: >
+ A hosted agent demonstrating two patterns in a single server: a tool-equipped agent
+ with local tools and remote MCP tools, and a triage workflow that routes conversations
+ to specialist agents (code expert or creative writer) via handoff orchestration.
+
+metadata:
+ tags:
+ - AI Agent Hosting
+ - Azure AI AgentServer
+ - Responses Protocol
+ - Workflows
+ - Handoff
+ - Agent Framework
+
+template:
+ name: triage-workflow
+ kind: hosted
+ protocols:
+ - protocol: responses
+ version: 1.0.0
+ resources:
+ cpu: "0.25"
+ memory: 0.5Gi
+parameters:
+ properties: []
+resources: []
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/agent.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/agent.yaml
new file mode 100644
index 0000000000..6b192c4eb6
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Handoff/agent.yaml
@@ -0,0 +1,9 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/ContainerAgent.yaml
+kind: hosted
+name: triage-workflow
+protocols:
+ - protocol: responses
+ version: 1.0.0
+resources:
+ cpu: "0.25"
+ memory: 0.5Gi
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/.env.example b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/.env.example
new file mode 100644
index 0000000000..b8fe9e8e7a
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/.env.example
@@ -0,0 +1,5 @@
+AZURE_AI_PROJECT_ENDPOINT=
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
+AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o
+AZURE_BEARER_TOKEN=DefaultAzureCredential
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/Dockerfile b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/Dockerfile
new file mode 100644
index 0000000000..e770ec172b
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/Dockerfile
@@ -0,0 +1,17 @@
+# Use the official .NET 10.0 ASP.NET runtime as a parent image
+FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
+WORKDIR /app
+
+FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
+WORKDIR /src
+COPY . .
+RUN dotnet restore
+RUN dotnet publish -c Release -o /app/publish
+
+# Final stage
+FROM base AS final
+WORKDIR /app
+COPY --from=build /app/publish .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedWorkflows.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/Dockerfile.contributor b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/Dockerfile.contributor
new file mode 100644
index 0000000000..17a924237f
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/Dockerfile.contributor
@@ -0,0 +1,18 @@
+# Dockerfile for contributors building from the agent-framework repository source.
+#
+# This project uses ProjectReference to the local source, which means a standard
+# multi-stage Docker build cannot resolve dependencies outside this folder.
+# Pre-publish the app targeting the container runtime and copy the output:
+#
+# dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+# docker build -f Dockerfile.contributor -t hosted-workflow-simple .
+# docker run --rm -p 8088:8088 -e AGENT_NAME=hosted-workflow-simple -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN --env-file .env hosted-workflow-simple
+#
+# For end-users consuming the NuGet package (not ProjectReference), use the standard
+# Dockerfile which performs a full dotnet restore + publish inside the container.
+FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
+WORKDIR /app
+COPY out/ .
+EXPOSE 8088
+ENV ASPNETCORE_URLS=http://+:8088
+ENTRYPOINT ["dotnet", "HostedWorkflowSimple.dll"]
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/HostedWorkflowSimple.csproj b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/HostedWorkflowSimple.csproj
new file mode 100644
index 0000000000..a5460564a9
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/HostedWorkflowSimple.csproj
@@ -0,0 +1,34 @@
+
+
+
+ net10.0
+ enable
+ enable
+ false
+ HostedWorkflowSimple
+ HostedWorkflowSimple
+ $(NoWarn);
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/Program.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/Program.cs
new file mode 100644
index 0000000000..558aef11d4
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/Program.cs
@@ -0,0 +1,97 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+// Translation Chain Workflow Agent — demonstrates how to compose multiple AI agents
+// into a sequential workflow pipeline. Three translation agents are connected:
+// English → French → Spanish → English, showing how agents can be orchestrated
+// as workflow executors in a hosted agent.
+
+using Azure.AI.Projects;
+using Azure.Core;
+using Azure.Identity;
+using DotNetEnv;
+using Microsoft.Agents.AI;
+using Microsoft.Agents.AI.Foundry.Hosting;
+using Microsoft.Agents.AI.Workflows;
+using Microsoft.Extensions.AI;
+
+// Load .env file if present (for local development)
+Env.TraversePath().Load();
+
+string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
+ ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
+string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o";
+
+// Use a chained credential: try a temporary dev token first (for local Docker debugging),
+// then fall back to DefaultAzureCredential (for local dev via dotnet run / managed identity in production).
+TokenCredential credential = new ChainedTokenCredential(
+ new DevTemporaryTokenCredential(),
+ new DefaultAzureCredential());
+
+// Create a chat client from the Foundry project
+IChatClient chatClient = new AIProjectClient(new Uri(endpoint), credential)
+ .GetProjectOpenAIClient()
+ .GetChatClient(deploymentName)
+ .AsIChatClient();
+
+// Create translation agents
+AIAgent frenchAgent = chatClient.AsAIAgent("You are a translation assistant that translates the provided text to French.");
+AIAgent spanishAgent = chatClient.AsAIAgent("You are a translation assistant that translates the provided text to Spanish.");
+AIAgent englishAgent = chatClient.AsAIAgent("You are a translation assistant that translates the provided text to English.");
+
+// Build the sequential workflow: French → Spanish → English
+AIAgent agent = new WorkflowBuilder(frenchAgent)
+ .AddEdge(frenchAgent, spanishAgent)
+ .AddEdge(spanishAgent, englishAgent)
+ .Build()
+ .AsAIAgent(
+ name: Environment.GetEnvironmentVariable("AGENT_NAME") ?? "hosted-workflows");
+
+// Host the workflow agent as a Foundry Hosted Agent using the Responses API.
+var builder = WebApplication.CreateBuilder(args);
+builder.Services.AddFoundryResponses(agent);
+
+var app = builder.Build();
+app.MapFoundryResponses();
+
+if (app.Environment.IsDevelopment())
+{
+ app.MapFoundryResponses("openai/v1");
+}
+
+app.Run();
+
+///
+/// A for local Docker debugging only.
+/// Reads a pre-fetched bearer token from the AZURE_BEARER_TOKEN environment variable
+/// once at startup. This should NOT be used in production.
+///
+/// Generate a token on your host and pass it to the container:
+/// export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
+/// docker run -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN ...
+///
+internal sealed class DevTemporaryTokenCredential : TokenCredential
+{
+ private const string EnvironmentVariable = "AZURE_BEARER_TOKEN";
+ private readonly string? _token;
+
+ public DevTemporaryTokenCredential()
+ {
+ this._token = Environment.GetEnvironmentVariable(EnvironmentVariable);
+ }
+
+ public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ => this.GetAccessToken();
+
+ public override ValueTask GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ => new(this.GetAccessToken());
+
+ private AccessToken GetAccessToken()
+ {
+ if (string.IsNullOrEmpty(this._token) || this._token == "DefaultAzureCredential")
+ {
+ throw new CredentialUnavailableException($"{EnvironmentVariable} environment variable is not set.");
+ }
+
+ return new AccessToken(this._token, DateTimeOffset.UtcNow.AddHours(1));
+ }
+}
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/README.md b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/README.md
new file mode 100644
index 0000000000..0cd438f5f1
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/README.md
@@ -0,0 +1,109 @@
+# Hosted-Workflow-Simple
+
+A hosted agent that demonstrates **multi-agent workflow orchestration**. Three translation agents are composed into a sequential pipeline: English → French → Spanish → English, showing how agents can be chained as workflow executors using `WorkflowBuilder`.
+
+## Prerequisites
+
+- [.NET 10 SDK](https://dotnet.microsoft.com/download/dotnet/10.0)
+- An Azure AI Foundry project with a deployed model (e.g., `gpt-4o`)
+- Azure CLI logged in (`az login`)
+
+## Configuration
+
+Copy the template and fill in your project endpoint:
+
+```bash
+cp .env.example .env
+```
+
+Edit `.env` and set your Azure AI Foundry project endpoint:
+
+```env
+AZURE_AI_PROJECT_ENDPOINT=https://.services.ai.azure.com/api/projects/
+ASPNETCORE_URLS=http://+:8088
+ASPNETCORE_ENVIRONMENT=Development
+AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o
+```
+
+> **Note:** `.env` is gitignored. The `.env.example` template is checked in as a reference.
+
+## Running directly (contributors)
+
+```bash
+cd dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple
+AGENT_NAME=hosted-workflows dotnet run
+```
+
+The agent will start on `http://localhost:8088`.
+
+### Test it
+
+Using the Azure Developer CLI:
+
+```bash
+azd ai agent invoke --local "The quick brown fox jumps over the lazy dog"
+```
+
+Or with curl:
+
+```bash
+curl -X POST http://localhost:8088/responses \
+ -H "Content-Type: application/json" \
+ -d '{"input": "The quick brown fox jumps over the lazy dog", "model": "hosted-workflow-simple"}'
+```
+
+The text will be translated through the chain: English → French → Spanish → English.
+
+## Running with Docker
+
+### 1. Publish for the container runtime
+
+```bash
+dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
+```
+
+### 2. Build the Docker image
+
+```bash
+docker build -f Dockerfile.contributor -t hosted-workflow-simple .
+```
+
+### 3. Run the container
+
+```bash
+export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
+
+docker run --rm -p 8088:8088 \
+ -e AGENT_NAME=hosted-workflow-simple \
+ -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN \
+ --env-file .env \
+ hosted-workflow-simple
+```
+
+### 4. Test it
+
+```bash
+azd ai agent invoke --local "Hello, how are you today?"
+```
+
+## How the workflow works
+
+```
+Input text
+ │
+ ▼
+┌─────────────┐ ┌──────────────┐ ┌──────────────┐
+│ French Agent │ → │ Spanish Agent │ → │ English Agent │
+│ (translate) │ │ (translate) │ │ (translate) │
+└─────────────┘ └──────────────┘ └──────────────┘
+ │
+ ▼
+ Final output
+ (back in English)
+```
+
+Each agent in the chain receives the output of the previous agent. The final result demonstrates how meaning is preserved (or subtly shifted) through multiple translation hops.
+
+## NuGet package users
+
+Use the standard `Dockerfile` instead of `Dockerfile.contributor`. See the commented section in `HostedWorkflowSimple.csproj` for the `PackageReference` alternative.
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/agent.manifest.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/agent.manifest.yaml
new file mode 100644
index 0000000000..e902b6232f
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/agent.manifest.yaml
@@ -0,0 +1,29 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/AgentManifest.yaml
+name: hosted-workflows
+displayName: "Translation Chain Workflow Agent"
+
+description: >
+ A workflow agent that performs sequential translation through multiple languages.
+ Translates text from English to French, then to Spanish, and finally back to English,
+ demonstrating how AI agents can be composed as workflow executors.
+
+metadata:
+ tags:
+ - AI Agent Hosting
+ - Azure AI AgentServer
+ - Responses Protocol
+ - Workflows
+ - Agent Framework
+
+template:
+ name: hosted-workflows
+ kind: hosted
+ protocols:
+ - protocol: responses
+ version: 1.0.0
+ resources:
+ cpu: "0.25"
+ memory: 0.5Gi
+parameters:
+ properties: []
+resources: []
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/agent.yaml b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/agent.yaml
new file mode 100644
index 0000000000..ab138939b4
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Workflow-Simple/agent.yaml
@@ -0,0 +1,9 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/ContainerAgent.yaml
+kind: hosted
+name: hosted-workflows
+protocols:
+ - protocol: responses
+ version: 1.0.0
+resources:
+ cpu: "0.25"
+ memory: 0.5Gi
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Using-Samples/SimpleAgent/Program.cs b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Using-Samples/SimpleAgent/Program.cs
new file mode 100644
index 0000000000..5d9c003dfa
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Using-Samples/SimpleAgent/Program.cs
@@ -0,0 +1,115 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.ClientModel.Primitives;
+using Azure.AI.Extensions.OpenAI;
+using Azure.AI.Projects;
+using Azure.Identity;
+using DotNetEnv;
+using Microsoft.Agents.AI;
+using Microsoft.Agents.AI.Foundry;
+
+// Load .env file if present (for local development)
+Env.TraversePath().Load();
+
+Uri agentEndpoint = new(Environment.GetEnvironmentVariable("AGENT_ENDPOINT")
+ ?? "http://localhost:8088");
+
+var agentName = Environment.GetEnvironmentVariable("AGENT_NAME")
+ ?? throw new InvalidOperationException("AGENT_NAME is not set.");
+
+// ── Create an agent-framework agent backed by the remote agent endpoint ──────
+
+var options = new AIProjectClientOptions();
+
+if (agentEndpoint.Scheme == "http")
+{
+ // For local HTTP dev: tell AIProjectClient the endpoint is HTTPS (to satisfy
+ // BearerTokenPolicy's TLS check), then swap the scheme back to HTTP right
+ // before the request hits the wire.
+
+ agentEndpoint = new UriBuilder(agentEndpoint) { Scheme = "https" }.Uri;
+ options.AddPolicy(new HttpSchemeRewritePolicy(), PipelinePosition.BeforeTransport);
+}
+
+var aiProjectClient = new AIProjectClient(agentEndpoint, new AzureCliCredential(), options);
+FoundryAgent agent = aiProjectClient.AsAIAgent(new AgentReference(agentName));
+
+AgentSession session = await agent.CreateSessionAsync();
+
+// ── REPL ──────────────────────────────────────────────────────────────────────
+
+Console.ForegroundColor = ConsoleColor.Cyan;
+Console.WriteLine($"""
+ ══════════════════════════════════════════════════════════
+ Simple Agent Sample
+ Connected to: {agentEndpoint}
+ Type a message or 'quit' to exit
+ ══════════════════════════════════════════════════════════
+ """);
+Console.ResetColor();
+Console.WriteLine();
+
+while (true)
+{
+ Console.ForegroundColor = ConsoleColor.Green;
+ Console.Write("You> ");
+ Console.ResetColor();
+
+ string? input = Console.ReadLine();
+
+ if (string.IsNullOrWhiteSpace(input)) { continue; }
+ if (input.Equals("quit", StringComparison.OrdinalIgnoreCase)) { break; }
+
+ try
+ {
+ Console.ForegroundColor = ConsoleColor.Yellow;
+ Console.Write("Agent> ");
+ Console.ResetColor();
+
+ await foreach (var update in agent.RunStreamingAsync(input, session))
+ {
+ Console.Write(update);
+ }
+
+ Console.WriteLine();
+ }
+ catch (Exception ex)
+ {
+ Console.ForegroundColor = ConsoleColor.Red;
+ Console.WriteLine($"Error: {ex.Message}");
+ Console.ResetColor();
+ }
+
+ Console.WriteLine();
+}
+
+Console.WriteLine("Goodbye!");
+
+///
+/// For Local Development Only
+/// Rewrites HTTPS URIs to HTTP right before transport, allowing AIProjectClient
+/// to target a local HTTP dev server while satisfying BearerTokenPolicy's TLS check.
+///
+internal sealed class HttpSchemeRewritePolicy : PipelinePolicy
+{
+ public override void Process(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
+ {
+ RewriteScheme(message);
+ ProcessNext(message, pipeline, currentIndex);
+ }
+
+ public override async ValueTask ProcessAsync(PipelineMessage message, IReadOnlyList pipeline, int currentIndex)
+ {
+ RewriteScheme(message);
+ await ProcessNextAsync(message, pipeline, currentIndex).ConfigureAwait(false);
+ }
+
+ private static void RewriteScheme(PipelineMessage message)
+ {
+ var uri = message.Request.Uri!;
+ if (uri.Scheme == Uri.UriSchemeHttps)
+ {
+ message.Request.Uri = new UriBuilder(uri) { Scheme = "http" }.Uri;
+ }
+ }
+}
diff --git a/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Using-Samples/SimpleAgent/SimpleAgent.csproj b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Using-Samples/SimpleAgent/SimpleAgent.csproj
new file mode 100644
index 0000000000..05e150880e
--- /dev/null
+++ b/dotnet/samples/04-hosting/FoundryHostedAgents/responses/Using-Samples/SimpleAgent/SimpleAgent.csproj
@@ -0,0 +1,24 @@
+
+
+
+ Exe
+ net10.0
+ enable
+ enable
+ false
+ SimpleAgentClient
+ simple-agent-client
+ $(NoWarn);NU1903;NU1605;OPENAI001
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj
deleted file mode 100644
index a56157fe9d..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/AgentThreadAndHITL.csproj
+++ /dev/null
@@ -1,69 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
- $(NoWarn);MEAI001
-
-
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
-
-
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Dockerfile b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Dockerfile
deleted file mode 100644
index 004bd49fa8..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Dockerfile
+++ /dev/null
@@ -1,20 +0,0 @@
-# Build the application
-FROM mcr.microsoft.com/dotnet/sdk:10.0-alpine AS build
-WORKDIR /src
-
-# Copy files from the current directory on the host to the working directory in the container
-COPY . .
-
-RUN dotnet restore
-RUN dotnet build -c Release --no-restore
-RUN dotnet publish -c Release --no-build -o /app -f net10.0
-
-# Run the application
-FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
-WORKDIR /app
-
-# Copy everything needed to run the app from the "build" stage.
-COPY --from=build /app .
-
-EXPOSE 8088
-ENTRYPOINT ["dotnet", "AgentThreadAndHITL.dll"]
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Program.cs
deleted file mode 100644
index fee781d660..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/Program.cs
+++ /dev/null
@@ -1,41 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample demonstrates Human-in-the-Loop (HITL) capabilities with thread persistence.
-// The agent wraps function tools with ApprovalRequiredAIFunction to require user approval
-// before invoking them. Users respond with 'approve' or 'reject' when prompted.
-
-using System.ComponentModel;
-using Azure.AI.AgentServer.AgentFramework.Extensions;
-using Azure.AI.AgentServer.AgentFramework.Persistence;
-using Azure.AI.OpenAI;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Extensions.AI;
-using OpenAI.Chat;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-5.4-mini";
-
-[Description("Get the weather for a given location.")]
-static string GetWeather([Description("The location to get the weather for.")] string location)
- => $"The weather in {location} is cloudy with a high of 15°C.";
-
-// Create the chat client and agent.
-// Note: ApprovalRequiredAIFunction wraps the tool to require user approval before invocation.
-// User should reply with 'approve' or 'reject' when prompted.
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-#pragma warning disable MEAI001 // Type is for evaluation purposes only
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsAIAgent(
- instructions: "You are a helpful assistant",
- tools: [new ApprovalRequiredAIFunction(AIFunctionFactory.Create(GetWeather))]
- );
-#pragma warning restore MEAI001
-
-InMemoryAgentThreadRepository threadRepository = new(agent);
-await agent.RunAIAgentAsync(telemetrySourceName: "Agents", threadRepository: threadRepository);
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md
deleted file mode 100644
index 465dfacbf0..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/README.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# What this sample demonstrates
-
-This sample demonstrates Human-in-the-Loop (HITL) capabilities with thread persistence. The agent wraps function tools with `ApprovalRequiredAIFunction` so that every tool invocation requires explicit user approval before execution. Thread state is maintained across requests using `InMemoryAgentThreadRepository`.
-
-Key features:
-- Requiring human approval before executing function calls
-- Persisting conversation threads across multiple requests
-- Approving or rejecting tool invocations at runtime
-
-> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
-
-## Prerequisites
-
-Before running this sample, ensure you have:
-
-1. .NET 10 SDK installed
-2. An Azure OpenAI endpoint configured
-3. A deployment of a chat model (e.g., gpt-5.4-mini)
-4. Azure CLI installed and authenticated (`az login`)
-
-## Environment Variables
-
-Set the following environment variables:
-
-```powershell
-# Replace with your Azure OpenAI endpoint
-$env:AZURE_OPENAI_ENDPOINT="https://your-openai-resource.openai.azure.com/"
-
-# Optional, defaults to gpt-5.4-mini
-$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-5.4-mini"
-```
-
-## How It Works
-
-The sample uses `ApprovalRequiredAIFunction` to wrap standard AI function tools. When the model decides to call a tool, the wrapper intercepts the invocation and returns a HITL approval request to the caller instead of executing the function immediately.
-
-1. The user sends a message (e.g., "What is the weather in Vancouver?")
-2. The model determines a function call is needed and selects the `GetWeather` tool
-3. `ApprovalRequiredAIFunction` intercepts the call and returns an approval request containing the function name and arguments
-4. The user responds with `approve` or `reject`
-5. If approved, the function executes and the model generates a response using the result
-6. If rejected, the model generates a response without the function result
-
-Thread persistence is handled by `InMemoryAgentThreadRepository`, which stores conversation history keyed by `conversation.id`. This means the HITL flow works across multiple HTTP requests as long as each request includes the same `conversation.id`.
-
-> **Note:** HITL requires a stable `conversation.id` in every request so the agent can correlate the approval response with the original function call. Use the `run-requests.http` file in this directory to test the full approval flow.
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/agent.yaml
deleted file mode 100644
index c7e67b3d4e..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/agent.yaml
+++ /dev/null
@@ -1,28 +0,0 @@
-name: AgentThreadAndHITL
-displayName: "Weather Assistant Agent"
-description: >
- A Weather Assistant Agent that provides weather information and forecasts. It
- demonstrates how to use Azure AI AgentServer with Human-in-the-Loop (HITL)
- capabilities to get human approval for functional calls.
-metadata:
- authors:
- - Microsoft Agent Framework Team
- tags:
- - Azure AI AgentServer
- - Microsoft Agent Framework
- - Human-in-the-Loop
-template:
- kind: hosted
- name: AgentThreadAndHITL
- protocols:
- - protocol: responses
- version: v1
- environment_variables:
- - name: AZURE_OPENAI_ENDPOINT
- value: ${AZURE_OPENAI_ENDPOINT}
- - name: AZURE_OPENAI_DEPLOYMENT_NAME
- value: gpt-5.4-mini
-resources:
- - name: "gpt-5.4-mini"
- kind: model
- id: gpt-5.4-mini
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/run-requests.http b/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/run-requests.http
deleted file mode 100644
index 196a30a542..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentThreadAndHITL/run-requests.http
+++ /dev/null
@@ -1,70 +0,0 @@
-@host = http://localhost:8088
-@endpoint = {{host}}/responses
-
-### Health Check
-GET {{host}}/readiness
-
-###
-# HITL (Human-in-the-Loop) Flow
-#
-# This sample requires a multi-turn conversation to demonstrate the approval flow:
-# 1. Send a request that triggers a tool call (e.g., asking about the weather)
-# 2. The agent responds with a function_call named "__hosted_agent_adapter_hitl__"
-# containing the call_id and the tool details
-# 3. Send a follow-up request with a function_call_output to approve or reject
-#
-# IMPORTANT: You must use the same conversation.id across all requests in a flow,
-# and update the call_id from step 2 into step 3.
-###
-
-### Step 1: Send initial request (triggers HITL approval)
-# @name initialRequest
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": "What is the weather like in Vancouver?",
- "stream": false,
- "conversation": {
- "id": "conv_test0000000000000000000000000000000000000000000000"
- }
-}
-
-### Step 2: Approve the function call
-# Copy the call_id from the Step 1 response output and replace below.
-# The response will contain: "name": "__hosted_agent_adapter_hitl__" with a "call_id" value.
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": [
- {
- "type": "function_call_output",
- "call_id": "REPLACE_WITH_CALL_ID_FROM_STEP_1",
- "output": "approve"
- }
- ],
- "stream": false,
- "conversation": {
- "id": "conv_test0000000000000000000000000000000000000000000000"
- }
-}
-
-### Step 3 (alternative): Reject the function call
-# Use this instead of Step 2 to deny the tool execution.
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": [
- {
- "type": "function_call_output",
- "call_id": "REPLACE_WITH_CALL_ID_FROM_STEP_1",
- "output": "reject"
- }
- ],
- "stream": false,
- "conversation": {
- "id": "conv_test0000000000000000000000000000000000000000000000"
- }
-}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/AgentWithHostedMCP.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/AgentWithHostedMCP.csproj
deleted file mode 100644
index 4e46f10c11..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/AgentWithHostedMCP.csproj
+++ /dev/null
@@ -1,68 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
-
-
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
-
-
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/Dockerfile b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/Dockerfile
deleted file mode 100644
index a2590fc112..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/Dockerfile
+++ /dev/null
@@ -1,20 +0,0 @@
-# Build the application
-FROM mcr.microsoft.com/dotnet/sdk:10.0-alpine AS build
-WORKDIR /src
-
-# Copy files from the current directory on the host to the working directory in the container
-COPY . .
-
-RUN dotnet restore
-RUN dotnet build -c Release --no-restore
-RUN dotnet publish -c Release --no-build -o /app -f net10.0
-
-# Run the application
-FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
-WORKDIR /app
-
-# Copy everything needed to run the app from the "build" stage.
-COPY --from=build /app .
-
-EXPOSE 8088
-ENTRYPOINT ["dotnet", "AgentWithHostedMCP.dll"]
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/Program.cs
deleted file mode 100644
index 4178946604..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/Program.cs
+++ /dev/null
@@ -1,40 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample shows how to create and use a simple AI agent with OpenAI Responses as the backend, that uses a Hosted MCP Tool.
-// In this case the OpenAI responses service will invoke any MCP tools as required. MCP tools are not invoked by the Agent Framework.
-// The sample demonstrates how to use MCP tools with auto approval by setting ApprovalMode to NeverRequire.
-
-#pragma warning disable MEAI001 // HostedMcpServerTool, HostedMcpServerToolApprovalMode are experimental
-#pragma warning disable OPENAI001 // GetResponsesClient is experimental
-
-using Azure.AI.AgentServer.AgentFramework.Extensions;
-using Azure.AI.OpenAI;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Extensions.AI;
-using OpenAI.Responses;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-5.4-mini";
-
-// Create an MCP tool that can be called without approval.
-AITool mcpTool = new HostedMcpServerTool(serverName: "microsoft_learn", serverAddress: "https://learn.microsoft.com/api/mcp")
-{
- AllowedTools = ["microsoft_docs_search"],
- ApprovalMode = HostedMcpServerToolApprovalMode.NeverRequire
-};
-
-// Create an agent with the MCP tool using Azure OpenAI Responses.
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetResponsesClient(deploymentName)
- .AsAIAgent(
- instructions: "You answer questions by searching the Microsoft Learn content only.",
- name: "MicrosoftLearnAgent",
- tools: [mcpTool]);
-
-await agent.RunAIAgentAsync();
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/README.md
deleted file mode 100644
index dc0718dfa7..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/README.md
+++ /dev/null
@@ -1,45 +0,0 @@
-# What this sample demonstrates
-
-This sample demonstrates how to use a Hosted Model Context Protocol (MCP) server with an AI agent.
-The agent connects to the Microsoft Learn MCP server to search documentation and answer questions using official Microsoft content.
-
-Key features:
-- Configuring MCP tools with automatic approval (no user confirmation required)
-- Filtering available tools from an MCP server
-- Using Azure OpenAI Responses with MCP tools
-
-> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
-
-## Prerequisites
-
-Before running this sample, ensure you have:
-
-1. An Azure OpenAI endpoint configured
-2. A deployment of a chat model (e.g., gpt-5.4-mini)
-3. Azure CLI installed and authenticated
-
-**Note**: This sample uses `DefaultAzureCredential` for authentication, which probes multiple sources automatically. For local development, make sure you're logged in with `az login` and have access to the Azure OpenAI resource.
-
-## Environment Variables
-
-Set the following environment variables:
-
-```powershell
-# Replace with your Azure OpenAI endpoint
-$env:AZURE_OPENAI_ENDPOINT="https://your-openai-resource.openai.azure.com/"
-
-# Optional, defaults to gpt-5.4-mini
-$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-5.4-mini"
-```
-
-## How It Works
-
-The sample connects to the Microsoft Learn MCP server and uses its documentation search capabilities:
-
-1. The agent is configured with a HostedMcpServerTool pointing to `https://learn.microsoft.com/api/mcp`
-2. Only the `microsoft_docs_search` tool is enabled from the available MCP tools
-3. Approval mode is set to `NeverRequire`, allowing automatic tool execution
-4. When you ask questions, Azure OpenAI Responses automatically invokes the MCP tool to search documentation
-5. The agent returns answers based on the Microsoft Learn content
-
-In this configuration, the OpenAI Responses service manages tool invocation directly - the Agent Framework does not handle MCP tool calls.
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/agent.yaml
deleted file mode 100644
index 7c02acb02a..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/agent.yaml
+++ /dev/null
@@ -1,31 +0,0 @@
-name: AgentWithHostedMCP
-displayName: "Microsoft Learn Response Agent with MCP"
-description: >
- An AI agent that uses Azure OpenAI Responses with a Hosted Model Context Protocol (MCP) server.
- The agent answers questions by searching Microsoft Learn documentation using MCP tools.
- This demonstrates how MCP tools can be integrated with Azure OpenAI Responses where the service
- itself handles tool invocation.
-metadata:
- authors:
- - Microsoft Agent Framework Team
- tags:
- - Azure AI AgentServer
- - Microsoft Agent Framework
- - Model Context Protocol
- - MCP
- - Tool Call Approval
-template:
- kind: hosted
- name: AgentWithHostedMCP
- protocols:
- - protocol: responses
- version: v1
- environment_variables:
- - name: AZURE_OPENAI_ENDPOINT
- value: ${AZURE_OPENAI_ENDPOINT}
- - name: AZURE_OPENAI_DEPLOYMENT_NAME
- value: gpt-5.4-mini
-resources:
- - name: "gpt-5.4-mini"
- kind: model
- id: gpt-5.4-mini
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/run-requests.http b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/run-requests.http
deleted file mode 100644
index b7c0b35efd..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithHostedMCP/run-requests.http
+++ /dev/null
@@ -1,32 +0,0 @@
-@host = http://localhost:8088
-@endpoint = {{host}}/responses
-
-### Health Check
-GET {{host}}/readiness
-
-### Simple string input - Ask about MCP Tools
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": "Please summarize the Azure AI Agent documentation related to MCP Tool calling?"
-}
-
-### Explicit input - Ask about Agent Framework
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": [
- {
- "type": "message",
- "role": "user",
- "content": [
- {
- "type": "input_text",
- "text": "What is the Microsoft Agent Framework?"
- }
- ]
- }
- ]
-}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/.dockerignore b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/.dockerignore
deleted file mode 100644
index 2afa2c2601..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/.dockerignore
+++ /dev/null
@@ -1,24 +0,0 @@
-**/.dockerignore
-**/.env
-**/.git
-**/.gitignore
-**/.project
-**/.settings
-**/.toolstarget
-**/.vs
-**/.vscode
-**/*.*proj.user
-**/*.dbmdl
-**/*.jfm
-**/azds.yaml
-**/bin
-**/charts
-**/docker-compose*
-**/Dockerfile*
-**/node_modules
-**/npm-debug.log
-**/obj
-**/secrets.dev.yaml
-**/values.dev.yaml
-LICENSE
-README.md
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj
deleted file mode 100644
index b7970f8c5f..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/AgentWithLocalTools.csproj
+++ /dev/null
@@ -1,70 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
- true
-
-
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
-
-
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Dockerfile b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Dockerfile
deleted file mode 100644
index c2461965a4..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Dockerfile
+++ /dev/null
@@ -1,20 +0,0 @@
-# Build the application
-FROM mcr.microsoft.com/dotnet/sdk:10.0-alpine AS build
-WORKDIR /src
-
-# Copy files from the current directory on the host to the working directory in the container
-COPY . .
-
-RUN dotnet restore
-RUN dotnet build -c Release --no-restore
-RUN dotnet publish -c Release --no-build -o /app -f net10.0
-
-# Run the application
-FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
-WORKDIR /app
-
-# Copy everything needed to run the app from the "build" stage.
-COPY --from=build /app .
-
-EXPOSE 8088
-ENTRYPOINT ["dotnet", "AgentWithLocalTools.dll"]
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs
deleted file mode 100644
index 0da7c57b12..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/Program.cs
+++ /dev/null
@@ -1,132 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// Seattle Hotel Agent - A simple agent with a tool to find hotels in Seattle.
-// Uses Microsoft Agent Framework with Microsoft Foundry.
-// Ready for deployment to Foundry Hosted Agent service.
-
-using System.ClientModel.Primitives;
-using System.ComponentModel;
-using System.Globalization;
-using System.Text;
-using Azure.AI.AgentServer.AgentFramework.Extensions;
-using Azure.AI.OpenAI;
-using Azure.AI.Projects;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Extensions.AI;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
- ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("MODEL_DEPLOYMENT_NAME") ?? "gpt-5.4-mini";
-Console.WriteLine($"Project Endpoint: {endpoint}");
-Console.WriteLine($"Model Deployment: {deploymentName}");
-
-Hotel[] seattleHotels =
-[
- new Hotel("Contoso Suites", 189, 4.5, "Downtown"),
- new Hotel("Fabrikam Residences", 159, 4.2, "Pike Place Market"),
- new Hotel("Alpine Ski House", 249, 4.7, "Seattle Center"),
- new Hotel("Margie's Travel Lodge", 219, 4.4, "Waterfront"),
- new Hotel("Northwind Inn", 139, 4.0, "Capitol Hill"),
- new Hotel("Relecloud Hotel", 99, 3.8, "University District"),
-];
-
-[Description("Get available hotels in Seattle for the specified dates. This simulates a call to a hotel availability API.")]
-string GetAvailableHotels(
- [Description("Check-in date in YYYY-MM-DD format")] string checkInDate,
- [Description("Check-out date in YYYY-MM-DD format")] string checkOutDate,
- [Description("Maximum price per night in USD (optional, defaults to 500)")] int maxPrice = 500)
-{
- try
- {
- if (!DateTime.TryParseExact(checkInDate, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var checkIn))
- {
- return "Error parsing check-in date. Please use YYYY-MM-DD format.";
- }
-
- if (!DateTime.TryParseExact(checkOutDate, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var checkOut))
- {
- return "Error parsing check-out date. Please use YYYY-MM-DD format.";
- }
-
- if (checkOut <= checkIn)
- {
- return "Error: Check-out date must be after check-in date.";
- }
-
- int nights = (checkOut - checkIn).Days;
- List availableHotels = seattleHotels.Where(h => h.PricePerNight <= maxPrice).ToList();
-
- if (availableHotels.Count == 0)
- {
- return $"No hotels found in Seattle within your budget of ${maxPrice}/night.";
- }
-
- StringBuilder result = new();
- result.AppendLine($"Available hotels in Seattle from {checkInDate} to {checkOutDate} ({nights} nights):");
- result.AppendLine();
-
- foreach (Hotel hotel in availableHotels)
- {
- int totalCost = hotel.PricePerNight * nights;
- result.AppendLine($"**{hotel.Name}**");
- result.AppendLine($" Location: {hotel.Location}");
- result.AppendLine($" Rating: {hotel.Rating}/5");
- result.AppendLine($" ${hotel.PricePerNight}/night (Total: ${totalCost})");
- result.AppendLine();
- }
-
- return result.ToString();
- }
- catch (Exception ex)
- {
- return $"Error processing request. Details: {ex.Message}";
- }
-}
-
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-DefaultAzureCredential credential = new();
-AIProjectClient projectClient = new(new Uri(endpoint), credential);
-
-ClientConnection connection = projectClient.GetConnection(typeof(AzureOpenAIClient).FullName!);
-
-if (!connection.TryGetLocatorAsUri(out Uri? openAiEndpoint) || openAiEndpoint is null)
-{
- throw new InvalidOperationException("Failed to get OpenAI endpoint from project connection.");
-}
-openAiEndpoint = new Uri($"https://{openAiEndpoint.Host}");
-Console.WriteLine($"OpenAI Endpoint: {openAiEndpoint}");
-
-IChatClient chatClient = new AzureOpenAIClient(openAiEndpoint, credential)
- .GetChatClient(deploymentName)
- .AsIChatClient()
- .AsBuilder()
- .UseOpenTelemetry(sourceName: "Agents", configure: cfg => cfg.EnableSensitiveData = false)
- .Build();
-
-AIAgent agent = chatClient.AsAIAgent(
- name: "SeattleHotelAgent",
- instructions: """
- You are a helpful travel assistant specializing in finding hotels in Seattle, Washington.
-
- When a user asks about hotels in Seattle:
- 1. Ask for their check-in and check-out dates if not provided
- 2. Ask about their budget preferences if not mentioned
- 3. Use the GetAvailableHotels tool to find available options
- 4. Present the results in a friendly, informative way
- 5. Offer to help with additional questions about the hotels or Seattle
-
- Be conversational and helpful. If users ask about things outside of Seattle hotels,
- politely let them know you specialize in Seattle hotel recommendations.
- """,
- tools: [AIFunctionFactory.Create(GetAvailableHotels)])
- .AsBuilder()
- .UseOpenTelemetry(sourceName: "Agents", configure: cfg => cfg.EnableSensitiveData = false)
- .Build();
-
-Console.WriteLine("Seattle Hotel Agent Server running on http://localhost:8088");
-await agent.RunAIAgentAsync(telemetrySourceName: "Agents");
-
-internal sealed record Hotel(string Name, int PricePerNight, double Rating, string Location);
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md
deleted file mode 100644
index aba51898ec..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/README.md
+++ /dev/null
@@ -1,39 +0,0 @@
-# What this sample demonstrates
-
-This sample demonstrates how to build a hosted agent that uses local C# function tools — a key advantage of code-based hosted agents over prompt agents. The agent acts as a Seattle travel assistant with a `GetAvailableHotels` tool that simulates querying a hotel availability API.
-
-Key features:
-- Defining local C# functions as agent tools using `AIFunctionFactory`
-- Using `AIProjectClient` to discover the OpenAI connection from the Microsoft Foundry project
-- Building a `ChatClientAgent` with custom instructions and tools
-- Deploying to the Foundry Hosted Agent service
-
-> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
-
-## Prerequisites
-
-Before running this sample, ensure you have:
-
-1. .NET 10 SDK installed
-2. A Microsoft Foundry Project with a chat model deployed (e.g., gpt-5.4-mini)
-3. Azure CLI installed and authenticated (`az login`)
-
-## Environment Variables
-
-Set the following environment variables:
-
-```powershell
-# Replace with your Microsoft Foundry project endpoint
-$env:AZURE_AI_PROJECT_ENDPOINT="https://your-project.services.ai.azure.com/api/projects/your-project-name"
-
-# Optional, defaults to gpt-5.4-mini
-$env:MODEL_DEPLOYMENT_NAME="gpt-5.4-mini"
-```
-
-## How It Works
-
-1. The agent uses `AIProjectClient` to discover the Azure OpenAI connection from the project endpoint
-2. A local C# function `GetAvailableHotels` is registered as a tool using `AIFunctionFactory.Create`
-3. When users ask about hotels, the model invokes the local tool to search simulated hotel data
-4. The tool filters hotels by price and calculates total costs based on the requested dates
-5. Results are returned to the model, which presents them in a conversational format
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml
deleted file mode 100644
index 7e75a738ce..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/agent.yaml
+++ /dev/null
@@ -1,29 +0,0 @@
-name: seattle-hotel-agent
-description: >
- A travel assistant agent that helps users find hotels in Seattle.
- Demonstrates local C# tool execution - a key advantage of code-based
- hosted agents over prompt agents.
-metadata:
- authors:
- - Microsoft
- tags:
- - Azure AI AgentServer
- - Microsoft Agent Framework
- - Local Tools
- - Travel Assistant
- - Hotel Search
-template:
- name: seattle-hotel-agent
- kind: hosted
- protocols:
- - protocol: responses
- version: v1
- environment_variables:
- - name: AZURE_AI_PROJECT_ENDPOINT
- value: ${AZURE_AI_PROJECT_ENDPOINT}
- - name: MODEL_DEPLOYMENT_NAME
- value: gpt-5.4-mini
-resources:
- - kind: model
- id: gpt-5.4-mini
- name: chat
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/run-requests.http b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/run-requests.http
deleted file mode 100644
index 4f2e87e097..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithLocalTools/run-requests.http
+++ /dev/null
@@ -1,52 +0,0 @@
-@host = http://localhost:8088
-@endpoint = {{host}}/responses
-
-### Health Check
-GET {{host}}/readiness
-
-### Simple hotel search - budget under $200
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": "I need a hotel in Seattle from 2025-03-15 to 2025-03-18, budget under $200 per night",
- "stream": false
-}
-
-### Hotel search with higher budget
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": "Find me hotels in Seattle for March 20-23, 2025 under $250 per night",
- "stream": false
-}
-
-### Ask for recommendations without dates (agent should ask for clarification)
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": "What hotels do you recommend in Seattle?",
- "stream": false
-}
-
-### Explicit input format
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": [
- {
- "type": "message",
- "role": "user",
- "content": [
- {
- "type": "input_text",
- "text": "I'm looking for a hotel in Seattle from 2025-04-01 to 2025-04-05, my budget is $150 per night maximum"
- }
- ]
- }
- ],
- "stream": false
-}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/AgentWithTextSearchRag.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/AgentWithTextSearchRag.csproj
deleted file mode 100644
index 7789abd315..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/AgentWithTextSearchRag.csproj
+++ /dev/null
@@ -1,68 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
-
-
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
-
-
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/Dockerfile b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/Dockerfile
deleted file mode 100644
index 3d944c9883..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/Dockerfile
+++ /dev/null
@@ -1,20 +0,0 @@
-# Build the application
-FROM mcr.microsoft.com/dotnet/sdk:10.0-alpine AS build
-WORKDIR /src
-
-# Copy files from the current directory on the host to the working directory in the container
-COPY . .
-
-RUN dotnet restore
-RUN dotnet build -c Release --no-restore
-RUN dotnet publish -c Release --no-build -o /app -f net10.0
-
-# Run the application
-FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
-WORKDIR /app
-
-# Copy everything needed to run the app from the "build" stage.
-COPY --from=build /app .
-
-EXPOSE 8088
-ENTRYPOINT ["dotnet", "AgentWithTextSearchRag.dll"]
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/Program.cs
deleted file mode 100644
index 518ce5679f..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/Program.cs
+++ /dev/null
@@ -1,79 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample shows how to use TextSearchProvider to add retrieval augmented generation (RAG)
-// capabilities to an AI agent. The provider runs a search against an external knowledge base
-// before each model invocation and injects the results into the model context.
-
-using Azure.AI.AgentServer.AgentFramework.Extensions;
-using Azure.AI.OpenAI;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Extensions.AI;
-using OpenAI.Chat;
-
-string endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-5.4-mini";
-
-TextSearchProviderOptions textSearchOptions = new()
-{
- // Run the search prior to every model invocation and keep a short rolling window of conversation context.
- SearchTime = TextSearchProviderOptions.TextSearchBehavior.BeforeAIInvoke,
- RecentMessageMemoryLimit = 6,
-};
-
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIAgent agent = new AzureOpenAIClient(
- new Uri(endpoint),
- new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsAIAgent(new ChatClientAgentOptions
- {
- ChatOptions = new ChatOptions
- {
- Instructions = "You are a helpful support specialist for Contoso Outdoors. Answer questions using the provided context and cite the source document when available.",
- },
- AIContextProviders = [new TextSearchProvider(MockSearchAsync, textSearchOptions)]
- });
-
-await agent.RunAIAgentAsync();
-
-static Task> MockSearchAsync(string query, CancellationToken cancellationToken)
-{
- // The mock search inspects the user's question and returns pre-defined snippets
- // that resemble documents stored in an external knowledge source.
- List results = [];
-
- if (query.Contains("return", StringComparison.OrdinalIgnoreCase) || query.Contains("refund", StringComparison.OrdinalIgnoreCase))
- {
- results.Add(new()
- {
- SourceName = "Contoso Outdoors Return Policy",
- SourceLink = "https://contoso.com/policies/returns",
- Text = "Customers may return any item within 30 days of delivery. Items should be unused and include original packaging. Refunds are issued to the original payment method within 5 business days of inspection."
- });
- }
-
- if (query.Contains("shipping", StringComparison.OrdinalIgnoreCase))
- {
- results.Add(new()
- {
- SourceName = "Contoso Outdoors Shipping Guide",
- SourceLink = "https://contoso.com/help/shipping",
- Text = "Standard shipping is free on orders over $50 and typically arrives in 3-5 business days within the continental United States. Expedited options are available at checkout."
- });
- }
-
- if (query.Contains("tent", StringComparison.OrdinalIgnoreCase) || query.Contains("fabric", StringComparison.OrdinalIgnoreCase))
- {
- results.Add(new()
- {
- SourceName = "TrailRunner Tent Care Instructions",
- SourceLink = "https://contoso.com/manuals/trailrunner-tent",
- Text = "Clean the tent fabric with lukewarm water and a non-detergent soap. Allow it to air dry completely before storage and avoid prolonged UV exposure to extend the lifespan of the waterproof coating."
- });
- }
-
- return Task.FromResult>(results);
-}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/README.md
deleted file mode 100644
index b62d9068ce..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/README.md
+++ /dev/null
@@ -1,43 +0,0 @@
-# What this sample demonstrates
-
-This sample demonstrates how to use TextSearchProvider to add retrieval augmented generation (RAG) capabilities to an AI agent. The provider runs a search against an external knowledge base before each model invocation and injects the results into the model context.
-
-Key features:
-- Configuring TextSearchProvider with custom search behavior
-- Running searches before AI invocations to provide relevant context
-- Managing conversation memory with a rolling window approach
-- Citing source documents in AI responses
-
-> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
-
-## Prerequisites
-
-Before running this sample, ensure you have:
-
-1. An Azure OpenAI endpoint configured
-2. A deployment of a chat model (e.g., gpt-5.4-mini)
-3. Azure CLI installed and authenticated
-
-## Environment Variables
-
-Set the following environment variables:
-
-```powershell
-# Replace with your Azure OpenAI endpoint
-$env:AZURE_OPENAI_ENDPOINT="https://your-openai-resource.openai.azure.com/"
-
-# Optional, defaults to gpt-5.4-mini
-$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-5.4-mini"
-```
-
-## How It Works
-
-The sample uses a mock search function that demonstrates the RAG pattern:
-
-1. When the user asks a question, the TextSearchProvider intercepts it
-2. The search function looks for relevant documents based on the query
-3. Retrieved documents are injected into the model's context
-4. The AI responds using both its training and the provided context
-5. The agent can cite specific source documents in its answers
-
-The mock search function returns pre-defined snippets for demonstration purposes. In a production scenario, you would replace this with actual searches against your knowledge base (e.g., Azure AI Search, vector database, etc.).
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/agent.yaml
deleted file mode 100644
index 6cdad09e9c..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/agent.yaml
+++ /dev/null
@@ -1,31 +0,0 @@
-name: AgentWithTextSearchRag
-displayName: "Text Search RAG Agent"
-description: >
- An AI agent that uses TextSearchProvider for retrieval augmented generation (RAG) capabilities.
- The agent runs searches against an external knowledge base before each model invocation and
- injects the results into the model context. It can answer questions about Contoso Outdoors
- policies and products, including return policies, refunds, shipping options, and product care
- instructions such as tent maintenance.
-metadata:
- authors:
- - Microsoft Agent Framework Team
- tags:
- - Azure AI AgentServer
- - Microsoft Agent Framework
- - Retrieval-Augmented Generation
- - RAG
-template:
- kind: hosted
- name: AgentWithTextSearchRag
- protocols:
- - protocol: responses
- version: v1
- environment_variables:
- - name: AZURE_OPENAI_ENDPOINT
- value: ${AZURE_OPENAI_ENDPOINT}
- - name: AZURE_OPENAI_DEPLOYMENT_NAME
- value: gpt-5.4-mini
-resources:
- - name: "gpt-5.4-mini"
- kind: model
- id: gpt-5.4-mini
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/run-requests.http b/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/run-requests.http
deleted file mode 100644
index 4bfb02d8f8..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentWithTextSearchRag/run-requests.http
+++ /dev/null
@@ -1,30 +0,0 @@
-@host = http://localhost:8088
-@endpoint = {{host}}/responses
-
-### Health Check
-GET {{host}}/readiness
-
-### Simple string input
-POST {{endpoint}}
-Content-Type: application/json
-{
- "input": "Hi! I need help understanding the return policy."
-}
-
-### Explicit input
-POST {{endpoint}}
-Content-Type: application/json
-{
- "input": [
- {
- "type": "message",
- "role": "user",
- "content": [
- {
- "type": "input_text",
- "text": "How long does standard shipping usually take?"
- }
- ]
- }
- ]
-}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/AgentsInWorkflows.csproj b/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/AgentsInWorkflows.csproj
deleted file mode 100644
index 7789abd315..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/AgentsInWorkflows.csproj
+++ /dev/null
@@ -1,68 +0,0 @@
-
-
-
- Exe
- net10.0
-
- enable
- enable
-
-
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
-
-
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/Dockerfile b/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/Dockerfile
deleted file mode 100644
index 86b6c156f3..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/Dockerfile
+++ /dev/null
@@ -1,20 +0,0 @@
-# Build the application
-FROM mcr.microsoft.com/dotnet/sdk:10.0-alpine AS build
-WORKDIR /src
-
-# Copy files from the current directory on the host to the working directory in the container
-COPY . .
-
-RUN dotnet restore
-RUN dotnet build -c Release --no-restore
-RUN dotnet publish -c Release --no-build -o /app -f net10.0
-
-# Run the application
-FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
-WORKDIR /app
-
-# Copy everything needed to run the app from the "build" stage.
-COPY --from=build /app .
-
-EXPOSE 8088
-ENTRYPOINT ["dotnet", "AgentsInWorkflows.dll"]
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/Program.cs
deleted file mode 100644
index 886e205acf..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/Program.cs
+++ /dev/null
@@ -1,40 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample demonstrates how to integrate AI agents into a workflow pipeline.
-// Three translation agents are connected sequentially to create a translation chain:
-// English → French → Spanish → English, showing how agents can be composed as workflow executors.
-
-using Azure.AI.AgentServer.AgentFramework.Extensions;
-using Azure.AI.OpenAI;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Agents.AI.Workflows;
-using Microsoft.Extensions.AI;
-
-// Set up the Azure OpenAI client
-string endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
-string deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-5.4-mini";
-
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-IChatClient chatClient = new AzureOpenAIClient(new Uri(endpoint), new DefaultAzureCredential())
- .GetChatClient(deploymentName)
- .AsIChatClient();
-
-// Create agents
-AIAgent frenchAgent = GetTranslationAgent("French", chatClient);
-AIAgent spanishAgent = GetTranslationAgent("Spanish", chatClient);
-AIAgent englishAgent = GetTranslationAgent("English", chatClient);
-
-// Build the workflow and turn it into an agent
-AIAgent agent = new WorkflowBuilder(frenchAgent)
- .AddEdge(frenchAgent, spanishAgent)
- .AddEdge(spanishAgent, englishAgent)
- .Build()
- .AsAIAgent();
-
-await agent.RunAIAgentAsync();
-
-static AIAgent GetTranslationAgent(string targetLanguage, IChatClient chatClient) =>
- chatClient.AsAIAgent($"You are a translation assistant that translates the provided text to {targetLanguage}.");
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/README.md b/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/README.md
deleted file mode 100644
index b7a2f9ca53..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/README.md
+++ /dev/null
@@ -1,28 +0,0 @@
-# What this sample demonstrates
-
-This sample demonstrates the use of AI agents as executors within a workflow.
-
-This workflow uses three translation agents:
-1. French Agent - translates input text to French
-2. Spanish Agent - translates French text to Spanish
-3. English Agent - translates Spanish text back to English
-
-The agents are connected sequentially, creating a translation chain that demonstrates how AI-powered components can be seamlessly integrated into workflow pipelines.
-
-> For common prerequisites and setup instructions, see the [Hosted Agent Samples README](../README.md).
-
-## Prerequisites
-
-Before you begin, ensure you have the following prerequisites:
-
-- .NET 10 SDK or later
-- Azure OpenAI service endpoint and deployment configured
-- Azure CLI installed and authenticated (for Azure credential authentication)
-
-**Note**: This demo uses `DefaultAzureCredential` for authentication, which probes multiple sources automatically. For local development, make sure you're logged in with `az login` and have access to the Azure OpenAI resource. For more information, see the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli-interactively).
-
-Set the following environment variables:
-
-```powershell
-$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" # Replace with your Azure OpenAI resource endpoint
-$env:AZURE_OPENAI_DEPLOYMENT_NAME="gpt-5.4-mini" # Optional, defaults to gpt-5.4-mini
\ No newline at end of file
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/agent.yaml
deleted file mode 100644
index 3c97fa2ac1..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/agent.yaml
+++ /dev/null
@@ -1,28 +0,0 @@
-name: AgentsInWorkflows
-displayName: "Translation Chain Workflow Agent"
-description: >
- A workflow agent that performs sequential translation through multiple languages.
- The agent translates text from English to French, then to Spanish, and finally back
- to English, leveraging AI-powered translation capabilities in a pipeline workflow.
-metadata:
- authors:
- - Microsoft Agent Framework Team
- tags:
- - Azure AI AgentServer
- - Microsoft Agent Framework
- - Workflows
-template:
- kind: hosted
- name: AgentsInWorkflows
- protocols:
- - protocol: responses
- version: v1
- environment_variables:
- - name: AZURE_OPENAI_ENDPOINT
- value: ${AZURE_OPENAI_ENDPOINT}
- - name: AZURE_OPENAI_DEPLOYMENT_NAME
- value: gpt-5.4-mini
-resources:
- - name: "gpt-5.4-mini"
- kind: model
- id: gpt-5.4-mini
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/run-requests.http b/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/run-requests.http
deleted file mode 100644
index 5c33700a93..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/AgentsInWorkflows/run-requests.http
+++ /dev/null
@@ -1,30 +0,0 @@
-@host = http://localhost:8088
-@endpoint = {{host}}/responses
-
-### Health Check
-GET {{host}}/readiness
-
-### Simple string input
-POST {{endpoint}}
-Content-Type: application/json
-{
- "input": "Hello, how are you today?"
-}
-
-### Explicit input
-POST {{endpoint}}
-Content-Type: application/json
-{
- "input": [
- {
- "type": "message",
- "role": "user",
- "content": [
- {
- "type": "input_text",
- "text": "Hello, how are you today?"
- }
- ]
- }
- ]
-}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/Dockerfile b/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/Dockerfile
deleted file mode 100644
index fc3d3a1a5b..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/Dockerfile
+++ /dev/null
@@ -1,20 +0,0 @@
-# Build the application
-FROM mcr.microsoft.com/dotnet/sdk:10.0-alpine AS build
-WORKDIR /src
-
-# Copy files from the current directory on the host to the working directory in the container
-COPY . .
-
-RUN dotnet restore
-RUN dotnet build -c Release --no-restore
-RUN dotnet publish -c Release --no-build -o /app -f net10.0
-
-# Run the application
-FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
-WORKDIR /app
-
-# Copy everything needed to run the app from the "build" stage.
-COPY --from=build /app .
-
-EXPOSE 8088
-ENTRYPOINT ["dotnet", "FoundryMultiAgent.dll"]
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/FoundryMultiAgent.csproj b/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/FoundryMultiAgent.csproj
deleted file mode 100644
index e8c7a434b0..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/FoundryMultiAgent.csproj
+++ /dev/null
@@ -1,76 +0,0 @@
-
-
- Exe
- net10.0
- enable
- enable
-
-
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
-
-
-
- PreserveNewest
-
-
-
-
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/Program.cs
deleted file mode 100644
index cc1e3314f0..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/Program.cs
+++ /dev/null
@@ -1,51 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// This sample demonstrates a multi-agent workflow with Writer and Reviewer agents
-// using Microsoft Foundry AIProjectClient and the Agent Framework WorkflowBuilder.
-
-#pragma warning disable CA2252 // AIProjectClient and Agents API require opting into preview features
-
-using Azure.AI.AgentServer.AgentFramework.Extensions;
-using Azure.AI.Projects;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Agents.AI.Workflows;
-
-var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
- ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("MODEL_DEPLOYMENT_NAME") ?? "gpt-5.4-mini";
-
-Console.WriteLine($"Using Azure AI endpoint: {endpoint}");
-Console.WriteLine($"Using model deployment: {deploymentName}");
-
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-// Create Foundry agents
-AIAgent writerAgent = await aiProjectClient.CreateAIAgentAsync(
- name: "Writer",
- model: deploymentName,
- instructions: "You are an excellent content writer. You create new content and edit contents based on the feedback.");
-
-AIAgent reviewerAgent = await aiProjectClient.CreateAIAgentAsync(
- name: "Reviewer",
- model: deploymentName,
- instructions: "You are an excellent content reviewer. Provide actionable feedback to the writer about the provided content. Provide the feedback in the most concise manner possible.");
-
-try
-{
- var workflow = new WorkflowBuilder(writerAgent)
- .AddEdge(writerAgent, reviewerAgent)
- .Build();
-
- Console.WriteLine("Starting Writer-Reviewer Workflow Agent Server on http://localhost:8088");
- await workflow.AsAIAgent().RunAIAgentAsync();
-}
-finally
-{
- // Cleanup server-side agents
- await aiProjectClient.Agents.DeleteAgentAsync(writerAgent.Name);
- await aiProjectClient.Agents.DeleteAgentAsync(reviewerAgent.Name);
-}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/README.md b/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/README.md
deleted file mode 100644
index 390df95e20..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/README.md
+++ /dev/null
@@ -1,168 +0,0 @@
-**IMPORTANT!** All samples and other resources made available in this GitHub repository ("samples") are designed to assist in accelerating development of agents, solutions, and agent workflows for various scenarios. Review all provided resources and carefully test output behavior in the context of your use case. AI responses may be inaccurate and AI actions should be monitored with human oversight. Learn more in the transparency documents for [Agent Service](https://learn.microsoft.com/en-us/azure/ai-foundry/responsible-ai/agents/transparency-note) and [Agent Framework](https://github.com/microsoft/agent-framework/blob/main/TRANSPARENCY_FAQ.md).
-
-Agents, solutions, or other output you create may be subject to legal and regulatory requirements, may require licenses, or may not be suitable for all industries, scenarios, or use cases. By using any sample, you are acknowledging that any output created using those samples are solely your responsibility, and that you will comply with all applicable laws, regulations, and relevant safety standards, terms of service, and codes of conduct.
-
-Third-party samples contained in this folder are subject to their own designated terms, and they have not been tested or verified by Microsoft or its affiliates.
-
-Microsoft has no responsibility to you or others with respect to any of these samples or any resulting output.
-
-# What this sample demonstrates
-
-This sample demonstrates a **key advantage of code-based hosted agents**:
-
-- **Multi-agent workflows** - Orchestrate multiple agents working together
-
-Code-based agents can execute **any C# code** you write. This sample includes a Writer-Reviewer workflow where two agents collaborate: a Writer creates content and a Reviewer provides feedback.
-
-The agent is hosted using the [Azure AI AgentServer SDK](https://www.nuget.org/packages/Azure.AI.AgentServer.AgentFramework/) and can be deployed to Microsoft Foundry.
-
-## How It Works
-
-### Multi-Agent Workflow
-
-In [Program.cs](Program.cs), the sample creates two agents using `AIProjectClient.CreateAIAgentAsync()` from the [Microsoft.Agents.AI.AzureAI](https://www.nuget.org/packages/Microsoft.Agents.AI.AzureAI/) package:
-
-- **Writer** - An agent that creates and edits content based on feedback
-- **Reviewer** - An agent that provides actionable feedback on the content
-
-The `WorkflowBuilder` from the [Microsoft.Agents.AI.Workflows](https://www.nuget.org/packages/Microsoft.Agents.AI.Workflows/) package connects these agents in a sequential flow:
-
-1. The Writer receives the initial request and generates content
-2. The Reviewer evaluates the content and provides feedback
-3. Both agent responses are output to the user
-
-### Agent Hosting
-
-The agent is hosted using the [Azure AI AgentServer SDK](https://www.nuget.org/packages/Azure.AI.AgentServer.AgentFramework/),
-which provisions a REST API endpoint compatible with the OpenAI Responses protocol.
-
-## Running the Agent Locally
-
-### Prerequisites
-
-Before running this sample, ensure you have:
-
-1. **Microsoft Foundry Project**
- - Project created.
- - Chat model deployed (e.g., `gpt-5.4-mini`)
- - Note your project endpoint URL and model deployment name
- > **Note**: You can right-click the project in the Microsoft Foundry VS Code extension and select `Copy Project Endpoint URL` to get the endpoint.
-
-2. **Azure CLI**
- - Installed and authenticated
- - Run `az login` and verify with `az account show`
- - Your identity needs the **Azure AI Developer** role on the Foundry resource (for `agents/write` data action required by `CreateAIAgentAsync`)
-
-3. **.NET 10.0 SDK or later**
- - Verify your version: `dotnet --version`
- - Download from [https://dotnet.microsoft.com/download](https://dotnet.microsoft.com/download)
-
-### Environment Variables
-
-Set the following environment variables:
-
-**PowerShell:**
-
-```powershell
-# Replace with your actual values
-$env:AZURE_AI_PROJECT_ENDPOINT="https://.services.ai.azure.com/api/projects/"
-$env:MODEL_DEPLOYMENT_NAME="gpt-5.4-mini"
-```
-
-**Bash:**
-
-```bash
-export AZURE_AI_PROJECT_ENDPOINT="https://.services.ai.azure.com/api/projects/"
-export MODEL_DEPLOYMENT_NAME="gpt-5.4-mini"
-```
-
-### Running the Sample
-
-To run the agent, execute the following command in your terminal:
-
-```bash
-dotnet restore
-dotnet build
-dotnet run
-```
-
-This will start the hosted agent locally on `http://localhost:8088/`.
-
-### Interacting with the Agent
-
-**VS Code:**
-
-1. Open the Visual Studio Code Command Palette and execute the `Microsoft Foundry: Open Container Agent Playground Locally` command.
-2. Execute the following commands to start the containerized hosted agent.
- ```bash
- dotnet restore
- dotnet build
- dotnet run
- ```
-3. Submit a request to the agent through the playground interface. For example, you may enter a prompt such as: "Create a slogan for a new electric SUV that is affordable and fun to drive."
-4. Review the agent's response in the playground interface.
-
-> **Note**: Open the local playground before starting the container agent to ensure the visualization functions correctly.
-
-**PowerShell (Windows):**
-
-```powershell
-$body = @{
- input = "Create a slogan for a new electric SUV that is affordable and fun to drive"
- stream = $false
-} | ConvertTo-Json
-
-Invoke-RestMethod -Uri http://localhost:8088/responses -Method Post -Body $body -ContentType "application/json"
-```
-
-**Bash/curl (Linux/macOS):**
-
-```bash
-curl -sS -H "Content-Type: application/json" -X POST http://localhost:8088/responses \
- -d '{"input": "Create a slogan for a new electric SUV that is affordable and fun to drive","stream":false}'
-```
-
-You can also use the `run-requests.http` file in this directory with the VS Code REST Client extension.
-
-The Writer agent will generate content based on your prompt, and the Reviewer agent will provide feedback on the output.
-
-## Deploying the Agent to Microsoft Foundry
-
-**Preparation (required)**
-
-Please check the environment_variables section in [agent.yaml](agent.yaml) and ensure the variables there are set in your target Microsoft Foundry Project.
-
-To deploy the hosted agent:
-
-1. Open the VS Code Command Palette and run the `Microsoft Foundry: Deploy Hosted Agent` command.
-
-2. Follow the interactive deployment prompts. The extension will help you select or create the container files it needs.
-
-3. After deployment completes, the hosted agent appears under the `Hosted Agents (Preview)` section of the extension tree. You can select the agent there to view details and test it using the integrated playground.
-
-**What the deploy flow does for you:**
-
-- Creates or obtains an Azure Container Registry for the target project.
-- Builds and pushes a container image from your workspace (the build packages the workspace respecting `.dockerignore`).
-- Creates an agent version in Microsoft Foundry using the built image. If a `.env` file exists at the workspace root, the extension will parse it and include its key/value pairs as the hosted agent's environment variables in the create request (these variables will be available to the agent runtime).
-- Starts the agent container on the project's capability host. If the capability host is not provisioned, the extension will prompt you to enable it and will guide you through creating it.
-
-## MSI Configuration in the Azure Portal
-
-This sample requires the Microsoft Foundry Project to authenticate using a Managed Identity when running remotely in Azure. Grant the project's managed identity the required permissions by assigning the built-in [Azure AI User](https://aka.ms/foundry-ext-project-role) role.
-
-To configure the Managed Identity:
-
-1. In the Azure Portal, open the Foundry Project.
-2. Select "Access control (IAM)" from the left-hand menu.
-3. Click "Add" and choose "Add role assignment".
-4. In the role selection, search for and select "Azure AI User", then click "Next".
-5. For "Assign access to", choose "Managed identity".
-6. Click "Select members", locate the managed identity associated with your Foundry Project (you can search by the project name), then click "Select".
-7. Click "Review + assign" to complete the assignment.
-8. Allow a few minutes for the role assignment to propagate before running the application.
-
-## Additional Resources
-
-- [Microsoft Agents Framework](https://learn.microsoft.com/en-us/agent-framework/overview/agent-framework-overview)
-- [Managed Identities for Azure Resources](https://learn.microsoft.com/en-us/entra/identity/managed-identities-azure-resources/)
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/agent.yaml
deleted file mode 100644
index 79d848fa5a..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/agent.yaml
+++ /dev/null
@@ -1,31 +0,0 @@
-# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/ContainerAgent.yaml
-
-name: FoundryMultiAgent
-displayName: "Foundry Multi-Agent Workflow"
-description: >
- A multi-agent workflow featuring a Writer and Reviewer that collaborate
- to create and refine content using Microsoft Foundry PersistentAgentsClient.
-metadata:
- authors:
- - Microsoft Agent Framework Team
- tags:
- - Azure AI AgentServer
- - Microsoft Agent Framework
- - Multi-Agent Workflow
- - Writer-Reviewer
- - Content Creation
-template:
- kind: hosted
- name: FoundryMultiAgent
- protocols:
- - protocol: responses
- version: v1
- environment_variables:
- - name: AZURE_AI_PROJECT_ENDPOINT
- value: ${AZURE_AI_PROJECT_ENDPOINT}
- - name: MODEL_DEPLOYMENT_NAME
- value: gpt-5.4-mini
-resources:
- - name: "gpt-5.4-mini"
- kind: model
- id: gpt-5.4-mini
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/appsettings.Development.json b/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/appsettings.Development.json
deleted file mode 100644
index eae0c9ec3f..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/appsettings.Development.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "AZURE_AI_PROJECT_ENDPOINT": "https://.services.ai.azure.com/api/projects/",
- "MODEL_DEPLOYMENT_NAME": "gpt-5.4-mini"
-}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/run-requests.http b/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/run-requests.http
deleted file mode 100644
index 2fcdb2499e..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent/run-requests.http
+++ /dev/null
@@ -1,34 +0,0 @@
-@host = http://localhost:8088
-@endpoint = {{host}}/responses
-
-### Health Check
-GET {{host}}/readiness
-
-### Simple string input - Content creation request
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": "Create a slogan for a new electric SUV that is affordable and fun to drive",
- "stream": false
-}
-
-### Explicit input format
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": [
- {
- "type": "message",
- "role": "user",
- "content": [
- {
- "type": "input_text",
- "text": "Write a short product description for a smart water bottle that tracks hydration"
- }
- ]
- }
- ],
- "stream": false
-}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/Dockerfile b/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/Dockerfile
deleted file mode 100644
index 0d1141cc69..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/Dockerfile
+++ /dev/null
@@ -1,20 +0,0 @@
-# Build the application
-FROM mcr.microsoft.com/dotnet/sdk:10.0-alpine AS build
-WORKDIR /src
-
-# Copy files from the current directory on the host to the working directory in the container
-COPY . .
-
-RUN dotnet restore
-RUN dotnet build -c Release --no-restore
-RUN dotnet publish -c Release --no-build -o /app -f net10.0
-
-# Run the application
-FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
-WORKDIR /app
-
-# Copy everything needed to run the app from the "build" stage.
-COPY --from=build /app .
-
-EXPOSE 8088
-ENTRYPOINT ["dotnet", "FoundrySingleAgent.dll"]
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/FoundrySingleAgent.csproj b/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/FoundrySingleAgent.csproj
deleted file mode 100644
index 70df458d90..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/FoundrySingleAgent.csproj
+++ /dev/null
@@ -1,67 +0,0 @@
-
-
- Exe
- net10.0
- enable
- enable
-
-
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
- all
- runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
-
-
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/Program.cs b/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/Program.cs
deleted file mode 100644
index c09a0a4a82..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/Program.cs
+++ /dev/null
@@ -1,132 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-// Seattle Hotel Agent - A simple agent with a tool to find hotels in Seattle.
-// Uses Microsoft Agent Framework with Microsoft Foundry.
-// Ready for deployment to Foundry Hosted Agent service.
-
-#pragma warning disable CA2252 // AIProjectClient and Agents API require opting into preview features
-
-using System.ComponentModel;
-using System.Globalization;
-using System.Text;
-
-using Azure.AI.AgentServer.AgentFramework.Extensions;
-using Azure.AI.Projects;
-using Azure.Identity;
-using Microsoft.Agents.AI;
-using Microsoft.Extensions.AI;
-
-// Get configuration from environment variables
-var endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
- ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
-var deploymentName = Environment.GetEnvironmentVariable("MODEL_DEPLOYMENT_NAME") ?? "gpt-5.4-mini";
-Console.WriteLine($"Project Endpoint: {endpoint}");
-Console.WriteLine($"Model Deployment: {deploymentName}");
-// Simulated hotel data for Seattle
-var seattleHotels = new[]
-{
- new Hotel("Contoso Suites", 189, 4.5, "Downtown"),
- new Hotel("Fabrikam Residences", 159, 4.2, "Pike Place Market"),
- new Hotel("Alpine Ski House", 249, 4.7, "Seattle Center"),
- new Hotel("Margie's Travel Lodge", 219, 4.4, "Waterfront"),
- new Hotel("Northwind Inn", 139, 4.0, "Capitol Hill"),
- new Hotel("Relecloud Hotel", 99, 3.8, "University District"),
-};
-
-[Description("Get available hotels in Seattle for the specified dates. This simulates a call to a hotel availability API.")]
-string GetAvailableHotels(
- [Description("Check-in date in YYYY-MM-DD format")] string checkInDate,
- [Description("Check-out date in YYYY-MM-DD format")] string checkOutDate,
- [Description("Maximum price per night in USD (optional, defaults to 500)")] int maxPrice = 500)
-{
- try
- {
- // Parse dates
- if (!DateTime.TryParseExact(checkInDate, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var checkIn))
- {
- return "Error parsing check-in date. Please use YYYY-MM-DD format.";
- }
-
- if (!DateTime.TryParseExact(checkOutDate, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var checkOut))
- {
- return "Error parsing check-out date. Please use YYYY-MM-DD format.";
- }
-
- // Validate dates
- if (checkOut <= checkIn)
- {
- return "Error: Check-out date must be after check-in date.";
- }
-
- var nights = (checkOut - checkIn).Days;
-
- // Filter hotels by price
- var availableHotels = seattleHotels.Where(h => h.PricePerNight <= maxPrice).ToList();
-
- if (availableHotels.Count == 0)
- {
- return $"No hotels found in Seattle within your budget of ${maxPrice}/night.";
- }
-
- // Build response
- var result = new StringBuilder();
- result
- .AppendLine($"Available hotels in Seattle from {checkInDate} to {checkOutDate} ({nights} nights):")
- .AppendLine();
-
- foreach (var hotel in availableHotels)
- {
- var totalCost = hotel.PricePerNight * nights;
- result
- .AppendLine($"**{hotel.Name}**")
- .AppendLine($" Location: {hotel.Location}")
- .AppendLine($" Rating: {hotel.Rating}/5")
- .AppendLine($" ${hotel.PricePerNight}/night (Total: ${totalCost})")
- .AppendLine();
- }
-
- return result.ToString();
- }
- catch (Exception ex)
- {
- return $"Error processing request. Details: {ex.Message}";
- }
-}
-
-// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
-// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
-// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
-AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential());
-
-// Create Foundry agent with hotel search tool
-AIAgent agent = await aiProjectClient.CreateAIAgentAsync(
- name: "SeattleHotelAgent",
- model: deploymentName,
- instructions: """
- You are a helpful travel assistant specializing in finding hotels in Seattle, Washington.
-
- When a user asks about hotels in Seattle:
- 1. Ask for their check-in and check-out dates if not provided
- 2. Ask about their budget preferences if not mentioned
- 3. Use the GetAvailableHotels tool to find available options
- 4. Present the results in a friendly, informative way
- 5. Offer to help with additional questions about the hotels or Seattle
-
- Be conversational and helpful. If users ask about things outside of Seattle hotels,
- politely let them know you specialize in Seattle hotel recommendations.
- """,
- tools: [AIFunctionFactory.Create(GetAvailableHotels)]);
-
-try
-{
- Console.WriteLine("Seattle Hotel Agent Server running on http://localhost:8088");
- await agent.RunAIAgentAsync(telemetrySourceName: "Agents");
-}
-finally
-{
- // Cleanup server-side agent
- await aiProjectClient.Agents.DeleteAgentAsync(agent.Name);
-}
-
-// Hotel record for simulated data
-internal sealed record Hotel(string Name, int PricePerNight, double Rating, string Location);
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/README.md b/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/README.md
deleted file mode 100644
index 43c5a6cb69..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/README.md
+++ /dev/null
@@ -1,167 +0,0 @@
-**IMPORTANT!** All samples and other resources made available in this GitHub repository ("samples") are designed to assist in accelerating development of agents, solutions, and agent workflows for various scenarios. Review all provided resources and carefully test output behavior in the context of your use case. AI responses may be inaccurate and AI actions should be monitored with human oversight. Learn more in the transparency documents for [Agent Service](https://learn.microsoft.com/en-us/azure/ai-foundry/responsible-ai/agents/transparency-note) and [Agent Framework](https://github.com/microsoft/agent-framework/blob/main/TRANSPARENCY_FAQ.md).
-
-Agents, solutions, or other output you create may be subject to legal and regulatory requirements, may require licenses, or may not be suitable for all industries, scenarios, or use cases. By using any sample, you are acknowledging that any output created using those samples are solely your responsibility, and that you will comply with all applicable laws, regulations, and relevant safety standards, terms of service, and codes of conduct.
-
-Third-party samples contained in this folder are subject to their own designated terms, and they have not been tested or verified by Microsoft or its affiliates.
-
-Microsoft has no responsibility to you or others with respect to any of these samples or any resulting output.
-
-# What this sample demonstrates
-
-This sample demonstrates a **key advantage of code-based hosted agents**:
-
-- **Local C# tool execution** - Run custom C# methods as agent tools
-
-Code-based agents can execute **any C# code** you write. This sample includes a Seattle Hotel Agent with a `GetAvailableHotels` tool that searches for available hotels based on check-in/check-out dates and budget preferences.
-
-The agent is hosted using the [Azure AI AgentServer SDK](https://learn.microsoft.com/en-us/dotnet/api/overview/azure/ai.agentserver.agentframework-readme) and can be deployed to Microsoft Foundry.
-
-## How It Works
-
-### Local Tools Integration
-
-In [Program.cs](Program.cs), the agent uses `AIProjectClient.CreateAIAgentAsync()` from the [Microsoft.Agents.AI.AzureAI](https://www.nuget.org/packages/Microsoft.Agents.AI.AzureAI/) package to create a Foundry agent with a local C# method (`GetAvailableHotels`) that simulates a hotel availability API. This demonstrates how code-based agents can execute custom server-side logic that prompt agents cannot access.
-
-The tool accepts:
-
-- **checkInDate** - Check-in date in YYYY-MM-DD format
-- **checkOutDate** - Check-out date in YYYY-MM-DD format
-- **maxPrice** - Maximum price per night in USD (optional, defaults to $500)
-
-### Agent Hosting
-
-The agent is hosted using the [Azure AI AgentServer SDK](https://learn.microsoft.com/en-us/dotnet/api/overview/azure/ai.agentserver.agentframework-readme),
-which provisions a REST API endpoint compatible with the OpenAI Responses protocol.
-
-## Running the Agent Locally
-
-### Prerequisites
-
-Before running this sample, ensure you have:
-
-1. **Microsoft Foundry Project**
- - Project created.
- - Chat model deployed (e.g., `gpt-5.4-mini`)
- - Note your project endpoint URL and model deployment name
-
-2. **Azure CLI**
- - Installed and authenticated
- - Run `az login` and verify with `az account show`
- - Your identity needs the **Azure AI Developer** role on the Foundry resource (for `agents/write` data action required by `CreateAIAgentAsync`)
-
-3. **.NET 10.0 SDK or later**
- - Verify your version: `dotnet --version`
- - Download from [https://dotnet.microsoft.com/download](https://dotnet.microsoft.com/download)
-
-### Environment Variables
-
-Set the following environment variables (matching `agent.yaml`):
-
-- `AZURE_AI_PROJECT_ENDPOINT` - Your Microsoft Foundry project endpoint URL (required)
-- `MODEL_DEPLOYMENT_NAME` - The deployment name for your chat model (defaults to `gpt-5.4-mini`)
-
-**PowerShell:**
-
-```powershell
-# Replace with your actual values
-$env:AZURE_AI_PROJECT_ENDPOINT="https://.services.ai.azure.com/api/projects/"
-$env:MODEL_DEPLOYMENT_NAME="gpt-5.4-mini"
-```
-
-**Bash:**
-
-```bash
-export AZURE_AI_PROJECT_ENDPOINT="https://.services.ai.azure.com/api/projects/"
-export MODEL_DEPLOYMENT_NAME="gpt-5.4-mini"
-```
-
-### Running the Sample
-
-To run the agent, execute the following command in your terminal:
-
-```bash
-dotnet restore
-dotnet build
-dotnet run
-```
-
-This will start the hosted agent locally on `http://localhost:8088/`.
-
-### Interacting with the Agent
-
-**VS Code:**
-
-1. Open the Visual Studio Code Command Palette and execute the `Microsoft Foundry: Open Container Agent Playground Locally` command.
-2. Execute the following commands to start the containerized hosted agent.
-
- ```bash
- dotnet restore
- dotnet build
- dotnet run
- ```
-
-3. Submit a request to the agent through the playground interface. For example, you may enter a prompt such as: "I need a hotel in Seattle from 2025-03-15 to 2025-03-18, budget under $200 per night."
-4. The agent will use the GetAvailableHotels tool to search for available hotels matching your criteria.
-
-> **Note**: Open the local playground before starting the container agent to ensure the visualization functions correctly.
-
-**PowerShell (Windows):**
-
-```powershell
-$body = @{
- input = "I need a hotel in Seattle from 2025-03-15 to 2025-03-18, budget under `$200 per night"
- stream = $false
-} | ConvertTo-Json
-
-Invoke-RestMethod -Uri http://localhost:8088/responses -Method Post -Body $body -ContentType "application/json"
-```
-
-**Bash/curl (Linux/macOS):**
-
-```bash
-curl -sS -H "Content-Type: application/json" -X POST http://localhost:8088/responses \
- -d '{"input": "Find me hotels in Seattle for March 20-23, 2025 under $200 per night","stream":false}'
-```
-
-You can also use the `run-requests.http` file in this directory with the VS Code REST Client extension.
-
-The agent will use the `GetAvailableHotels` tool to search for available hotels matching your criteria.
-
-## Deploying the Agent to Microsoft Foundry
-
-**Preparation (required)**
-
-Please check the environment_variables section in [agent.yaml](agent.yaml) and ensure the variables there are set in your target Microsoft Foundry Project.
-
-To deploy the hosted agent:
-
-1. Open the VS Code Command Palette and run the `Microsoft Foundry: Deploy Hosted Agent` command.
-2. Follow the interactive deployment prompts. The extension will help you select or create the container files it needs.
-3. After deployment completes, the hosted agent appears under the `Hosted Agents (Preview)` section of the extension tree. You can select the agent there to view details and test it using the integrated playground.
-
-**What the deploy flow does for you:**
-
-- Creates or obtains an Azure Container Registry for the target project.
-- Builds and pushes a container image from your workspace (the build packages the workspace respecting `.dockerignore`).
-- Creates an agent version in Microsoft Foundry using the built image. If a `.env` file exists at the workspace root, the extension will parse it and include its key/value pairs as the hosted agent's environment variables in the create request (these variables will be available to the agent runtime).
-- Starts the agent container on the project's capability host. If the capability host is not provisioned, the extension will prompt you to enable it and will guide you through creating it.
-
-## MSI Configuration in the Azure Portal
-
-This sample requires the Microsoft Foundry Project to authenticate using a Managed Identity when running remotely in Azure. Grant the project's managed identity the required permissions by assigning the built-in [Azure AI User](https://aka.ms/foundry-ext-project-role) role.
-
-To configure the Managed Identity:
-
-1. In the Azure Portal, open the Foundry Project.
-2. Select "Access control (IAM)" from the left-hand menu.
-3. Click "Add" and choose "Add role assignment".
-4. In the role selection, search for and select "Azure AI User", then click "Next".
-5. For "Assign access to", choose "Managed identity".
-6. Click "Select members", locate the managed identity associated with your Foundry Project (you can search by the project name), then click "Select".
-7. Click "Review + assign" to complete the assignment.
-8. Allow a few minutes for the role assignment to propagate before running the application.
-
-## Additional Resources
-
-- [Microsoft Agents Framework](https://learn.microsoft.com/en-us/agent-framework/overview/agent-framework-overview)
-- [Managed Identities for Azure Resources](https://learn.microsoft.com/en-us/entra/identity/managed-identities-azure-resources/)
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/agent.yaml b/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/agent.yaml
deleted file mode 100644
index eacb3ec2c0..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/agent.yaml
+++ /dev/null
@@ -1,32 +0,0 @@
-# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/ContainerAgent.yaml
-
-name: FoundrySingleAgent
-displayName: "Foundry Single Agent with Local Tools"
-description: >
- A travel assistant agent that helps users find hotels in Seattle.
- Demonstrates local C# tool execution - a key advantage of code-based
- hosted agents over prompt agents.
-metadata:
- authors:
- - Microsoft Agent Framework Team
- tags:
- - Azure AI AgentServer
- - Microsoft Agent Framework
- - Local Tools
- - Travel Assistant
- - Hotel Search
-template:
- kind: hosted
- name: FoundrySingleAgent
- protocols:
- - protocol: responses
- version: v1
- environment_variables:
- - name: AZURE_AI_PROJECT_ENDPOINT
- value: ${AZURE_AI_PROJECT_ENDPOINT}
- - name: MODEL_DEPLOYMENT_NAME
- value: gpt-5.4-mini
-resources:
- - name: "gpt-5.4-mini"
- kind: model
- id: gpt-5.4-mini
\ No newline at end of file
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/run-requests.http b/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/run-requests.http
deleted file mode 100644
index 4f2e87e097..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent/run-requests.http
+++ /dev/null
@@ -1,52 +0,0 @@
-@host = http://localhost:8088
-@endpoint = {{host}}/responses
-
-### Health Check
-GET {{host}}/readiness
-
-### Simple hotel search - budget under $200
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": "I need a hotel in Seattle from 2025-03-15 to 2025-03-18, budget under $200 per night",
- "stream": false
-}
-
-### Hotel search with higher budget
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": "Find me hotels in Seattle for March 20-23, 2025 under $250 per night",
- "stream": false
-}
-
-### Ask for recommendations without dates (agent should ask for clarification)
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": "What hotels do you recommend in Seattle?",
- "stream": false
-}
-
-### Explicit input format
-POST {{endpoint}}
-Content-Type: application/json
-
-{
- "input": [
- {
- "type": "message",
- "role": "user",
- "content": [
- {
- "type": "input_text",
- "text": "I'm looking for a hotel in Seattle from 2025-04-01 to 2025-04-05, my budget is $150 per night maximum"
- }
- ]
- }
- ],
- "stream": false
-}
diff --git a/dotnet/samples/05-end-to-end/HostedAgents/README.md b/dotnet/samples/05-end-to-end/HostedAgents/README.md
deleted file mode 100644
index a2b603cc34..0000000000
--- a/dotnet/samples/05-end-to-end/HostedAgents/README.md
+++ /dev/null
@@ -1,100 +0,0 @@
-# Hosted Agent Samples
-
-These samples demonstrate how to build and host AI agents using the [Azure AI AgentServer SDK](https://learn.microsoft.com/en-us/dotnet/api/overview/azure/ai.agentserver.agentframework-readme). Each sample can be run locally and deployed to Microsoft Foundry as a hosted agent.
-
-## Samples
-
-| Sample | Description |
-|--------|-------------|
-| [`AgentWithLocalTools`](./AgentWithLocalTools/) | Local C# function tool execution (Seattle hotel search) |
-| [`AgentThreadAndHITL`](./AgentThreadAndHITL/) | Human-in-the-loop with `ApprovalRequiredAIFunction` and thread persistence |
-| [`AgentWithHostedMCP`](./AgentWithHostedMCP/) | Hosted MCP server tool (Microsoft Learn search) |
-| [`AgentWithTextSearchRag`](./AgentWithTextSearchRag/) | RAG with `TextSearchProvider` (Contoso Outdoors) |
-| [`AgentsInWorkflows`](./AgentsInWorkflows/) | Sequential workflow pipeline (translation chain) |
-| [`FoundryMultiAgent`](./FoundryMultiAgent/) | Multi-agent Writer-Reviewer workflow using `AIProjectClient.CreateAIAgentAsync()` from [Microsoft.Agents.AI.AzureAI](https://www.nuget.org/packages/Microsoft.Agents.AI.AzureAI/) |
-| [`FoundrySingleAgent`](./FoundrySingleAgent/) | Single agent with local C# tool execution (hotel search) using `AIProjectClient.CreateAIAgentAsync()` from [Microsoft.Agents.AI.AzureAI](https://www.nuget.org/packages/Microsoft.Agents.AI.AzureAI/) |
-
-## Common Prerequisites
-
-Before running any sample, ensure you have:
-
-1. **.NET 10 SDK** or later — [Download](https://dotnet.microsoft.com/download/dotnet/10.0)
-2. **Azure CLI** installed — [Install guide](https://learn.microsoft.com/cli/azure/install-azure-cli)
-3. **Azure OpenAI** or **Microsoft Foundry project** with a chat model deployed (e.g., `gpt-5.4-mini`)
-
-### Authenticate with Azure CLI
-
-All samples use `DefaultAzureCredential` for authentication, which automatically probes multiple credential sources (environment variables, managed identity, Azure CLI, etc.). For local development, the simplest approach is to authenticate via Azure CLI:
-
-```powershell
-az login
-az account show # Verify the correct subscription
-```
-
-### Common Environment Variables
-
-Most samples require one or more of these environment variables:
-
-| Variable | Used By | Description |
-|----------|---------|-------------|
-| `AZURE_OPENAI_ENDPOINT` | Most samples | Your Azure OpenAI resource endpoint URL |
-| `AZURE_OPENAI_DEPLOYMENT_NAME` | Most samples | Chat model deployment name (defaults to `gpt-5.4-mini`) |
-| `AZURE_AI_PROJECT_ENDPOINT` | AgentWithLocalTools, FoundryMultiAgent, FoundrySingleAgent | Microsoft Foundry project endpoint |
-| `MODEL_DEPLOYMENT_NAME` | AgentWithLocalTools, FoundryMultiAgent, FoundrySingleAgent | Chat model deployment name (defaults to `gpt-5.4-mini`) |
-
-See each sample's README for the specific variables required.
-
-## Microsoft Foundry Setup (for samples that use Foundry)
-
-Some samples (`AgentWithLocalTools`, `FoundrySingleAgent`, `FoundryMultiAgent`) connect to a Microsoft Foundry project. If you're using these samples, you'll need additional setup.
-
-### Azure AI Developer Role
-
-Some Foundry operations require the **Azure AI Developer** role on the Cognitive Services resource. Even if you created the project, you may not have this role by default.
-
-```powershell
-az role assignment create `
- --role "Azure AI Developer" `
- --assignee "your-email@microsoft.com" `
- --scope "/subscriptions/{subscription-id}/resourceGroups/{resource-group}/providers/Microsoft.CognitiveServices/accounts/{account-name}"
-```
-
-> **Note**: You need **Owner** or **User Access Administrator** permissions on the resource to assign roles. If you don't have this, you may need to request JIT (Just-In-Time) elevated access via [Azure PIM](https://portal.azure.com/#view/Microsoft_Azure_PIMCommon/ActivationMenuBlade/~/aadmigratedresource).
-
-For more details on permissions, see [Microsoft Foundry Permissions](https://aka.ms/FoundryPermissions).
-
-## Running a Sample
-
-Each sample runs as a standalone hosted agent on `http://localhost:8088/`:
-
-```powershell
-cd
-dotnet run
-```
-
-### Interacting with the Agent
-
-Each sample includes a `run-requests.http` file for testing with the [VS Code REST Client](https://marketplace.visualstudio.com/items?itemName=humao.rest-client) extension, or you can use PowerShell:
-
-```powershell
-$body = @{ input = "Your question here" } | ConvertTo-Json
-Invoke-RestMethod -Uri "http://localhost:8088/responses" -Method Post -Body $body -ContentType "application/json"
-```
-
-## Deploying to Microsoft Foundry
-
-Each sample includes a `Dockerfile` and `agent.yaml` for deployment. To deploy your agent to Microsoft Foundry, follow the [hosted agents deployment guide](https://learn.microsoft.com/en-us/azure/ai-foundry/agents/concepts/hosted-agents).
-
-## Troubleshooting
-
-### `PermissionDenied` — lacks `agents/write` data action
-
-Assign the **Azure AI Developer** role to your user. See [Azure AI Developer Role](#azure-ai-developer-role) above.
-
-### Multi-framework error when running `dotnet run`
-
-If you see "Your project targets multiple frameworks", specify the framework:
-
-```powershell
-dotnet run --framework net10.0
-```
diff --git a/dotnet/src/Microsoft.Agents.AI.Foundry/AzureAIProjectChatClientExtensions.cs b/dotnet/src/Microsoft.Agents.AI.Foundry/AzureAIProjectChatClientExtensions.cs
index 4383cfb6d4..1f0f0a6e5f 100644
--- a/dotnet/src/Microsoft.Agents.AI.Foundry/AzureAIProjectChatClientExtensions.cs
+++ b/dotnet/src/Microsoft.Agents.AI.Foundry/AzureAIProjectChatClientExtensions.cs
@@ -181,7 +181,7 @@ public static ChatClientAgent AsAIAgent(
/// Creates a non-versioned backed by the project's Responses API using the specified options.
///
/// The to use for Responses API calls. Cannot be .
- /// Configuration options that control the agent's behavior. is required.
+ /// Optional configuration options that control the agent's behavior.
/// Provides a way to customize the creation of the underlying used by the agent.
/// Optional logger factory for creating loggers used by the agent.
/// An optional to use for resolving services required by the instances being invoked.
@@ -190,15 +190,14 @@ public static ChatClientAgent AsAIAgent(
/// Thrown when does not specify .
public static ChatClientAgent AsAIAgent(
this AIProjectClient aiProjectClient,
- ChatClientAgentOptions options,
+ ChatClientAgentOptions? options = null,
Func? clientFactory = null,
ILoggerFactory? loggerFactory = null,
IServiceProvider? services = null)
{
Throw.IfNull(aiProjectClient);
- Throw.IfNull(options);
- return CreateResponsesChatClientAgent(aiProjectClient, options, clientFactory, loggerFactory, services);
+ return CreateResponsesChatClientAgent(aiProjectClient, options ?? new(), clientFactory, loggerFactory, services);
}
#region Private
diff --git a/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/AgentFrameworkResponseHandler.cs b/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/AgentFrameworkResponseHandler.cs
new file mode 100644
index 0000000000..87bfd1fc75
--- /dev/null
+++ b/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/AgentFrameworkResponseHandler.cs
@@ -0,0 +1,268 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using Azure.AI.AgentServer.Responses;
+using Azure.AI.AgentServer.Responses.Models;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+
+namespace Microsoft.Agents.AI.Foundry.Hosting;
+
+///
+/// A implementation that bridges the Azure AI Responses Server SDK
+/// with agent-framework instances, enabling agent-framework agents and workflows
+/// to be hosted as Azure Foundry Hosted Agents.
+///
+public class AgentFrameworkResponseHandler : ResponseHandler
+{
+ private readonly IServiceProvider _serviceProvider;
+ private readonly ILogger _logger;
+
+ ///
+ /// Initializes a new instance of the class
+ /// that resolves agents from keyed DI services.
+ ///
+ /// The service provider for resolving agents.
+ /// The logger instance.
+ public AgentFrameworkResponseHandler(
+ IServiceProvider serviceProvider,
+ ILogger logger)
+ {
+ ArgumentNullException.ThrowIfNull(serviceProvider);
+ ArgumentNullException.ThrowIfNull(logger);
+
+ this._serviceProvider = serviceProvider;
+ this._logger = logger;
+ }
+
+ ///
+ public override async IAsyncEnumerable CreateAsync(
+ CreateResponse request,
+ ResponseContext context,
+ [EnumeratorCancellation] CancellationToken cancellationToken)
+ {
+ // 1. Resolve agent
+ var agent = this.ResolveAgent(request);
+ var sessionStore = this.ResolveSessionStore(request);
+
+ // 2. Load or create a new session from the interaction
+ var sessionConversationId = request.GetConversationId() ?? Guid.NewGuid().ToString();
+
+ var chatClientAgent = agent.GetService();
+
+ AgentSession? session = !string.IsNullOrEmpty(sessionConversationId)
+ ? await sessionStore.GetSessionAsync(agent, sessionConversationId, cancellationToken).ConfigureAwait(false)
+ : chatClientAgent is not null
+ ? await chatClientAgent.CreateSessionAsync(sessionConversationId, cancellationToken).ConfigureAwait(false)
+ : await agent.CreateSessionAsync(cancellationToken).ConfigureAwait(false);
+
+ // 3. Create the SDK event stream builder
+ var stream = new ResponseEventStream(context, request);
+
+ // 3. Emit lifecycle events
+ yield return stream.EmitCreated();
+ yield return stream.EmitInProgress();
+
+ // 4. Convert input: history + current input → ChatMessage[]
+ var messages = new List();
+
+ // Load conversation history if available
+ var history = await context.GetHistoryAsync(cancellationToken).ConfigureAwait(false);
+ if (history.Count > 0)
+ {
+ messages.AddRange(InputConverter.ConvertOutputItemsToMessages(history));
+ }
+
+ // Load and convert current input items
+ var inputItems = await context.GetInputItemsAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
+ if (inputItems.Count > 0)
+ {
+ messages.AddRange(InputConverter.ConvertItemsToMessages(inputItems));
+ }
+ else
+ {
+ // Fall back to raw request input
+ messages.AddRange(InputConverter.ConvertInputToMessages(request));
+ }
+
+ // 5. Build chat options
+ var chatOptions = InputConverter.ConvertToChatOptions(request);
+ chatOptions.Instructions = request.Instructions;
+ var options = new ChatClientAgentRunOptions(chatOptions);
+
+ // 6. Run the agent and convert output
+ // NOTE: C# forbids 'yield return' inside a try block that has a catch clause,
+ // and inside catch blocks. We use a flag to defer the yield to outside the try/catch.
+ bool emittedTerminal = false;
+ var enumerator = OutputConverter.ConvertUpdatesToEventsAsync(
+ agent.RunStreamingAsync(messages, session, options: options, cancellationToken: cancellationToken),
+ stream,
+ cancellationToken).GetAsyncEnumerator(cancellationToken);
+ try
+ {
+ while (true)
+ {
+ bool shutdownDetected = false;
+ ResponseStreamEvent? failedEvent = null;
+ ResponseStreamEvent? evt = null;
+ try
+ {
+ if (!await enumerator.MoveNextAsync().ConfigureAwait(false))
+ {
+ break;
+ }
+
+ evt = enumerator.Current;
+ }
+ catch (OperationCanceledException) when (context.IsShutdownRequested && !emittedTerminal)
+ {
+ shutdownDetected = true;
+ }
+ catch (Exception ex) when (ex is not OperationCanceledException && !emittedTerminal)
+ {
+ // Catch agent execution errors and emit a proper failed event
+ // with the real error message instead of letting the SDK emit
+ // a generic "An internal server error occurred."
+ if (this._logger.IsEnabled(LogLevel.Error))
+ {
+ this._logger.LogError(ex, "Agent execution failed for response {ResponseId}.", context.ResponseId);
+ }
+
+ failedEvent = stream.EmitFailed(
+ ResponseErrorCode.ServerError,
+ ex.Message);
+ }
+
+ if (failedEvent is not null)
+ {
+ yield return failedEvent;
+ yield break;
+ }
+
+ if (shutdownDetected)
+ {
+ // Server is shutting down — emit incomplete so clients can resume
+ this._logger.LogInformation("Shutdown detected, emitting incomplete response.");
+ yield return stream.EmitIncomplete();
+ yield break;
+ }
+
+ // yield is in the outer try (finally-only) — allowed by C#
+ yield return evt!;
+
+ if (evt is ResponseCompletedEvent or ResponseFailedEvent or ResponseIncompleteEvent)
+ {
+ emittedTerminal = true;
+ }
+ }
+ }
+ finally
+ {
+ await enumerator.DisposeAsync().ConfigureAwait(false);
+
+ // Persist session after streaming completes (successful or not)
+ if (session is not null && !string.IsNullOrEmpty(sessionConversationId))
+ {
+ await sessionStore.SaveSessionAsync(agent, sessionConversationId, session, CancellationToken.None).ConfigureAwait(false);
+ }
+ }
+ }
+
+ ///
+ /// Resolves an from the request.
+ /// Tries agent.name first, then falls back to metadata["entity_id"].
+ /// If neither is present, attempts to resolve a default (non-keyed) .
+ ///
+ private AIAgent ResolveAgent(CreateResponse request)
+ {
+ var agentName = GetAgentName(request);
+
+ if (!string.IsNullOrEmpty(agentName))
+ {
+ var agent = this._serviceProvider.GetKeyedService(agentName);
+ if (agent is not null)
+ {
+ return agent;
+ }
+
+ if (this._logger.IsEnabled(LogLevel.Warning))
+ {
+ this._logger.LogWarning("Agent '{AgentName}' not found in keyed services. Attempting default resolution.", agentName);
+ }
+ }
+
+ // Try non-keyed default
+ var defaultAgent = this._serviceProvider.GetService();
+ if (defaultAgent is not null)
+ {
+ return defaultAgent;
+ }
+
+ var errorMessage = string.IsNullOrEmpty(agentName)
+ ? "No agent name specified in the request (via agent.name or metadata[\"entity_id\"]) and no default AIAgent is registered."
+ : $"Agent '{agentName}' not found. Ensure it is registered via AddAIAgent(\"{agentName}\", ...) or as a default AIAgent.";
+
+ throw new InvalidOperationException(errorMessage);
+ }
+
+ ///
+ /// Resolves an from the request.
+ /// Tries agent.name first, then falls back to metadata["entity_id"].
+ /// If neither is present, attempts to resolve a default (non-keyed) .
+ ///
+ private AgentSessionStore ResolveSessionStore(CreateResponse request)
+ {
+ var agentName = GetAgentName(request);
+
+ if (!string.IsNullOrEmpty(agentName))
+ {
+ var sessionStore = this._serviceProvider.GetKeyedService(agentName);
+ if (sessionStore is not null)
+ {
+ return sessionStore;
+ }
+
+ if (this._logger.IsEnabled(LogLevel.Warning))
+ {
+ this._logger.LogWarning("SessionStore for agent '{AgentName}' not found in keyed services. Attempting default resolution.", agentName);
+ }
+ }
+
+ // Try non-keyed default
+ var defaultSessionStore = this._serviceProvider.GetService();
+ if (defaultSessionStore is not null)
+ {
+ return defaultSessionStore;
+ }
+
+ var errorMessage = string.IsNullOrEmpty(agentName)
+ ? "No agent name specified in the request (via agent.name or metadata[\"entity_id\"]) and no default AgentSessionStore is registered."
+ : $"Agent '{agentName}' not found. Ensure it is registered via AddAIAgent(\"{agentName}\", ...) or as a default AgentSessionStore.";
+
+ throw new InvalidOperationException(errorMessage);
+ }
+
+ private static string? GetAgentName(CreateResponse request)
+ {
+ // Try agent.name from AgentReference
+ var agentName = request.AgentReference?.Name;
+
+ // Fall back to "model" field (OpenAI clients send the agent name as the model)
+ if (string.IsNullOrEmpty(agentName))
+ {
+ agentName = request.Model;
+ }
+
+ // Fall back to metadata["entity_id"]
+ if (string.IsNullOrEmpty(agentName) && request.Metadata?.AdditionalProperties is not null)
+ {
+ request.Metadata.AdditionalProperties.TryGetValue("entity_id", out agentName);
+ }
+
+ return agentName;
+ }
+}
diff --git a/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/AgentSessionStore.cs b/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/AgentSessionStore.cs
new file mode 100644
index 0000000000..c61584e9e0
--- /dev/null
+++ b/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/AgentSessionStore.cs
@@ -0,0 +1,46 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Microsoft.Agents.AI.Foundry.Hosting;
+
+///
+/// Defines the contract for storing and retrieving agent conversation sessions.
+///
+///
+/// Implementations of this interface enable persistent storage of conversation sessions,
+/// allowing conversations to be resumed across HTTP requests, application restarts,
+/// or different service instances in hosted scenarios.
+///
+public abstract class AgentSessionStore
+{
+ ///
+ /// Saves a serialized agent session to persistent storage.
+ ///
+ /// The agent that owns this session.
+ /// The unique identifier for the conversation/session.
+ /// The session to save.
+ /// The to monitor for cancellation requests.
+ /// A task that represents the asynchronous save operation.
+ public abstract ValueTask SaveSessionAsync(
+ AIAgent agent,
+ string conversationId,
+ AgentSession session,
+ CancellationToken cancellationToken = default);
+
+ ///
+ /// Retrieves a serialized agent session from persistent storage.
+ ///
+ /// The agent that owns this session.
+ /// The unique identifier for the conversation/session to retrieve.
+ /// The to monitor for cancellation requests.
+ ///
+ /// A task that represents the asynchronous retrieval operation.
+ /// The task result contains the session, or a new session if not found.
+ ///
+ public abstract ValueTask GetSessionAsync(
+ AIAgent agent,
+ string conversationId,
+ CancellationToken cancellationToken = default);
+}
diff --git a/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/InMemoryAgentSessionStore.cs b/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/InMemoryAgentSessionStore.cs
new file mode 100644
index 0000000000..4ae94ed4fe
--- /dev/null
+++ b/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/InMemoryAgentSessionStore.cs
@@ -0,0 +1,53 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections.Concurrent;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Microsoft.Agents.AI.Foundry.Hosting;
+
+///
+/// Provides an in-memory implementation of for development and testing scenarios.
+///
+///
+///
+/// This implementation stores sessions in memory using a concurrent dictionary and is suitable for:
+///
+/// Single-instance development scenarios
+/// Testing and prototyping
+/// Scenarios where session persistence across restarts is not required
+///
+///
+///
+/// Warning: All stored sessions will be lost when the application restarts.
+/// For production use with multiple instances or persistence across restarts, use a durable storage implementation
+/// such as Redis, SQL Server, or Azure Cosmos DB.
+///
+///
+public sealed class InMemoryAgentSessionStore : AgentSessionStore
+{
+ private readonly ConcurrentDictionary _sessions = new();
+
+ ///
+ public override async ValueTask SaveSessionAsync(AIAgent agent, string conversationId, AgentSession session, CancellationToken cancellationToken = default)
+ {
+ var key = GetKey(conversationId, agent.Id);
+ this._sessions[key] = await agent.SerializeSessionAsync(session, cancellationToken: cancellationToken).ConfigureAwait(false);
+ }
+
+ ///
+ public override async ValueTask GetSessionAsync(AIAgent agent, string conversationId, CancellationToken cancellationToken = default)
+ {
+ var key = GetKey(conversationId, agent.Id);
+ JsonElement? sessionContent = this._sessions.TryGetValue(key, out var existingSession) ? existingSession : null;
+
+ return sessionContent switch
+ {
+ null => await agent.CreateSessionAsync(cancellationToken).ConfigureAwait(false),
+ _ => await agent.DeserializeSessionAsync(sessionContent.Value, cancellationToken: cancellationToken).ConfigureAwait(false),
+ };
+ }
+
+ private static string GetKey(string conversationId, string agentId) => $"{agentId}:{conversationId}";
+}
diff --git a/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/InputConverter.cs b/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/InputConverter.cs
new file mode 100644
index 0000000000..1d8be8f590
--- /dev/null
+++ b/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/InputConverter.cs
@@ -0,0 +1,322 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using System.Text.Json;
+using Azure.AI.AgentServer.Responses.Models;
+using Microsoft.Extensions.AI;
+using MeaiTextContent = Microsoft.Extensions.AI.TextContent;
+
+namespace Microsoft.Agents.AI.Foundry.Hosting;
+
+///
+/// Converts Responses Server SDK input types to agent-framework types.
+///
+internal static class InputConverter
+{
+ ///
+ /// Converts the SDK request input items into a list of .
+ ///
+ /// The create response request from the SDK.
+ /// A list of chat messages representing the request input.
+ public static List ConvertInputToMessages(CreateResponse request)
+ {
+ var messages = new List();
+
+ foreach (var item in request.GetInputExpanded())
+ {
+ var message = ConvertInputItemToMessage(item);
+ if (message is not null)
+ {
+ messages.Add(message);
+ }
+ }
+
+ return messages;
+ }
+
+ ///
+ /// Converts resolved SDK input items into instances.
+ ///
+ /// The resolved input items from the SDK context.
+ /// A list of chat messages.
+ public static List ConvertItemsToMessages(IReadOnlyList items)
+ {
+ var messages = new List();
+
+ foreach (var item in items)
+ {
+ var message = ConvertInputItemToMessage(item);
+ if (message is not null)
+ {
+ messages.Add(message);
+ }
+ }
+
+ return messages;
+ }
+
+ ///
+ /// Converts resolved SDK history/input items into instances.
+ ///
+ /// The resolved output items from the SDK context.
+ /// A list of chat messages.
+ public static List ConvertOutputItemsToMessages(IReadOnlyList items)
+ {
+ var messages = new List();
+
+ foreach (var item in items)
+ {
+ var message = ConvertOutputItemToMessage(item);
+ if (message is not null)
+ {
+ messages.Add(message);
+ }
+ }
+
+ return messages;
+ }
+
+ ///
+ /// Creates from the SDK request properties.
+ ///
+ /// The create response request.
+ /// A configured instance.
+ public static ChatOptions ConvertToChatOptions(CreateResponse request)
+ {
+ return new ChatOptions
+ {
+ Temperature = (float?)request.Temperature,
+ TopP = (float?)request.TopP,
+ MaxOutputTokens = (int?)request.MaxOutputTokens,
+ // Note: We intentionally do NOT set ModelId from request.Model here.
+ // The hosted agent already has its own model configured, and passing
+ // the client-provided model would override it (causing failures when
+ // clients send placeholder values like "hosted-agent").
+ };
+ }
+
+ private static ChatMessage? ConvertInputItemToMessage(Item item)
+ {
+ return item switch
+ {
+ ItemMessage msg => ConvertItemMessage(msg),
+ FunctionCallOutputItemParam funcOutput => ConvertFunctionCallOutput(funcOutput),
+ ItemFunctionToolCall funcCall => ConvertItemFunctionToolCall(funcCall),
+ ItemReferenceParam => null,
+ _ => null
+ };
+ }
+
+ private static ChatMessage ConvertItemMessage(ItemMessage msg)
+ {
+ var role = ConvertMessageRole(msg.Role);
+ var contents = new List();
+
+ foreach (var content in msg.GetContentExpanded())
+ {
+ switch (content)
+ {
+ case MessageContentInputTextContent textContent:
+ contents.Add(new MeaiTextContent(textContent.Text));
+ break;
+ case MessageContentInputImageContent imageContent:
+ if (imageContent.ImageUrl is not null)
+ {
+ var url = imageContent.ImageUrl.ToString();
+ if (url.StartsWith("data:", StringComparison.OrdinalIgnoreCase))
+ {
+ contents.Add(new DataContent(url, "image/*"));
+ }
+ else
+ {
+ contents.Add(new UriContent(imageContent.ImageUrl, "image/*"));
+ }
+ }
+ else if (!string.IsNullOrEmpty(imageContent.FileId))
+ {
+ contents.Add(new HostedFileContent(imageContent.FileId));
+ }
+
+ break;
+ case MessageContentInputFileContent fileContent:
+ if (fileContent.FileUrl is not null)
+ {
+ contents.Add(new UriContent(fileContent.FileUrl, "application/octet-stream"));
+ }
+ else if (!string.IsNullOrEmpty(fileContent.FileData))
+ {
+ contents.Add(new DataContent(fileContent.FileData, "application/octet-stream"));
+ }
+ else if (!string.IsNullOrEmpty(fileContent.FileId))
+ {
+ contents.Add(new HostedFileContent(fileContent.FileId));
+ }
+ else if (!string.IsNullOrEmpty(fileContent.Filename))
+ {
+ contents.Add(new MeaiTextContent($"[File: {fileContent.Filename}]"));
+ }
+
+ break;
+ }
+ }
+
+ if (contents.Count == 0)
+ {
+ contents.Add(new MeaiTextContent(string.Empty));
+ }
+
+ return new ChatMessage(role, contents);
+ }
+
+ private static ChatMessage ConvertFunctionCallOutput(FunctionCallOutputItemParam funcOutput)
+ {
+ var output = funcOutput.Output?.ToString() ?? string.Empty;
+ return new ChatMessage(
+ ChatRole.Tool,
+ [new FunctionResultContent(funcOutput.CallId, output)]);
+ }
+
+ [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Deserializing function call arguments from SDK input.")]
+ [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Deserializing function call arguments from SDK input.")]
+ private static ChatMessage ConvertItemFunctionToolCall(ItemFunctionToolCall funcCall)
+ {
+ IDictionary? arguments = null;
+ if (funcCall.Arguments is not null)
+ {
+ try
+ {
+ arguments = JsonSerializer.Deserialize>(funcCall.Arguments);
+ }
+ catch (JsonException)
+ {
+ arguments = new Dictionary { ["_raw"] = funcCall.Arguments };
+ }
+ }
+
+ return new ChatMessage(
+ ChatRole.Assistant,
+ [new FunctionCallContent(funcCall.CallId, funcCall.Name, arguments)]);
+ }
+
+ private static ChatMessage? ConvertOutputItemToMessage(OutputItem item)
+ {
+ return item switch
+ {
+ OutputItemMessage msg => ConvertOutputItemMessageToChat(msg),
+ OutputItemFunctionToolCall funcCall => ConvertOutputItemFunctionCall(funcCall),
+ FunctionToolCallOutputResource funcOutput => ConvertFunctionToolCallOutputResource(funcOutput),
+ OutputItemReasoningItem => null,
+ _ => null
+ };
+ }
+
+ private static ChatMessage ConvertOutputItemMessageToChat(OutputItemMessage msg)
+ {
+ var role = ConvertMessageRole(msg.Role);
+ var contents = new List();
+
+ foreach (var content in msg.Content)
+ {
+ switch (content)
+ {
+ case MessageContentInputTextContent textContent:
+ contents.Add(new MeaiTextContent(textContent.Text));
+ break;
+ case MessageContentOutputTextContent textContent:
+ contents.Add(new MeaiTextContent(textContent.Text));
+ break;
+ case MessageContentRefusalContent refusal:
+ contents.Add(new MeaiTextContent($"[Refusal: {refusal.Refusal}]"));
+ break;
+ case MessageContentInputImageContent imageContent:
+ if (imageContent.ImageUrl is not null)
+ {
+ var url = imageContent.ImageUrl.ToString();
+ if (url.StartsWith("data:", StringComparison.OrdinalIgnoreCase))
+ {
+ contents.Add(new DataContent(url, "image/*"));
+ }
+ else
+ {
+ contents.Add(new UriContent(imageContent.ImageUrl, "image/*"));
+ }
+ }
+ else if (!string.IsNullOrEmpty(imageContent.FileId))
+ {
+ contents.Add(new HostedFileContent(imageContent.FileId));
+ }
+
+ break;
+ case MessageContentInputFileContent fileContent:
+ if (fileContent.FileUrl is not null)
+ {
+ contents.Add(new UriContent(fileContent.FileUrl, "application/octet-stream"));
+ }
+ else if (!string.IsNullOrEmpty(fileContent.FileData))
+ {
+ contents.Add(new DataContent(fileContent.FileData, "application/octet-stream"));
+ }
+ else if (!string.IsNullOrEmpty(fileContent.FileId))
+ {
+ contents.Add(new HostedFileContent(fileContent.FileId));
+ }
+ else if (!string.IsNullOrEmpty(fileContent.Filename))
+ {
+ contents.Add(new MeaiTextContent($"[File: {fileContent.Filename}]"));
+ }
+
+ break;
+ }
+ }
+
+ if (contents.Count == 0)
+ {
+ contents.Add(new MeaiTextContent(string.Empty));
+ }
+
+ return new ChatMessage(role, contents);
+ }
+
+ [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Deserializing function call arguments from SDK output history.")]
+ [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Deserializing function call arguments from SDK output history.")]
+ private static ChatMessage ConvertOutputItemFunctionCall(OutputItemFunctionToolCall funcCall)
+ {
+ IDictionary? arguments = null;
+ if (funcCall.Arguments is not null)
+ {
+ try
+ {
+ arguments = JsonSerializer.Deserialize>(funcCall.Arguments);
+ }
+ catch (JsonException)
+ {
+ arguments = new Dictionary { ["_raw"] = funcCall.Arguments };
+ }
+ }
+
+ return new ChatMessage(
+ ChatRole.Assistant,
+ [new FunctionCallContent(funcCall.CallId, funcCall.Name, arguments)]);
+ }
+
+ private static ChatMessage ConvertFunctionToolCallOutputResource(FunctionToolCallOutputResource funcOutput)
+ {
+ return new ChatMessage(
+ ChatRole.Tool,
+ [new FunctionResultContent(funcOutput.CallId, funcOutput.Output)]);
+ }
+
+ private static ChatRole ConvertMessageRole(MessageRole role)
+ {
+ return role switch
+ {
+ MessageRole.User => ChatRole.User,
+ MessageRole.Assistant => ChatRole.Assistant,
+ MessageRole.System => ChatRole.System,
+ MessageRole.Developer => new ChatRole("developer"),
+ _ => ChatRole.User
+ };
+ }
+}
diff --git a/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/OutputConverter.cs b/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/OutputConverter.cs
new file mode 100644
index 0000000000..58ba989ebf
--- /dev/null
+++ b/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/OutputConverter.cs
@@ -0,0 +1,349 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using System.Runtime.CompilerServices;
+using System.Security.Cryptography;
+using System.Text;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.AI.AgentServer.Responses;
+using Azure.AI.AgentServer.Responses.Models;
+using Microsoft.Agents.AI.Workflows;
+using Microsoft.Extensions.AI;
+using MeaiTextContent = Microsoft.Extensions.AI.TextContent;
+
+namespace Microsoft.Agents.AI.Foundry.Hosting;
+
+///
+/// Converts agent-framework streams into
+/// Responses Server SDK sequences using the
+/// builder pattern.
+///
+internal static class OutputConverter
+{
+ ///
+ /// Converts a stream of into a stream of
+ /// using the SDK builder pattern.
+ ///
+ /// The agent response updates to convert.
+ /// The SDK event stream builder.
+ /// Cancellation token.
+ /// An async enumerable of SDK response stream events (excluding lifecycle events).
+ [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Serializing function call arguments dictionary.")]
+ [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Serializing function call arguments dictionary.")]
+ public static async IAsyncEnumerable ConvertUpdatesToEventsAsync(
+ IAsyncEnumerable updates,
+ ResponseEventStream stream,
+ [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ ResponseUsage? accumulatedUsage = null;
+ OutputItemMessageBuilder? currentMessageBuilder = null;
+ TextContentBuilder? currentTextBuilder = null;
+ StringBuilder? accumulatedText = null;
+ string? previousMessageId = null;
+ bool hasTerminalEvent = false;
+ var executorItemIds = new Dictionary();
+
+ await foreach (var update in updates.WithCancellation(cancellationToken).ConfigureAwait(false))
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+
+ // Handle workflow events from RawRepresentation
+ if (update.RawRepresentation is WorkflowEvent workflowEvent)
+ {
+ // Close any open message builder before emitting workflow items
+ foreach (var evt in CloseCurrentMessage(currentMessageBuilder, currentTextBuilder, accumulatedText))
+ {
+ yield return evt;
+ }
+
+ currentTextBuilder = null;
+ currentMessageBuilder = null;
+ accumulatedText = null;
+ previousMessageId = null;
+
+ foreach (var evt in EmitWorkflowEvent(stream, workflowEvent, executorItemIds))
+ {
+ yield return evt;
+ }
+
+ continue;
+ }
+
+ foreach (var content in update.Contents)
+ {
+ switch (content)
+ {
+ case MeaiTextContent textContent:
+ {
+ if (!IsSameMessage(update.MessageId, previousMessageId) && currentMessageBuilder is not null)
+ {
+ foreach (var evt in CloseCurrentMessage(currentMessageBuilder, currentTextBuilder, accumulatedText))
+ {
+ yield return evt;
+ }
+
+ currentTextBuilder = null;
+ currentMessageBuilder = null;
+ accumulatedText = null;
+ }
+
+ previousMessageId = update.MessageId;
+
+ if (currentMessageBuilder is null)
+ {
+ currentMessageBuilder = stream.AddOutputItemMessage();
+ yield return currentMessageBuilder.EmitAdded();
+
+ currentTextBuilder = currentMessageBuilder.AddTextContent();
+ yield return currentTextBuilder.EmitAdded();
+
+ accumulatedText = new StringBuilder();
+ }
+
+ if (textContent.Text is { Length: > 0 })
+ {
+ accumulatedText!.Append(textContent.Text);
+ yield return currentTextBuilder!.EmitDelta(textContent.Text);
+ }
+
+ break;
+ }
+
+ case FunctionCallContent funcCall:
+ {
+ foreach (var evt in CloseCurrentMessage(currentMessageBuilder, currentTextBuilder, accumulatedText))
+ {
+ yield return evt;
+ }
+
+ currentTextBuilder = null;
+ currentMessageBuilder = null;
+ accumulatedText = null;
+ previousMessageId = null;
+
+ var callId = funcCall.CallId ?? Guid.NewGuid().ToString("N");
+ var funcBuilder = stream.AddOutputItemFunctionCall(funcCall.Name, callId);
+ yield return funcBuilder.EmitAdded();
+
+ var arguments = funcCall.Arguments is not null
+ ? JsonSerializer.Serialize(funcCall.Arguments)
+ : "{}";
+
+ yield return funcBuilder.EmitArgumentsDelta(arguments);
+ yield return funcBuilder.EmitArgumentsDone(arguments);
+ yield return funcBuilder.EmitDone();
+ break;
+ }
+
+ case TextReasoningContent reasoningContent:
+ {
+ foreach (var evt in CloseCurrentMessage(currentMessageBuilder, currentTextBuilder, accumulatedText))
+ {
+ yield return evt;
+ }
+
+ currentTextBuilder = null;
+ currentMessageBuilder = null;
+ accumulatedText = null;
+ previousMessageId = null;
+
+ var reasoningBuilder = stream.AddOutputItemReasoningItem();
+ yield return reasoningBuilder.EmitAdded();
+
+ var summaryPart = reasoningBuilder.AddSummaryPart();
+ yield return summaryPart.EmitAdded();
+
+ var text = reasoningContent.Text ?? string.Empty;
+ yield return summaryPart.EmitTextDelta(text);
+ yield return summaryPart.EmitTextDone(text);
+ yield return summaryPart.EmitDone();
+
+ yield return reasoningBuilder.EmitDone();
+ break;
+ }
+
+ case UsageContent usageContent when usageContent.Details is not null:
+ {
+ accumulatedUsage = ConvertUsage(usageContent.Details, accumulatedUsage);
+ break;
+ }
+
+ case ErrorContent errorContent:
+ {
+ foreach (var evt in CloseCurrentMessage(currentMessageBuilder, currentTextBuilder, accumulatedText))
+ {
+ yield return evt;
+ }
+
+ currentTextBuilder = null;
+ currentMessageBuilder = null;
+ accumulatedText = null;
+ previousMessageId = null;
+ hasTerminalEvent = true;
+
+ yield return stream.EmitFailed(
+ ResponseErrorCode.ServerError,
+ errorContent.Message ?? "An error occurred during agent execution.",
+ accumulatedUsage);
+ yield break;
+ }
+
+ case DataContent:
+ case UriContent:
+ // Image/audio/file content from agents is not currently supported
+ // as streaming output items in the Responses Server SDK builder pattern.
+ // These would need to be serialized as base64 or URL references.
+ break;
+
+ case FunctionResultContent:
+ // Function results are internal to the agent's tool-calling loop
+ // and are not emitted as output items in the response stream.
+ break;
+
+ default:
+ break;
+ }
+ }
+ }
+
+ // Close any remaining open message
+ foreach (var evt in CloseCurrentMessage(currentMessageBuilder, currentTextBuilder, accumulatedText))
+ {
+ yield return evt;
+ }
+
+ if (!hasTerminalEvent)
+ {
+ yield return stream.EmitCompleted(accumulatedUsage);
+ }
+ }
+
+ private static IEnumerable CloseCurrentMessage(
+ OutputItemMessageBuilder? messageBuilder,
+ TextContentBuilder? textBuilder,
+ StringBuilder? accumulatedText)
+ {
+ if (messageBuilder is null)
+ {
+ yield break;
+ }
+
+ if (textBuilder is not null)
+ {
+ var finalText = accumulatedText?.ToString() ?? string.Empty;
+ yield return textBuilder.EmitTextDone(finalText);
+ yield return textBuilder.EmitDone();
+ }
+
+ yield return messageBuilder.EmitDone();
+ }
+
+ private static bool IsSameMessage(string? currentId, string? previousId) =>
+ currentId is not { Length: > 0 } || previousId is not { Length: > 0 } || currentId == previousId;
+
+ private static ResponseUsage ConvertUsage(UsageDetails details, ResponseUsage? existing)
+ {
+ var inputTokens = details.InputTokenCount ?? 0;
+ var outputTokens = details.OutputTokenCount ?? 0;
+ var totalTokens = details.TotalTokenCount ?? 0;
+
+ if (existing is not null)
+ {
+ inputTokens += existing.InputTokens;
+ outputTokens += existing.OutputTokens;
+ totalTokens += existing.TotalTokens;
+ }
+
+ return AzureAIAgentServerResponsesModelFactory.ResponseUsage(
+ inputTokens: inputTokens,
+ outputTokens: outputTokens,
+ totalTokens: totalTokens);
+ }
+
+ private static IEnumerable EmitWorkflowEvent(
+ ResponseEventStream stream,
+ WorkflowEvent workflowEvent,
+ Dictionary executorItemIds)
+ {
+ switch (workflowEvent)
+ {
+ case ExecutorInvokedEvent invokedEvent:
+ {
+ var itemId = GenerateItemId("wfa");
+ executorItemIds[invokedEvent.ExecutorId] = itemId;
+
+ var item = new WorkflowActionOutputItem(
+ kind: "InvokeExecutor",
+ actionId: invokedEvent.ExecutorId,
+ status: WorkflowActionOutputItemStatus.InProgress,
+ id: itemId);
+
+ var builder = stream.AddOutputItem(itemId);
+ yield return builder.EmitAdded(item);
+ yield return builder.EmitDone(item);
+ break;
+ }
+
+ case ExecutorCompletedEvent completedEvent:
+ {
+ var itemId = GenerateItemId("wfa");
+
+ var item = new WorkflowActionOutputItem(
+ kind: "InvokeExecutor",
+ actionId: completedEvent.ExecutorId,
+ status: WorkflowActionOutputItemStatus.Completed,
+ id: itemId);
+
+ var builder = stream.AddOutputItem(itemId);
+ yield return builder.EmitAdded(item);
+ yield return builder.EmitDone(item);
+ executorItemIds.Remove(completedEvent.ExecutorId);
+ break;
+ }
+
+ case ExecutorFailedEvent failedEvent:
+ {
+ var itemId = GenerateItemId("wfa");
+
+ var item = new WorkflowActionOutputItem(
+ kind: "InvokeExecutor",
+ actionId: failedEvent.ExecutorId,
+ status: WorkflowActionOutputItemStatus.Failed,
+ id: itemId);
+
+ var builder = stream.AddOutputItem(itemId);
+ yield return builder.EmitAdded(item);
+ yield return builder.EmitDone(item);
+ executorItemIds.Remove(failedEvent.ExecutorId);
+ break;
+ }
+
+ // Informational/lifecycle events — no SDK output needed.
+ // Note: AgentResponseUpdateEvent and WorkflowErrorEvent are unwrapped by
+ // WorkflowSession.InvokeStageAsync() into regular AgentResponseUpdate objects
+ // with populated Contents (TextContent, ErrorContent, etc.), so they flow
+ // through the normal content processing path above — not through this method.
+ case SuperStepStartedEvent:
+ case SuperStepCompletedEvent:
+ case WorkflowStartedEvent:
+ case WorkflowWarningEvent:
+ case RequestInfoEvent:
+ break;
+ }
+ }
+
+ ///
+ /// Generates a valid item ID matching the SDK's {prefix}_{50chars} format.
+ ///
+ private static string GenerateItemId(string prefix)
+ {
+ // SDK format: {prefix}_{50 char body}
+ var bytes = RandomNumberGenerator.GetBytes(25);
+ var body = Convert.ToHexString(bytes); // 50 hex chars, uppercase
+ return $"{prefix}_{body}";
+ }
+}
diff --git a/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/ServiceCollectionExtensions.cs b/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/ServiceCollectionExtensions.cs
new file mode 100644
index 0000000000..fe3b07c023
--- /dev/null
+++ b/dotnet/src/Microsoft.Agents.AI.Foundry/Hosting/ServiceCollectionExtensions.cs
@@ -0,0 +1,162 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Reflection;
+using System.Threading.Tasks;
+using Azure.AI.AgentServer.Responses;
+using Microsoft.AspNetCore.Builder;
+using Microsoft.AspNetCore.Http;
+using Microsoft.AspNetCore.Routing;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.DependencyInjection.Extensions;
+
+namespace Microsoft.Agents.AI.Foundry.Hosting;
+
+///
+/// Extension methods for registering agent-framework agents as Foundry Hosted Agents
+/// using the Azure AI Responses Server SDK.
+///
+public static class FoundryHostingExtensions
+{
+ ///
+ /// Registers the Azure AI Responses Server SDK and
+ /// as the . Agents are resolved from keyed DI services
+ /// using the agent.name or metadata["entity_id"] from incoming requests.
+ ///
+ ///
+ ///
+ /// This method calls AddResponsesServer() internally, so you do not need to
+ /// call it separately. Register your instances before calling this.
+ ///
+ ///
+ /// Example:
+ ///
+ /// builder.AddAIAgent("my-agent", ...);
+ /// builder.Services.AddFoundryResponses();
+ ///
+ /// var app = builder.Build();
+ /// app.MapFoundryResponses();
+ ///
+ ///
+ ///
+ /// The service collection.
+ /// The service collection for chaining.
+ public static IServiceCollection AddFoundryResponses(this IServiceCollection services)
+ {
+ ArgumentNullException.ThrowIfNull(services);
+ services.AddResponsesServer();
+ services.TryAddSingleton();
+ services.TryAddSingleton();
+ return services;
+ }
+
+ ///
+ /// Registers the Azure AI Responses Server SDK and a specific
+ /// as the handler for all incoming requests, regardless of the agent.name in the request.
+ ///
+ ///
+ ///
+ /// Use this overload when hosting a single agent. The provided agent instance is
+ /// registered as both a keyed service and the default .
+ /// This method calls AddResponsesServer() internally.
+ ///
+ ///
+ /// Example:
+ ///
+ /// builder.Services.AddFoundryResponses(myAgent);
+ ///
+ /// var app = builder.Build();
+ /// app.MapFoundryResponses();
+ ///
+ ///
+ ///
+ /// The service collection.
+ /// The agent instance to register.
+ /// The agent session store to use for managing agent sessions server-side. If null, an in-memory session store will be used.
+ /// The service collection for chaining.
+ public static IServiceCollection AddFoundryResponses(this IServiceCollection services, AIAgent agent, AgentSessionStore? agentSessionStore = null)
+ {
+ ArgumentNullException.ThrowIfNull(services);
+ ArgumentNullException.ThrowIfNull(agent);
+
+ services.AddResponsesServer();
+ agentSessionStore ??= new InMemoryAgentSessionStore();
+
+ if (!string.IsNullOrWhiteSpace(agent.Name))
+ {
+ services.TryAddKeyedSingleton(agent.Name, agent);
+ services.TryAddKeyedSingleton(agent.Name, agentSessionStore);
+ }
+
+ // Also register as the default (non-keyed) agent so requests
+ // without an agent name can resolve it (e.g., local dev tooling).
+ services.TryAddSingleton(agent);
+ services.TryAddSingleton(agentSessionStore);
+
+ services.TryAddSingleton();
+ return services;
+ }
+
+ ///
+ /// Maps the Responses API routes for the agent-framework handler to the endpoint routing pipeline.
+ ///
+ /// The endpoint route builder.
+ /// Optional route prefix (e.g., "/openai/v1"). Default: empty (routes at /responses).
+ /// The endpoint route builder for chaining.
+ public static IEndpointRouteBuilder MapFoundryResponses(this IEndpointRouteBuilder endpoints, string prefix = "")
+ {
+ ArgumentNullException.ThrowIfNull(endpoints);
+ endpoints.MapResponsesServer(prefix);
+
+ if (endpoints is IApplicationBuilder app)
+ {
+ // Ensure the middleware is added to the pipeline
+ app.UseMiddleware();
+ }
+
+ return endpoints;
+ }
+
+ private sealed class AgentFrameworkUserAgentMiddleware(RequestDelegate next)
+ {
+ private static readonly string s_userAgentValue = CreateUserAgentValue();
+
+ public async Task InvokeAsync(HttpContext context)
+ {
+ var headers = context.Request.Headers;
+ var userAgent = headers.UserAgent.ToString();
+
+ if (string.IsNullOrEmpty(userAgent))
+ {
+ headers.UserAgent = s_userAgentValue;
+ }
+ else if (!userAgent.Contains(s_userAgentValue, StringComparison.OrdinalIgnoreCase))
+ {
+ headers.UserAgent = $"{userAgent} {s_userAgentValue}";
+ }
+
+ await next(context).ConfigureAwait(false);
+ }
+
+ private static string CreateUserAgentValue()
+ {
+ const string Name = "agent-framework-dotnet";
+
+ if (typeof(AgentFrameworkUserAgentMiddleware).Assembly.GetCustomAttribute()?.InformationalVersion is string version)
+ {
+ int pos = version.IndexOf('+');
+ if (pos >= 0)
+ {
+ version = version.Substring(0, pos);
+ }
+
+ if (version.Length > 0)
+ {
+ return $"{Name}/{version}";
+ }
+ }
+
+ return Name;
+ }
+ }
+}
diff --git a/dotnet/src/Microsoft.Agents.AI.Foundry/Microsoft.Agents.AI.Foundry.csproj b/dotnet/src/Microsoft.Agents.AI.Foundry/Microsoft.Agents.AI.Foundry.csproj
index 670d140043..e3c1773941 100644
--- a/dotnet/src/Microsoft.Agents.AI.Foundry/Microsoft.Agents.AI.Foundry.csproj
+++ b/dotnet/src/Microsoft.Agents.AI.Foundry/Microsoft.Agents.AI.Foundry.csproj
@@ -3,7 +3,8 @@
truetrue
- $(NoWarn);OPENAI001
+ $(NoWarn);OPENAI001;MEAI001;NU1903
+ false
@@ -20,18 +21,32 @@
true
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Microsoft Agent Framework for Foundry Agents
@@ -43,4 +58,9 @@
+
+
+
+
+
diff --git a/dotnet/tests/Microsoft.Agents.AI.Foundry.UnitTests/Hosting/AgentFrameworkResponseHandlerTests.cs b/dotnet/tests/Microsoft.Agents.AI.Foundry.UnitTests/Hosting/AgentFrameworkResponseHandlerTests.cs
new file mode 100644
index 0000000000..169dc75e22
--- /dev/null
+++ b/dotnet/tests/Microsoft.Agents.AI.Foundry.UnitTests/Hosting/AgentFrameworkResponseHandlerTests.cs
@@ -0,0 +1,832 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure.AI.AgentServer.Responses;
+using Azure.AI.AgentServer.Responses.Models;
+using Microsoft.Agents.AI.Foundry.Hosting;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Logging.Abstractions;
+using Moq;
+using MeaiTextContent = Microsoft.Extensions.AI.TextContent;
+
+namespace Microsoft.Agents.AI.Foundry.UnitTests.Hosting;
+
+public class AgentFrameworkResponseHandlerTests
+{
+ [Fact]
+ public async Task CreateAsync_WithDefaultAgent_ProducesStreamEventsAsync()
+ {
+ // Arrange
+ var agent = CreateTestAgent("Hello from the agent!");
+ var services = new ServiceCollection();
+ services.AddSingleton(new InMemoryAgentSessionStore());
+ services.AddSingleton(agent);
+ services.AddSingleton>(NullLogger.Instance);
+ var sp = services.BuildServiceProvider();
+
+ var handler = new AgentFrameworkResponseHandler(sp, NullLogger.Instance);
+
+ var request = AzureAIAgentServerResponsesModelFactory.CreateResponse(model: "test");
+ request.Input = BinaryData.FromObjectAsJson(new[]
+ {
+ new { type = "message", id = "msg_1", status = "completed", role = "user",
+ content = new[] { new { type = "input_text", text = "Hello" } } }
+ });
+
+ var mockContext = new Mock("resp_" + new string('0', 46)) { CallBase = true };
+ mockContext.Setup(x => x.GetHistoryAsync(It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+ mockContext.Setup(x => x.GetInputItemsAsync(It.IsAny(), It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+
+ // Act
+ var events = new List();
+ await foreach (var evt in handler.CreateAsync(request, mockContext.Object, CancellationToken.None))
+ {
+ events.Add(evt);
+ }
+
+ // Assert
+ Assert.True(events.Count >= 4, $"Expected at least 4 events, got {events.Count}");
+ Assert.IsType(events[0]);
+ Assert.IsType(events[1]);
+ }
+
+ [Fact]
+ public async Task CreateAsync_WithKeyedAgent_ResolvesCorrectAgentAsync()
+ {
+ // Arrange
+ var agent = CreateTestAgent("Keyed agent response");
+ var services = new ServiceCollection();
+ services.AddSingleton(new InMemoryAgentSessionStore());
+ services.AddKeyedSingleton("my-agent", agent);
+ var sp = services.BuildServiceProvider();
+
+ var handler = new AgentFrameworkResponseHandler(sp, NullLogger.Instance);
+
+ var request = AzureAIAgentServerResponsesModelFactory.CreateResponse(
+ model: "test",
+ agentReference: new AgentReference("my-agent"));
+ request.Input = BinaryData.FromObjectAsJson(new[]
+ {
+ new { type = "message", id = "msg_1", status = "completed", role = "user",
+ content = new[] { new { type = "input_text", text = "Hello" } } }
+ });
+
+ var mockContext = new Mock("resp_" + new string('0', 46)) { CallBase = true };
+ mockContext.Setup(x => x.GetHistoryAsync(It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+ mockContext.Setup(x => x.GetInputItemsAsync(It.IsAny(), It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+
+ // Act
+ var events = new List();
+ await foreach (var evt in handler.CreateAsync(request, mockContext.Object, CancellationToken.None))
+ {
+ events.Add(evt);
+ }
+
+ // Assert - should have produced events from the keyed agent
+ Assert.True(events.Count >= 4);
+ Assert.IsType(events[0]);
+ }
+
+ [Fact]
+ public async Task CreateAsync_NoAgentRegistered_ThrowsInvalidOperationExceptionAsync()
+ {
+ // Arrange
+ var services = new ServiceCollection();
+ services.AddSingleton(new InMemoryAgentSessionStore());
+ var sp = services.BuildServiceProvider();
+
+ var handler = new AgentFrameworkResponseHandler(sp, NullLogger.Instance);
+
+ var request = AzureAIAgentServerResponsesModelFactory.CreateResponse(model: "test");
+ request.Input = BinaryData.FromObjectAsJson(new[]
+ {
+ new { type = "message", id = "msg_1", status = "completed", role = "user",
+ content = new[] { new { type = "input_text", text = "Hello" } } }
+ });
+
+ var mockContext = new Mock("resp_" + new string('0', 46)) { CallBase = true };
+ mockContext.Setup(x => x.GetHistoryAsync(It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+ mockContext.Setup(x => x.GetInputItemsAsync(It.IsAny(), It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+
+ // Act & Assert
+ await Assert.ThrowsAsync(async () =>
+ {
+ await foreach (var _ in handler.CreateAsync(request, mockContext.Object, CancellationToken.None))
+ {
+ }
+ });
+ }
+
+ [Fact]
+ public void Constructor_NullServiceProvider_ThrowsArgumentNullException()
+ {
+ Assert.Throws(
+ () => new AgentFrameworkResponseHandler(null!, NullLogger.Instance));
+ }
+
+ [Fact]
+ public void Constructor_NullLogger_ThrowsArgumentNullException()
+ {
+ var sp = new ServiceCollection().BuildServiceProvider();
+ Assert.Throws(
+ () => new AgentFrameworkResponseHandler(sp, null!));
+ }
+
+ [Fact]
+ public async Task CreateAsync_ResolvesAgentByModelFieldAsync()
+ {
+ // Arrange
+ var agent = CreateTestAgent("model agent");
+ var services = new ServiceCollection();
+ services.AddSingleton(new InMemoryAgentSessionStore());
+ services.AddKeyedSingleton("my-agent", agent);
+ var sp = services.BuildServiceProvider();
+
+ var handler = new AgentFrameworkResponseHandler(sp, NullLogger.Instance);
+
+ var request = AzureAIAgentServerResponsesModelFactory.CreateResponse(model: "my-agent");
+ request.Input = BinaryData.FromObjectAsJson(new[]
+ {
+ new { type = "message", id = "msg_1", status = "completed", role = "user",
+ content = new[] { new { type = "input_text", text = "Hello" } } }
+ });
+
+ var mockContext = new Mock("resp_" + new string('0', 46)) { CallBase = true };
+ mockContext.Setup(x => x.GetHistoryAsync(It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+ mockContext.Setup(x => x.GetInputItemsAsync(It.IsAny(), It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+
+ // Act
+ var events = new List();
+ await foreach (var evt in handler.CreateAsync(request, mockContext.Object, CancellationToken.None))
+ {
+ events.Add(evt);
+ }
+
+ // Assert
+ Assert.True(events.Count >= 4);
+ Assert.IsType(events[0]);
+ }
+
+ [Fact]
+ public async Task CreateAsync_ResolvesAgentByEntityIdMetadataAsync()
+ {
+ // Arrange
+ var agent = CreateTestAgent("entity agent");
+ var services = new ServiceCollection();
+ services.AddSingleton(new InMemoryAgentSessionStore());
+ services.AddKeyedSingleton("entity-agent", agent);
+ var sp = services.BuildServiceProvider();
+
+ var handler = new AgentFrameworkResponseHandler(sp, NullLogger.Instance);
+
+ var request = AzureAIAgentServerResponsesModelFactory.CreateResponse(model: "");
+ var metadata = new Metadata();
+ metadata.AdditionalProperties["entity_id"] = "entity-agent";
+ request.Metadata = metadata;
+ request.Input = BinaryData.FromObjectAsJson(new[]
+ {
+ new { type = "message", id = "msg_1", status = "completed", role = "user",
+ content = new[] { new { type = "input_text", text = "Hello" } } }
+ });
+
+ var mockContext = new Mock("resp_" + new string('0', 46)) { CallBase = true };
+ mockContext.Setup(x => x.GetHistoryAsync(It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+ mockContext.Setup(x => x.GetInputItemsAsync(It.IsAny(), It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+
+ // Act
+ var events = new List();
+ await foreach (var evt in handler.CreateAsync(request, mockContext.Object, CancellationToken.None))
+ {
+ events.Add(evt);
+ }
+
+ // Assert
+ Assert.True(events.Count >= 4);
+ Assert.IsType(events[0]);
+ }
+
+ [Fact]
+ public async Task CreateAsync_NamedAgentNotFound_FallsBackToDefaultAsync()
+ {
+ // Arrange
+ var agent = CreateTestAgent("default agent");
+ var services = new ServiceCollection();
+ services.AddSingleton(new InMemoryAgentSessionStore());
+ services.AddSingleton(agent);
+ var sp = services.BuildServiceProvider();
+
+ var handler = new AgentFrameworkResponseHandler(sp, NullLogger.Instance);
+
+ var request = AzureAIAgentServerResponsesModelFactory.CreateResponse(
+ model: "test",
+ agentReference: new AgentReference("nonexistent-agent"));
+ request.Input = BinaryData.FromObjectAsJson(new[]
+ {
+ new { type = "message", id = "msg_1", status = "completed", role = "user",
+ content = new[] { new { type = "input_text", text = "Hello" } } }
+ });
+
+ var mockContext = new Mock("resp_" + new string('0', 46)) { CallBase = true };
+ mockContext.Setup(x => x.GetHistoryAsync(It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+ mockContext.Setup(x => x.GetInputItemsAsync(It.IsAny(), It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+
+ // Act
+ var events = new List();
+ await foreach (var evt in handler.CreateAsync(request, mockContext.Object, CancellationToken.None))
+ {
+ events.Add(evt);
+ }
+
+ // Assert
+ Assert.True(events.Count >= 4);
+ Assert.IsType(events[0]);
+ }
+
+ [Fact]
+ public async Task CreateAsync_NoAgentFound_ErrorMessageIncludesAgentNameAsync()
+ {
+ // Arrange
+ var services = new ServiceCollection();
+ services.AddSingleton(new InMemoryAgentSessionStore());
+ var sp = services.BuildServiceProvider();
+
+ var handler = new AgentFrameworkResponseHandler(sp, NullLogger.Instance);
+
+ var request = AzureAIAgentServerResponsesModelFactory.CreateResponse(
+ model: "test",
+ agentReference: new AgentReference("missing-agent"));
+ request.Input = BinaryData.FromObjectAsJson(new[]
+ {
+ new { type = "message", id = "msg_1", status = "completed", role = "user",
+ content = new[] { new { type = "input_text", text = "Hello" } } }
+ });
+
+ var mockContext = new Mock("resp_" + new string('0', 46)) { CallBase = true };
+ mockContext.Setup(x => x.GetHistoryAsync(It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+ mockContext.Setup(x => x.GetInputItemsAsync(It.IsAny(), It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+
+ // Act & Assert
+ var ex = await Assert.ThrowsAsync(async () =>
+ {
+ await foreach (var _ in handler.CreateAsync(request, mockContext.Object, CancellationToken.None))
+ {
+ }
+ });
+
+ Assert.Contains("missing-agent", ex.Message);
+ }
+
+ [Fact]
+ public async Task CreateAsync_NoAgentNoName_ErrorMessageIsGenericAsync()
+ {
+ // Arrange
+ var services = new ServiceCollection();
+ services.AddSingleton(new InMemoryAgentSessionStore());
+ var sp = services.BuildServiceProvider();
+
+ var handler = new AgentFrameworkResponseHandler(sp, NullLogger.Instance);
+
+ var request = AzureAIAgentServerResponsesModelFactory.CreateResponse(model: "");
+ request.Input = BinaryData.FromObjectAsJson(new[]
+ {
+ new { type = "message", id = "msg_1", status = "completed", role = "user",
+ content = new[] { new { type = "input_text", text = "Hello" } } }
+ });
+
+ var mockContext = new Mock("resp_" + new string('0', 46)) { CallBase = true };
+ mockContext.Setup(x => x.GetHistoryAsync(It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+ mockContext.Setup(x => x.GetInputItemsAsync(It.IsAny(), It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+
+ // Act & Assert
+ var ex = await Assert.ThrowsAsync(async () =>
+ {
+ await foreach (var _ in handler.CreateAsync(request, mockContext.Object, CancellationToken.None))
+ {
+ }
+ });
+
+ Assert.Contains("No agent name specified", ex.Message);
+ }
+
+ [Fact]
+ public async Task CreateAsync_AgentResolvedBeforeEmitCreated_ExceptionHasNoEventsAsync()
+ {
+ // Arrange
+ var services = new ServiceCollection();
+ services.AddSingleton(new InMemoryAgentSessionStore());
+ var sp = services.BuildServiceProvider();
+
+ var handler = new AgentFrameworkResponseHandler(sp, NullLogger.Instance);
+
+ var request = AzureAIAgentServerResponsesModelFactory.CreateResponse(model: "test");
+ request.Input = BinaryData.FromObjectAsJson(new[]
+ {
+ new { type = "message", id = "msg_1", status = "completed", role = "user",
+ content = new[] { new { type = "input_text", text = "Hello" } } }
+ });
+
+ var mockContext = new Mock("resp_" + new string('0', 46)) { CallBase = true };
+ mockContext.Setup(x => x.GetHistoryAsync(It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+ mockContext.Setup(x => x.GetInputItemsAsync(It.IsAny(), It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+
+ // Act
+ var events = new List();
+ bool threw = false;
+ try
+ {
+ await foreach (var evt in handler.CreateAsync(request, mockContext.Object, CancellationToken.None))
+ {
+ events.Add(evt);
+ }
+ }
+ catch (InvalidOperationException)
+ {
+ threw = true;
+ }
+
+ // Assert
+ Assert.True(threw);
+ Assert.Empty(events);
+ }
+
+ [Fact]
+ public async Task CreateAsync_WithHistory_PrependsHistoryToMessagesAsync()
+ {
+ // Arrange
+ var agent = new CapturingAgent();
+ var services = new ServiceCollection();
+ services.AddSingleton(new InMemoryAgentSessionStore());
+ services.AddSingleton(agent);
+ var sp = services.BuildServiceProvider();
+
+ var handler = new AgentFrameworkResponseHandler(sp, NullLogger.Instance);
+
+ var request = AzureAIAgentServerResponsesModelFactory.CreateResponse(model: "test");
+ request.Input = BinaryData.FromObjectAsJson(new[]
+ {
+ new { type = "message", id = "msg_1", status = "completed", role = "user",
+ content = new[] { new { type = "input_text", text = "Hello" } } }
+ });
+
+ var historyItem = new OutputItemMessage(
+ id: "hist_1",
+ role: MessageRole.Assistant,
+ content: [new MessageContentOutputTextContent(
+ "Previous response",
+ Array.Empty(),
+ Array.Empty())],
+ status: MessageStatus.Completed);
+
+ var mockContext = new Mock("resp_" + new string('0', 46)) { CallBase = true };
+ mockContext.Setup(x => x.GetHistoryAsync(It.IsAny()))
+ .ReturnsAsync(new OutputItem[] { historyItem });
+ mockContext.Setup(x => x.GetInputItemsAsync(It.IsAny(), It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+
+ // Act
+ var events = new List();
+ await foreach (var evt in handler.CreateAsync(request, mockContext.Object, CancellationToken.None))
+ {
+ events.Add(evt);
+ }
+
+ // Assert
+ Assert.NotNull(agent.CapturedMessages);
+ var messages = agent.CapturedMessages.ToList();
+ Assert.True(messages.Count >= 2);
+ Assert.Equal(ChatRole.Assistant, messages[0].Role);
+ }
+
+ [Fact]
+ public async Task CreateAsync_WithInputItems_UsesResolvedInputItemsAsync()
+ {
+ // Arrange
+ var agent = new CapturingAgent();
+ var services = new ServiceCollection();
+ services.AddSingleton(new InMemoryAgentSessionStore());
+ services.AddSingleton(agent);
+ var sp = services.BuildServiceProvider();
+
+ var handler = new AgentFrameworkResponseHandler(sp, NullLogger.Instance);
+
+ var request = AzureAIAgentServerResponsesModelFactory.CreateResponse(model: "test");
+ request.Input = BinaryData.FromObjectAsJson(new[]
+ {
+ new { type = "message", id = "msg_1", status = "completed", role = "user",
+ content = new[] { new { type = "input_text", text = "Raw input" } } }
+ });
+
+ var inputItem = new ItemMessage(
+ MessageRole.Assistant,
+ [new MessageContentInputTextContent("Resolved input")]);
+
+ var mockContext = new Mock("resp_" + new string('0', 46)) { CallBase = true };
+ mockContext.Setup(x => x.GetHistoryAsync(It.IsAny()))
+ .ReturnsAsync(Array.Empty());
+ mockContext.Setup(x => x.GetInputItemsAsync(It.IsAny(), It.IsAny()))
+ .ReturnsAsync(new Item[] { inputItem });
+
+ // Act
+ var events = new List();
+ await foreach (var evt in handler.CreateAsync(request, mockContext.Object, CancellationToken.None))
+ {
+ events.Add(evt);
+ }
+
+ // Assert
+ Assert.NotNull(agent.CapturedMessages);
+ var messages = agent.CapturedMessages.ToList();
+ Assert.Single(messages);
+ Assert.Equal(ChatRole.Assistant, messages[0].Role);
+ }
+
+ [Fact]
+ public async Task CreateAsync_NoInputItems_FallsBackToRawRequestInputAsync()
+ {
+ // Arrange
+ var agent = new CapturingAgent();
+ var services = new ServiceCollection();
+ services.AddSingleton