From 7da0035c19d23db10b360a8ae141d8d203b59cd7 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 9 Mar 2026 15:01:35 +0000
Subject: [PATCH 1/9] Initial plan
From 10f461eaaf992bd6a83d5501ce7ca93d51819c8b Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 9 Mar 2026 15:31:37 +0000
Subject: [PATCH 2/9] Implement HostedToolSearchTool and
SearchableAIFunctionDeclaration for tool search support
Co-authored-by: stephentoub <2642209+stephentoub@users.noreply.github.com>
---
global.json | 2 +-
.../DelegatingAIFunctionDeclaration.cs | 2 +-
.../SearchableAIFunctionDeclaration.cs | 62 +++++++++++
.../Microsoft.Extensions.AI.Abstractions.json | 88 +++++++++++++++
.../Tools/HostedToolSearchTool.cs | 38 +++++++
.../OpenAIJsonContext.cs | 1 +
.../OpenAIResponsesChatClient.cs | 18 +++-
src/Shared/DiagnosticIds/DiagnosticIds.cs | 1 +
.../SearchableAIFunctionDeclarationTests.cs | 102 ++++++++++++++++++
.../Tools/HostedToolSearchToolTests.cs | 38 +++++++
.../OpenAIConversionTests.cs | 53 +++++++++
11 files changed, 402 insertions(+), 3 deletions(-)
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/SearchableAIFunctionDeclaration.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Tools/HostedToolSearchTool.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Functions/SearchableAIFunctionDeclarationTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Tools/HostedToolSearchToolTests.cs
diff --git a/global.json b/global.json
index 8decbcb016e..06ad2d78bce 100644
--- a/global.json
+++ b/global.json
@@ -23,4 +23,4 @@
"Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.26123.3",
"Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.26123.3"
}
-}
+}
\ No newline at end of file
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/DelegatingAIFunctionDeclaration.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/DelegatingAIFunctionDeclaration.cs
index 38ebcf0ffd9..3d509aeff68 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/DelegatingAIFunctionDeclaration.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/DelegatingAIFunctionDeclaration.cs
@@ -11,7 +11,7 @@ namespace Microsoft.Extensions.AI;
///
/// Provides an optional base class for an that passes through calls to another instance.
///
-internal class DelegatingAIFunctionDeclaration : AIFunctionDeclaration // could be made public in the future if there's demand
+public class DelegatingAIFunctionDeclaration : AIFunctionDeclaration
{
///
/// Initializes a new instance of the class as a wrapper around .
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/SearchableAIFunctionDeclaration.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/SearchableAIFunctionDeclaration.cs
new file mode 100644
index 00000000000..c49b723e5bf
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/SearchableAIFunctionDeclaration.cs
@@ -0,0 +1,62 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using Microsoft.Shared.DiagnosticIds;
+using Microsoft.Shared.Diagnostics;
+
+namespace Microsoft.Extensions.AI;
+
+///
+/// Represents an that signals to supporting AI services that deferred
+/// loading should be used when tool search is enabled. Only the function's name and description are sent initially;
+/// the full JSON schema is loaded on demand by the service when the model selects this tool.
+///
+///
+/// This class is a marker/decorator that signals to a supporting provider that the function should be
+/// sent with deferred loading (only name and description upfront). Use to create
+/// a complete tool list including a and wrapped functions.
+///
+[Experimental(DiagnosticIds.Experiments.AIToolSearch, UrlFormat = DiagnosticIds.UrlFormat)]
+public sealed class SearchableAIFunctionDeclaration : DelegatingAIFunctionDeclaration
+{
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The represented by this instance.
+ /// An optional namespace for grouping related tools in the tool search index.
+ /// is .
+ public SearchableAIFunctionDeclaration(AIFunctionDeclaration innerFunction, string? namespaceName = null)
+ : base(innerFunction)
+ {
+ Namespace = namespaceName;
+ }
+
+ /// Gets the optional namespace this function belongs to, for grouping related tools in the tool search index.
+ public string? Namespace { get; }
+
+ ///
+ /// Creates a complete tool list with a and the given functions wrapped as .
+ ///
+ /// The functions to include as searchable tools.
+ /// An optional namespace for grouping related tools.
+ /// Any additional properties to pass to the .
+ /// A list of instances ready for use in .
+ /// is .
+ public static IList CreateToolSet(
+ IEnumerable functions,
+ string? namespaceName = null,
+ IReadOnlyDictionary? toolSearchProperties = null)
+ {
+ _ = Throw.IfNull(functions);
+
+ var tools = new List { new HostedToolSearchTool(toolSearchProperties) };
+ foreach (var fn in functions)
+ {
+ tools.Add(new SearchableAIFunctionDeclaration(fn, namespaceName));
+ }
+
+ return tools;
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json
index d9f97f58c97..07dbe187606 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json
@@ -1619,6 +1619,50 @@
}
]
},
+ {
+ "Type": "class Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration : Microsoft.Extensions.AI.AIFunctionDeclaration",
+ "Stage": "Stable",
+ "Methods": [
+ {
+ "Member": "Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.DelegatingAIFunctionDeclaration(Microsoft.Extensions.AI.AIFunctionDeclaration innerFunction);",
+ "Stage": "Stable"
+ },
+ {
+ "Member": "override object? Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.GetService(System.Type serviceType, object? serviceKey = null);",
+ "Stage": "Stable"
+ },
+ {
+ "Member": "override string Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.ToString();",
+ "Stage": "Stable"
+ }
+ ],
+ "Properties": [
+ {
+ "Member": "override System.Collections.Generic.IReadOnlyDictionary Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.AdditionalProperties { get; }",
+ "Stage": "Stable"
+ },
+ {
+ "Member": "override string Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.Description { get; }",
+ "Stage": "Stable"
+ },
+ {
+ "Member": "Microsoft.Extensions.AI.AIFunctionDeclaration Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.InnerFunction { get; }",
+ "Stage": "Stable"
+ },
+ {
+ "Member": "override System.Text.Json.JsonElement Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.JsonSchema { get; }",
+ "Stage": "Stable"
+ },
+ {
+ "Member": "override string Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.Name { get; }",
+ "Stage": "Stable"
+ },
+ {
+ "Member": "override System.Text.Json.JsonElement? Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.ReturnJsonSchema { get; }",
+ "Stage": "Stable"
+ }
+ ]
+ },
{
"Type": "class Microsoft.Extensions.AI.DelegatingChatClient : Microsoft.Extensions.AI.IChatClient, System.IDisposable",
"Stage": "Stable",
@@ -2305,6 +2349,30 @@
}
]
},
+ {
+ "Type": "class Microsoft.Extensions.AI.HostedToolSearchTool : Microsoft.Extensions.AI.AITool",
+ "Stage": "Experimental",
+ "Methods": [
+ {
+ "Member": "Microsoft.Extensions.AI.HostedToolSearchTool.HostedToolSearchTool();",
+ "Stage": "Experimental"
+ },
+ {
+ "Member": "Microsoft.Extensions.AI.HostedToolSearchTool.HostedToolSearchTool(System.Collections.Generic.IReadOnlyDictionary? additionalProperties);",
+ "Stage": "Experimental"
+ }
+ ],
+ "Properties": [
+ {
+ "Member": "override System.Collections.Generic.IReadOnlyDictionary Microsoft.Extensions.AI.HostedToolSearchTool.AdditionalProperties { get; }",
+ "Stage": "Experimental"
+ },
+ {
+ "Member": "override string Microsoft.Extensions.AI.HostedToolSearchTool.Name { get; }",
+ "Stage": "Experimental"
+ }
+ ]
+ },
{
"Type": "sealed class Microsoft.Extensions.AI.HostedVectorStoreContent : Microsoft.Extensions.AI.AIContent",
"Stage": "Stable",
@@ -2882,6 +2950,26 @@
}
]
},
+ {
+ "Type": "sealed class Microsoft.Extensions.AI.SearchableAIFunctionDeclaration : Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration",
+ "Stage": "Experimental",
+ "Methods": [
+ {
+ "Member": "Microsoft.Extensions.AI.SearchableAIFunctionDeclaration.SearchableAIFunctionDeclaration(Microsoft.Extensions.AI.AIFunctionDeclaration innerFunction, string? namespaceName = null);",
+ "Stage": "Experimental"
+ },
+ {
+ "Member": "static System.Collections.Generic.IList Microsoft.Extensions.AI.SearchableAIFunctionDeclaration.CreateToolSet(System.Collections.Generic.IEnumerable functions, string? namespaceName = null, System.Collections.Generic.IReadOnlyDictionary? toolSearchProperties = null);",
+ "Stage": "Experimental"
+ }
+ ],
+ "Properties": [
+ {
+ "Member": "string? Microsoft.Extensions.AI.SearchableAIFunctionDeclaration.Namespace { get; }",
+ "Stage": "Experimental"
+ }
+ ]
+ },
{
"Type": "static class Microsoft.Extensions.AI.SpeechToTextClientExtensions",
"Stage": "Experimental",
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Tools/HostedToolSearchTool.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Tools/HostedToolSearchTool.cs
new file mode 100644
index 00000000000..4fd90e06449
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Tools/HostedToolSearchTool.cs
@@ -0,0 +1,38 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using Microsoft.Shared.DiagnosticIds;
+
+namespace Microsoft.Extensions.AI;
+
+/// Represents a hosted tool that can be specified to an AI service to enable it to search for and selectively load tool definitions on demand.
+///
+/// This tool does not itself implement tool search. It is a marker that can be used to inform a service
+/// that tool search should be enabled, reducing token usage by deferring full tool schema loading until the model requests it.
+///
+[Experimental(DiagnosticIds.Experiments.AIToolSearch, UrlFormat = DiagnosticIds.UrlFormat)]
+public class HostedToolSearchTool : AITool
+{
+ /// Any additional properties associated with the tool.
+ private IReadOnlyDictionary? _additionalProperties;
+
+ /// Initializes a new instance of the class.
+ public HostedToolSearchTool()
+ {
+ }
+
+ /// Initializes a new instance of the class.
+ /// Any additional properties associated with the tool.
+ public HostedToolSearchTool(IReadOnlyDictionary? additionalProperties)
+ {
+ _additionalProperties = additionalProperties;
+ }
+
+ ///
+ public override string Name => "tool_search";
+
+ ///
+ public override IReadOnlyDictionary AdditionalProperties => _additionalProperties ?? base.AdditionalProperties;
+}
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIJsonContext.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIJsonContext.cs
index 9a040864613..fcdf957762b 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIJsonContext.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIJsonContext.cs
@@ -17,6 +17,7 @@ namespace Microsoft.Extensions.AI;
WriteIndented = true)]
[JsonSerializable(typeof(OpenAIClientExtensions.ToolJson))]
[JsonSerializable(typeof(IDictionary))]
+[JsonSerializable(typeof(string))]
[JsonSerializable(typeof(string[]))]
[JsonSerializable(typeof(IEnumerable))]
[JsonSerializable(typeof(JsonElement))]
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
index de1cdd4b127..2002f269a5d 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
@@ -50,6 +50,9 @@ private static readonly Func>));
+ /// Cached deserialized for the tool_search hosted tool.
+ private static ResponseTool? s_toolSearchResponseTool;
+
/// Metadata about the client.
private readonly ChatClientMetadata _metadata;
@@ -690,7 +693,20 @@ void IDisposable.Dispose()
return rtat.Tool;
case AIFunctionDeclaration aiFunction:
- return ToResponseTool(aiFunction, options);
+ var functionTool = ToResponseTool(aiFunction, options);
+ if (tool.GetService() is { } searchable)
+ {
+ functionTool.Patch.Set("$.defer_loading"u8, JsonSerializer.SerializeToUtf8Bytes(true).AsSpan());
+ if (searchable.Namespace is { } ns)
+ {
+ functionTool.Patch.Set("$.namespace"u8, JsonSerializer.SerializeToUtf8Bytes(ns, OpenAIJsonContext.Default.String).AsSpan());
+ }
+ }
+
+ return functionTool;
+
+ case HostedToolSearchTool:
+ return s_toolSearchResponseTool ??= ModelReaderWriter.Read(BinaryData.FromString("""{"type": "tool_search"}"""))!;
case HostedWebSearchTool webSearchTool:
return new WebSearchTool
diff --git a/src/Shared/DiagnosticIds/DiagnosticIds.cs b/src/Shared/DiagnosticIds/DiagnosticIds.cs
index 92dd69462c9..27d1048e9f8 100644
--- a/src/Shared/DiagnosticIds/DiagnosticIds.cs
+++ b/src/Shared/DiagnosticIds/DiagnosticIds.cs
@@ -57,6 +57,7 @@ internal static class Experiments
internal const string AIResponseContinuations = AIExperiments;
internal const string AICodeInterpreter = AIExperiments;
internal const string AIWebSearch = AIExperiments;
+ internal const string AIToolSearch = AIExperiments;
internal const string AIRealTime = AIExperiments;
internal const string AIFiles = AIExperiments;
diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Functions/SearchableAIFunctionDeclarationTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Functions/SearchableAIFunctionDeclarationTests.cs
new file mode 100644
index 00000000000..98af5640597
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Functions/SearchableAIFunctionDeclarationTests.cs
@@ -0,0 +1,102 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using Xunit;
+
+namespace Microsoft.Extensions.AI.Functions;
+
+public class SearchableAIFunctionDeclarationTests
+{
+ [Fact]
+ public void Constructor_NullFunction_ThrowsArgumentNullException()
+ {
+ Assert.Throws("innerFunction", () => new SearchableAIFunctionDeclaration(null!));
+ }
+
+ [Fact]
+ public void Constructor_DelegatesToInnerFunction_Properties()
+ {
+ var inner = AIFunctionFactory.Create(() => 42, "MyFunc", "My description");
+ var wrapper = new SearchableAIFunctionDeclaration(inner);
+
+ Assert.Equal(inner.Name, wrapper.Name);
+ Assert.Equal(inner.Description, wrapper.Description);
+ Assert.Equal(inner.JsonSchema, wrapper.JsonSchema);
+ Assert.Equal(inner.ReturnJsonSchema, wrapper.ReturnJsonSchema);
+ Assert.Same(inner.AdditionalProperties, wrapper.AdditionalProperties);
+ Assert.Equal(inner.ToString(), wrapper.ToString());
+ }
+
+ [Fact]
+ public void Namespace_DefaultIsNull()
+ {
+ var inner = AIFunctionFactory.Create(() => 42);
+ var wrapper = new SearchableAIFunctionDeclaration(inner);
+
+ Assert.Null(wrapper.Namespace);
+ }
+
+ [Fact]
+ public void Namespace_Roundtrips()
+ {
+ var inner = AIFunctionFactory.Create(() => 42);
+ var wrapper = new SearchableAIFunctionDeclaration(inner, namespaceName: "myNamespace");
+
+ Assert.Equal("myNamespace", wrapper.Namespace);
+ }
+
+ [Fact]
+ public void GetService_ReturnsSelf()
+ {
+ var inner = AIFunctionFactory.Create(() => 42);
+ var wrapper = new SearchableAIFunctionDeclaration(inner);
+
+ Assert.Same(wrapper, wrapper.GetService());
+ }
+
+ [Fact]
+ public void CreateToolSet_NullFunctions_Throws()
+ {
+ Assert.Throws("functions", () => SearchableAIFunctionDeclaration.CreateToolSet(null!));
+ }
+
+ [Fact]
+ public void CreateToolSet_ReturnsHostedToolSearchToolFirst_ThenWrappedFunctions()
+ {
+ var f1 = AIFunctionFactory.Create(() => 1, "F1");
+ var f2 = AIFunctionFactory.Create(() => 2, "F2");
+
+ var tools = SearchableAIFunctionDeclaration.CreateToolSet([f1, f2]);
+
+ Assert.Equal(3, tools.Count);
+ Assert.IsType(tools[0]);
+ Assert.Empty(tools[0].AdditionalProperties);
+
+ var s1 = Assert.IsType(tools[1]);
+ Assert.Equal("F1", s1.Name);
+ Assert.Null(s1.Namespace);
+
+ var s2 = Assert.IsType(tools[2]);
+ Assert.Equal("F2", s2.Name);
+ Assert.Null(s2.Namespace);
+ }
+
+ [Fact]
+ public void CreateToolSet_WithNamespaceAndProperties_Roundtrips()
+ {
+ var f1 = AIFunctionFactory.Create(() => 1, "F1");
+ var props = new Dictionary { ["key"] = "value" };
+
+ var tools = SearchableAIFunctionDeclaration.CreateToolSet([f1], namespaceName: "ns", toolSearchProperties: props);
+
+ Assert.Equal(2, tools.Count);
+
+ var hostTool = Assert.IsType(tools[0]);
+ Assert.Same(props, hostTool.AdditionalProperties);
+
+ var s1 = Assert.IsType(tools[1]);
+ Assert.Equal("ns", s1.Namespace);
+ }
+}
diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Tools/HostedToolSearchToolTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Tools/HostedToolSearchToolTests.cs
new file mode 100644
index 00000000000..f3a32dc8c84
--- /dev/null
+++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Tools/HostedToolSearchToolTests.cs
@@ -0,0 +1,38 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+using Xunit;
+
+namespace Microsoft.Extensions.AI;
+
+public class HostedToolSearchToolTests
+{
+ [Fact]
+ public void Constructor_Roundtrips()
+ {
+ var tool = new HostedToolSearchTool();
+ Assert.Equal("tool_search", tool.Name);
+ Assert.Empty(tool.Description);
+ Assert.Empty(tool.AdditionalProperties);
+ Assert.Equal(tool.Name, tool.ToString());
+ }
+
+ [Fact]
+ public void Constructor_AdditionalProperties_Roundtrips()
+ {
+ var props = new Dictionary { ["key"] = "value" };
+ var tool = new HostedToolSearchTool(props);
+
+ Assert.Equal("tool_search", tool.Name);
+ Assert.Same(props, tool.AdditionalProperties);
+ }
+
+ [Fact]
+ public void Constructor_NullAdditionalProperties_UsesEmpty()
+ {
+ var tool = new HostedToolSearchTool(null);
+
+ Assert.Empty(tool.AdditionalProperties);
+ }
+}
diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIConversionTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIConversionTests.cs
index 9ffd62e72b2..b55e27b4478 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIConversionTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIConversionTests.cs
@@ -588,6 +588,59 @@ public void AsOpenAIResponseTool_WithUnknownToolType_ReturnsNull()
Assert.Null(result);
}
+ [Fact]
+ public void AsOpenAIResponseTool_WithHostedToolSearchTool_ProducesValidToolSearchTool()
+ {
+ var toolSearchTool = new HostedToolSearchTool();
+
+ var result = toolSearchTool.AsOpenAIResponseTool();
+
+ Assert.NotNull(result);
+ var json = ModelReaderWriter.Write(result, ModelReaderWriterOptions.Json).ToString();
+ Assert.Contains("\"type\"", json);
+ Assert.Contains("tool_search", json);
+ }
+
+ [Fact]
+ public void AsOpenAIResponseTool_WithHostedToolSearchTool_CachesResult()
+ {
+ var result1 = new HostedToolSearchTool().AsOpenAIResponseTool();
+ var result2 = new HostedToolSearchTool().AsOpenAIResponseTool();
+
+ Assert.NotNull(result1);
+ Assert.Same(result1, result2);
+ }
+
+ [Fact]
+ public void AsOpenAIResponseTool_WithSearchableAIFunctionDeclaration_PatchesDeferLoading()
+ {
+ var inner = AIFunctionFactory.Create(() => 42, "MyFunc", "My description");
+ var searchable = new SearchableAIFunctionDeclaration(inner);
+
+ var result = ((AITool)searchable).AsOpenAIResponseTool();
+
+ Assert.NotNull(result);
+ var functionTool = Assert.IsType(result);
+ var json = ModelReaderWriter.Write(functionTool, ModelReaderWriterOptions.Json).ToString();
+ Assert.Contains("defer_loading", json);
+ Assert.Contains("true", json);
+ }
+
+ [Fact]
+ public void AsOpenAIResponseTool_WithSearchableAIFunctionDeclarationWithNamespace_PatchesNamespace()
+ {
+ var inner = AIFunctionFactory.Create(() => 42, "MyFunc", "My description");
+ var searchable = new SearchableAIFunctionDeclaration(inner, namespaceName: "myNamespace");
+
+ var result = ((AITool)searchable).AsOpenAIResponseTool();
+
+ Assert.NotNull(result);
+ var functionTool = Assert.IsType(result);
+ var json = ModelReaderWriter.Write(functionTool, ModelReaderWriterOptions.Json).ToString();
+ Assert.Contains("namespace", json);
+ Assert.Contains("myNamespace", json);
+ }
+
[Fact]
public void AsOpenAIResponseTool_WithNullTool_ThrowsArgumentNullException()
{
From f295d106cdbfb7948a1cb3879da951cf0126b523 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 9 Mar 2026 16:48:37 +0000
Subject: [PATCH 3/9] Redesign: consolidate tool search into
HostedToolSearchTool with DeferredTools/NonDeferredTools
- Redesign HostedToolSearchTool with DeferredTools/NonDeferredTools properties
- Remove SearchableAIFunctionDeclaration (no longer needed)
- Revert DelegatingAIFunctionDeclaration to internal
- Update OpenAI provider: use HostedToolSearchTool enable/disable logic for defer_loading
- Add ChatOptions parameter to AsOpenAIResponseTool extension method
- Use AOT-safe ModelReaderWriter.Read with OpenAIContext.Default
- Update API baselines and tests
Co-authored-by: stephentoub <2642209+stephentoub@users.noreply.github.com>
---
global.json | 2 +-
.../DelegatingAIFunctionDeclaration.cs | 2 +-
.../SearchableAIFunctionDeclaration.cs | 62 -----------
.../Microsoft.Extensions.AI.Abstractions.json | 72 ++-----------
.../Tools/HostedToolSearchTool.cs | 35 ++++++
.../Microsoft.Extensions.AI.OpenAI.json | 2 +-
...icrosoftExtensionsAIResponsesExtensions.cs | 5 +-
.../OpenAIJsonContext.cs | 1 -
.../OpenAIResponsesChatClient.cs | 41 +++++--
.../SearchableAIFunctionDeclarationTests.cs | 102 ------------------
.../Tools/HostedToolSearchToolTests.cs | 32 ++++++
.../OpenAIConversionTests.cs | 74 +++++++++++--
12 files changed, 178 insertions(+), 252 deletions(-)
delete mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/SearchableAIFunctionDeclaration.cs
delete mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Functions/SearchableAIFunctionDeclarationTests.cs
diff --git a/global.json b/global.json
index 06ad2d78bce..8decbcb016e 100644
--- a/global.json
+++ b/global.json
@@ -23,4 +23,4 @@
"Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.26123.3",
"Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.26123.3"
}
-}
\ No newline at end of file
+}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/DelegatingAIFunctionDeclaration.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/DelegatingAIFunctionDeclaration.cs
index 3d509aeff68..38ebcf0ffd9 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/DelegatingAIFunctionDeclaration.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/DelegatingAIFunctionDeclaration.cs
@@ -11,7 +11,7 @@ namespace Microsoft.Extensions.AI;
///
/// Provides an optional base class for an that passes through calls to another instance.
///
-public class DelegatingAIFunctionDeclaration : AIFunctionDeclaration
+internal class DelegatingAIFunctionDeclaration : AIFunctionDeclaration // could be made public in the future if there's demand
{
///
/// Initializes a new instance of the class as a wrapper around .
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/SearchableAIFunctionDeclaration.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/SearchableAIFunctionDeclaration.cs
deleted file mode 100644
index c49b723e5bf..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/SearchableAIFunctionDeclaration.cs
+++ /dev/null
@@ -1,62 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
-using Microsoft.Shared.DiagnosticIds;
-using Microsoft.Shared.Diagnostics;
-
-namespace Microsoft.Extensions.AI;
-
-///
-/// Represents an that signals to supporting AI services that deferred
-/// loading should be used when tool search is enabled. Only the function's name and description are sent initially;
-/// the full JSON schema is loaded on demand by the service when the model selects this tool.
-///
-///
-/// This class is a marker/decorator that signals to a supporting provider that the function should be
-/// sent with deferred loading (only name and description upfront). Use to create
-/// a complete tool list including a and wrapped functions.
-///
-[Experimental(DiagnosticIds.Experiments.AIToolSearch, UrlFormat = DiagnosticIds.UrlFormat)]
-public sealed class SearchableAIFunctionDeclaration : DelegatingAIFunctionDeclaration
-{
- ///
- /// Initializes a new instance of the class.
- ///
- /// The represented by this instance.
- /// An optional namespace for grouping related tools in the tool search index.
- /// is .
- public SearchableAIFunctionDeclaration(AIFunctionDeclaration innerFunction, string? namespaceName = null)
- : base(innerFunction)
- {
- Namespace = namespaceName;
- }
-
- /// Gets the optional namespace this function belongs to, for grouping related tools in the tool search index.
- public string? Namespace { get; }
-
- ///
- /// Creates a complete tool list with a and the given functions wrapped as .
- ///
- /// The functions to include as searchable tools.
- /// An optional namespace for grouping related tools.
- /// Any additional properties to pass to the .
- /// A list of instances ready for use in .
- /// is .
- public static IList CreateToolSet(
- IEnumerable functions,
- string? namespaceName = null,
- IReadOnlyDictionary? toolSearchProperties = null)
- {
- _ = Throw.IfNull(functions);
-
- var tools = new List { new HostedToolSearchTool(toolSearchProperties) };
- foreach (var fn in functions)
- {
- tools.Add(new SearchableAIFunctionDeclaration(fn, namespaceName));
- }
-
- return tools;
- }
-}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json
index 07dbe187606..8647d2e14a3 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json
@@ -1619,50 +1619,6 @@
}
]
},
- {
- "Type": "class Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration : Microsoft.Extensions.AI.AIFunctionDeclaration",
- "Stage": "Stable",
- "Methods": [
- {
- "Member": "Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.DelegatingAIFunctionDeclaration(Microsoft.Extensions.AI.AIFunctionDeclaration innerFunction);",
- "Stage": "Stable"
- },
- {
- "Member": "override object? Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.GetService(System.Type serviceType, object? serviceKey = null);",
- "Stage": "Stable"
- },
- {
- "Member": "override string Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.ToString();",
- "Stage": "Stable"
- }
- ],
- "Properties": [
- {
- "Member": "override System.Collections.Generic.IReadOnlyDictionary Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.AdditionalProperties { get; }",
- "Stage": "Stable"
- },
- {
- "Member": "override string Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.Description { get; }",
- "Stage": "Stable"
- },
- {
- "Member": "Microsoft.Extensions.AI.AIFunctionDeclaration Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.InnerFunction { get; }",
- "Stage": "Stable"
- },
- {
- "Member": "override System.Text.Json.JsonElement Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.JsonSchema { get; }",
- "Stage": "Stable"
- },
- {
- "Member": "override string Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.Name { get; }",
- "Stage": "Stable"
- },
- {
- "Member": "override System.Text.Json.JsonElement? Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration.ReturnJsonSchema { get; }",
- "Stage": "Stable"
- }
- ]
- },
{
"Type": "class Microsoft.Extensions.AI.DelegatingChatClient : Microsoft.Extensions.AI.IChatClient, System.IDisposable",
"Stage": "Stable",
@@ -2367,9 +2323,17 @@
"Member": "override System.Collections.Generic.IReadOnlyDictionary Microsoft.Extensions.AI.HostedToolSearchTool.AdditionalProperties { get; }",
"Stage": "Experimental"
},
+ {
+ "Member": "System.Collections.Generic.IList? Microsoft.Extensions.AI.HostedToolSearchTool.DeferredTools { get; set; }",
+ "Stage": "Experimental"
+ },
{
"Member": "override string Microsoft.Extensions.AI.HostedToolSearchTool.Name { get; }",
"Stage": "Experimental"
+ },
+ {
+ "Member": "System.Collections.Generic.IList? Microsoft.Extensions.AI.HostedToolSearchTool.NonDeferredTools { get; set; }",
+ "Stage": "Experimental"
}
]
},
@@ -2950,26 +2914,6 @@
}
]
},
- {
- "Type": "sealed class Microsoft.Extensions.AI.SearchableAIFunctionDeclaration : Microsoft.Extensions.AI.DelegatingAIFunctionDeclaration",
- "Stage": "Experimental",
- "Methods": [
- {
- "Member": "Microsoft.Extensions.AI.SearchableAIFunctionDeclaration.SearchableAIFunctionDeclaration(Microsoft.Extensions.AI.AIFunctionDeclaration innerFunction, string? namespaceName = null);",
- "Stage": "Experimental"
- },
- {
- "Member": "static System.Collections.Generic.IList Microsoft.Extensions.AI.SearchableAIFunctionDeclaration.CreateToolSet(System.Collections.Generic.IEnumerable functions, string? namespaceName = null, System.Collections.Generic.IReadOnlyDictionary? toolSearchProperties = null);",
- "Stage": "Experimental"
- }
- ],
- "Properties": [
- {
- "Member": "string? Microsoft.Extensions.AI.SearchableAIFunctionDeclaration.Namespace { get; }",
- "Stage": "Experimental"
- }
- ]
- },
{
"Type": "static class Microsoft.Extensions.AI.SpeechToTextClientExtensions",
"Stage": "Experimental",
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Tools/HostedToolSearchTool.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Tools/HostedToolSearchTool.cs
index 4fd90e06449..50c2465e465 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Tools/HostedToolSearchTool.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Tools/HostedToolSearchTool.cs
@@ -9,8 +9,15 @@ namespace Microsoft.Extensions.AI;
/// Represents a hosted tool that can be specified to an AI service to enable it to search for and selectively load tool definitions on demand.
///
+///
/// This tool does not itself implement tool search. It is a marker that can be used to inform a service
/// that tool search should be enabled, reducing token usage by deferring full tool schema loading until the model requests it.
+///
+///
+/// By default, when a is present in the tools list, all other tools are treated
+/// as having deferred loading enabled. Use and to control
+/// which tools have deferred loading on a per-tool basis.
+///
///
[Experimental(DiagnosticIds.Experiments.AIToolSearch, UrlFormat = DiagnosticIds.UrlFormat)]
public class HostedToolSearchTool : AITool
@@ -35,4 +42,32 @@ public HostedToolSearchTool(IReadOnlyDictionary? additionalProp
///
public override IReadOnlyDictionary AdditionalProperties => _additionalProperties ?? base.AdditionalProperties;
+
+ ///
+ /// Gets or sets the list of tool names for which deferred loading should be enabled.
+ ///
+ ///
+ ///
+ /// The default value is , which enables deferred loading for all tools in the tools list.
+ ///
+ ///
+ /// When non-null, only tools whose names appear in this list will have deferred loading enabled,
+ /// unless they also appear in .
+ ///
+ ///
+ public IList? DeferredTools { get; set; }
+
+ ///
+ /// Gets or sets the list of tool names for which deferred loading should be disabled.
+ ///
+ ///
+ ///
+ /// The default value is , which means no tools are excluded from deferred loading.
+ ///
+ ///
+ /// When non-null, tools whose names appear in this list will not have deferred loading enabled,
+ /// even if they also appear in .
+ ///
+ ///
+ public IList? NonDeferredTools { get; set; }
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/Microsoft.Extensions.AI.OpenAI.json b/src/Libraries/Microsoft.Extensions.AI.OpenAI/Microsoft.Extensions.AI.OpenAI.json
index 7d0e39492d9..2decf2dc025 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/Microsoft.Extensions.AI.OpenAI.json
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/Microsoft.Extensions.AI.OpenAI.json
@@ -100,7 +100,7 @@
"Stage": "Experimental"
},
{
- "Member": "static OpenAI.Responses.ResponseTool? OpenAI.Responses.MicrosoftExtensionsAIResponsesExtensions.AsOpenAIResponseTool(this Microsoft.Extensions.AI.AITool tool);",
+ "Member": "static OpenAI.Responses.ResponseTool? OpenAI.Responses.MicrosoftExtensionsAIResponsesExtensions.AsOpenAIResponseTool(this Microsoft.Extensions.AI.AITool tool, Microsoft.Extensions.AI.ChatOptions? options = null);",
"Stage": "Experimental"
}
]
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
index 419b65aaecc..870cdbbac19 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
@@ -26,14 +26,15 @@ public static FunctionTool AsOpenAIResponseTool(this AIFunctionDeclaration funct
/// Creates an OpenAI from an .
/// The tool to convert.
+ /// Optional chat options providing context for the conversion. When the tools list includes a , function tools may have deferred loading applied.
/// An OpenAI representing or if there is no mapping.
/// is .
///
/// This method is only able to create s for types
/// it's aware of, namely all of those available from the Microsoft.Extensions.AI.Abstractions library.
///
- public static ResponseTool? AsOpenAIResponseTool(this AITool tool) =>
- OpenAIResponsesChatClient.ToResponseTool(Throw.IfNull(tool));
+ public static ResponseTool? AsOpenAIResponseTool(this AITool tool, ChatOptions? options = null) =>
+ OpenAIResponsesChatClient.ToResponseTool(Throw.IfNull(tool), options);
///
/// Creates an OpenAI from a .
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIJsonContext.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIJsonContext.cs
index fcdf957762b..9a040864613 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIJsonContext.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIJsonContext.cs
@@ -17,7 +17,6 @@ namespace Microsoft.Extensions.AI;
WriteIndented = true)]
[JsonSerializable(typeof(OpenAIClientExtensions.ToolJson))]
[JsonSerializable(typeof(IDictionary))]
-[JsonSerializable(typeof(string))]
[JsonSerializable(typeof(string[]))]
[JsonSerializable(typeof(IEnumerable))]
[JsonSerializable(typeof(JsonElement))]
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
index 2002f269a5d..e422c5bfc45 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
@@ -18,6 +18,7 @@
using System.Threading.Tasks;
using Microsoft.Shared.DiagnosticIds;
using Microsoft.Shared.Diagnostics;
+using OpenAI;
using OpenAI.Responses;
#pragma warning disable S1226 // Method parameters, caught exceptions and foreach variables' initial values should not be ignored
@@ -51,7 +52,7 @@ private static readonly Func>));
/// Cached deserialized for the tool_search hosted tool.
- private static ResponseTool? s_toolSearchResponseTool;
+ private static ResponseTool? _toolSearchResponseTool;
/// Metadata about the client.
private readonly ChatClientMetadata _metadata;
@@ -694,19 +695,15 @@ void IDisposable.Dispose()
case AIFunctionDeclaration aiFunction:
var functionTool = ToResponseTool(aiFunction, options);
- if (tool.GetService() is { } searchable)
+ if (FindToolSearchTool(options) is { } toolSearch && IsDeferredLoading(aiFunction.Name, toolSearch))
{
- functionTool.Patch.Set("$.defer_loading"u8, JsonSerializer.SerializeToUtf8Bytes(true).AsSpan());
- if (searchable.Namespace is { } ns)
- {
- functionTool.Patch.Set("$.namespace"u8, JsonSerializer.SerializeToUtf8Bytes(ns, OpenAIJsonContext.Default.String).AsSpan());
- }
+ functionTool.Patch.Set("$.defer_loading"u8, "true"u8);
}
return functionTool;
case HostedToolSearchTool:
- return s_toolSearchResponseTool ??= ModelReaderWriter.Read(BinaryData.FromString("""{"type": "tool_search"}"""))!;
+ return _toolSearchResponseTool ??= ModelReaderWriter.Read(BinaryData.FromString("""{"type": "tool_search"}"""), ModelReaderWriterOptions.Json, OpenAIContext.Default)!;
case HostedWebSearchTool webSearchTool:
return new WebSearchTool
@@ -1817,6 +1814,34 @@ private static ImageGenerationToolResultContent GetImageGenerationResult(Streami
return null;
}
+ /// Finds the in the options' tools list, if present.
+ private static HostedToolSearchTool? FindToolSearchTool(ChatOptions? options)
+ {
+ if (options?.Tools is { } tools)
+ {
+ foreach (AITool t in tools)
+ {
+ if (t is HostedToolSearchTool toolSearch)
+ {
+ return toolSearch;
+ }
+ }
+ }
+
+ return null;
+ }
+
+ /// Determines whether the tool with the given name should have deferred loading based on the configuration.
+ private static bool IsDeferredLoading(string toolName, HostedToolSearchTool toolSearch)
+ {
+ if (toolSearch.NonDeferredTools is { } nonDeferred && nonDeferred.Contains(toolName))
+ {
+ return false;
+ }
+
+ return toolSearch.DeferredTools is not { } deferred || deferred.Contains(toolName);
+ }
+
/// Provides an wrapper for a .
internal sealed class ResponseToolAITool(ResponseTool tool) : AITool
{
diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Functions/SearchableAIFunctionDeclarationTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Functions/SearchableAIFunctionDeclarationTests.cs
deleted file mode 100644
index 98af5640597..00000000000
--- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Functions/SearchableAIFunctionDeclarationTests.cs
+++ /dev/null
@@ -1,102 +0,0 @@
-// Licensed to the .NET Foundation under one or more agreements.
-// The .NET Foundation licenses this file to you under the MIT license.
-
-using System;
-using System.Collections.Generic;
-using Xunit;
-
-namespace Microsoft.Extensions.AI.Functions;
-
-public class SearchableAIFunctionDeclarationTests
-{
- [Fact]
- public void Constructor_NullFunction_ThrowsArgumentNullException()
- {
- Assert.Throws("innerFunction", () => new SearchableAIFunctionDeclaration(null!));
- }
-
- [Fact]
- public void Constructor_DelegatesToInnerFunction_Properties()
- {
- var inner = AIFunctionFactory.Create(() => 42, "MyFunc", "My description");
- var wrapper = new SearchableAIFunctionDeclaration(inner);
-
- Assert.Equal(inner.Name, wrapper.Name);
- Assert.Equal(inner.Description, wrapper.Description);
- Assert.Equal(inner.JsonSchema, wrapper.JsonSchema);
- Assert.Equal(inner.ReturnJsonSchema, wrapper.ReturnJsonSchema);
- Assert.Same(inner.AdditionalProperties, wrapper.AdditionalProperties);
- Assert.Equal(inner.ToString(), wrapper.ToString());
- }
-
- [Fact]
- public void Namespace_DefaultIsNull()
- {
- var inner = AIFunctionFactory.Create(() => 42);
- var wrapper = new SearchableAIFunctionDeclaration(inner);
-
- Assert.Null(wrapper.Namespace);
- }
-
- [Fact]
- public void Namespace_Roundtrips()
- {
- var inner = AIFunctionFactory.Create(() => 42);
- var wrapper = new SearchableAIFunctionDeclaration(inner, namespaceName: "myNamespace");
-
- Assert.Equal("myNamespace", wrapper.Namespace);
- }
-
- [Fact]
- public void GetService_ReturnsSelf()
- {
- var inner = AIFunctionFactory.Create(() => 42);
- var wrapper = new SearchableAIFunctionDeclaration(inner);
-
- Assert.Same(wrapper, wrapper.GetService());
- }
-
- [Fact]
- public void CreateToolSet_NullFunctions_Throws()
- {
- Assert.Throws("functions", () => SearchableAIFunctionDeclaration.CreateToolSet(null!));
- }
-
- [Fact]
- public void CreateToolSet_ReturnsHostedToolSearchToolFirst_ThenWrappedFunctions()
- {
- var f1 = AIFunctionFactory.Create(() => 1, "F1");
- var f2 = AIFunctionFactory.Create(() => 2, "F2");
-
- var tools = SearchableAIFunctionDeclaration.CreateToolSet([f1, f2]);
-
- Assert.Equal(3, tools.Count);
- Assert.IsType(tools[0]);
- Assert.Empty(tools[0].AdditionalProperties);
-
- var s1 = Assert.IsType(tools[1]);
- Assert.Equal("F1", s1.Name);
- Assert.Null(s1.Namespace);
-
- var s2 = Assert.IsType(tools[2]);
- Assert.Equal("F2", s2.Name);
- Assert.Null(s2.Namespace);
- }
-
- [Fact]
- public void CreateToolSet_WithNamespaceAndProperties_Roundtrips()
- {
- var f1 = AIFunctionFactory.Create(() => 1, "F1");
- var props = new Dictionary { ["key"] = "value" };
-
- var tools = SearchableAIFunctionDeclaration.CreateToolSet([f1], namespaceName: "ns", toolSearchProperties: props);
-
- Assert.Equal(2, tools.Count);
-
- var hostTool = Assert.IsType(tools[0]);
- Assert.Same(props, hostTool.AdditionalProperties);
-
- var s1 = Assert.IsType(tools[1]);
- Assert.Equal("ns", s1.Namespace);
- }
-}
diff --git a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Tools/HostedToolSearchToolTests.cs b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Tools/HostedToolSearchToolTests.cs
index f3a32dc8c84..24cde84d490 100644
--- a/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Tools/HostedToolSearchToolTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Tools/HostedToolSearchToolTests.cs
@@ -35,4 +35,36 @@ public void Constructor_NullAdditionalProperties_UsesEmpty()
Assert.Empty(tool.AdditionalProperties);
}
+
+ [Fact]
+ public void DeferredTools_DefaultIsNull()
+ {
+ var tool = new HostedToolSearchTool();
+ Assert.Null(tool.DeferredTools);
+ }
+
+ [Fact]
+ public void DeferredTools_Roundtrips()
+ {
+ var tool = new HostedToolSearchTool();
+ var list = new List { "func1", "func2" };
+ tool.DeferredTools = list;
+ Assert.Same(list, tool.DeferredTools);
+ }
+
+ [Fact]
+ public void NonDeferredTools_DefaultIsNull()
+ {
+ var tool = new HostedToolSearchTool();
+ Assert.Null(tool.NonDeferredTools);
+ }
+
+ [Fact]
+ public void NonDeferredTools_Roundtrips()
+ {
+ var tool = new HostedToolSearchTool();
+ var list = new List { "func3" };
+ tool.NonDeferredTools = list;
+ Assert.Same(list, tool.NonDeferredTools);
+ }
}
diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIConversionTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIConversionTests.cs
index b55e27b4478..6a320508322 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIConversionTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIConversionTests.cs
@@ -612,12 +612,13 @@ public void AsOpenAIResponseTool_WithHostedToolSearchTool_CachesResult()
}
[Fact]
- public void AsOpenAIResponseTool_WithSearchableAIFunctionDeclaration_PatchesDeferLoading()
+ public void AsOpenAIResponseTool_AllToolsDeferred_WhenBothListsNull()
{
- var inner = AIFunctionFactory.Create(() => 42, "MyFunc", "My description");
- var searchable = new SearchableAIFunctionDeclaration(inner);
+ var func = AIFunctionFactory.Create(() => 42, "MyFunc", "My description");
+ var toolSearch = new HostedToolSearchTool();
+ var options = new ChatOptions { Tools = [toolSearch, func] };
- var result = ((AITool)searchable).AsOpenAIResponseTool();
+ var result = func.AsOpenAIResponseTool(options);
Assert.NotNull(result);
var functionTool = Assert.IsType(result);
@@ -627,18 +628,71 @@ public void AsOpenAIResponseTool_WithSearchableAIFunctionDeclaration_PatchesDefe
}
[Fact]
- public void AsOpenAIResponseTool_WithSearchableAIFunctionDeclarationWithNamespace_PatchesNamespace()
+ public void AsOpenAIResponseTool_NoDeferLoading_WhenNoHostedToolSearchTool()
{
- var inner = AIFunctionFactory.Create(() => 42, "MyFunc", "My description");
- var searchable = new SearchableAIFunctionDeclaration(inner, namespaceName: "myNamespace");
+ var func = AIFunctionFactory.Create(() => 42, "MyFunc", "My description");
+ var options = new ChatOptions { Tools = [func] };
- var result = ((AITool)searchable).AsOpenAIResponseTool();
+ var result = func.AsOpenAIResponseTool(options);
Assert.NotNull(result);
var functionTool = Assert.IsType(result);
var json = ModelReaderWriter.Write(functionTool, ModelReaderWriterOptions.Json).ToString();
- Assert.Contains("namespace", json);
- Assert.Contains("myNamespace", json);
+ Assert.DoesNotContain("defer_loading", json);
+ }
+
+ [Fact]
+ public void AsOpenAIResponseTool_OnlyDeferredToolsGetDeferLoading()
+ {
+ var func1 = AIFunctionFactory.Create(() => 1, "Func1");
+ var func2 = AIFunctionFactory.Create(() => 2, "Func2");
+ var toolSearch = new HostedToolSearchTool { DeferredTools = ["Func1"] };
+ var options = new ChatOptions { Tools = [toolSearch, func1, func2] };
+
+ var result1 = func1.AsOpenAIResponseTool(options);
+ var result2 = func2.AsOpenAIResponseTool(options);
+
+ var json1 = ModelReaderWriter.Write(result1!, ModelReaderWriterOptions.Json).ToString();
+ Assert.Contains("defer_loading", json1);
+
+ var json2 = ModelReaderWriter.Write(result2!, ModelReaderWriterOptions.Json).ToString();
+ Assert.DoesNotContain("defer_loading", json2);
+ }
+
+ [Fact]
+ public void AsOpenAIResponseTool_NonDeferredToolsExcluded()
+ {
+ var func1 = AIFunctionFactory.Create(() => 1, "Func1");
+ var func2 = AIFunctionFactory.Create(() => 2, "Func2");
+ var toolSearch = new HostedToolSearchTool { NonDeferredTools = ["Func2"] };
+ var options = new ChatOptions { Tools = [toolSearch, func1, func2] };
+
+ var result1 = func1.AsOpenAIResponseTool(options);
+ var result2 = func2.AsOpenAIResponseTool(options);
+
+ var json1 = ModelReaderWriter.Write(result1!, ModelReaderWriterOptions.Json).ToString();
+ Assert.Contains("defer_loading", json1);
+
+ var json2 = ModelReaderWriter.Write(result2!, ModelReaderWriterOptions.Json).ToString();
+ Assert.DoesNotContain("defer_loading", json2);
+ }
+
+ [Fact]
+ public void AsOpenAIResponseTool_BothLists_DisableTakesPrecedence()
+ {
+ var func = AIFunctionFactory.Create(() => 42, "MyFunc");
+ var toolSearch = new HostedToolSearchTool
+ {
+ DeferredTools = ["MyFunc"],
+ NonDeferredTools = ["MyFunc"],
+ };
+ var options = new ChatOptions { Tools = [toolSearch, func] };
+
+ var result = func.AsOpenAIResponseTool(options);
+
+ Assert.NotNull(result);
+ var json = ModelReaderWriter.Write(result!, ModelReaderWriterOptions.Json).ToString();
+ Assert.DoesNotContain("defer_loading", json);
}
[Fact]
From d6bce96b2fac06d8035919710d5180e7ec7d5a76 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 9 Mar 2026 17:25:13 +0000
Subject: [PATCH 4/9] =?UTF-8?q?Address=20review=20feedback:=20fix=20O(N?=
=?UTF-8?q?=C2=B2),=20remove=20json=20baseline=20entry,=20refactor=20ToRes?=
=?UTF-8?q?ponseTool=20signature?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
- Fix O(N²) by finding HostedToolSearchTool once before the tools loop
instead of scanning the list for each tool
- Remove HostedToolSearchTool from json baseline (experimental types
don't need entries)
- Refactor ToResponseTool(AITool, ...) to take HostedToolSearchTool?
directly instead of extracting from ChatOptions each time
- Remove FindToolSearchTool helper method (inlined into callers)
Co-authored-by: stephentoub <2642209+stephentoub@users.noreply.github.com>
---
.../Microsoft.Extensions.AI.Abstractions.json | 32 ------------------
...icrosoftExtensionsAIResponsesExtensions.cs | 21 ++++++++++--
.../OpenAIResponsesChatClient.cs | 33 ++++++++-----------
3 files changed, 32 insertions(+), 54 deletions(-)
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json
index 8647d2e14a3..d9f97f58c97 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Microsoft.Extensions.AI.Abstractions.json
@@ -2305,38 +2305,6 @@
}
]
},
- {
- "Type": "class Microsoft.Extensions.AI.HostedToolSearchTool : Microsoft.Extensions.AI.AITool",
- "Stage": "Experimental",
- "Methods": [
- {
- "Member": "Microsoft.Extensions.AI.HostedToolSearchTool.HostedToolSearchTool();",
- "Stage": "Experimental"
- },
- {
- "Member": "Microsoft.Extensions.AI.HostedToolSearchTool.HostedToolSearchTool(System.Collections.Generic.IReadOnlyDictionary? additionalProperties);",
- "Stage": "Experimental"
- }
- ],
- "Properties": [
- {
- "Member": "override System.Collections.Generic.IReadOnlyDictionary Microsoft.Extensions.AI.HostedToolSearchTool.AdditionalProperties { get; }",
- "Stage": "Experimental"
- },
- {
- "Member": "System.Collections.Generic.IList? Microsoft.Extensions.AI.HostedToolSearchTool.DeferredTools { get; set; }",
- "Stage": "Experimental"
- },
- {
- "Member": "override string Microsoft.Extensions.AI.HostedToolSearchTool.Name { get; }",
- "Stage": "Experimental"
- },
- {
- "Member": "System.Collections.Generic.IList? Microsoft.Extensions.AI.HostedToolSearchTool.NonDeferredTools { get; set; }",
- "Stage": "Experimental"
- }
- ]
- },
{
"Type": "sealed class Microsoft.Extensions.AI.HostedVectorStoreContent : Microsoft.Extensions.AI.AIContent",
"Stage": "Stable",
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
index 870cdbbac19..81817f93679 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
@@ -33,8 +33,25 @@ public static FunctionTool AsOpenAIResponseTool(this AIFunctionDeclaration funct
/// This method is only able to create s for types
/// it's aware of, namely all of those available from the Microsoft.Extensions.AI.Abstractions library.
///
- public static ResponseTool? AsOpenAIResponseTool(this AITool tool, ChatOptions? options = null) =>
- OpenAIResponsesChatClient.ToResponseTool(Throw.IfNull(tool), options);
+ public static ResponseTool? AsOpenAIResponseTool(this AITool tool, ChatOptions? options = null)
+ {
+ _ = Throw.IfNull(tool);
+
+ HostedToolSearchTool? toolSearchTool = null;
+ if (options?.Tools is { } tools)
+ {
+ foreach (AITool t in tools)
+ {
+ if (t is HostedToolSearchTool tst)
+ {
+ toolSearchTool = tst;
+ break;
+ }
+ }
+ }
+
+ return OpenAIResponsesChatClient.ToResponseTool(tool, toolSearchTool, options);
+ }
///
/// Creates an OpenAI from a .
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
index e422c5bfc45..636c145add7 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
@@ -686,7 +686,7 @@ void IDisposable.Dispose()
// Nothing to dispose.
}
- internal static ResponseTool? ToResponseTool(AITool tool, ChatOptions? options = null)
+ internal static ResponseTool? ToResponseTool(AITool tool, HostedToolSearchTool? toolSearchTool, ChatOptions? options)
{
switch (tool)
{
@@ -695,7 +695,7 @@ void IDisposable.Dispose()
case AIFunctionDeclaration aiFunction:
var functionTool = ToResponseTool(aiFunction, options);
- if (FindToolSearchTool(options) is { } toolSearch && IsDeferredLoading(aiFunction.Name, toolSearch))
+ if (toolSearchTool is not null && IsDeferredLoading(aiFunction.Name, toolSearchTool))
{
functionTool.Patch.Set("$.defer_loading"u8, "true"u8);
}
@@ -920,9 +920,19 @@ private CreateResponseOptions AsCreateResponseOptions(ChatOptions? options, out
// Populate tools if there are any.
if (options.Tools is { Count: > 0 } tools)
{
+ HostedToolSearchTool? toolSearchTool = null;
foreach (AITool tool in tools)
{
- if (ToResponseTool(tool, options) is { } responseTool)
+ if (tool is HostedToolSearchTool tst)
+ {
+ toolSearchTool = tst;
+ break;
+ }
+ }
+
+ foreach (AITool tool in tools)
+ {
+ if (ToResponseTool(tool, toolSearchTool, options) is { } responseTool)
{
result.Tools.Add(responseTool);
}
@@ -1814,23 +1824,6 @@ private static ImageGenerationToolResultContent GetImageGenerationResult(Streami
return null;
}
- /// Finds the in the options' tools list, if present.
- private static HostedToolSearchTool? FindToolSearchTool(ChatOptions? options)
- {
- if (options?.Tools is { } tools)
- {
- foreach (AITool t in tools)
- {
- if (t is HostedToolSearchTool toolSearch)
- {
- return toolSearch;
- }
- }
- }
-
- return null;
- }
-
/// Determines whether the tool with the given name should have deferred loading based on the configuration.
private static bool IsDeferredLoading(string toolName, HostedToolSearchTool toolSearch)
{
From ed9f47b4deaf952711c6bd9260a46fe155b0e887 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 9 Mar 2026 17:33:05 +0000
Subject: [PATCH 5/9] Extract shared FindToolSearchTool helper to deduplicate
lookup code
Co-authored-by: stephentoub <2642209+stephentoub@users.noreply.github.com>
---
...icrosoftExtensionsAIResponsesExtensions.cs | 18 +++----------
.../OpenAIResponsesChatClient.cs | 27 ++++++++++++-------
2 files changed, 22 insertions(+), 23 deletions(-)
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
index 81817f93679..f7d8ca88127 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
@@ -37,20 +37,10 @@ public static FunctionTool AsOpenAIResponseTool(this AIFunctionDeclaration funct
{
_ = Throw.IfNull(tool);
- HostedToolSearchTool? toolSearchTool = null;
- if (options?.Tools is { } tools)
- {
- foreach (AITool t in tools)
- {
- if (t is HostedToolSearchTool tst)
- {
- toolSearchTool = tst;
- break;
- }
- }
- }
-
- return OpenAIResponsesChatClient.ToResponseTool(tool, toolSearchTool, options);
+ return OpenAIResponsesChatClient.ToResponseTool(
+ tool,
+ OpenAIResponsesChatClient.FindToolSearchTool(options?.Tools),
+ options);
}
///
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
index 636c145add7..d429816a6fb 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
@@ -920,15 +920,7 @@ private CreateResponseOptions AsCreateResponseOptions(ChatOptions? options, out
// Populate tools if there are any.
if (options.Tools is { Count: > 0 } tools)
{
- HostedToolSearchTool? toolSearchTool = null;
- foreach (AITool tool in tools)
- {
- if (tool is HostedToolSearchTool tst)
- {
- toolSearchTool = tst;
- break;
- }
- }
+ HostedToolSearchTool? toolSearchTool = FindToolSearchTool(tools);
foreach (AITool tool in tools)
{
@@ -1835,6 +1827,23 @@ private static bool IsDeferredLoading(string toolName, HostedToolSearchTool tool
return toolSearch.DeferredTools is not { } deferred || deferred.Contains(toolName);
}
+ /// Finds the first in the given tools list, if present.
+ internal static HostedToolSearchTool? FindToolSearchTool(IList? tools)
+ {
+ if (tools is not null)
+ {
+ foreach (AITool tool in tools)
+ {
+ if (tool is HostedToolSearchTool toolSearch)
+ {
+ return toolSearch;
+ }
+ }
+ }
+
+ return null;
+ }
+
/// Provides an wrapper for a .
internal sealed class ResponseToolAITool(ResponseTool tool) : AITool
{
From b53a6b904168e72f27d11a0b79691982bf72868b Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 9 Mar 2026 18:52:37 +0000
Subject: [PATCH 6/9] Simplify ToResponseTool: add ChatOptions-only overload,
make FindToolSearchTool private
Co-authored-by: stephentoub <2642209+stephentoub@users.noreply.github.com>
---
.../MicrosoftExtensionsAIResponsesExtensions.cs | 5 +----
.../OpenAIResponsesChatClient.cs | 7 +++++--
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
index f7d8ca88127..b333c349595 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/MicrosoftExtensionsAIResponsesExtensions.cs
@@ -37,10 +37,7 @@ public static FunctionTool AsOpenAIResponseTool(this AIFunctionDeclaration funct
{
_ = Throw.IfNull(tool);
- return OpenAIResponsesChatClient.ToResponseTool(
- tool,
- OpenAIResponsesChatClient.FindToolSearchTool(options?.Tools),
- options);
+ return OpenAIResponsesChatClient.ToResponseTool(tool, options);
}
///
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
index d429816a6fb..c3b1c196564 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
@@ -686,7 +686,10 @@ void IDisposable.Dispose()
// Nothing to dispose.
}
- internal static ResponseTool? ToResponseTool(AITool tool, HostedToolSearchTool? toolSearchTool, ChatOptions? options)
+ internal static ResponseTool? ToResponseTool(AITool tool, ChatOptions? options) =>
+ ToResponseTool(tool, FindToolSearchTool(options?.Tools), options);
+
+ private static ResponseTool? ToResponseTool(AITool tool, HostedToolSearchTool? toolSearchTool, ChatOptions? options)
{
switch (tool)
{
@@ -1828,7 +1831,7 @@ private static bool IsDeferredLoading(string toolName, HostedToolSearchTool tool
}
/// Finds the first in the given tools list, if present.
- internal static HostedToolSearchTool? FindToolSearchTool(IList? tools)
+ private static HostedToolSearchTool? FindToolSearchTool(IList? tools)
{
if (tools is not null)
{
From ead292d051bc1c1864b1eb2e9c9bb8428dbd03cb Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 9 Mar 2026 19:04:30 +0000
Subject: [PATCH 7/9] Add unit tests for HostedToolSearchTool JSON
serialization and integration test
Co-authored-by: stephentoub <2642209+stephentoub@users.noreply.github.com>
---
.../OpenAIResponseClientIntegrationTests.cs | 24 +
.../OpenAIResponseClientTests.cs | 484 ++++++++++++++++++
2 files changed, 508 insertions(+)
diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientIntegrationTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientIntegrationTests.cs
index c98197e0b65..995c9a9a86f 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientIntegrationTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientIntegrationTests.cs
@@ -754,4 +754,28 @@ public async Task ReasoningContent_Streaming_RoundtripsEncryptedContent()
});
Assert.Contains("encrypted", ex.Message, StringComparison.OrdinalIgnoreCase);
}
+
+ [ConditionalFact]
+ public async Task UseToolSearch_WithDeferredFunctions()
+ {
+ SkipIfNotEnabled();
+
+ AIFunction getWeather = AIFunctionFactory.Create(() => "Sunny, 72°F", "GetWeather", "Gets the current weather.");
+ AIFunction getTime = AIFunctionFactory.Create(() => "3:00 PM", "GetTime", "Gets the current time.");
+
+ var response = await ChatClient.GetResponseAsync(
+ "What's the weather like? Just respond with the weather info, nothing else.",
+ new()
+ {
+ Tools =
+ [
+ new HostedToolSearchTool(),
+ getWeather,
+ getTime,
+ ],
+ });
+
+ Assert.NotNull(response);
+ Assert.NotEmpty(response.Text);
+ }
}
diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs
index 5e712f22afe..ee98551e575 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs
@@ -6964,5 +6964,489 @@ public async Task WebSearchTool_Streaming()
var textContent = message.Contents.OfType().Single();
Assert.Equal(".NET 10 was officially released.", textContent.Text);
}
+
+ [Fact]
+ public async Task ToolSearchTool_OnlyToolSearch_NonStreaming()
+ {
+ const string Input = """
+ {
+ "model": "gpt-4o-mini",
+ "input": [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "hello"
+ }
+ ]
+ }
+ ],
+ "tools": [
+ {
+ "type": "tool_search"
+ }
+ ]
+ }
+ """;
+
+ const string Output = """
+ {
+ "id": "resp_001",
+ "object": "response",
+ "created_at": 1741892091,
+ "status": "completed",
+ "model": "gpt-4o-mini",
+ "output": [
+ {
+ "type": "message",
+ "id": "msg_001",
+ "status": "completed",
+ "role": "assistant",
+ "content": [{"type": "output_text", "text": "Hello!", "annotations": []}]
+ }
+ ]
+ }
+ """;
+
+ using VerbatimHttpHandler handler = new(Input, Output);
+ using HttpClient httpClient = new(handler);
+ using IChatClient client = CreateResponseClient(httpClient, "gpt-4o-mini");
+
+ var response = await client.GetResponseAsync("hello", new()
+ {
+ Tools = [new HostedToolSearchTool()],
+ });
+
+ Assert.NotNull(response);
+ Assert.Equal("Hello!", response.Text);
+ }
+
+ [Fact]
+ public async Task ToolSearchTool_AllToolsDeferred_NonStreaming()
+ {
+ const string Input = """
+ {
+ "model": "gpt-4o-mini",
+ "input": [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "hello"
+ }
+ ]
+ }
+ ],
+ "tools": [
+ {
+ "type": "tool_search"
+ },
+ {
+ "type": "function",
+ "name": "GetWeather",
+ "description": "Gets the weather.",
+ "parameters": {
+ "type": "object",
+ "required": [],
+ "properties": {},
+ "additionalProperties": false
+ },
+ "strict": true,
+ "defer_loading": true
+ },
+ {
+ "type": "function",
+ "name": "GetForecast",
+ "description": "Gets the forecast.",
+ "parameters": {
+ "type": "object",
+ "required": [],
+ "properties": {},
+ "additionalProperties": false
+ },
+ "strict": true,
+ "defer_loading": true
+ }
+ ]
+ }
+ """;
+
+ const string Output = """
+ {
+ "id": "resp_001",
+ "object": "response",
+ "created_at": 1741892091,
+ "status": "completed",
+ "model": "gpt-4o-mini",
+ "output": [
+ {
+ "type": "message",
+ "id": "msg_001",
+ "status": "completed",
+ "role": "assistant",
+ "content": [{"type": "output_text", "text": "Hello!", "annotations": []}]
+ }
+ ]
+ }
+ """;
+
+ using VerbatimHttpHandler handler = new(Input, Output);
+ using HttpClient httpClient = new(handler);
+ using IChatClient client = CreateResponseClient(httpClient, "gpt-4o-mini");
+
+ var response = await client.GetResponseAsync("hello", new()
+ {
+ Tools =
+ [
+ new HostedToolSearchTool(),
+ AIFunctionFactory.Create(() => 42, "GetWeather", "Gets the weather."),
+ AIFunctionFactory.Create(() => 42, "GetForecast", "Gets the forecast."),
+ ],
+ AdditionalProperties = new() { ["strict"] = true },
+ });
+
+ Assert.NotNull(response);
+ Assert.Equal("Hello!", response.Text);
+ }
+
+ [Fact]
+ public async Task ToolSearchTool_SpecificDeferredTools_NonStreaming()
+ {
+ const string Input = """
+ {
+ "model": "gpt-4o-mini",
+ "input": [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "hello"
+ }
+ ]
+ }
+ ],
+ "tools": [
+ {
+ "type": "tool_search"
+ },
+ {
+ "type": "function",
+ "name": "GetWeather",
+ "description": "Gets the weather.",
+ "parameters": {
+ "type": "object",
+ "required": [],
+ "properties": {},
+ "additionalProperties": false
+ },
+ "strict": true,
+ "defer_loading": true
+ },
+ {
+ "type": "function",
+ "name": "GetForecast",
+ "description": "Gets the forecast.",
+ "parameters": {
+ "type": "object",
+ "required": [],
+ "properties": {},
+ "additionalProperties": false
+ },
+ "strict": true
+ }
+ ]
+ }
+ """;
+
+ const string Output = """
+ {
+ "id": "resp_001",
+ "object": "response",
+ "created_at": 1741892091,
+ "status": "completed",
+ "model": "gpt-4o-mini",
+ "output": [
+ {
+ "type": "message",
+ "id": "msg_001",
+ "status": "completed",
+ "role": "assistant",
+ "content": [{"type": "output_text", "text": "Hello!", "annotations": []}]
+ }
+ ]
+ }
+ """;
+
+ using VerbatimHttpHandler handler = new(Input, Output);
+ using HttpClient httpClient = new(handler);
+ using IChatClient client = CreateResponseClient(httpClient, "gpt-4o-mini");
+
+ var response = await client.GetResponseAsync("hello", new()
+ {
+ Tools =
+ [
+ new HostedToolSearchTool { DeferredTools = ["GetWeather"] },
+ AIFunctionFactory.Create(() => 42, "GetWeather", "Gets the weather."),
+ AIFunctionFactory.Create(() => 42, "GetForecast", "Gets the forecast."),
+ ],
+ AdditionalProperties = new() { ["strict"] = true },
+ });
+
+ Assert.NotNull(response);
+ Assert.Equal("Hello!", response.Text);
+ }
+
+ [Fact]
+ public async Task ToolSearchTool_NonDeferredExclusion_NonStreaming()
+ {
+ const string Input = """
+ {
+ "model": "gpt-4o-mini",
+ "input": [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "hello"
+ }
+ ]
+ }
+ ],
+ "tools": [
+ {
+ "type": "tool_search"
+ },
+ {
+ "type": "function",
+ "name": "GetWeather",
+ "description": "Gets the weather.",
+ "parameters": {
+ "type": "object",
+ "required": [],
+ "properties": {},
+ "additionalProperties": false
+ },
+ "strict": true,
+ "defer_loading": true
+ },
+ {
+ "type": "function",
+ "name": "ImportantTool",
+ "description": "An important tool.",
+ "parameters": {
+ "type": "object",
+ "required": [],
+ "properties": {},
+ "additionalProperties": false
+ },
+ "strict": true
+ }
+ ]
+ }
+ """;
+
+ const string Output = """
+ {
+ "id": "resp_001",
+ "object": "response",
+ "created_at": 1741892091,
+ "status": "completed",
+ "model": "gpt-4o-mini",
+ "output": [
+ {
+ "type": "message",
+ "id": "msg_001",
+ "status": "completed",
+ "role": "assistant",
+ "content": [{"type": "output_text", "text": "Hello!", "annotations": []}]
+ }
+ ]
+ }
+ """;
+
+ using VerbatimHttpHandler handler = new(Input, Output);
+ using HttpClient httpClient = new(handler);
+ using IChatClient client = CreateResponseClient(httpClient, "gpt-4o-mini");
+
+ var response = await client.GetResponseAsync("hello", new()
+ {
+ Tools =
+ [
+ new HostedToolSearchTool { NonDeferredTools = ["ImportantTool"] },
+ AIFunctionFactory.Create(() => 42, "GetWeather", "Gets the weather."),
+ AIFunctionFactory.Create(() => 42, "ImportantTool", "An important tool."),
+ ],
+ AdditionalProperties = new() { ["strict"] = true },
+ });
+
+ Assert.NotNull(response);
+ Assert.Equal("Hello!", response.Text);
+ }
+
+ [Fact]
+ public async Task ToolSearchTool_BothLists_DisableTakesPrecedence_NonStreaming()
+ {
+ const string Input = """
+ {
+ "model": "gpt-4o-mini",
+ "input": [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "hello"
+ }
+ ]
+ }
+ ],
+ "tools": [
+ {
+ "type": "tool_search"
+ },
+ {
+ "type": "function",
+ "name": "Func1",
+ "description": "First function.",
+ "parameters": {
+ "type": "object",
+ "required": [],
+ "properties": {},
+ "additionalProperties": false
+ },
+ "strict": true
+ },
+ {
+ "type": "function",
+ "name": "Func2",
+ "description": "Second function.",
+ "parameters": {
+ "type": "object",
+ "required": [],
+ "properties": {},
+ "additionalProperties": false
+ },
+ "strict": true,
+ "defer_loading": true
+ }
+ ]
+ }
+ """;
+
+ const string Output = """
+ {
+ "id": "resp_001",
+ "object": "response",
+ "created_at": 1741892091,
+ "status": "completed",
+ "model": "gpt-4o-mini",
+ "output": [
+ {
+ "type": "message",
+ "id": "msg_001",
+ "status": "completed",
+ "role": "assistant",
+ "content": [{"type": "output_text", "text": "Hello!", "annotations": []}]
+ }
+ ]
+ }
+ """;
+
+ using VerbatimHttpHandler handler = new(Input, Output);
+ using HttpClient httpClient = new(handler);
+ using IChatClient client = CreateResponseClient(httpClient, "gpt-4o-mini");
+
+ var response = await client.GetResponseAsync("hello", new()
+ {
+ Tools =
+ [
+ new HostedToolSearchTool
+ {
+ DeferredTools = ["Func1", "Func2"],
+ NonDeferredTools = ["Func1"],
+ },
+ AIFunctionFactory.Create(() => 1, "Func1", "First function."),
+ AIFunctionFactory.Create(() => 2, "Func2", "Second function."),
+ ],
+ AdditionalProperties = new() { ["strict"] = true },
+ });
+
+ Assert.NotNull(response);
+ Assert.Equal("Hello!", response.Text);
+ }
+
+ [Fact]
+ public async Task ToolSearchTool_NoFunctionTools_NonStreaming()
+ {
+ const string Input = """
+ {
+ "model": "gpt-4o-mini",
+ "input": [
+ {
+ "type": "message",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "hello"
+ }
+ ]
+ }
+ ],
+ "tools": [
+ {
+ "type": "tool_search"
+ },
+ {
+ "type": "web_search"
+ }
+ ]
+ }
+ """;
+
+ const string Output = """
+ {
+ "id": "resp_001",
+ "object": "response",
+ "created_at": 1741892091,
+ "status": "completed",
+ "model": "gpt-4o-mini",
+ "output": [
+ {
+ "type": "message",
+ "id": "msg_001",
+ "status": "completed",
+ "role": "assistant",
+ "content": [{"type": "output_text", "text": "Hello!", "annotations": []}]
+ }
+ ]
+ }
+ """;
+
+ using VerbatimHttpHandler handler = new(Input, Output);
+ using HttpClient httpClient = new(handler);
+ using IChatClient client = CreateResponseClient(httpClient, "gpt-4o-mini");
+
+ var response = await client.GetResponseAsync("hello", new()
+ {
+ Tools =
+ [
+ new HostedToolSearchTool(),
+ new HostedWebSearchTool(),
+ ],
+ });
+
+ Assert.NotNull(response);
+ Assert.Equal("Hello!", response.Text);
+ }
}
From 14dbaca8d18362fdc9eb2792c1ccf4e1617c89ea Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Tue, 24 Mar 2026 21:51:13 +0000
Subject: [PATCH 8/9] Merge main to resolve conflicts (conflict in
OpenAIResponsesChatClient.cs resolved by taking main's version)
Co-authored-by: stephentoub <2642209+stephentoub@users.noreply.github.com>
Agent-Logs-Url: https://github.com/dotnet/extensions/sessions/7a29d49e-c422-4fe7-81f4-366bd781b460
---
.github/skills/prepare-release/SKILL.md | 129 ++
.github/skills/release-notes/SKILL.md | 132 ++
.../references/categorize-entries.md | 75 +
.../release-notes/references/collect-prs.md | 159 ++
.../references/editorial-rules.md | 143 ++
.../references/experimental-features.md | 115 ++
.../references/format-template.md | 119 ++
.../release-notes/references/package-areas.md | 118 ++
.../release-notes/references/sql-storage.md | 167 +++
azure-pipelines.yml | 2 +-
eng/MSBuild/Packaging.targets | 2 +-
eng/Version.Details.xml | 212 +--
eng/Versions.props | 230 +--
eng/common/CIBuild.cmd | 2 +-
eng/common/SetupNugetSources.ps1 | 90 +-
eng/common/SetupNugetSources.sh | 192 ++-
eng/common/build.ps1 | 11 +-
eng/common/build.sh | 33 +-
eng/common/cibuild.sh | 2 +-
eng/common/core-templates/job/job.yml | 48 +-
eng/common/core-templates/job/onelocbuild.yml | 35 +-
.../job/publish-build-assets.yml | 79 +-
.../core-templates/job/source-build.yml | 15 +-
.../job/source-index-stage1.yml | 47 +-
.../core-templates/jobs/codeql-build.yml | 1 -
eng/common/core-templates/jobs/jobs.yml | 15 +-
.../core-templates/jobs/source-build.yml | 23 +-
.../core-templates/post-build/post-build.yml | 26 +-
.../steps/cleanup-microbuild.yml | 28 +
.../core-templates/steps/generate-sbom.yml | 2 +-
.../steps/get-delegation-sas.yml | 11 +-
.../steps/install-microbuild.yml | 110 ++
.../core-templates/steps/publish-logs.yml | 8 +-
.../core-templates/steps/source-build.yml | 88 +-
.../steps/source-index-stage1-publish.yml | 35 +
eng/common/cross/arm64/tizen/tizen.patch | 2 +-
eng/common/cross/armel/armel.jessie.patch | 43 -
eng/common/cross/build-android-rootfs.sh | 49 +-
eng/common/cross/build-rootfs.sh | 237 +--
eng/common/cross/install-debs.py | 334 +++++
eng/common/cross/tizen-fetch.sh | 9 +-
eng/common/cross/toolchain.cmake | 82 +-
eng/common/darc-init.sh | 2 +-
eng/common/dotnet.cmd | 7 +
eng/common/dotnet.ps1 | 11 +
eng/common/dotnet.sh | 26 +
eng/common/generate-locproject.ps1 | 49 +-
eng/common/native/install-dependencies.sh | 62 +
eng/common/post-build/publish-using-darc.ps1 | 9 +-
eng/common/post-build/redact-logs.ps1 | 5 +-
eng/common/sdk-task.ps1 | 14 +-
eng/common/sdk-task.sh | 121 ++
eng/common/sdl/packages.config | 2 +-
eng/common/templates-official/job/job.yml | 4 +-
.../steps/publish-build-artifacts.yml | 7 +-
.../steps/source-index-stage1-publish.yml | 7 +
eng/common/templates/job/job.yml | 4 +-
.../steps/publish-build-artifacts.yml | 8 +-
.../steps/source-index-stage1-publish.yml | 7 +
eng/common/templates/steps/vmr-sync.yml | 186 +++
eng/common/templates/vmr-build-pr.yml | 43 +
eng/common/tools.ps1 | 71 +-
eng/common/tools.sh | 81 +-
eng/common/vmr-sync.ps1 | 164 +++
eng/common/vmr-sync.sh | 227 +++
eng/packages/General.props | 2 +-
global.json | 8 +-
.../HttpLoggingServiceCollectionExtensions.cs | 3 +-
.../Logging/IHttpLogEnricher.cs | 3 -
.../RequestHeadersLogEnricherOptions.cs | 3 -
...oft.AspNetCore.Diagnostics.Middleware.json | 14 +-
.../CHANGELOG.md | 245 ----
.../ChatCompletion/ChatResponseExtensions.cs | 34 +-
.../CompatibilitySuppressions.xml | 808 -----------
.../Contents/UriContent.cs | 57 +-
.../Files/HostedFileDownloadStream.cs | 34 +
.../Functions/AIFunctionDeclaration.cs | 18 +-
.../Functions/AIFunctionFactory.cs | 48 +-
.../Functions/AIFunctionFactoryOptions.cs | 13 +-
.../Microsoft.Extensions.AI.Abstractions.json | 4 +-
...teConversationItemRealtimeClientMessage.cs | 38 +
.../CreateResponseRealtimeClientMessage.cs | 125 ++
.../Realtime/DelegatingRealtimeClient.cs | 68 +
.../Realtime/ErrorRealtimeServerMessage.cs | 39 +
.../Realtime/IRealtimeClient.cs | 33 +
.../Realtime/IRealtimeClientSession.cs | 63 +
...tAudioBufferAppendRealtimeClientMessage.cs | 41 +
...tAudioBufferCommitRealtimeClientMessage.cs | 22 +
...AudioTranscriptionRealtimeServerMessage.cs | 58 +
.../OutputTextAudioRealtimeServerMessage.cs | 73 +
.../Realtime/RealtimeAudioFormat.cs | 33 +
.../Realtime/RealtimeClientMessage.cs | 30 +
.../Realtime/RealtimeConversationItem.cs | 61 +
.../Realtime/RealtimeResponseStatus.cs | 42 +
.../Realtime/RealtimeServerMessage.cs | 35 +
.../Realtime/RealtimeServerMessageType.cs | 163 +++
.../Realtime/RealtimeSessionKind.cs | 100 ++
.../Realtime/RealtimeSessionOptions.cs | 108 ++
.../ResponseCreatedRealtimeServerMessage.cs | 119 ++
...ResponseOutputItemRealtimeServerMessage.cs | 54 +
.../SessionUpdateRealtimeClientMessage.cs | 42 +
.../Realtime/VoiceActivityDetectionOptions.cs | 57 +
.../SpeechToText/TranscriptionOptions.cs | 40 +
.../DelegatingTextToSpeechClient.cs | 77 +
.../TextToSpeech/ITextToSpeechClient.cs | 62 +
.../TextToSpeechClientExtensions.cs | 29 +
.../TextToSpeechClientMetadata.cs | 44 +
.../TextToSpeech/TextToSpeechOptions.cs | 103 ++
.../TextToSpeech/TextToSpeechResponse.cs | 80 +
.../TextToSpeechResponseUpdate.cs | 75 +
.../TextToSpeechResponseUpdateExtensions.cs | 109 ++
.../TextToSpeechResponseUpdateKind.cs | 105 ++
.../UsageDetails.cs | 92 +-
.../Utilities/AIJsonUtilities.Defaults.cs | 7 +
....Extensions.AI.Evaluation.Reporting.csproj | 2 +-
.../CSharp/Storage/DiskBasedResponseCache.cs | 14 +-
.../CSharp/Storage/DiskBasedResultStore.cs | 40 +-
.../CSharp/Utilities/PathValidation.cs | 88 ++
.../TypeScript/azure-devops-report/build.ps1 | 9 +-
.../package-lock.json | 513 +++++--
.../PublishAIEvaluationReport/package.json | 2 +-
.../TypeScript/package-lock.json | 97 +-
.../CHANGELOG.md | 176 ---
.../OpenAIClientExtensions.cs | 9 +
.../OpenAIFileDownloadStream.cs | 22 -
.../OpenAIJsonContext.cs | 1 +
.../OpenAIRealtimeClient.cs | 97 ++
.../OpenAIRealtimeClientSession.cs | 1289 +++++++++++++++++
.../OpenAIResponsesChatClient.cs | 80 +-
.../OpenAITextToSpeechClient.cs | 137 ++
.../Microsoft.Extensions.AI/CHANGELOG.md | 212 ---
.../FunctionInvokingChatClient.cs | 381 +----
.../ChatCompletion/OpenTelemetryChatClient.cs | 11 +-
.../OpenTelemetryImageGenerator.cs | 11 +-
.../Common/FunctionInvocationHelpers.cs | 41 +
.../Common/FunctionInvocationLogger.cs | 55 +
.../Common/FunctionInvocationProcessor.cs | 252 ++++
.../Common/OpenTelemetryLog.cs | 17 +
.../CompatibilitySuppressions.xml | 109 --
.../OpenTelemetryEmbeddingGenerator.cs | 11 +-
.../Files/OpenTelemetryHostedFileClient.cs | 13 +-
.../OpenTelemetryConsts.cs | 49 +
.../FunctionInvokingRealtimeClient.cs | 131 ++
...InvokingRealtimeClientBuilderExtensions.cs | 43 +
.../FunctionInvokingRealtimeClientSession.cs | 415 ++++++
.../Realtime/LoggingRealtimeClient.cs | 56 +
.../LoggingRealtimeClientBuilderExtensions.cs | 59 +
.../Realtime/LoggingRealtimeClientSession.cs | 261 ++++
.../Realtime/OpenTelemetryRealtimeClient.cs | 71 +
...elemetryRealtimeClientBuilderExtensions.cs | 79 +
.../OpenTelemetryRealtimeClientSession.cs | 1050 ++++++++++++++
.../Realtime/RealtimeClientBuilder.cs | 89 ++
...meClientBuilderRealtimeClientExtensions.cs | 29 +
.../Realtime/RealtimeClientExtensions.cs | 82 ++
.../RealtimeClientSessionExtensions.cs | 82 ++
.../OpenTelemetrySpeechToTextClient.cs | 13 +-
.../ConfigureOptionsTextToSpeechClient.cs | 65 +
...ionsTextToSpeechClientBuilderExtensions.cs | 37 +
.../TextToSpeech/LoggingTextToSpeechClient.cs | 189 +++
...gingTextToSpeechClientBuilderExtensions.cs | 57 +
.../OpenTelemetryTextToSpeechClient.cs | 355 +++++
...etryTextToSpeechClientBuilderExtensions.cs | 43 +
.../TextToSpeech/TextToSpeechClientBuilder.cs | 82 ++
...lientBuilderServiceCollectionExtensions.cs | 89 ++
...ientBuilderTextToSpeechClientExtensions.cs | 27 +
.../Microsoft.Extensions.AI/Throw.cs | 15 +
.../CHANGELOG.md | 5 -
.../CHANGELOG.md | 5 -
.../CHANGELOG.md | 5 -
.../CHANGELOG.md | 9 -
.../Writers/VectorStoreWriter.cs | 8 +-
.../Resolver/DnsResolver.cs | 114 +-
src/Shared/DiagnosticIds/DiagnosticIds.cs | 1 +
.../ChatResponseUpdateExtensionsTests.cs | 45 +
.../Contents/UriContentTests.cs | 94 +-
.../Files/HostedFileDownloadStreamTests.cs | 31 +-
.../Realtime/RealtimeAudioFormatTests.cs | 33 +
.../Realtime/RealtimeClientMessageTests.cs | 183 +++
.../Realtime/RealtimeConversationItemTests.cs | 66 +
.../Realtime/RealtimeServerMessageTests.cs | 262 ++++
.../Realtime/RealtimeSessionOptionsTests.cs | 139 ++
.../TestJsonSerializerContext.cs | 4 +
.../TestRealtimeClientSession.cs | 62 +
.../TestTextToSpeechClient.cs | 59 +
.../DelegatingTextToSpeechClientTests.cs | 163 +++
.../TextToSpeechClientExtensionsTests.cs | 19 +
.../TextToSpeechClientMetadataTests.cs | 29 +
.../TextToSpeech/TextToSpeechClientTests.cs | 72 +
.../TextToSpeech/TextToSpeechOptionsTests.cs | 216 +++
.../TextToSpeech/TextToSpeechResponseTests.cs | 216 +++
...xtToSpeechResponseUpdateExtensionsTests.cs | 83 ++
.../TextToSpeechResponseUpdateKindTests.cs | 65 +
.../TextToSpeechResponseUpdateTests.cs | 98 ++
.../UsageDetailsTests.cs | 46 +
.../DiskBased/PathValidationTests.cs | 486 +++++++
.../TextToSpeechClientIntegrationTests.cs | 138 ++
.../OpenAIRealtimeClientSessionTests.cs | 96 ++
.../OpenAIRealtimeClientTests.cs | 63 +
.../OpenAIResponseClientTests.cs | 137 ++
.../OpenAISpeechToTextClientTests.cs | 1 -
...penAITextToSpeechClientIntegrationTests.cs | 12 +
.../OpenAITextToSpeechClientTests.cs | 283 ++++
.../FunctionInvokingChatClientTests.cs | 31 +-
.../OpenTelemetryChatClientTests.cs | 62 +
.../OpenTelemetryEmbeddingGeneratorTests.cs | 44 +
.../OpenTelemetryHostedFileClientTests.cs | 41 +-
.../Image/OpenTelemetryImageGeneratorTests.cs | 46 +
.../Microsoft.Extensions.AI.Tests.csproj | 2 +
.../FunctionInvokingRealtimeClientTests.cs | 685 +++++++++
.../Realtime/LoggingRealtimeClientTests.cs | 481 ++++++
.../OpenTelemetryRealtimeClientTests.cs | 1112 ++++++++++++++
.../Realtime/RealtimeClientBuilderTests.cs | 182 +++
.../Realtime/RealtimeClientExtensionsTests.cs | 124 ++
.../RealtimeClientSessionExtensionsTests.cs | 110 ++
.../OpenTelemetrySpeechToTextClientTests.cs | 62 +
...ConfigureOptionsTextToSpeechClientTests.cs | 98 ++
.../LoggingTextToSpeechClientTests.cs | 150 ++
.../OpenTelemetryTextToSpeechClientTests.cs | 192 +++
.../SingletonTextToSpeechClientExtensions.cs | 11 +
...SpeechClientDependencyInjectionPatterns.cs | 178 +++
.../IngestionPipelineTests.cs | 2 +-
.../Writers/VectorStoreWriterTests.cs | 5 +-
.../Resolver/ResolveAddressesTests.cs | 41 +-
223 files changed, 18851 insertions(+), 3211 deletions(-)
create mode 100644 .github/skills/prepare-release/SKILL.md
create mode 100644 .github/skills/release-notes/SKILL.md
create mode 100644 .github/skills/release-notes/references/categorize-entries.md
create mode 100644 .github/skills/release-notes/references/collect-prs.md
create mode 100644 .github/skills/release-notes/references/editorial-rules.md
create mode 100644 .github/skills/release-notes/references/experimental-features.md
create mode 100644 .github/skills/release-notes/references/format-template.md
create mode 100644 .github/skills/release-notes/references/package-areas.md
create mode 100644 .github/skills/release-notes/references/sql-storage.md
create mode 100644 eng/common/core-templates/steps/cleanup-microbuild.yml
create mode 100644 eng/common/core-templates/steps/install-microbuild.yml
create mode 100644 eng/common/core-templates/steps/source-index-stage1-publish.yml
delete mode 100644 eng/common/cross/armel/armel.jessie.patch
create mode 100644 eng/common/cross/install-debs.py
create mode 100644 eng/common/dotnet.cmd
create mode 100644 eng/common/dotnet.ps1
create mode 100644 eng/common/dotnet.sh
create mode 100644 eng/common/native/install-dependencies.sh
create mode 100644 eng/common/sdk-task.sh
create mode 100644 eng/common/templates-official/steps/source-index-stage1-publish.yml
create mode 100644 eng/common/templates/steps/source-index-stage1-publish.yml
create mode 100644 eng/common/templates/steps/vmr-sync.yml
create mode 100644 eng/common/templates/vmr-build-pr.yml
create mode 100644 eng/common/vmr-sync.ps1
create mode 100644 eng/common/vmr-sync.sh
delete mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/CHANGELOG.md
delete mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/CompatibilitySuppressions.xml
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/CreateConversationItemRealtimeClientMessage.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/CreateResponseRealtimeClientMessage.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/DelegatingRealtimeClient.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/ErrorRealtimeServerMessage.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/IRealtimeClient.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/IRealtimeClientSession.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/InputAudioBufferAppendRealtimeClientMessage.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/InputAudioBufferCommitRealtimeClientMessage.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/InputAudioTranscriptionRealtimeServerMessage.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/OutputTextAudioRealtimeServerMessage.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/RealtimeAudioFormat.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/RealtimeClientMessage.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/RealtimeConversationItem.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/RealtimeResponseStatus.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/RealtimeServerMessage.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/RealtimeServerMessageType.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/RealtimeSessionKind.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/RealtimeSessionOptions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/ResponseCreatedRealtimeServerMessage.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/ResponseOutputItemRealtimeServerMessage.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/SessionUpdateRealtimeClientMessage.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/Realtime/VoiceActivityDetectionOptions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/SpeechToText/TranscriptionOptions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/TextToSpeech/DelegatingTextToSpeechClient.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/TextToSpeech/ITextToSpeechClient.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/TextToSpeech/TextToSpeechClientExtensions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/TextToSpeech/TextToSpeechClientMetadata.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/TextToSpeech/TextToSpeechOptions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/TextToSpeech/TextToSpeechResponse.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/TextToSpeech/TextToSpeechResponseUpdate.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/TextToSpeech/TextToSpeechResponseUpdateExtensions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Abstractions/TextToSpeech/TextToSpeechResponseUpdateKind.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.Evaluation.Reporting/CSharp/Utilities/PathValidation.cs
delete mode 100644 src/Libraries/Microsoft.Extensions.AI.OpenAI/CHANGELOG.md
create mode 100644 src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeClient.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIRealtimeClientSession.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAITextToSpeechClient.cs
delete mode 100644 src/Libraries/Microsoft.Extensions.AI/CHANGELOG.md
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Common/FunctionInvocationHelpers.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Common/FunctionInvocationLogger.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Common/FunctionInvocationProcessor.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Common/OpenTelemetryLog.cs
delete mode 100644 src/Libraries/Microsoft.Extensions.AI/CompatibilitySuppressions.xml
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Realtime/FunctionInvokingRealtimeClient.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Realtime/FunctionInvokingRealtimeClientBuilderExtensions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Realtime/FunctionInvokingRealtimeClientSession.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Realtime/LoggingRealtimeClient.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Realtime/LoggingRealtimeClientBuilderExtensions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Realtime/LoggingRealtimeClientSession.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Realtime/OpenTelemetryRealtimeClient.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Realtime/OpenTelemetryRealtimeClientBuilderExtensions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Realtime/OpenTelemetryRealtimeClientSession.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Realtime/RealtimeClientBuilder.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Realtime/RealtimeClientBuilderRealtimeClientExtensions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Realtime/RealtimeClientExtensions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Realtime/RealtimeClientSessionExtensions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/TextToSpeech/ConfigureOptionsTextToSpeechClient.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/TextToSpeech/ConfigureOptionsTextToSpeechClientBuilderExtensions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/TextToSpeech/LoggingTextToSpeechClient.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/TextToSpeech/LoggingTextToSpeechClientBuilderExtensions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/TextToSpeech/OpenTelemetryTextToSpeechClient.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/TextToSpeech/OpenTelemetryTextToSpeechClientBuilderExtensions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/TextToSpeech/TextToSpeechClientBuilder.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/TextToSpeech/TextToSpeechClientBuilderServiceCollectionExtensions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/TextToSpeech/TextToSpeechClientBuilderTextToSpeechClientExtensions.cs
create mode 100644 src/Libraries/Microsoft.Extensions.AI/Throw.cs
delete mode 100644 src/Libraries/Microsoft.Extensions.DataIngestion.Abstractions/CHANGELOG.md
delete mode 100644 src/Libraries/Microsoft.Extensions.DataIngestion.MarkItDown/CHANGELOG.md
delete mode 100644 src/Libraries/Microsoft.Extensions.DataIngestion.Markdig/CHANGELOG.md
delete mode 100644 src/Libraries/Microsoft.Extensions.DataIngestion/CHANGELOG.md
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Realtime/RealtimeAudioFormatTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Realtime/RealtimeClientMessageTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Realtime/RealtimeConversationItemTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Realtime/RealtimeServerMessageTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/Realtime/RealtimeSessionOptionsTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestRealtimeClientSession.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TestTextToSpeechClient.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TextToSpeech/DelegatingTextToSpeechClientTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TextToSpeech/TextToSpeechClientExtensionsTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TextToSpeech/TextToSpeechClientMetadataTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TextToSpeech/TextToSpeechClientTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TextToSpeech/TextToSpeechOptionsTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TextToSpeech/TextToSpeechResponseTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TextToSpeech/TextToSpeechResponseUpdateExtensionsTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TextToSpeech/TextToSpeechResponseUpdateKindTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Abstractions.Tests/TextToSpeech/TextToSpeechResponseUpdateTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Evaluation.Reporting.Tests/DiskBased/PathValidationTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Integration.Tests/TextToSpeechClientIntegrationTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIRealtimeClientSessionTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIRealtimeClientTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAITextToSpeechClientIntegrationTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAITextToSpeechClientTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/Realtime/FunctionInvokingRealtimeClientTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/Realtime/LoggingRealtimeClientTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/Realtime/OpenTelemetryRealtimeClientTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/Realtime/RealtimeClientBuilderTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/Realtime/RealtimeClientExtensionsTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/Realtime/RealtimeClientSessionExtensionsTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/TextToSpeech/ConfigureOptionsTextToSpeechClientTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/TextToSpeech/LoggingTextToSpeechClientTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/TextToSpeech/OpenTelemetryTextToSpeechClientTests.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/TextToSpeech/SingletonTextToSpeechClientExtensions.cs
create mode 100644 test/Libraries/Microsoft.Extensions.AI.Tests/TextToSpeech/TextToSpeechClientDependencyInjectionPatterns.cs
diff --git a/.github/skills/prepare-release/SKILL.md b/.github/skills/prepare-release/SKILL.md
new file mode 100644
index 00000000000..33f30dbff05
--- /dev/null
+++ b/.github/skills/prepare-release/SKILL.md
@@ -0,0 +1,129 @@
+---
+name: prepare-release
+description: Prepares the repository for an internal release branch. Use this when asked to "prepare for a release", "prepare internal release branch", or similar release preparation tasks.
+---
+
+# Prepare Internal Release Branch
+
+When preparing a public branch for internal release, apply the following changes:
+
+## 1. Directory.Build.props
+
+Add NU1507 warning suppression after the `TestNetCoreTargetFrameworks` PropertyGroup. Internal branches don't use package source mapping due to internal feeds:
+
+```xml
+
+
+ $(NoWarn);NU1507
+
+```
+
+Insert this new PropertyGroup right after the closing `` that contains `TestNetCoreTargetFrameworks`.
+
+## 2. NuGet.config
+
+Remove the entire `` section. This section looks like:
+
+```xml
+
+
+
+
+
+
+
+
+
+
+```
+
+**Important**: Do NOT add new internal feed sources to NuGet.config - those are managed by Dependency Flow automation and will be added automatically.
+
+## 3. eng/Versions.props
+
+Update these two properties (do NOT change any version numbers):
+
+Change `StabilizePackageVersion` from `false` to `true`:
+```xml
+true
+```
+
+Change `DotNetFinalVersionKind` from empty to `release`:
+```xml
+release
+```
+
+## 4. eng/pipelines/templates/BuildAndTest.yml
+
+### Add Private Feeds Credentials Setup
+
+After the Node.js setup task (the `NodeTool@0` task), add these two tasks to authenticate with private Azure DevOps feeds:
+
+```yaml
+ - task: PowerShell@2
+ displayName: Setup Private Feeds Credentials
+ condition: eq(variables['Agent.OS'], 'Windows_NT')
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
+ arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token
+ env:
+ Token: $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - task: Bash@3
+ displayName: Setup Private Feeds Credentials
+ condition: ne(variables['Agent.OS'], 'Windows_NT')
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
+ arguments: $(Build.SourcesDirectory)/NuGet.config $Token
+ env:
+ Token: $(dn-bot-dnceng-artifact-feeds-rw)
+```
+
+### Comment Out Integration Tests
+
+Comment out the integration tests step as they require authentication to private feeds that isn't available during internal release builds:
+
+```yaml
+ - ${{ if ne(parameters.skipTests, 'true') }}:
+ # Skipping integration tests for now as they require authentication to the private feeds
+ # - script: ${{ parameters.buildScript }}
+ # -integrationTest
+ # -configuration ${{ parameters.buildConfig }}
+ # -warnAsError 1
+ # /bl:${{ parameters.repoLogPath }}/integration_tests.binlog
+ # $(_OfficialBuildIdArgs)
+ # displayName: Run integration tests
+```
+
+## 5. azure-pipelines.yml
+
+Remove the `codecoverage` stage entirely. This is the stage that:
+- Has `displayName: CodeCoverage`
+- Downloads code coverage reports from build jobs
+- Merges and validates combined test coverage
+- Contains a `CodeCoverageReport` job
+
+Also remove the `codecoverage` dependency from the post-build validation's `validateDependsOn` list:
+
+```yaml
+# Remove this conditional dependency block:
+- ${{ if eq(parameters.runTests, true) }}:
+ - codecoverage
+```
+
+## Files NOT to modify
+
+- **eng/Version.Details.xml**: Version updates are managed by Dependency Flow automation
+- **eng/Versions.props version numbers**: Package versions are managed by Dependency Flow automation
+- **NuGet.config feed sources**: Internal darc feeds are added automatically by Dependency Flow
+
+## Summary
+
+| File | Action |
+|------|--------|
+| Directory.Build.props | Add `NU1507` to `NoWarn` in new PropertyGroup |
+| NuGet.config | Remove entire `` section |
+| eng/Versions.props | Set `StabilizePackageVersion=true`, `DotNetFinalVersionKind=release` |
+| eng/pipelines/templates/BuildAndTest.yml | Add private feeds credentials setup tasks, comment out integration tests |
+| azure-pipelines.yml | Remove `codecoverage` stage and its post-build dependency |
diff --git a/.github/skills/release-notes/SKILL.md b/.github/skills/release-notes/SKILL.md
new file mode 100644
index 00000000000..177972c48f9
--- /dev/null
+++ b/.github/skills/release-notes/SKILL.md
@@ -0,0 +1,132 @@
+---
+name: release-notes
+description: 'Draft release notes for a dotnet/extensions release. Gathers merged PRs, assigns them to packages by file path, categorizes by area and impact, tracks experimental API changes, and produces formatted markdown suitable for a GitHub release. Handles both monthly full releases and targeted intra-month patch releases.'
+agent: 'agent'
+tools: ['github/*', 'sql', 'ask_user']
+---
+
+# Release Notes
+
+Draft release notes for a `dotnet/extensions` release. This skill gathers merged PRs between two tags, maps them to affected packages by examining changed file paths, categorizes entries by area and impact, audits experimental API changes, and produces concise markdown suitable for a GitHub release.
+
+> **User confirmation required: This skill NEVER publishes a GitHub release without explicit user confirmation.** The user must review and approve the draft before any release is created.
+
+## Context
+
+The `dotnet/extensions` repository ships NuGet packages across many functional areas (AI, HTTP Resilience, Diagnostics, Compliance, Telemetry, etc.). Releases come in two forms:
+
+- **Monthly full releases** — all packages ship together with a minor version bump (e.g. v10.3.0 → v10.4.0)
+- **Intra-month patch releases** — a targeted subset of packages ships with a patch version bump (e.g. v10.3.1), typically addressing specific bug fixes or urgent changes
+
+The repository does not follow Semantic Versioning. Major versions align with annual .NET releases, minor versions increment monthly, and patch versions are for intra-month fixes.
+
+The repository makes heavy use of `[Experimental]` attributes. Experimental diagnostic IDs are documented in [`docs/list-of-diagnostics.md`](../../docs/list-of-diagnostics.md). Breaking changes to experimental APIs are expected and acceptable. Graduation of experimental APIs to stable is a noteworthy positive event.
+
+The repository uses `release/` branches (e.g. `release/10.4`) where release tags are associated with commits on those branches. When determining the commit range for a release, ensure the previous and target tags are resolved against the appropriate release branch history.
+
+## Execution Guidelines
+
+- **Do not write intermediate files to disk.** Use the **SQL tool** for structured storage and querying (see [references/sql-storage.md](references/sql-storage.md) for schema).
+- **Do not run linters, formatters, or validators** on the output.
+- **Maximize parallel tool calls.** Fetch multiple PR and issue details in a single response.
+- **Package assignment is file-path-driven.** Determine which packages a PR affects by examining which `src/Libraries/{PackageName}/` paths it touches. See [references/package-areas.md](references/package-areas.md) for the mapping. Use `area-*` labels only as a fallback.
+
+## Process
+
+Work through each step sequentially. Present findings at each step and get user confirmation before proceeding.
+
+### Step 1: Determine Release Scope
+
+The user may provide:
+- **Two tags** (previous and target) — use these directly
+- **A target tag only** — determine the previous release from `gh release list --repo dotnet/extensions --exclude-drafts`
+- **No context** — show the last 5 published releases and ask the user to select
+
+Once the range is established:
+
+1. Determine if this is a **full release** (minor version bump) or **patch release** (patch version bump) based on the version numbers.
+2. For patch releases, ask the user which packages are included (or infer from the PRs).
+3. Get the merge date range for PR collection.
+
+### Step 2: Collect and Enrich PRs
+
+Follow [references/collect-prs.md](references/collect-prs.md):
+
+1. Search for merged PRs in the date range between the two tags.
+2. For each PR, fetch the file list and assign packages based on `src/Libraries/{PackageName}/` paths.
+3. Enrich with full PR body, reactions, linked issues, and co-author data.
+4. Apply exclusion filters (backports, automated version bumps, etc.).
+5. Mark remaining PRs as candidates.
+
+Store all data using the SQL tool.
+
+### Step 3: Categorize and Group
+
+Follow [references/categorize-entries.md](references/categorize-entries.md):
+
+1. **Assign categories**: What's Changed, Documentation Updates, Test Improvements, or Repository Infrastructure Updates.
+2. **Group by package area**: For "What's Changed" entries, group under descriptive area headings from [references/package-areas.md](references/package-areas.md). Each area heading must clearly identify the packages it covers.
+3. **Order by impact**: Within each area, order entries by impact — breaking changes first, then new features, then bug fixes.
+4. **Order areas by activity**: Place the area with the most entries first.
+
+### Step 4: Audit Experimental API Changes
+
+Follow [references/experimental-features.md](references/experimental-features.md):
+
+1. For each candidate PR, **fetch the file list and diff** to identify changes to `[Experimental]` APIs. Do not infer experimental changes from PR titles — always verify against the actual files changed.
+2. Classify each change: now stable, new experimental, breaking change to experimental, or removed.
+3. Derive the conceptual feature name from the actual types/members affected in the diff.
+4. Record in the `experimental_changes` SQL table.
+5. Present findings to the user for confirmation.
+
+### Step 5: Determine Package Versions
+
+Build the package version information:
+
+1. For **full releases**: all packages ship at the same version. Note the version number but do not generate a per-package table — it would be repetitive with no value.
+2. For **patch releases**: build a table of only the affected packages and their version numbers.
+3. Present the version information to the user for confirmation. The user may adjust which packages are included in a patch release.
+
+### Step 6: Draft Release Notes
+
+Compose the release notes following [references/format-template.md](references/format-template.md) and [references/editorial-rules.md](references/editorial-rules.md):
+
+1. **Preamble** — Optionally draft 2–3 sentences summarizing the release theme. Present the preamble options to the user using the `ask_user` tool, offering them the choice of: (a) one of the suggested preambles, (b) writing their own, or (c) skipping the preamble entirely.
+2. **Packages in this release** — for patch releases, the table of affected packages and versions from Step 5. For full releases, omit this table (all packages ship at the same version and listing them all adds no value).
+3. **Breaking Changes** — stable API breaks only (should be very rare). Include migration guidance.
+4. **Experimental API Changes** — from Step 4 results. Group by change type. Omit empty subsections.
+5. **What's Changed** — area-grouped entries from Step 3. Omit empty areas.
+6. **Documentation Updates** — chronological flat list.
+7. **Test Improvements** — chronological flat list.
+8. **Repository Infrastructure Updates** — chronological flat list.
+9. **Acknowledgements** — new contributors, issue reporters, PR reviewers.
+10. **Full Changelog** — link to the GitHub compare view.
+
+Omit empty sections entirely.
+
+### Step 7: Review and Finalize
+
+Present the complete draft to the user:
+
+1. The full release notes markdown
+2. Summary statistics (number of PRs, packages affected, areas covered)
+3. Any unresolved items (ambiguous PRs, missing package assignments)
+
+After the user has reviewed and approved the draft, present the finalization options using the `ask_user` tool:
+- **Create draft release** — create a GitHub release in draft state with the notes as the body
+- **Save to private gist** — save the draft notes to a private GitHub gist for later use
+- **Cancel** — discard the draft without creating anything
+
+## Edge Cases
+
+- **PR spans categories**: Categorize by primary intent; read the title and description.
+- **PR spans multiple areas**: Place under the most central area; mention cross-cutting nature in the description.
+- **Copilot-authored PRs**: If the PR author is Copilot or a bot, check the `copilot_work_started` timeline event for the triggering user, then assignees, then the merger. See [references/editorial-rules.md](references/editorial-rules.md) for the full fallback chain. Never fabricate an attribution — always derive it from the PR data.
+- **No breaking changes**: Omit the Breaking Changes section entirely.
+- **No experimental changes**: Omit the Experimental API Changes section entirely.
+- **No user-facing changes**: If all PRs are documentation, tests, or infrastructure, note this in the release notes. The release still proceeds — this repository ships monthly regardless.
+- **Patch release with unclear scope**: Ask the user to confirm which packages are included.
+- **No previous release**: If this is the first release under the current versioning scheme, gather all PRs from the beginning of the tag history.
+- **Version mismatch**: If the tag version doesn't match the version in source files, flag the discrepancy.
+- **Large release (100+ PRs)**: Break the enrichment step into parallel batches. Consider summarizing lower-impact areas more aggressively.
+- **Cross-repo changes**: Some PRs may reference issues or changes in other repos (e.g. `dotnet/runtime`). Use full markdown links for cross-repo references.
diff --git a/.github/skills/release-notes/references/categorize-entries.md b/.github/skills/release-notes/references/categorize-entries.md
new file mode 100644
index 00000000000..6e570167c07
--- /dev/null
+++ b/.github/skills/release-notes/references/categorize-entries.md
@@ -0,0 +1,75 @@
+# Categorize Entries
+
+Sort candidate PRs into sections and group them by package area for the release notes.
+
+## Step 1: Assign categories
+
+For each candidate PR, assign one of these categories based on the primary intent:
+
+| Category | Key | Content |
+|----------|-----|---------|
+| What's Changed | `changed` | Features, bug fixes, API improvements, performance, breaking changes |
+| Documentation Updates | `docs` | PRs whose sole purpose is documentation |
+| Test Improvements | `tests` | Adding, fixing, or improving tests |
+| Repository Infrastructure Updates | `infra` | CI/CD, dependency bumps, version bumps, build system, skills |
+
+**Decision rules:**
+- If a PR modifies files under `src/Libraries/` or `src/Generators/` or `src/Analyzers/`, it is `changed` (even if it also touches docs or tests)
+- If a PR **only** modifies files under `docs/`, XML doc comments, or README files, it is `docs`
+- If a PR **only** modifies files under `test/`, it is `tests`
+- If a PR **only** modifies `eng/`, `scripts/`, `.github/`, CI YAML files, or root config files, it is `infra`
+- When a PR spans multiple categories, assign based on primary intent — read the title and description
+
+Update the SQL record:
+```sql
+UPDATE prs SET category = '' WHERE number = ;
+```
+
+## Step 2: Group by package area
+
+For PRs in the `changed` category, group them under their package area headings using the `pr_packages` table. Each area heading uses the descriptive name from [package-areas.md](package-areas.md).
+
+**Area heading selection:**
+- If a PR affects packages in a single area → place under that area
+- If a PR affects packages in multiple areas → place under the area most central to the change, noting the cross-cutting nature in the description if relevant
+- If a `changed` PR has no package assignment (rare — e.g. a cross-cutting change to `Directory.Build.props` that affects all packages) → place under a "Cross-Cutting Changes" heading
+
+**Area ordering in the release notes:**
+Order areas by the number of entries (most active area first), then alphabetically for ties. This naturally highlights the areas with the most changes.
+
+## Step 3: Impact tiering within areas
+
+Within each area, order entries by impact:
+
+1. **Breaking changes** (stable API breaks — should be very rare)
+2. **Experimental API changes** (graduated, removed, breaking — see [experimental-features.md](experimental-features.md))
+3. **New features and significant improvements**
+4. **Bug fixes with community signal** (reported by community members, high reaction count)
+5. **Other bug fixes and improvements**
+
+Use the popularity score from the SQL `prs` + `issues` tables (combined reaction counts) as a tiebreaker within each tier.
+
+## Step 4: Handle documentation, test, and infrastructure categories
+
+These categories are **not** grouped by package area. They appear as flat lists in their own sections at the bottom of the release notes:
+
+- **Documentation Updates** — sorted by merge date
+- **Test Improvements** — sorted by merge date
+- **Repository Infrastructure Updates** — sorted by merge date
+
+## Full vs. patch release considerations
+
+### Full monthly release
+- All areas with changes get their own heading
+- All four category sections appear (omit empty ones)
+- Include the "Experimental API Changes" section if any experimental changes were detected
+
+### Targeted patch release
+- Only the affected areas appear (typically 1–3 areas)
+- The preamble explicitly states which packages are included in the patch
+- The "Experimental API Changes" section still appears if relevant
+- Documentation, test, and infrastructure sections may be shorter or absent
+
+## Multi-faceted PRs
+
+A single PR may deliver a feature, fix bugs, AND improve performance. Use the verbatim PR title as the entry description regardless. Read the full PR description, not just the title, to determine the correct category assignment.
diff --git a/.github/skills/release-notes/references/collect-prs.md b/.github/skills/release-notes/references/collect-prs.md
new file mode 100644
index 00000000000..de21ba88d89
--- /dev/null
+++ b/.github/skills/release-notes/references/collect-prs.md
@@ -0,0 +1,159 @@
+# Collect and Filter PRs
+
+Gather all merged PRs between the previous release tag and the target for this release.
+
+## Determine the commit range
+
+1. **Previous release tag**: Use `gh release list --repo dotnet/extensions --exclude-drafts --limit 10` to find the most recent published release. If the user specifies a particular previous version, use that instead.
+2. **Target**: The user provides a target (commit SHA, branch, or tag). If none is specified, use the `HEAD` of the default branch (`main`).
+3. Verify both refs exist: `git rev-parse ` and `git rev-parse `.
+
+## Search for merged PRs
+
+### Primary — GitHub MCP server
+
+Use `search_pull_requests` to find PRs merged in the date range. Keep result sets small to avoid large responses being saved to temp files.
+
+```
+search_pull_requests(
+ owner: "dotnet",
+ repo: "extensions",
+ query: "is:merged merged:..",
+ perPage: 30
+)
+```
+
+Page through results (incrementing `page`) until all PRs are collected. The `start-date` is the merge date of the previous release tag's PR (or the tag's commit date); the `end-date` is the target commit date.
+
+If the date range yields many results, split by week or use label-scoped queries to keep individual searches small.
+
+### Fallback — GitHub CLI
+
+If the MCP server is unavailable:
+
+```bash
+gh pr list --repo dotnet/extensions --state merged \
+ --search "merged:.." \
+ --limit 500 --json number,title,labels,author,mergedAt,url
+```
+
+## Assign packages from file paths
+
+For each PR, fetch the list of changed files:
+
+```
+pull_request_read(
+ method: "get_files",
+ owner: "dotnet",
+ repo: "extensions",
+ pullNumber:
+)
+```
+
+Extract package names from file paths matching `src/Libraries/{PackageName}/`. For each matched package, look up the area group from [package-areas.md](package-areas.md).
+
+**Rules:**
+- A PR may affect multiple packages across different areas — record all of them in the `pr_packages` table
+- If a PR only touches `test/Libraries/{PackageName}/`, it still maps to that package's area (useful for the "Test Improvements" category)
+- If a PR only touches `eng/`, `scripts/`, `.github/`, or root-level files, it has no package assignment — categorize as infrastructure
+- If a PR only touches `docs/`, it has no package assignment — categorize as documentation
+
+**Fallback for ambiguous PRs:**
+If a PR has no `src/Libraries/` or `test/Libraries/` file changes but does have `area-*` labels, use those labels to infer the package area. Map `area-Microsoft.Extensions.AI` to the AI area group, etc.
+
+## Store PR data
+
+Insert each discovered PR into the `prs` SQL table. See [sql-storage.md](sql-storage.md) for the schema.
+
+## Enrich PR details
+
+For each PR, fetch the full body and metadata:
+
+```
+pull_request_read(
+ method: "get",
+ owner: "dotnet",
+ repo: "extensions",
+ pullNumber:
+)
+```
+
+Update the `body`, `reactions`, `author_association`, and `labels` columns. Multiple independent PR reads can be issued in parallel.
+
+Also fetch comments to look for Copilot-generated summaries:
+
+```
+pull_request_read(
+ method: "get_comments",
+ owner: "dotnet",
+ repo: "extensions",
+ pullNumber:
+)
+```
+
+Also fetch reviews for the acknowledgements section:
+
+```
+pull_request_read(
+ method: "get_reviews",
+ owner: "dotnet",
+ repo: "extensions",
+ pullNumber:
+)
+```
+
+Record each reviewer's username and the PR number. See [editorial-rules.md](editorial-rules.md) for exclusion and sorting rules.
+
+## Discover linked issues
+
+Parse the PR body for issue references:
+- Closing keywords: `Fixes #1234`, `Closes #1234`, `Resolves #1234`
+- Full URL links: `https://github.com/dotnet/extensions/issues/1234`
+- Cross-repo references: `dotnet/extensions#1234`
+
+For each discovered issue, fetch details with `issue_read` and insert into the `issues` table.
+
+## Deduplicate against prior release
+
+PRs merged into `main` may include changes that were already included in a prior release via a `release/` branch. The prior release branch may also contain PRs that were merged into it but never covered in that release's notes — those PRs must still be excluded from the current release notes because they shipped in the prior release's packages.
+
+### Fetch release branches
+
+Before deduplication, ensure the relevant release branches are available locally:
+
+1. Identify which local git remote points to `dotnet/extensions` on GitHub (e.g. by checking `git remote -v` for a URL containing `dotnet/extensions`). Use that remote name in subsequent fetch commands.
+2. Identify the prior release branch from the previous release tag (e.g. `v10.3.0` → `release/10.3`).
+3. Fetch it: `git fetch release/10.3` (using the remote identified in step 1).
+4. If the current release also has a release branch (e.g. `release/10.4`), fetch that too.
+
+### Exclude PRs already shipped
+
+For each candidate PR, check whether it was already included in any prior release — even if the prior release notes didn't mention it:
+
+1. **Check against the prior release tag**: `git merge-base --is-ancestor `. If the PR's merge commit is an ancestor of the previous release tag, it shipped in that release — exclude it.
+2. **Check against the prior release branch HEAD**: The release branch may have advanced beyond the release tag (e.g. `release/10.3` may contain commits merged after `v10.3.0` was tagged but before `v10.3.1` or the branch was abandoned). Check: `git merge-base --is-ancestor /release/10.3`. If reachable, the PR was part of that release branch's content — exclude it.
+3. **Check the prior release notes body**: Fetch the GitHub release for the previous tag and check if the PR number appears in the release notes body. This catches PRs that were explicitly covered.
+
+> **Why this matters:** A PR can be merged into a `release/` branch, ship in that release's packages, but never appear in that release's notes (e.g. a late-breaking fix). When that PR is later merged into `main`, it appears in the date-range search for the next release. Without branch-aware deduplication, it would be incorrectly included in the new release notes.
+
+This step is critical and must run before marking PRs as candidates.
+
+## Exclusion filters
+
+Before marking PRs as candidates, exclude:
+- PRs labeled `backport`, `servicing`, or `NO-MERGE`
+- PRs whose title starts with `[release/` or contains `backport`
+- PRs that are purely automated version bumps (title matches `Update version to *` and only changes `Directory.Build.props` or version files)
+
+Mark remaining PRs as candidates: `UPDATE prs SET is_candidate = 1 WHERE ...`
+
+## Populate co-author data
+
+For each candidate PR, collect co-authors from **all commits in the PR**, not just the merge commit:
+
+1. **Fetch the PR's commits** via the pull request commits endpoint (for example, using a `pull_request_read` / PR-scoped `list_commits` method), so it works even if the PR's head branch has been deleted. If needed, also use `get_commit` for the merge commit SHA from the PR details.
+2. **Parse `Co-authored-by:` trailers** from every commit message in the PR. These trailers follow the format: `Co-authored-by: Name `. Extract the GitHub username from the email (e.g. `123456+username@users.noreply.github.com` → `username`) or match the name against known GitHub users.
+3. **Also check the merge commit** message itself for `Co-authored-by:` trailers, as squash-merged PRs consolidate trailers there.
+4. **Check the `copilot_work_started` timeline event** to identify Copilot-assisted PRs where a human delegated the work.
+
+A common pattern in this repository is a human-authored PR with `Co-authored-by: Copilot <...>` trailers on individual commits — these must be detected to give Copilot co-author attribution. Store all discovered co-authors in the database for use during rendering.
diff --git a/.github/skills/release-notes/references/editorial-rules.md b/.github/skills/release-notes/references/editorial-rules.md
new file mode 100644
index 00000000000..9efe10704b7
--- /dev/null
+++ b/.github/skills/release-notes/references/editorial-rules.md
@@ -0,0 +1,143 @@
+# Editorial Rules
+
+## Tone
+
+- Remain **objective and factual** — describe what was introduced or changed without editorial judgment
+ - ✅ `Introduces new APIs for text-to-speech`
+ - ✅ `Added streaming metrics for time-to-first-chunk and time-per-output-chunk`
+ - ❌ `Adds significant advancements in AI capabilities`
+ - ❌ `Previously there was no way to measure streaming latency`
+- Avoid superlatives and subjective qualifiers ("significant", "major improvements", "exciting"). Simply state what was added, changed, or fixed.
+- When context about the prior state is needed, keep it brief — one clause, not a paragraph — then pivot to the new capability
+
+## Conciseness
+
+- **No code samples** in release notes. This repository ships many packages and the release notes should be scannable, not tutorial-length.
+- Each entry is a **single bullet point** using the verbatim PR title.
+- Link to the PR for details (via `#PR` auto-link).
+- If a PR touches multiple concerns, the PR title should capture the primary change. Do not rewrite it.
+
+## Entry format
+
+Use this format (GitHub auto-links `#PR` and `@user` in release notes):
+
+```
+* Description #PR by @author
+```
+
+For PRs with co-authors (harvested from `Co-authored-by` commit trailers):
+```
+* Description #PR by @author (co-authored by @user1 @user2)
+```
+
+For Dependabot PRs, omit the author:
+```
+* Bump actions/checkout from 5.0.0 to 6.0.0 #1234
+```
+
+For Copilot-authored PRs, check the `copilot_work_started` timeline event to identify the triggering user. That person becomes the primary author; `@Copilot` becomes a co-author:
+```
+* Add trace-level logging for HTTP requests #1234 by @author (co-authored by @Copilot)
+```
+
+## Entry naming
+
+- Use the **verbatim PR title** as the entry description. Do not rewrite, rephrase, or summarize PR titles.
+- The PR title is the author's chosen description of the change and should be preserved exactly as written.
+
+## Attribution rules
+
+> **Critical: Every attribution must be derived from the stored PR data, never fabricated or assumed.** When writing each release note entry, read the `author` field from the `prs` SQL table and the co-author data collected during enrichment. Do not write an `@username` attribution without having that username in the database for that PR.
+
+- **PR author**: The `user.login` from the PR details — read this from the `prs` table when rendering each entry
+- **Co-authors**: Harvest from `Co-authored-by` trailers in **all commits** in the PR (not just the merge commit). Individual commits often carry `Co-authored-by: Copilot <...>` trailers that are not present in the merge commit message. Fetch the PR's commits and parse trailers from each one. For squash-merged PRs, check the squash commit message which consolidates trailers.
+- **Copilot-authored PRs**: If the PR author is `Copilot`, `copilot-swe-agent[bot]`, or the PR body mentions "Created from Copilot CLI" / "copilot delegate":
+ 1. Check the `copilot_work_started` timeline event to identify the triggering user
+ 2. If found, the triggering user becomes the primary author and `@Copilot` becomes a co-author
+ 3. If the timeline event is missing, check assignees and the merger — the human who delegated and merged the work is the primary author
+ 4. As a last resort, attribute to the merger
+- **Bots to exclude**: `dependabot[bot]`, `dotnet-maestro[bot]`, `github-actions[bot]`, `copilot-swe-agent[bot]`, and any account ending with `[bot]`
+
+## Sorting
+
+Within the **What's Changed** area sections, sort entries by **impact** (see [categorize-entries.md](categorize-entries.md) for the impact tier ordering). Within all other sections (Documentation Updates, Test Improvements, Repository Infrastructure Updates), sort entries by **merge date** (chronological order, oldest first).
+
+## Category definitions
+
+### What's Changed
+Feature work, bug fixes, API improvements, performance enhancements, and any other user-facing changes. This includes:
+- New API surface area
+- Bug fixes that affect runtime behavior
+- Performance improvements
+- Changes that span code + docs (categorize by primary intent)
+
+### Documentation Updates
+PRs whose **sole purpose** is documentation:
+- Fixing typos in docs
+- Updating XML doc comments (when not part of a functional change)
+- README updates
+
+A PR that changes code AND updates docs belongs in "What's Changed."
+
+### Test Improvements
+PRs focused on test quality or coverage:
+- Adding new tests
+- Fixing broken or flaky tests
+- Test infrastructure improvements
+
+### Repository Infrastructure Updates
+PRs that maintain the development environment:
+- Version bumps
+- CI/CD workflow changes
+- Dependency updates (Dependabot)
+- Build system changes
+- Copilot instructions and skill updates
+
+PRs that touch test code should never be categorized as Infrastructure.
+
+## Acknowledgements section
+
+Include an acknowledgements section at the bottom of the release notes:
+
+1. **New contributors** — people making their first contribution in this release
+2. **Issue reporters** — community members whose reported issues were resolved in this release, citing the resolving PR
+3. **PR reviewers** — single bullet listing all reviewers, sorted by review count (no count shown)
+
+### Collecting PR reviewers
+
+For each candidate PR, fetch the reviews:
+
+```
+pull_request_read(
+ method: "get_reviews",
+ owner: "dotnet",
+ repo: "extensions",
+ pullNumber:
+)
+```
+
+Collect all users who submitted a review (any state: APPROVED, CHANGES_REQUESTED, COMMENTED, DISMISSED). Multiple reviews on the same PR by the same user count as one review for that PR.
+
+**Exclusions — do not list as reviewers:**
+- Bot accounts: any account ending with `[bot]`, `Copilot`, `copilot-swe-agent[bot]`
+- Users who are already listed as PR authors or co-authors elsewhere in the release notes (they are already acknowledged)
+- The PR author themselves (self-reviews)
+
+**Sorting:** Sort reviewers by the number of distinct PRs they reviewed (descending). Do not show the count — just the sorted order.
+
+**Format:** A single bullet with all reviewers listed inline:
+```
+* @user1 @user2 @user3 reviewed pull requests
+```
+
+## Inclusion criteria
+
+Include a feature/fix if:
+- It gives users something new or something that works better
+- It's a community-requested change (high reaction count on backing issue)
+- It changes behavior users need to be aware of
+
+Exclude:
+- Internal refactoring with no user-facing change
+- Test-only changes (these go in "Test Improvements")
+- Build/infrastructure changes (these go in "Repository Infrastructure Updates")
diff --git a/.github/skills/release-notes/references/experimental-features.md b/.github/skills/release-notes/references/experimental-features.md
new file mode 100644
index 00000000000..87efeb610c8
--- /dev/null
+++ b/.github/skills/release-notes/references/experimental-features.md
@@ -0,0 +1,115 @@
+# Experimental Feature Tracking
+
+The `dotnet/extensions` repository makes heavy use of the `[Experimental]` attribute to mark APIs that are not yet stable. Experimental APIs have their own diagnostic IDs and may undergo breaking changes, graduation to stable, or removal between releases. These changes are noteworthy and deserve dedicated coverage in release notes.
+
+## Diagnostic ID conventions
+
+Experimental APIs in this repository use diagnostic IDs documented in [`docs/list-of-diagnostics.md`](../../../docs/list-of-diagnostics.md) under the "Experiments" section. Consult that file for the current list of experimental diagnostic IDs and their descriptions. New diagnostic IDs may be added as new experimental features are introduced.
+
+## Types of experimental changes
+
+### Now Stable
+
+An experimental API has its `[Experimental]` attribute removed, making it a stable part of the public API. This is a positive signal — the API has been validated through preview usage and feedback.
+
+**How to detect:**
+- PR removes `[Experimental("...")]` attribute from types or members
+- PR updates the project's experimental diagnostic staging properties (for example, removing the ID from `StageDevDiagnosticId` or related MSBuild properties) in line with [`docs/list-of-diagnostics.md`](../../../docs/list-of-diagnostics.md)
+- PR description or title mentions "promote", "graduate", "stabilize", or "remove experimental"
+- The corresponding `.json` API baseline file changes a type's `"Stage"` from `"Experimental"` to `"Stable"`
+
+**How to report:**
+Reference the feature by a conceptual name, not individual type names. Do not attribute to an author.
+```markdown
+* APIs are now stable (previously `EXTEXP0003`) #PR
+```
+
+### Removed
+
+An experimental API is removed entirely. This is acceptable under the experimental API contract — consumers who suppressed the diagnostic accepted this possibility.
+
+**How to detect:**
+- PR deletes types or members that were annotated with `[Experimental]`
+- PR description mentions "remove" along with experimental type names
+- The `.json` API baseline file removes entries that were previously `"Stage": "Experimental"`
+
+**How to report:**
+Reference the feature by a conceptual name, not individual type names. Do not attribute to an author.
+```markdown
+* experimental APIs removed (was experimental under `MEAI001`) #PR
+```
+
+### Breaking changes to experimental APIs
+
+An experimental API changes its signature, behavior, or contracts. These changes are acceptable under the experimental API policy but consumers need to know.
+
+**How to detect:**
+- PR modifies the signature of types/members annotated with `[Experimental]`
+- PR changes behavior described in XML docs for experimental types
+- PR renames experimental types or changes their namespace
+
+**How to report:**
+```markdown
+* : `TypeOrMemberName` signature changed (experimental under `EXTEXP0002`) #PR
+```
+
+### New experimental APIs
+
+A new API is introduced with the `[Experimental]` attribute. These are interesting for early adopters.
+
+**How to detect:**
+- PR adds new types or members annotated with `[Experimental]`
+- PR introduces a new diagnostic ID
+- The `.json` API baseline file adds entries with `"Stage": "Experimental"`
+
+**How to report:**
+```markdown
+* New experimental API: (`MEAI002`) #PR
+```
+
+## Detection strategy
+
+For each candidate PR, detect experimental API changes using the **PR diff** and the **`run-apichief` skill**. Do not rely on PR titles, descriptions, or labels to determine *what* changed — they can be misleading or incomplete.
+
+> **Critical: Every experimental change description must be derived from the actual file diff, not inferred from PR titles.** PR titles may use imprecise or overloaded terminology (e.g. "Reduction" could refer to chat reduction or tool reduction — entirely different features). Always fetch and inspect the changed files to determine exactly which types and members were affected.
+
+### Step-by-step
+
+1. **Fetch the PR file list** using `pull_request_read` with method `get_files` for every candidate PR. This is mandatory — do not skip it or rely on title-based inference.
+2. **Inspect the diff for experimental annotations.** Look for:
+ - Files adding or removing `[Experimental("...")]` attributes
+ - Changes to `.json` API baseline files where the `"Stage"` field changes between `"Experimental"` and `"Stable"`
+ - Deletions of types or members that were previously experimental
+3. **Derive the feature name from the actual types affected**, not from the PR title. For example, if the deleted files are `IToolReductionStrategy.cs`, `ToolReducingChatClient.cs`, and `EmbeddingToolReductionStrategy.cs`, the feature name is "Tool Reduction" — even if the PR title says something more generic like "Remove Reduction APIs."
+4. **Cross-reference with `run-apichief`** — use the `run-apichief` skill's `emit delta` or `check breaking` commands to compare API baselines between the previous release and the current target. This reveals:
+ - New experimental types/members added
+ - Experimental types/members removed
+ - Experimental types/members that changed stage to Stable
+ - Signature changes on experimental types/members
+5. **Cross-reference `docs/list-of-diagnostics.md`** — check if the PR modifies the diagnostics list, which signals addition or removal of experimental diagnostic IDs.
+
+Store detected changes in the `experimental_changes` SQL table (see [sql-storage.md](sql-storage.md)). The `description` column must reflect the actual types/members found in the diff, not a summary derived from the PR title.
+
+## Presentation in release notes
+
+Experimental feature changes appear in a dedicated section near the top of the release notes, after any stable breaking changes (which should be rare) and before the area-grouped "What's Changed" sections. **Do not include author attributions in this section** — the PRs will still appear with full attribution in the "What's Changed" list.
+
+Group experimental changes by type:
+
+```markdown
+## Experimental API Changes
+
+### Now Stable
+* HTTP Logging Middleware APIs are now stable (previously `EXTEXP0013`) #7380
+
+### New Experimental APIs
+* Realtime Client Sessions (`MEAI001`) #7285
+
+### Breaking Changes to Experimental APIs
+* AI Function Approvals: `FunctionCallApprovalContext` constructor changed (experimental under `MEAI001`) #7350
+
+### Removed Experimental APIs
+* AI Tool Reduction experimental APIs removed (was experimental under `MEAI001`) #7300
+```
+
+Omit subsections that have no entries.
diff --git a/.github/skills/release-notes/references/format-template.md b/.github/skills/release-notes/references/format-template.md
new file mode 100644
index 00000000000..93f4a18debd
--- /dev/null
+++ b/.github/skills/release-notes/references/format-template.md
@@ -0,0 +1,119 @@
+# Release Notes Format Template
+
+## Full monthly release
+
+Use this template when all packages ship together (e.g. v10.3.0 → v10.4.0).
+
+```markdown
+[Optional preamble — 2–3 sentences summarizing the release theme. May be omitted.]
+
+## Breaking Changes
+
+[If any stable API breaking changes exist — these should be very rare]
+
+1. **Description #PR**
+ * Detail of the break
+ * Migration guidance
+
+## Experimental API Changes
+
+[Grouped by change type — see experimental-features.md]
+
+### Now Stable
+* APIs are now stable (previously `DIAGID`) #PR
+
+### New Experimental APIs
+* New experimental API: (`DIAGID`) #PR
+
+### Breaking Changes to Experimental APIs
+* : `TypeName` signature changed (experimental under `DIAGID`) #PR
+
+### Removed Experimental APIs
+* experimental APIs removed (was experimental under `DIAGID`) #PR
+
+## What's Changed
+
+### [Area Name — e.g. "AI"]
+
+* Description #PR by @author (co-authored by @user1 @Copilot)
+* Description #PR by @author
+
+### [Area Name — e.g. "HTTP Resilience and Diagnostics"]
+
+* Description #PR by @author
+
+### [Area Name — e.g. "Diagnostics, Health Checks, and Resource Monitoring"]
+
+* Description #PR by @author
+
+## Documentation Updates
+
+* Description #PR by @author
+
+## Test Improvements
+
+* Description #PR by @author
+
+## Repository Infrastructure Updates
+
+* Description #PR by @author
+
+## Acknowledgements
+
+* @user made their first contribution in #PR
+* @user submitted issue #1234 (resolved by #5678)
+* @user1 @user2 @user3 reviewed pull requests
+
+**Full Changelog**: https://github.com/dotnet/extensions/compare/v10.3.0...v10.4.0
+```
+
+## Targeted patch release
+
+Use this template when only a subset of packages ships (e.g. v10.3.1).
+
+```markdown
+[Optional preamble — state which packages are patched and why. Example: "This patch release addresses issues in the AI and HTTP Resilience packages." May be omitted.]
+
+## Packages in this release
+
+[Only the patched packages]
+
+| Package | Version |
+|---------|---------|
+| Microsoft.Extensions.AI | 10.3.1 |
+| Microsoft.Extensions.AI.Abstractions | 10.3.1 |
+
+## What's Changed
+
+### [Area Name]
+
+* Description #PR by @author
+
+## Acknowledgements
+
+* @user submitted issue #1234 (resolved by #5678)
+* @user1 @user2 reviewed pull requests
+
+**Full Changelog**: https://github.com/dotnet/extensions/compare/v10.3.0...v10.3.1
+```
+
+## Section rules
+
+1. **Preamble** — optional. If included, summarize the release theme. For patch releases, if included, name the affected packages. Suggest a couple of options to the user and always offer the option of omitting it.
+2. **Packages in this release** — for patch releases only. Table of affected packages and versions. Omit for full releases (all packages ship at the same version).
+3. **Breaking Changes** — only for stable API breaks (very rare). Omit if none.
+4. **Experimental API Changes** — omit if no experimental changes. Omit empty subsections within.
+5. **What's Changed** — grouped by area. Order areas by activity (most entries first). Omit areas with no entries.
+6. **Documentation Updates** — flat list. Omit if none.
+7. **Test Improvements** — flat list. Omit if none.
+8. **Repository Infrastructure Updates** — flat list. Omit if none.
+9. **Acknowledgements** — always include. Omit empty sub-items.
+10. **Full Changelog** — always last. Link to the GitHub compare view.
+
+## PR and issue references
+
+Use the format `#number` for PRs and issues in the same repository. GitHub will auto-link these in release notes. Use full markdown links only for cross-repo references:
+
+- ✅ `#7380` (same repo — GitHub auto-links)
+- ✅ `[dotnet/runtime#124264](https://github.com/dotnet/runtime/pull/124264)` (cross-repo)
+- ❌ `[#7380](https://github.com/dotnet/extensions/pull/7380)` (unnecessary — same repo)
diff --git a/.github/skills/release-notes/references/package-areas.md b/.github/skills/release-notes/references/package-areas.md
new file mode 100644
index 00000000000..9d8203951da
--- /dev/null
+++ b/.github/skills/release-notes/references/package-areas.md
@@ -0,0 +1,118 @@
+# Package Area Definitions
+
+This file maps the libraries in `src/Libraries/` to logical area groups for organizing release notes. Each group name must clearly and unambiguously identify the packages it covers.
+
+## Area groups
+
+### AI
+
+Packages:
+- `Microsoft.Extensions.AI`
+- `Microsoft.Extensions.AI.Abstractions`
+- `Microsoft.Extensions.AI.OpenAI`
+
+### AI Evaluation
+
+Packages:
+- `Microsoft.Extensions.AI.Evaluation`
+- `Microsoft.Extensions.AI.Evaluation.Console`
+- `Microsoft.Extensions.AI.Evaluation.NLP`
+- `Microsoft.Extensions.AI.Evaluation.Quality`
+- `Microsoft.Extensions.AI.Evaluation.Reporting`
+- `Microsoft.Extensions.AI.Evaluation.Reporting.Azure`
+- `Microsoft.Extensions.AI.Evaluation.Safety`
+
+### Data Ingestion
+
+Packages:
+- `Microsoft.Extensions.DataIngestion`
+- `Microsoft.Extensions.DataIngestion.Abstractions`
+- `Microsoft.Extensions.DataIngestion.Markdig`
+- `Microsoft.Extensions.DataIngestion.MarkItDown`
+
+### Diagnostics, Health Checks, and Resource Monitoring
+
+Packages:
+- `Microsoft.Extensions.Diagnostics.ExceptionSummarization`
+- `Microsoft.Extensions.Diagnostics.HealthChecks.Common`
+- `Microsoft.Extensions.Diagnostics.HealthChecks.ResourceUtilization`
+- `Microsoft.Extensions.Diagnostics.Probes`
+- `Microsoft.Extensions.Diagnostics.ResourceMonitoring`
+- `Microsoft.Extensions.Diagnostics.ResourceMonitoring.Kubernetes`
+- `Microsoft.Extensions.Diagnostics.Testing`
+
+### Compliance, Redaction, and Data Classification
+
+Packages:
+- `Microsoft.Extensions.Compliance.Abstractions`
+- `Microsoft.Extensions.Compliance.Redaction`
+- `Microsoft.Extensions.Compliance.Testing`
+
+### HTTP Resilience and Diagnostics
+
+Packages:
+- `Microsoft.Extensions.Http.Resilience`
+- `Microsoft.Extensions.Resilience`
+- `Microsoft.Extensions.Http.Diagnostics`
+
+### Telemetry and Observability
+
+Packages:
+- `Microsoft.Extensions.Telemetry`
+- `Microsoft.Extensions.Telemetry.Abstractions`
+
+### ASP.NET Core Extensions
+
+Packages:
+- `Microsoft.AspNetCore.Diagnostics.Middleware`
+- `Microsoft.AspNetCore.HeaderParsing`
+- `Microsoft.AspNetCore.Testing`
+- `Microsoft.AspNetCore.AsyncState`
+
+### Service Discovery
+
+Packages:
+- `Microsoft.Extensions.ServiceDiscovery`
+- `Microsoft.Extensions.ServiceDiscovery.Abstractions`
+- `Microsoft.Extensions.ServiceDiscovery.Dns`
+- `Microsoft.Extensions.ServiceDiscovery.Yarp`
+
+### Hosting, Configuration, and Ambient Metadata
+
+Packages:
+- `Microsoft.Extensions.Hosting.Testing`
+- `Microsoft.Extensions.Options.Contextual`
+- `Microsoft.Extensions.AmbientMetadata.Application`
+- `Microsoft.Extensions.AmbientMetadata.Build`
+
+### Caching
+
+Packages:
+- `Microsoft.Extensions.Caching.Hybrid`
+
+### Dependency Injection and Object Pooling
+
+Packages:
+- `Microsoft.Extensions.DependencyInjection.AutoActivation`
+- `Microsoft.Extensions.ObjectPool.DependencyInjection`
+
+### Async State
+
+Packages:
+- `Microsoft.Extensions.AsyncState`
+
+### Time Provider Testing
+
+Packages:
+- `Microsoft.Extensions.TimeProvider.Testing`
+
+## Assigning PRs to areas
+
+1. **Primary method — file paths**: Examine the files changed in each PR. Extract package names from paths matching `src/Libraries/{PackageName}/`. Map each package name to its area using the table above.
+2. **Fallback — `area-*` labels**: If a PR has no `src/Libraries/` file changes (e.g. infrastructure PRs), check for `area-*` labels and map those to the closest area group.
+3. **Multi-area PRs**: A single PR may touch multiple packages in different areas. Assign the PR to all affected areas. When writing the release notes entry, place the PR under the area most central to the change and add a brief cross-reference note for other areas if warranted.
+4. **No area match**: PRs that touch only `eng/`, `scripts/`, `.github/`, `docs/`, or `test/` without corresponding `src/Libraries/` changes are infrastructure, documentation, or test PRs — categorize them by type, not by area.
+
+## Maintaining this file
+
+When new libraries are added to `src/Libraries/`, update this file to include them in the appropriate area group. If a new area is needed, choose a name that clearly identifies the packages it contains.
diff --git a/.github/skills/release-notes/references/sql-storage.md b/.github/skills/release-notes/references/sql-storage.md
new file mode 100644
index 00000000000..cbbb03cc390
--- /dev/null
+++ b/.github/skills/release-notes/references/sql-storage.md
@@ -0,0 +1,167 @@
+# SQL Schema and Patterns
+
+Use the SQL tool for all structured data storage during the release notes pipeline. Do **not** write intermediate files to disk.
+
+## Core tables
+
+```sql
+CREATE TABLE prs (
+ number INTEGER PRIMARY KEY,
+ title TEXT,
+ author TEXT,
+ author_association TEXT,
+ labels TEXT, -- comma-separated label names
+ merged_at TEXT,
+ body TEXT,
+ reactions INTEGER DEFAULT 0,
+ is_candidate INTEGER DEFAULT 0,
+ category TEXT, -- 'changed', 'docs', 'tests', 'infra'
+ packages TEXT -- comma-separated package names from file paths
+);
+
+CREATE TABLE pr_packages (
+ pr_number INTEGER NOT NULL,
+ package_name TEXT NOT NULL,
+ area_group TEXT NOT NULL,
+ PRIMARY KEY (pr_number, package_name)
+);
+
+CREATE TABLE issues (
+ number INTEGER PRIMARY KEY,
+ title TEXT,
+ body TEXT,
+ labels TEXT,
+ reactions INTEGER DEFAULT 0,
+ pr_number INTEGER -- the PR that references this issue
+);
+
+CREATE TABLE experimental_changes (
+ pr_number INTEGER NOT NULL,
+ package_name TEXT NOT NULL,
+ change_type TEXT NOT NULL, -- 'graduated', 'removed', 'breaking', 'added'
+ diagnostic_id TEXT, -- e.g. 'EXTEXP0001', 'MEAI001'
+ description TEXT,
+ PRIMARY KEY (pr_number, package_name, change_type)
+);
+
+CREATE TABLE pr_coauthors (
+ pr_number INTEGER NOT NULL,
+ coauthor TEXT NOT NULL,
+ PRIMARY KEY (pr_number, coauthor)
+);
+
+CREATE TABLE pr_reviewers (
+ pr_number INTEGER NOT NULL,
+ reviewer TEXT NOT NULL,
+ PRIMARY KEY (pr_number, reviewer)
+);
+```
+
+## Common queries
+
+### Find candidate PRs
+
+```sql
+SELECT * FROM prs WHERE is_candidate = 1 ORDER BY merged_at;
+```
+
+### PRs by area group
+
+```sql
+SELECT DISTINCT p.number, p.title, p.author, p.merged_at, pp.area_group
+FROM prs p
+JOIN pr_packages pp ON p.number = pp.pr_number
+WHERE p.is_candidate = 1
+ORDER BY pp.area_group, p.merged_at;
+```
+
+### Affected packages (for patch release scope)
+
+```sql
+SELECT DISTINCT package_name, area_group
+FROM pr_packages pp
+JOIN prs p ON pp.pr_number = p.number
+WHERE p.is_candidate = 1
+ORDER BY area_group, package_name;
+```
+
+### Popularity ranking
+
+```sql
+SELECT p.number, p.title, p.reactions,
+ COALESCE(SUM(i.reactions), 0) AS issue_reactions,
+ p.reactions + COALESCE(SUM(i.reactions), 0) AS total_reactions
+FROM prs p
+LEFT JOIN issues i ON i.pr_number = p.number
+WHERE p.is_candidate = 1
+GROUP BY p.number
+ORDER BY total_reactions DESC;
+```
+
+### Experimental feature changes
+
+```sql
+SELECT ec.change_type, ec.package_name, ec.diagnostic_id, ec.description,
+ p.number, p.title, p.author
+FROM experimental_changes ec
+JOIN prs p ON ec.pr_number = p.number
+ORDER BY ec.change_type, ec.package_name;
+```
+
+### Category breakdown
+
+```sql
+SELECT category, COUNT(*) as count
+FROM prs
+WHERE is_candidate = 1
+GROUP BY category;
+```
+
+### All contributors for acknowledgements
+
+```sql
+SELECT contributor, MIN(pr_number) as first_pr FROM (
+ SELECT author AS contributor, number AS pr_number
+ FROM prs
+ WHERE is_candidate = 1
+
+ UNION
+
+ SELECT c.coauthor AS contributor, c.pr_number
+ FROM pr_coauthors c
+ JOIN prs p ON p.number = c.pr_number
+ WHERE p.is_candidate = 1
+)
+WHERE contributor NOT LIKE '%[bot]%'
+ AND contributor != 'Copilot'
+GROUP BY contributor
+ORDER BY first_pr;
+```
+
+### PR reviewers for acknowledgements
+
+```sql
+SELECT reviewer, COUNT(DISTINCT pr_number) as review_count
+FROM pr_reviewers r
+WHERE reviewer NOT LIKE '%[bot]%'
+ AND reviewer != 'Copilot'
+ AND reviewer NOT IN (SELECT DISTINCT author FROM prs WHERE is_candidate = 1)
+ AND reviewer NOT IN (
+ SELECT DISTINCT c.coauthor
+ FROM pr_coauthors c
+ JOIN prs p ON p.number = c.pr_number
+ WHERE p.is_candidate = 1
+ )
+GROUP BY reviewer
+ORDER BY review_count DESC;
+```
+
+## Usage notes
+
+- Insert PRs as they are discovered during collection. Update `body`, `reactions`, and `packages` during enrichment.
+- Insert into `pr_packages` after file path analysis determines affected packages (see [package-areas.md](package-areas.md)).
+- Insert into `pr_coauthors` during enrichment when harvesting `Co-authored-by:` trailers from PR commits.
+- Insert into `pr_reviewers` during enrichment when fetching PR reviews.
+- Mark candidates with `is_candidate = 1` after filtering.
+- Insert `experimental_changes` during the experimental feature audit step.
+- Additional PRs can be added to the candidate list manually by number.
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 6caf3896493..80d5564d387 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -145,7 +145,7 @@ extends:
template: v1/1ES.Official.PipelineTemplate.yml@1ESPipelineTemplates
parameters:
settings:
- networkIsolationPolicy: Permissive,CFSClean2
+ networkIsolationPolicy: Permissive, CFSClean, CFSClean2
featureFlags:
binskimScanAllExtensions: true
sdl:
diff --git a/eng/MSBuild/Packaging.targets b/eng/MSBuild/Packaging.targets
index 32e13e879a6..72f0a412ef2 100644
--- a/eng/MSBuild/Packaging.targets
+++ b/eng/MSBuild/Packaging.targets
@@ -37,7 +37,7 @@
true
- 10.2.0
+ 10.4.0
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index 516f874a838..5df4a3b67a9 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -1,222 +1,222 @@
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-runtime
- 2f124007573374800632d39177cde00ca9fe1ef0
+ 19c07820cb72aafc554c3bc8fe3c54010f5123f0
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- f736effe82a61eb6f5eba46e4173eae3b7d3dffd
+ baa6b294e728e6171378b4e8c52e42e7c4d4ed63
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- f736effe82a61eb6f5eba46e4173eae3b7d3dffd
+ baa6b294e728e6171378b4e8c52e42e7c4d4ed63
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- f736effe82a61eb6f5eba46e4173eae3b7d3dffd
+ baa6b294e728e6171378b4e8c52e42e7c4d4ed63
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- f736effe82a61eb6f5eba46e4173eae3b7d3dffd
+ baa6b294e728e6171378b4e8c52e42e7c4d4ed63
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- f736effe82a61eb6f5eba46e4173eae3b7d3dffd
+ baa6b294e728e6171378b4e8c52e42e7c4d4ed63
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- f736effe82a61eb6f5eba46e4173eae3b7d3dffd
+ baa6b294e728e6171378b4e8c52e42e7c4d4ed63
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- f736effe82a61eb6f5eba46e4173eae3b7d3dffd
+ baa6b294e728e6171378b4e8c52e42e7c4d4ed63
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- f736effe82a61eb6f5eba46e4173eae3b7d3dffd
+ baa6b294e728e6171378b4e8c52e42e7c4d4ed63
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- f736effe82a61eb6f5eba46e4173eae3b7d3dffd
+ baa6b294e728e6171378b4e8c52e42e7c4d4ed63
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-aspnetcore
- f736effe82a61eb6f5eba46e4173eae3b7d3dffd
+ baa6b294e728e6171378b4e8c52e42e7c4d4ed63
-
+
https://dev.azure.com/dnceng/internal/_git/dotnet-efcore
- f838f47ba4ccda655b7f55b2e22984bdc9495720
+ 1bea6ab613ce7346af69753850e0dd7eb774bc8a
-
+
https://github.com/dotnet/arcade
- 29a2184303379b9840b70e7cdb2faa0f39833b89
+ 3907f62e877e105b6196b1bd9c309203d6362a0a
-
+
https://github.com/dotnet/arcade
- 29a2184303379b9840b70e7cdb2faa0f39833b89
+ 3907f62e877e105b6196b1bd9c309203d6362a0a
-
+
https://github.com/dotnet/arcade
- 29a2184303379b9840b70e7cdb2faa0f39833b89
+ 3907f62e877e105b6196b1bd9c309203d6362a0a
diff --git a/eng/Versions.props b/eng/Versions.props
index a2223d61f74..9295b3a07e2 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -33,117 +33,117 @@
-->
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
- 9.0.12
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
+ 9.0.14
- 9.0.12
+ 9.0.14
- 9.0.0-beta.26123.3
+ 10.0.0-beta.26168.1
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
- 10.0.2
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
+ 10.0.4
- 10.0.2
+ 10.0.4
- 10.0.0-beta.25612.105
+ 10.0.0-beta.26119.110
@@ -168,8 +168,8 @@
8.0.0
8.0.2
8.0.0
- 8.0.23
- 8.0.23
+ 8.0.25
+ 8.0.25
8.0.0
8.0.1
8.0.1
@@ -186,18 +186,18 @@
8.0.6
8.0.0
- 8.0.23
- 8.0.23
- 8.0.23
- 8.0.23
- 8.0.23
- 8.0.23
- 8.0.23
- 8.0.23
- 8.0.23
- 8.0.23
+ 8.0.25
+ 8.0.25
+ 8.0.25
+ 8.0.25
+ 8.0.25
+ 8.0.25
+ 8.0.25
+ 8.0.25
+ 8.0.25
+ 8.0.25
- 8.0.23
+ 8.0.25
"
- sed -i.bak "s|$OldDisableValue|$NewDisableValue|" $ConfigFile
- echo "Neutralized disablePackageSources entry for '$DisabledSourceName'"
- fi
- done
-fi
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
index 438f9920c43..8cfee107e7a 100644
--- a/eng/common/build.ps1
+++ b/eng/common/build.ps1
@@ -7,6 +7,7 @@ Param(
[string] $msbuildEngine = $null,
[bool] $warnAsError = $true,
[bool] $nodeReuse = $true,
+ [switch] $buildCheck = $false,
[switch][Alias('r')]$restore,
[switch] $deployDeps,
[switch][Alias('b')]$build,
@@ -20,6 +21,7 @@ Param(
[switch] $publish,
[switch] $clean,
[switch][Alias('pb')]$productBuild,
+ [switch]$fromVMR,
[switch][Alias('bl')]$binaryLog,
[switch][Alias('nobl')]$excludeCIBinarylog,
[switch] $ci,
@@ -71,6 +73,9 @@ function Print-Usage() {
Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio"
Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)"
+ Write-Host " -nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')"
+ Write-Host " -buildCheck Sets /check msbuild parameter"
+ Write-Host " -fromVMR Set when building from within the VMR"
Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild."
@@ -97,6 +102,7 @@ function Build {
$bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' }
$platformArg = if ($platform) { "/p:Platform=$platform" } else { '' }
+ $check = if ($buildCheck) { '/check' } else { '' }
if ($projects) {
# Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons.
@@ -113,6 +119,7 @@ function Build {
MSBuild $toolsetBuildProj `
$bl `
$platformArg `
+ $check `
/p:Configuration=$configuration `
/p:RepoRoot=$RepoRoot `
/p:Restore=$restore `
@@ -122,11 +129,13 @@ function Build {
/p:Deploy=$deploy `
/p:Test=$test `
/p:Pack=$pack `
- /p:DotNetBuildRepo=$productBuild `
+ /p:DotNetBuild=$productBuild `
+ /p:DotNetBuildFromVMR=$fromVMR `
/p:IntegrationTest=$integrationTest `
/p:PerformanceTest=$performanceTest `
/p:Sign=$sign `
/p:Publish=$publish `
+ /p:RestoreStaticGraphEnableBinaryLogger=$binaryLog `
@properties
}
diff --git a/eng/common/build.sh b/eng/common/build.sh
index ac1ee8620cd..9767bb411a4 100755
--- a/eng/common/build.sh
+++ b/eng/common/build.sh
@@ -42,6 +42,8 @@ usage()
echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')"
echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')"
+ echo " --buildCheck Sets /check msbuild parameter"
+ echo " --fromVMR Set when building from within the VMR"
echo ""
echo "Command line arguments not listed above are passed thru to msbuild."
echo "Arguments can also be passed in with a single hyphen."
@@ -63,6 +65,7 @@ restore=false
build=false
source_build=false
product_build=false
+from_vmr=false
rebuild=false
test=false
integration_test=false
@@ -76,6 +79,7 @@ clean=false
warn_as_error=true
node_reuse=true
+build_check=false
binary_log=false
exclude_ci_binary_log=false
pipelines_log=false
@@ -87,7 +91,7 @@ verbosity='minimal'
runtime_source_feed=''
runtime_source_feed_key=''
-properties=''
+properties=()
while [[ $# > 0 ]]; do
opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
case "$opt" in
@@ -127,19 +131,22 @@ while [[ $# > 0 ]]; do
-pack)
pack=true
;;
- -sourcebuild|-sb)
+ -sourcebuild|-source-build|-sb)
build=true
source_build=true
product_build=true
restore=true
pack=true
;;
- -productBuild|-pb)
+ -productbuild|-product-build|-pb)
build=true
product_build=true
restore=true
pack=true
;;
+ -fromvmr|-from-vmr)
+ from_vmr=true
+ ;;
-test|-t)
test=true
;;
@@ -173,6 +180,9 @@ while [[ $# > 0 ]]; do
node_reuse=$2
shift
;;
+ -buildcheck)
+ build_check=true
+ ;;
-runtimesourcefeed)
runtime_source_feed=$2
shift
@@ -182,7 +192,7 @@ while [[ $# > 0 ]]; do
shift
;;
*)
- properties="$properties $1"
+ properties+=("$1")
;;
esac
@@ -216,7 +226,7 @@ function Build {
InitializeCustomToolset
if [[ ! -z "$projects" ]]; then
- properties="$properties /p:Projects=$projects"
+ properties+=("/p:Projects=$projects")
fi
local bl=""
@@ -224,15 +234,21 @@ function Build {
bl="/bl:\"$log_dir/Build.binlog\""
fi
+ local check=""
+ if [[ "$build_check" == true ]]; then
+ check="/check"
+ fi
+
MSBuild $_InitializeToolset \
$bl \
+ $check \
/p:Configuration=$configuration \
/p:RepoRoot="$repo_root" \
/p:Restore=$restore \
/p:Build=$build \
- /p:DotNetBuildRepo=$product_build \
- /p:ArcadeBuildFromSource=$source_build \
+ /p:DotNetBuild=$product_build \
/p:DotNetBuildSourceOnly=$source_build \
+ /p:DotNetBuildFromVMR=$from_vmr \
/p:Rebuild=$rebuild \
/p:Test=$test \
/p:Pack=$pack \
@@ -240,7 +256,8 @@ function Build {
/p:PerformanceTest=$performance_test \
/p:Sign=$sign \
/p:Publish=$publish \
- $properties
+ /p:RestoreStaticGraphEnableBinaryLogger=$binary_log \
+ ${properties[@]+"${properties[@]}"}
ExitWithExitCode 0
}
diff --git a/eng/common/cibuild.sh b/eng/common/cibuild.sh
index 1a02c0dec8f..66e3b0ac61c 100755
--- a/eng/common/cibuild.sh
+++ b/eng/common/cibuild.sh
@@ -13,4 +13,4 @@ while [[ -h $source ]]; do
done
scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
-. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
\ No newline at end of file
+. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@
diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml
index 8da43d3b583..5ce51840619 100644
--- a/eng/common/core-templates/job/job.yml
+++ b/eng/common/core-templates/job/job.yml
@@ -19,11 +19,11 @@ parameters:
# publishing defaults
artifacts: ''
enableMicrobuild: false
+ enableMicrobuildForMacAndLinux: false
microbuildUseESRP: true
enablePublishBuildArtifacts: false
enablePublishBuildAssets: false
enablePublishTestResults: false
- enablePublishUsingPipelines: false
enableBuildRetry: false
mergeTestResults: false
testRunTitle: ''
@@ -74,9 +74,6 @@ jobs:
- ${{ if ne(parameters.enableTelemetry, 'false') }}:
- name: DOTNET_CLI_TELEMETRY_PROFILE
value: '$(Build.Repository.Uri)'
- - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
- - name: EnableRichCodeNavigation
- value: 'true'
# Retry signature validation up to three times, waiting 2 seconds between attempts.
# See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
- name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
@@ -128,23 +125,12 @@ jobs:
- ${{ preStep }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - task: MicroBuildSigningPlugin@4
- displayName: Install MicroBuild plugin
- inputs:
- signType: $(_SignType)
- zipSources: false
- feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
- ${{ if eq(parameters.microbuildUseESRP, true) }}:
- ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
- ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea
- ${{ else }}:
- ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca
- env:
- TeamName: $(_TeamName)
- MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)'
+ - template: /eng/common/core-templates/steps/install-microbuild.yml
+ parameters:
+ enableMicrobuild: ${{ parameters.enableMicrobuild }}
+ enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }}
+ microbuildUseESRP: ${{ parameters.microbuildUseESRP }}
continueOnError: ${{ parameters.continueOnError }}
- condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
- ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
- task: NuGetAuthenticate@1
@@ -160,27 +146,15 @@ jobs:
- ${{ each step in parameters.steps }}:
- ${{ step }}
- - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
- - task: RichCodeNavIndexer@0
- displayName: RichCodeNav Upload
- inputs:
- languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
- environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }}
- richNavLogOutputDirectory: $(System.DefaultWorkingDirectory)/artifacts/bin
- uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
- continueOnError: true
-
- ${{ each step in parameters.componentGovernanceSteps }}:
- ${{ step }}
- - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: MicroBuildCleanup@1
- displayName: Execute Microbuild cleanup tasks
- condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - template: /eng/common/core-templates/steps/cleanup-microbuild.yml
+ parameters:
+ enableMicrobuild: ${{ parameters.enableMicrobuild }}
+ enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }}
continueOnError: ${{ parameters.continueOnError }}
- env:
- TeamName: $(_TeamName)
# Publish test results
- ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
diff --git a/eng/common/core-templates/job/onelocbuild.yml b/eng/common/core-templates/job/onelocbuild.yml
index edefa789d36..c5788829a87 100644
--- a/eng/common/core-templates/job/onelocbuild.yml
+++ b/eng/common/core-templates/job/onelocbuild.yml
@@ -4,7 +4,7 @@ parameters:
# Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
pool: ''
-
+
CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
@@ -27,7 +27,7 @@ parameters:
is1ESPipeline: ''
jobs:
- job: OneLocBuild${{ parameters.JobNameSuffix }}
-
+
dependsOn: ${{ parameters.dependsOn }}
displayName: OneLocBuild${{ parameters.JobNameSuffix }}
@@ -86,8 +86,7 @@ jobs:
isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
${{ if eq(parameters.CreatePr, true) }}:
isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
- ${{ if eq(parameters.RepoType, 'gitHub') }}:
- isShouldReusePrSelected: ${{ parameters.ReusePr }}
+ isShouldReusePrSelected: ${{ parameters.ReusePr }}
packageSourceAuth: patAuth
patVariable: ${{ parameters.CeapexPat }}
${{ if eq(parameters.RepoType, 'gitHub') }}:
@@ -100,22 +99,20 @@ jobs:
mirrorBranch: ${{ parameters.MirrorBranch }}
condition: ${{ parameters.condition }}
- - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
- parameters:
- is1ESPipeline: ${{ parameters.is1ESPipeline }}
- args:
- displayName: Publish Localization Files
- pathToPublish: '$(Build.ArtifactStagingDirectory)/loc'
- publishLocation: Container
- artifactName: Loc
- condition: ${{ parameters.condition }}
+ # Copy the locProject.json to the root of the Loc directory, then publish a pipeline artifact
+ - task: CopyFiles@2
+ displayName: Copy LocProject.json
+ inputs:
+ SourceFolder: '$(System.DefaultWorkingDirectory)/eng/Localize/'
+ Contents: 'LocProject.json'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/loc'
+ condition: ${{ parameters.condition }}
- - template: /eng/common/core-templates/steps/publish-build-artifacts.yml
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
- displayName: Publish LocProject.json
- pathToPublish: '$(System.DefaultWorkingDirectory)/eng/Localize/'
- publishLocation: Container
- artifactName: Loc
- condition: ${{ parameters.condition }}
\ No newline at end of file
+ targetPath: '$(Build.ArtifactStagingDirectory)/loc'
+ artifactName: 'Loc'
+ displayName: 'Publish Localization Files'
+ condition: ${{ parameters.condition }}
diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml
index 3cb20fb5041..b955fac6e13 100644
--- a/eng/common/core-templates/job/publish-build-assets.yml
+++ b/eng/common/core-templates/job/publish-build-assets.yml
@@ -20,9 +20,6 @@ parameters:
# if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
runAsPublic: false
- # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
- publishUsingPipelines: false
-
# Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
publishAssetsImmediately: false
@@ -32,6 +29,15 @@ parameters:
is1ESPipeline: ''
+ # Optional: 🌤️ or not the build has assets it wants to publish to BAR
+ isAssetlessBuild: false
+
+ # Optional, publishing version
+ publishingVersion: 3
+
+ # Optional: A minimatch pattern for the asset manifests to publish to BAR
+ assetManifestsPattern: '*/manifests/**/*.xml'
+
repositoryAlias: self
officialBuildId: ''
@@ -84,18 +90,44 @@ jobs:
- checkout: ${{ parameters.repositoryAlias }}
fetchDepth: 3
clean: true
-
- - task: DownloadBuildArtifacts@0
- displayName: Download artifact
- inputs:
- artifactName: AssetManifests
- downloadPath: '$(Build.StagingDirectory)/Download'
- checkDownloadedFiles: true
- condition: ${{ parameters.condition }}
- continueOnError: ${{ parameters.continueOnError }}
+
+ - ${{ if eq(parameters.isAssetlessBuild, 'false') }}:
+ - ${{ if eq(parameters.publishingVersion, 3) }}:
+ - task: DownloadPipelineArtifact@2
+ displayName: Download Asset Manifests
+ inputs:
+ artifactName: AssetManifests
+ targetPath: '$(Build.StagingDirectory)/AssetManifests'
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+ - ${{ if eq(parameters.publishingVersion, 4) }}:
+ - task: DownloadPipelineArtifact@2
+ displayName: Download V4 asset manifests
+ inputs:
+ itemPattern: '*/manifests/**/*.xml'
+ targetPath: '$(Build.StagingDirectory)/AllAssetManifests'
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+ - task: CopyFiles@2
+ displayName: Copy V4 asset manifests to AssetManifests
+ inputs:
+ SourceFolder: '$(Build.StagingDirectory)/AllAssetManifests'
+ Contents: ${{ parameters.assetManifestsPattern }}
+ TargetFolder: '$(Build.StagingDirectory)/AssetManifests'
+ flattenFolders: true
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
- task: NuGetAuthenticate@1
+ # Populate internal runtime variables.
+ - template: /eng/common/templates/steps/enable-internal-sources.yml
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ parameters:
+ legacyCredential: $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - template: /eng/common/templates/steps/enable-internal-runtimes.yml
+
- task: AzureCLI@2
displayName: Publish Build Assets
inputs:
@@ -104,10 +136,13 @@ jobs:
scriptLocation: scriptPath
scriptPath: $(System.DefaultWorkingDirectory)/eng/common/sdk-task.ps1
arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
- /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
+ /p:ManifestsPath='$(Build.StagingDirectory)/AssetManifests'
+ /p:IsAssetlessBuild=${{ parameters.isAssetlessBuild }}
/p:MaestroApiEndpoint=https://maestro.dot.net
- /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
/p:OfficialBuildId=$(OfficialBuildId)
+ -runtimeSourceFeed https://ci.dot.net/internal
+ -runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
+
condition: ${{ parameters.condition }}
continueOnError: ${{ parameters.continueOnError }}
@@ -129,6 +164,17 @@ jobs:
Copy-Item -Path $symbolExclusionfile -Destination "$(Build.StagingDirectory)/ReleaseConfigs"
}
+ - ${{ if eq(parameters.publishingVersion, 4) }}:
+ - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
+ parameters:
+ is1ESPipeline: ${{ parameters.is1ESPipeline }}
+ args:
+ targetPath: '$(Build.ArtifactStagingDirectory)/MergedManifest.xml'
+ artifactName: AssetManifests
+ displayName: 'Publish Merged Manifest'
+ retryCountOnTaskFailure: 10 # for any logs being locked
+ sbomEnabled: false # we don't need SBOM for logs
+
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
@@ -138,7 +184,7 @@ jobs:
publishLocation: Container
artifactName: ReleaseConfigs
- - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ - ${{ if or(eq(parameters.publishAssetsImmediately, 'true'), eq(parameters.isAssetlessBuild, 'true')) }}:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
BARBuildId: ${{ parameters.BARBuildId }}
@@ -164,6 +210,9 @@ jobs:
-WaitPublishingFinish true
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}'
+ -runtimeSourceFeed https://ci.dot.net/internal
+ -runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
- ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
- template: /eng/common/core-templates/steps/publish-logs.yml
diff --git a/eng/common/core-templates/job/source-build.yml b/eng/common/core-templates/job/source-build.yml
index d943748ac10..1997c2ae00d 100644
--- a/eng/common/core-templates/job/source-build.yml
+++ b/eng/common/core-templates/job/source-build.yml
@@ -12,9 +12,10 @@ parameters:
# The name of the job. This is included in the job ID.
# targetRID: ''
# The name of the target RID to use, instead of the one auto-detected by Arcade.
- # nonPortable: false
+ # portableBuild: false
# Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
- # linux-x64), and compiling against distro-provided packages rather than portable ones.
+ # linux-x64), and compiling against distro-provided packages rather than portable ones. The
+ # default is portable mode.
# skipPublishValidation: false
# Disables publishing validation. By default, a check is performed to ensure no packages are
# published by source-build.
@@ -33,9 +34,6 @@ parameters:
# container and pool.
platform: {}
- # Optional list of directories to ignore for component governance scans.
- componentGovernanceIgnoreDirectories: []
-
is1ESPipeline: ''
# If set to true and running on a non-public project,
@@ -62,7 +60,7 @@ jobs:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
- demands: ImageOverride -equals build.ubuntu.2004.amd64
+ demands: ImageOverride -equals build.azurelinux.3.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
image: build.azurelinux.3.amd64
@@ -71,10 +69,10 @@ jobs:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
- demands: ImageOverride -equals Build.Ubuntu.2204.Amd64.Open
+ demands: ImageOverride -equals build.azurelinux.3.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
- demands: ImageOverride -equals Build.Ubuntu.2204.Amd64
+ demands: ImageOverride -equals build.azurelinux.3.amd64
${{ if ne(parameters.platform.pool, '') }}:
pool: ${{ parameters.platform.pool }}
@@ -96,4 +94,3 @@ jobs:
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
platform: ${{ parameters.platform }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
diff --git a/eng/common/core-templates/job/source-index-stage1.yml b/eng/common/core-templates/job/source-index-stage1.yml
index ddf8c2e00d8..76baf5c2725 100644
--- a/eng/common/core-templates/job/source-index-stage1.yml
+++ b/eng/common/core-templates/job/source-index-stage1.yml
@@ -1,8 +1,5 @@
parameters:
runAsPublic: false
- sourceIndexUploadPackageVersion: 2.0.0-20250425.2
- sourceIndexProcessBinlogPackageVersion: 1.0.1-20250425.2
- sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
preSteps: []
binlogPath: artifacts/log/Debug/Build.binlog
@@ -16,12 +13,6 @@ jobs:
dependsOn: ${{ parameters.dependsOn }}
condition: ${{ parameters.condition }}
variables:
- - name: SourceIndexUploadPackageVersion
- value: ${{ parameters.sourceIndexUploadPackageVersion }}
- - name: SourceIndexProcessBinlogPackageVersion
- value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }}
- - name: SourceIndexPackageSource
- value: ${{ parameters.sourceIndexPackageSource }}
- name: BinlogPath
value: ${{ parameters.binlogPath }}
- template: /eng/common/core-templates/variables/pool-providers.yml
@@ -34,12 +25,10 @@ jobs:
pool:
${{ if eq(variables['System.TeamProject'], 'public') }}:
name: $(DncEngPublicBuildPool)
- image: 1es-windows-2022-open
- os: windows
+ image: windows.vs2026preview.scout.amd64.open
${{ if eq(variables['System.TeamProject'], 'internal') }}:
name: $(DncEngInternalBuildPool)
- image: 1es-windows-2022
- os: windows
+ image: windows.vs2026preview.scout.amd64
steps:
- ${{ if eq(parameters.is1ESPipeline, '') }}:
@@ -47,35 +36,9 @@ jobs:
- ${{ each preStep in parameters.preSteps }}:
- ${{ preStep }}
-
- - task: UseDotNet@2
- displayName: Use .NET 8 SDK
- inputs:
- packageType: sdk
- version: 8.0.x
- installationPath: $(Agent.TempDirectory)/dotnet
- workingDirectory: $(Agent.TempDirectory)
-
- - script: |
- $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- displayName: Download Tools
- # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
- workingDirectory: $(Agent.TempDirectory)
-
- script: ${{ parameters.sourceIndexBuildCommand }}
displayName: Build Repository
- - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(System.DefaultWorkingDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
- displayName: Process Binlog into indexable sln
-
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - task: AzureCLI@2
- displayName: Log in to Azure and upload stage1 artifacts to source index
- inputs:
- azureSubscription: 'SourceDotNet Stage1 Publish'
- addSpnToEnvironment: true
- scriptType: 'ps'
- scriptLocation: 'inlineScript'
- inlineScript: |
- $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
+ - template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
+ parameters:
+ binLogPath: ${{ parameters.binLogPath }}
diff --git a/eng/common/core-templates/jobs/codeql-build.yml b/eng/common/core-templates/jobs/codeql-build.yml
index 4571a7864df..dbc14ac580a 100644
--- a/eng/common/core-templates/jobs/codeql-build.yml
+++ b/eng/common/core-templates/jobs/codeql-build.yml
@@ -15,7 +15,6 @@ jobs:
enablePublishBuildArtifacts: false
enablePublishTestResults: false
enablePublishBuildAssets: false
- enablePublishUsingPipelines: false
enableTelemetry: true
variables:
diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml
index bf33cdc2cc7..01ada747665 100644
--- a/eng/common/core-templates/jobs/jobs.yml
+++ b/eng/common/core-templates/jobs/jobs.yml
@@ -5,9 +5,6 @@ parameters:
# Optional: Include PublishBuildArtifacts task
enablePublishBuildArtifacts: false
- # Optional: Enable publishing using release pipelines
- enablePublishUsingPipelines: false
-
# Optional: Enable running the source-build jobs to build repo from source
enableSourceBuild: false
@@ -30,6 +27,9 @@ parameters:
# Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
publishAssetsImmediately: false
+ # Optional: 🌤️ or not the build has assets it wants to publish to BAR
+ isAssetlessBuild: false
+
# Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
artifactsPublishingAdditionalParameters: ''
signingValidationAdditionalParameters: ''
@@ -85,7 +85,6 @@ jobs:
- template: /eng/common/core-templates/jobs/source-build.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
- allCompletedJobId: Source_Build_Complete
${{ each parameter in parameters.sourceBuildParameters }}:
${{ parameter.key }}: ${{ parameter.value }}
@@ -98,7 +97,7 @@ jobs:
${{ parameter.key }}: ${{ parameter.value }}
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, ''), eq(parameters.isAssetlessBuild, true)) }}:
- template: ../job/publish-build-assets.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
@@ -110,12 +109,10 @@ jobs:
- ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
- ${{ each job in parameters.jobs }}:
- ${{ job.job }}
- - ${{ if eq(parameters.enableSourceBuild, true) }}:
- - Source_Build_Complete
runAsPublic: ${{ parameters.runAsPublic }}
- publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
- publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
+ publishAssetsImmediately: ${{ or(parameters.publishAssetsImmediately, parameters.isAssetlessBuild) }}
+ isAssetlessBuild: ${{ parameters.isAssetlessBuild }}
enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
diff --git a/eng/common/core-templates/jobs/source-build.yml b/eng/common/core-templates/jobs/source-build.yml
index 0b408a67bd5..d92860cba20 100644
--- a/eng/common/core-templates/jobs/source-build.yml
+++ b/eng/common/core-templates/jobs/source-build.yml
@@ -2,28 +2,19 @@ parameters:
# This template adds arcade-powered source-build to CI. A job is created for each platform, as
# well as an optional server job that completes when all platform jobs complete.
- # The name of the "join" job for all source-build platforms. If set to empty string, the job is
- # not included. Existing repo pipelines can use this job depend on all source-build jobs
- # completing without maintaining a separate list of every single job ID: just depend on this one
- # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
- allCompletedJobId: ''
-
# See /eng/common/core-templates/job/source-build.yml
jobNamePrefix: 'Source_Build'
# This is the default platform provided by Arcade, intended for use by a managed-only repo.
defaultManagedPlatform:
name: 'Managed'
- container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9'
+ container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream-10-amd64'
# Defines the platforms on which to run build jobs. One job is created for each platform, and the
# object in this array is sent to the job template as 'platform'. If no platforms are specified,
# one job runs on 'defaultManagedPlatform'.
platforms: []
- # Optional list of directories to ignore for component governance scans.
- componentGovernanceIgnoreDirectories: []
-
is1ESPipeline: ''
# If set to true and running on a non-public project,
@@ -34,23 +25,12 @@ parameters:
jobs:
-- ${{ if ne(parameters.allCompletedJobId, '') }}:
- - job: ${{ parameters.allCompletedJobId }}
- displayName: Source-Build Complete
- pool: server
- dependsOn:
- - ${{ each platform in parameters.platforms }}:
- - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
- - ${{ if eq(length(parameters.platforms), 0) }}:
- - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
-
- ${{ each platform in parameters.platforms }}:
- template: /eng/common/core-templates/job/source-build.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
jobNamePrefix: ${{ parameters.jobNamePrefix }}
platform: ${{ platform }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
enableInternalSources: ${{ parameters.enableInternalSources }}
- ${{ if eq(length(parameters.platforms), 0) }}:
@@ -59,5 +39,4 @@ jobs:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
jobNamePrefix: ${{ parameters.jobNamePrefix }}
platform: ${{ parameters.defaultManagedPlatform }}
- componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
enableInternalSources: ${{ parameters.enableInternalSources }}
diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml
index 864427d9694..b942a79ef02 100644
--- a/eng/common/core-templates/post-build/post-build.yml
+++ b/eng/common/core-templates/post-build/post-build.yml
@@ -60,6 +60,11 @@ parameters:
artifactNames: ''
downloadArtifacts: true
+ - name: isAssetlessBuild
+ type: boolean
+ displayName: Is Assetless Build
+ default: false
+
# These parameters let the user customize the call to sdk-task.ps1 for publishing
# symbols & general artifacts as well as for signing validation
- name: symbolPublishingAdditionalParameters
@@ -122,11 +127,11 @@ stages:
${{ else }}:
${{ if eq(parameters.is1ESPipeline, true) }}:
name: $(DncEngInternalBuildPool)
- image: windows.vs2022.amd64
+ image: windows.vs2026preview.scout.amd64
os: windows
${{ else }}:
name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2022.amd64
+ demands: ImageOverride -equals windows.vs2026preview.scout.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
@@ -170,7 +175,7 @@ stages:
os: windows
${{ else }}:
name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2022.amd64
+ demands: ImageOverride -equals windows.vs2026preview.scout.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
@@ -188,9 +193,6 @@ stages:
buildId: $(AzDOBuildId)
artifactName: PackageArtifacts
checkDownloadedFiles: true
- itemPattern: |
- **
- !**/Microsoft.SourceBuild.Intermediate.*.nupkg
# This is necessary whenever we want to publish/restore to an AzDO private feed
# Since sdk-task.ps1 tries to restore packages we need to do this authentication here
@@ -234,7 +236,7 @@ stages:
os: windows
${{ else }}:
name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2022.amd64
+ demands: ImageOverride -equals windows.vs2026preview.scout.amd64
steps:
- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml
parameters:
@@ -305,6 +307,13 @@ stages:
- task: NuGetAuthenticate@1
+ # Populate internal runtime variables.
+ - template: /eng/common/templates/steps/enable-internal-sources.yml
+ parameters:
+ legacyCredential: $(dn-bot-dnceng-artifact-feeds-rw)
+
+ - template: /eng/common/templates/steps/enable-internal-runtimes.yml
+
# Darc is targeting 8.0, so make sure it's installed
- task: UseDotNet@2
inputs:
@@ -325,3 +334,6 @@ stages:
-RequireDefaultChannels ${{ parameters.requireDefaultChannels }}
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+ -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}'
+ -runtimeSourceFeed https://ci.dot.net/internal
+ -runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
diff --git a/eng/common/core-templates/steps/cleanup-microbuild.yml b/eng/common/core-templates/steps/cleanup-microbuild.yml
new file mode 100644
index 00000000000..c0fdcd3379d
--- /dev/null
+++ b/eng/common/core-templates/steps/cleanup-microbuild.yml
@@ -0,0 +1,28 @@
+parameters:
+ # Enable cleanup tasks for MicroBuild
+ enableMicrobuild: false
+ # Enable cleanup tasks for MicroBuild on Mac and Linux
+ # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT'
+ enableMicrobuildForMacAndLinux: false
+ continueOnError: false
+
+steps:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - task: MicroBuildCleanup@1
+ displayName: Execute Microbuild cleanup tasks
+ condition: and(
+ always(),
+ or(
+ and(
+ eq(variables['Agent.Os'], 'Windows_NT'),
+ in(variables['_SignType'], 'real', 'test')
+ ),
+ and(
+ ${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }},
+ ne(variables['Agent.Os'], 'Windows_NT'),
+ eq(variables['_SignType'], 'real')
+ )
+ ))
+ continueOnError: ${{ parameters.continueOnError }}
+ env:
+ TeamName: $(_TeamName)
diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml
index 7f5b84c4cb8..c05f6502797 100644
--- a/eng/common/core-templates/steps/generate-sbom.yml
+++ b/eng/common/core-templates/steps/generate-sbom.yml
@@ -5,7 +5,7 @@
# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
parameters:
- PackageVersion: 9.0.0
+ PackageVersion: 10.0.0
BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts'
PackageName: '.NET'
ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
diff --git a/eng/common/core-templates/steps/get-delegation-sas.yml b/eng/common/core-templates/steps/get-delegation-sas.yml
index 9db5617ea7d..d2901470a7f 100644
--- a/eng/common/core-templates/steps/get-delegation-sas.yml
+++ b/eng/common/core-templates/steps/get-delegation-sas.yml
@@ -31,16 +31,7 @@ steps:
# Calculate the expiration of the SAS token and convert to UTC
$expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ")
- # Temporarily work around a helix issue where SAS tokens with / in them will cause incorrect downloads
- # of correlation payloads. https://github.com/dotnet/dnceng/issues/3484
- $sas = ""
- do {
- $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
- if ($LASTEXITCODE -ne 0) {
- Write-Error "Failed to generate SAS token."
- exit 1
- }
- } while($sas.IndexOf('/') -ne -1)
+ $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv
if ($LASTEXITCODE -ne 0) {
Write-Error "Failed to generate SAS token."
diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml
new file mode 100644
index 00000000000..553fce66b94
--- /dev/null
+++ b/eng/common/core-templates/steps/install-microbuild.yml
@@ -0,0 +1,110 @@
+parameters:
+ # Enable install tasks for MicroBuild
+ enableMicrobuild: false
+ # Enable install tasks for MicroBuild on Mac and Linux
+ # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT'
+ enableMicrobuildForMacAndLinux: false
+ # Determines whether the ESRP service connection information should be passed to the signing plugin.
+ # This overlaps with _SignType to some degree. We only need the service connection for real signing.
+ # It's important that the service connection not be passed to the MicroBuildSigningPlugin task in this place.
+ # Doing so will cause the service connection to be authorized for the pipeline, which isn't allowed and won't work for non-prod.
+ # Unfortunately, _SignType can't be used to exclude the use of the service connection in non-real sign scenarios. The
+ # variable is not available in template expression. _SignType has a very large proliferation across .NET, so replacing it is tough.
+ microbuildUseESRP: true
+ # Microbuild installation directory
+ microBuildOutputFolder: $(Agent.TempDirectory)/MicroBuild
+
+ continueOnError: false
+
+steps:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}:
+ # Needed to download the MicroBuild plugin nupkgs on Mac and Linux when nuget.exe is unavailable
+ - task: UseDotNet@2
+ displayName: Install .NET 8.0 SDK for MicroBuild Plugin
+ inputs:
+ packageType: sdk
+ version: 8.0.x
+ installationPath: ${{ parameters.microBuildOutputFolder }}/.dotnet-microbuild
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
+
+ - script: |
+ set -euo pipefail
+
+ # UseDotNet@2 prepends the dotnet executable path to the PATH variable, so we can call dotnet directly
+ version=$(dotnet --version)
+ cat << 'EOF' > ${{ parameters.microBuildOutputFolder }}/global.json
+ {
+ "sdk": {
+ "version": "$version",
+ "paths": [
+ "${{ parameters.microBuildOutputFolder }}/.dotnet-microbuild"
+ ],
+ "errorMessage": "The .NET SDK version $version is required to install the MicroBuild signing plugin."
+ }
+ }
+ EOF
+ displayName: 'Add global.json to MicroBuild Installation path'
+ workingDirectory: ${{ parameters.microBuildOutputFolder }}
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
+
+ - script: |
+ REM Check if ESRP is disabled while SignType is real
+ if /I "${{ parameters.microbuildUseESRP }}"=="false" if /I "$(_SignType)"=="real" (
+ echo Error: ESRP must be enabled when SignType is real.
+ exit /b 1
+ )
+ displayName: 'Validate ESRP usage (Windows)'
+ condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'))
+ - script: |
+ # Check if ESRP is disabled while SignType is real
+ if [ "${{ parameters.microbuildUseESRP }}" = "false" ] && [ "$(_SignType)" = "real" ]; then
+ echo "Error: ESRP must be enabled when SignType is real."
+ exit 1
+ fi
+ displayName: 'Validate ESRP usage (Non-Windows)'
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
+
+ # Two different MB install steps. This is due to not being able to use the agent OS during
+ # YAML expansion, and Windows vs. Linux/Mac uses different service connections. However,
+ # we can avoid including the MB install step if not enabled at all. This avoids a bunch of
+ # extra pipeline authorizations, since most pipelines do not sign on non-Windows.
+ - task: MicroBuildSigningPlugin@4
+ displayName: Install MicroBuild plugin (Windows)
+ inputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ ${{ if eq(parameters.microbuildUseESRP, true) }}:
+ ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)'
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea
+ ${{ else }}:
+ ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca
+ env:
+ TeamName: $(_TeamName)
+ MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test'))
+
+ - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, true) }}:
+ - task: MicroBuildSigningPlugin@4
+ displayName: Install MicroBuild plugin (non-Windows)
+ inputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ workingDirectory: ${{ parameters.microBuildOutputFolder }}
+ ${{ if eq(parameters.microbuildUseESRP, true) }}:
+ ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)'
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ ConnectedPMEServiceName: beb8cb23-b303-4c95-ab26-9e44bc958d39
+ ${{ else }}:
+ ConnectedPMEServiceName: c24de2a5-cc7a-493d-95e4-8e5ff5cad2bc
+ env:
+ TeamName: $(_TeamName)
+ MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'), eq(variables['_SignType'], 'real'))
diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml
index 0623ac6e112..a9ea99ba6aa 100644
--- a/eng/common/core-templates/steps/publish-logs.yml
+++ b/eng/common/core-templates/steps/publish-logs.yml
@@ -26,15 +26,18 @@ steps:
# If the file exists - sensitive data for redaction will be sourced from it
# (single entry per line, lines starting with '# ' are considered comments and skipped)
arguments: -InputPath '$(System.DefaultWorkingDirectory)/PostBuildLogs'
- -BinlogToolVersion ${{parameters.BinlogToolVersion}}
+ -BinlogToolVersion '${{parameters.BinlogToolVersion}}'
-TokensFilePath '$(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt'
+ -runtimeSourceFeed https://ci.dot.net/internal
+ -runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)'
'$(publishing-dnceng-devdiv-code-r-build-re)'
- '$(MaestroAccessToken)'
'$(dn-bot-all-orgs-artifact-feeds-rw)'
'$(akams-client-id)'
'$(microsoft-symbol-server-pat)'
'$(symweb-symbol-server-pat)'
+ '$(dnceng-symbol-server-pat)'
'$(dn-bot-all-orgs-build-rw-code-rw)'
+ '$(System.AccessToken)'
${{parameters.CustomSensitiveDataList}}
continueOnError: true
condition: always()
@@ -45,6 +48,7 @@ steps:
SourceFolder: '$(System.DefaultWorkingDirectory)/PostBuildLogs'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs'
+ condition: always()
- template: /eng/common/core-templates/steps/publish-build-artifacts.yml
parameters:
diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml
index 7846584d2a7..b9c86c18ae4 100644
--- a/eng/common/core-templates/steps/source-build.yml
+++ b/eng/common/core-templates/steps/source-build.yml
@@ -11,10 +11,6 @@ parameters:
# for details. The entire object is described in the 'job' template for simplicity, even though
# the usage of the properties on this object is split between the 'job' and 'steps' templates.
platform: {}
-
- # Optional list of directories to ignore for component governance scans.
- componentGovernanceIgnoreDirectories: []
-
is1ESPipeline: false
steps:
@@ -23,25 +19,12 @@ steps:
set -x
df -h
- # If file changes are detected, set CopyWipIntoInnerSourceBuildRepo to copy the WIP changes into the inner source build repo.
- internalRestoreArgs=
- if ! git diff --quiet; then
- internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
- # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
- # This only works if there is a username/email configured, which won't be the case in most CI runs.
- git config --get user.email
- if [ $? -ne 0 ]; then
- git config user.email dn-bot@microsoft.com
- git config user.name dn-bot
- fi
- fi
-
# If building on the internal project, the internal storage variable may be available (usually only if needed)
# In that case, add variables to allow the download of internal runtimes if the specified versions are not found
# in the default public locations.
internalRuntimeDownloadArgs=
if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
- internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://ci.dot.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://ci.dot.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
+ internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://ci.dot.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://ci.dot.net/internal --runtimesourcefeedkey '$(dotnetbuilds-internal-container-read-token-base64)''
fi
buildConfig=Release
@@ -50,88 +33,33 @@ steps:
buildConfig='$(_BuildConfig)'
fi
- officialBuildArgs=
- if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
- officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
- fi
-
targetRidArgs=
if [ '${{ parameters.platform.targetRID }}' != '' ]; then
targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
fi
- runtimeOsArgs=
- if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
- runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
- fi
-
- baseOsArgs=
- if [ '${{ parameters.platform.baseOS }}' != '' ]; then
- baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
- fi
-
- publishArgs=
- if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
- publishArgs='--publish'
- fi
-
- assetManifestFileName=SourceBuild_RidSpecific.xml
- if [ '${{ parameters.platform.name }}' != '' ]; then
- assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
+ portableBuildArgs=
+ if [ '${{ parameters.platform.portableBuild }}' != '' ]; then
+ portableBuildArgs='/p:PortableBuild=${{ parameters.platform.portableBuild }}'
fi
${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
--configuration $buildConfig \
- --restore --build --pack $publishArgs -bl \
+ --restore --build --pack -bl \
+ --source-build \
${{ parameters.platform.buildArguments }} \
- $officialBuildArgs \
$internalRuntimeDownloadArgs \
- $internalRestoreArgs \
$targetRidArgs \
- $runtimeOsArgs \
- $baseOsArgs \
- /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
- /p:ArcadeBuildFromSource=true \
- /p:DotNetBuildSourceOnly=true \
- /p:DotNetBuildRepo=true \
- /p:AssetManifestFileName=$assetManifestFileName
+ $portableBuildArgs \
displayName: Build
-# Upload build logs for diagnosis.
-- task: CopyFiles@2
- displayName: Prepare BuildLogs staging directory
- inputs:
- SourceFolder: '$(System.DefaultWorkingDirectory)'
- Contents: |
- **/*.log
- **/*.binlog
- artifacts/sb/prebuilt-report/**
- TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
- CleanTargetFolder: true
- continueOnError: true
- condition: succeededOrFailed()
-
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
is1ESPipeline: ${{ parameters.is1ESPipeline }}
args:
displayName: Publish BuildLogs
- targetPath: '$(Build.StagingDirectory)/BuildLogs'
+ targetPath: artifacts/log/${{ coalesce(variables._BuildConfig, 'Release') }}
artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
continueOnError: true
condition: succeededOrFailed()
sbomEnabled: false # we don't need SBOM for logs
-
-# Manually inject component detection so that we can ignore the source build upstream cache, which contains
-# a nupkg cache of input packages (a local feed).
-# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
-# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
-- template: /eng/common/core-templates/steps/component-governance.yml
- parameters:
- displayName: Component Detection (Exclude upstream cache)
- is1ESPipeline: ${{ parameters.is1ESPipeline }}
- ${{ if eq(length(parameters.componentGovernanceIgnoreDirectories), 0) }}:
- componentGovernanceIgnoreDirectories: '$(System.DefaultWorkingDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache'
- ${{ else }}:
- componentGovernanceIgnoreDirectories: ${{ join(',', parameters.componentGovernanceIgnoreDirectories) }}
- disableComponentGovernance: ${{ eq(variables['System.TeamProject'], 'public') }}
diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml
new file mode 100644
index 00000000000..e9a694afa58
--- /dev/null
+++ b/eng/common/core-templates/steps/source-index-stage1-publish.yml
@@ -0,0 +1,35 @@
+parameters:
+ sourceIndexUploadPackageVersion: 2.0.0-20250818.1
+ sourceIndexProcessBinlogPackageVersion: 1.0.1-20250818.1
+ sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
+ binlogPath: artifacts/log/Debug/Build.binlog
+
+steps:
+- task: UseDotNet@2
+ displayName: "Source Index: Use .NET 9 SDK"
+ inputs:
+ packageType: sdk
+ version: 9.0.x
+ installationPath: $(Agent.TempDirectory)/dotnet
+ workingDirectory: $(Agent.TempDirectory)
+
+- script: |
+ $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
+ $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools
+ displayName: "Source Index: Download netsourceindex Tools"
+ # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
+ workingDirectory: $(Agent.TempDirectory)
+
+- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i ${{parameters.BinlogPath}} -r $(System.DefaultWorkingDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
+ displayName: "Source Index: Process Binlog into indexable sln"
+
+- ${{ if and(ne(parameters.runAsPublic, 'true'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: AzureCLI@2
+ displayName: "Source Index: Upload Source Index stage1 artifacts to Azure"
+ inputs:
+ azureSubscription: 'SourceDotNet Stage1 Publish'
+ addSpnToEnvironment: true
+ scriptType: 'ps'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
diff --git a/eng/common/cross/arm64/tizen/tizen.patch b/eng/common/cross/arm64/tizen/tizen.patch
index af7c8be0590..2cebc547382 100644
--- a/eng/common/cross/arm64/tizen/tizen.patch
+++ b/eng/common/cross/arm64/tizen/tizen.patch
@@ -5,5 +5,5 @@ diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
Use the shared library, but some functions are only in
the static library, so try that secondarily. */
OUTPUT_FORMAT(elf64-littleaarch64)
--GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-aarch64.so.1 ) )
+-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-aarch64.so.1 ) )
+GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) )
diff --git a/eng/common/cross/armel/armel.jessie.patch b/eng/common/cross/armel/armel.jessie.patch
deleted file mode 100644
index 2d261561935..00000000000
--- a/eng/common/cross/armel/armel.jessie.patch
+++ /dev/null
@@ -1,43 +0,0 @@
-diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
---- a/usr/include/urcu/uatomic/generic.h 2014-10-22 15:00:58.000000000 -0700
-+++ b/usr/include/urcu/uatomic/generic.h 2020-10-30 21:38:28.550000000 -0700
-@@ -69,10 +69,10 @@
- #endif
- #ifdef UATOMIC_HAS_ATOMIC_SHORT
- case 2:
-- return __sync_val_compare_and_swap_2(addr, old, _new);
-+ return __sync_val_compare_and_swap_2((uint16_t*) addr, old, _new);
- #endif
- case 4:
-- return __sync_val_compare_and_swap_4(addr, old, _new);
-+ return __sync_val_compare_and_swap_4((uint32_t*) addr, old, _new);
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
- return __sync_val_compare_and_swap_8(addr, old, _new);
-@@ -109,7 +109,7 @@
- return;
- #endif
- case 4:
-- __sync_and_and_fetch_4(addr, val);
-+ __sync_and_and_fetch_4((uint32_t*) addr, val);
- return;
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
-@@ -148,7 +148,7 @@
- return;
- #endif
- case 4:
-- __sync_or_and_fetch_4(addr, val);
-+ __sync_or_and_fetch_4((uint32_t*) addr, val);
- return;
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
-@@ -187,7 +187,7 @@
- return __sync_add_and_fetch_2(addr, val);
- #endif
- case 4:
-- return __sync_add_and_fetch_4(addr, val);
-+ return __sync_add_and_fetch_4((uint32_t*) addr, val);
- #if (CAA_BITS_PER_LONG == 64)
- case 8:
- return __sync_add_and_fetch_8(addr, val);
diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh
index 7e9ba2b75ed..fbd8d80848a 100755
--- a/eng/common/cross/build-android-rootfs.sh
+++ b/eng/common/cross/build-android-rootfs.sh
@@ -6,10 +6,11 @@ usage()
{
echo "Creates a toolchain and sysroot used for cross-compiling for Android."
echo
- echo "Usage: $0 [BuildArch] [ApiLevel]"
+ echo "Usage: $0 [BuildArch] [ApiLevel] [--ndk NDKVersion]"
echo
echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
+ echo "NDKVersion is the version of Android NDK. The default is r21. See https://developer.android.com/ndk/downloads/revision_history"
echo
echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
echo "by setting the TOOLCHAIN_DIR environment variable"
@@ -25,10 +26,15 @@ __BuildArch=arm64
__AndroidArch=aarch64
__AndroidToolchain=aarch64-linux-android
-for i in "$@"
- do
- lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")"
- case $lowerI in
+while :; do
+ if [[ "$#" -le 0 ]]; then
+ break
+ fi
+
+ i=$1
+
+ lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
-?|-h|--help)
usage
exit 1
@@ -43,6 +49,10 @@ for i in "$@"
__AndroidArch=arm
__AndroidToolchain=arm-linux-androideabi
;;
+ --ndk)
+ shift
+ __NDK_Version=$1
+ ;;
*[0-9])
__ApiLevel=$i
;;
@@ -50,8 +60,17 @@ for i in "$@"
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
;;
esac
+ shift
done
+if [[ "$__NDK_Version" == "r21" ]] || [[ "$__NDK_Version" == "r22" ]]; then
+ __NDK_File_Arch_Spec=-x86_64
+ __SysRoot=sysroot
+else
+ __NDK_File_Arch_Spec=
+ __SysRoot=toolchains/llvm/prebuilt/linux-x86_64/sysroot
+fi
+
# Obtain the location of the bash script to figure out where the root of the repo is.
__ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
@@ -78,6 +97,7 @@ fi
echo "Target API level: $__ApiLevel"
echo "Target architecture: $__BuildArch"
+echo "NDK version: $__NDK_Version"
echo "NDK location: $__NDK_Dir"
echo "Target Toolchain location: $__ToolchainDir"
@@ -85,8 +105,8 @@ echo "Target Toolchain location: $__ToolchainDir"
if [ ! -d $__NDK_Dir ]; then
echo Downloading the NDK into $__NDK_Dir
mkdir -p $__NDK_Dir
- wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip
- unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__CrossDir
+ wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux$__NDK_File_Arch_Spec.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux.zip
+ unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux.zip -d $__CrossDir
fi
if [ ! -d $__lldb_Dir ]; then
@@ -116,16 +136,11 @@ for path in $(wget -qO- https://packages.termux.dev/termux-main-21/dists/stable/
fi
done
-cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/sysroot/usr/"
+cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/$__SysRoot/usr/"
# Generate platform file for build.sh script to assign to __DistroRid
echo "Generating platform file..."
-echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/sysroot/android_platform
-
-echo "Now to build coreclr, libraries and installers; run:"
-echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
- --subsetCategory coreclr
-echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
- --subsetCategory libraries
-echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
- --subsetCategory installer
+echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/$__SysRoot/android_platform
+
+echo "Now to build coreclr, libraries and host; run:"
+echo ROOTFS_DIR=$(realpath $__ToolchainDir/$__SysRoot) ./build.sh clr+libs+host --cross --arch $__BuildArch
diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
index 4b5e8d7166b..8abfb71f727 100755
--- a/eng/common/cross/build-rootfs.sh
+++ b/eng/common/cross/build-rootfs.sh
@@ -5,7 +5,7 @@ set -e
usage()
{
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir ]"
- echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86"
+ echo "BuildArch can be: arm(default), arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64, x86"
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine"
echo " for alpine can be specified with version: alpineX.YY or alpineedge"
echo " for FreeBSD can be: freebsd13, freebsd14"
@@ -15,6 +15,7 @@ usage()
echo "llvmx[.y] - optional, LLVM version for LLVM related packages."
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
echo "--skipsigcheck - optional, will skip package signature checks (allowing untrusted packages)."
+ echo "--skipemulation - optional, will skip qemu and debootstrap requirement when building environment for debian based systems."
echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
echo "--jobs N - optional, restrict to N jobs."
exit 1
@@ -52,28 +53,27 @@ __UbuntuPackages+=" symlinks"
__UbuntuPackages+=" libicu-dev"
__UbuntuPackages+=" liblttng-ust-dev"
__UbuntuPackages+=" libunwind8-dev"
-__UbuntuPackages+=" libnuma-dev"
__AlpinePackages+=" gettext-dev"
__AlpinePackages+=" icu-dev"
__AlpinePackages+=" libunwind-dev"
__AlpinePackages+=" lttng-ust-dev"
__AlpinePackages+=" compiler-rt"
-__AlpinePackages+=" numactl-dev"
# runtime libraries' dependencies
__UbuntuPackages+=" libcurl4-openssl-dev"
__UbuntuPackages+=" libkrb5-dev"
__UbuntuPackages+=" libssl-dev"
__UbuntuPackages+=" zlib1g-dev"
+__UbuntuPackages+=" libbrotli-dev"
__AlpinePackages+=" curl-dev"
__AlpinePackages+=" krb5-dev"
__AlpinePackages+=" openssl-dev"
__AlpinePackages+=" zlib-dev"
-__FreeBSDBase="13.3-RELEASE"
-__FreeBSDPkg="1.17.0"
+__FreeBSDBase="13.4-RELEASE"
+__FreeBSDPkg="1.21.3"
__FreeBSDABI="13"
__FreeBSDPackages="libunwind"
__FreeBSDPackages+=" icu"
@@ -91,18 +91,18 @@ __HaikuPackages="gcc_syslibs"
__HaikuPackages+=" gcc_syslibs_devel"
__HaikuPackages+=" gmp"
__HaikuPackages+=" gmp_devel"
-__HaikuPackages+=" icu66"
-__HaikuPackages+=" icu66_devel"
+__HaikuPackages+=" icu[0-9]+"
+__HaikuPackages+=" icu[0-9]*_devel"
__HaikuPackages+=" krb5"
__HaikuPackages+=" krb5_devel"
__HaikuPackages+=" libiconv"
__HaikuPackages+=" libiconv_devel"
-__HaikuPackages+=" llvm12_libunwind"
-__HaikuPackages+=" llvm12_libunwind_devel"
+__HaikuPackages+=" llvm[0-9]*_libunwind"
+__HaikuPackages+=" llvm[0-9]*_libunwind_devel"
__HaikuPackages+=" mpfr"
__HaikuPackages+=" mpfr_devel"
-__HaikuPackages+=" openssl"
-__HaikuPackages+=" openssl_devel"
+__HaikuPackages+=" openssl3"
+__HaikuPackages+=" openssl3_devel"
__HaikuPackages+=" zlib"
__HaikuPackages+=" zlib_devel"
@@ -128,10 +128,12 @@ __AlpineKeys='
616adfeb:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq0BFD1D4lIxQcsqEpQzU\npNCYM3aP1V/fxxVdT4DWvSI53JHTwHQamKdMWtEXetWVbP5zSROniYKFXd/xrD9X\n0jiGHey3lEtylXRIPxe5s+wXoCmNLcJVnvTcDtwx/ne2NLHxp76lyc25At+6RgE6\nADjLVuoD7M4IFDkAsd8UQ8zM0Dww9SylIk/wgV3ZkifecvgUQRagrNUdUjR56EBZ\nraQrev4hhzOgwelT0kXCu3snbUuNY/lU53CoTzfBJ5UfEJ5pMw1ij6X0r5S9IVsy\nKLWH1hiO0NzU2c8ViUYCly4Fe9xMTFc6u2dy/dxf6FwERfGzETQxqZvSfrRX+GLj\n/QZAXiPg5178hT/m0Y3z5IGenIC/80Z9NCi+byF1WuJlzKjDcF/TU72zk0+PNM/H\nKuppf3JT4DyjiVzNC5YoWJT2QRMS9KLP5iKCSThwVceEEg5HfhQBRT9M6KIcFLSs\nmFjx9kNEEmc1E8hl5IR3+3Ry8G5/bTIIruz14jgeY9u5jhL8Vyyvo41jgt9sLHR1\n/J1TxKfkgksYev7PoX6/ZzJ1ksWKZY5NFoDXTNYUgzFUTOoEaOg3BAQKadb3Qbbq\nXIrxmPBdgrn9QI7NCgfnAY3Tb4EEjs3ON/BNyEhUENcXOH6I1NbcuBQ7g9P73kE4\nVORdoc8MdJ5eoKBpO8Ww8HECAwEAAQ==
616ae350:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyduVzi1mWm+lYo2Tqt/0\nXkCIWrDNP1QBMVPrE0/ZlU2bCGSoo2Z9FHQKz/mTyMRlhNqTfhJ5qU3U9XlyGOPJ\npiM+b91g26pnpXJ2Q2kOypSgOMOPA4cQ42PkHBEqhuzssfj9t7x47ppS94bboh46\nxLSDRff/NAbtwTpvhStV3URYkxFG++cKGGa5MPXBrxIp+iZf9GnuxVdST5PGiVGP\nODL/b69sPJQNbJHVquqUTOh5Ry8uuD2WZuXfKf7/C0jC/ie9m2+0CttNu9tMciGM\nEyKG1/Xhk5iIWO43m4SrrT2WkFlcZ1z2JSf9Pjm4C2+HovYpihwwdM/OdP8Xmsnr\nDzVB4YvQiW+IHBjStHVuyiZWc+JsgEPJzisNY0Wyc/kNyNtqVKpX6dRhMLanLmy+\nf53cCSI05KPQAcGj6tdL+D60uKDkt+FsDa0BTAobZ31OsFVid0vCXtsbplNhW1IF\nHwsGXBTVcfXg44RLyL8Lk/2dQxDHNHzAUslJXzPxaHBLmt++2COa2EI1iWlvtznk\nOk9WP8SOAIj+xdqoiHcC4j72BOVVgiITIJNHrbppZCq6qPR+fgXmXa+sDcGh30m6\n9Wpbr28kLMSHiENCWTdsFij+NQTd5S47H7XTROHnalYDuF1RpS+DpQidT5tUimaT\nJZDr++FjKrnnijbyNF8b98UCAwEAAQ==
616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ==
+66ba20fe:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtfB12w4ZgqsXWZDfUAV/\n6Y4aHUKIu3q4SXrNZ7CXF9nXoAVYrS7NAxJdAodsY3vPCN0g5O8DFXR+390LdOuQ\n+HsGKCc1k5tX5ZXld37EZNTNSbR0k+NKhd9h6X3u6wqPOx7SIKxwAQR8qeeFq4pP\nrt9GAGlxtuYgzIIcKJPwE0dZlcBCg+GnptCUZXp/38BP1eYC+xTXSL6Muq1etYfg\nodXdb7Yl+2h1IHuOwo5rjgY5kpY7GcAs8AjGk3lDD/av60OTYccknH0NCVSmPoXK\nvrxDBOn0LQRNBLcAfnTKgHrzy0Q5h4TNkkyTgxkoQw5ObDk9nnabTxql732yy9BY\ns+hM9+dSFO1HKeVXreYSA2n1ndF18YAvAumzgyqzB7I4pMHXq1kC/8bONMJxwSkS\nYm6CoXKyavp7RqGMyeVpRC7tV+blkrrUml0BwNkxE+XnwDRB3xDV6hqgWe0XrifD\nYTfvd9ScZQP83ip0r4IKlq4GMv/R5shcCRJSkSZ6QSGshH40JYSoiwJf5FHbj9ND\n7do0UAqebWo4yNx63j/wb2ULorW3AClv0BCFSdPsIrCStiGdpgJDBR2P2NZOCob3\nG9uMj+wJD6JJg2nWqNJxkANXX37Qf8plgzssrhrgOvB0fjjS7GYhfkfmZTJ0wPOw\nA8+KzFseBh4UFGgue78KwgkCAwEAAQ==
'
__Keyring=
__KeyringFile="/usr/share/keyrings/ubuntu-archive-keyring.gpg"
__SkipSigCheck=0
+__SkipEmulation=0
__UseMirror=0
__UnprocessedBuildArgs=
@@ -162,9 +164,13 @@ while :; do
armel)
__BuildArch=armel
__UbuntuArch=armel
- __UbuntuRepo="http://ftp.debian.org/debian/"
- __CodeName=jessie
+ __UbuntuRepo="http://archive.debian.org/debian/"
+ __CodeName=buster
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
+ __LLDB_Package="liblldb-6.0-dev"
+ __UbuntuPackages="${__UbuntuPackages// libomp-dev/}"
+ __UbuntuPackages="${__UbuntuPackages// libomp5/}"
+ __UbuntuSuites=
;;
armv6)
__BuildArch=armv6
@@ -180,6 +186,18 @@ while :; do
__Keyring="--keyring $__KeyringFile"
fi
;;
+ loongarch64)
+ __BuildArch=loongarch64
+ __AlpineArch=loongarch64
+ __QEMUArch=loongarch64
+ __UbuntuArch=loong64
+ __UbuntuSuites=unreleased
+ __LLDB_Package="liblldb-19-dev"
+
+ if [[ "$__CodeName" == "sid" ]]; then
+ __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/"
+ fi
+ ;;
riscv64)
__BuildArch=riscv64
__AlpineArch=riscv64
@@ -264,44 +282,21 @@ while :; do
;;
xenial) # Ubuntu 16.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=xenial
- fi
- ;;
- zesty) # Ubuntu 17.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=zesty
- fi
+ __CodeName=xenial
;;
bionic) # Ubuntu 18.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=bionic
- fi
+ __CodeName=bionic
;;
focal) # Ubuntu 20.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=focal
- fi
+ __CodeName=focal
;;
jammy) # Ubuntu 22.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=jammy
- fi
+ __CodeName=jammy
;;
noble) # Ubuntu 24.04
- if [[ "$__CodeName" != "jessie" ]]; then
- __CodeName=noble
- fi
- if [[ -n "$__LLDB_Package" ]]; then
- __LLDB_Package="liblldb-18-dev"
- fi
- ;;
- jessie) # Debian 8
- __CodeName=jessie
- __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
-
- if [[ -z "$__UbuntuRepo" ]]; then
- __UbuntuRepo="http://ftp.debian.org/debian/"
+ __CodeName=noble
+ if [[ -z "$__LLDB_Package" ]]; then
+ __LLDB_Package="liblldb-19-dev"
fi
;;
stretch) # Debian 9
@@ -319,7 +314,7 @@ while :; do
__KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
if [[ -z "$__UbuntuRepo" ]]; then
- __UbuntuRepo="http://ftp.debian.org/debian/"
+ __UbuntuRepo="http://archive.debian.org/debian/"
fi
;;
bullseye) # Debian 11
@@ -340,10 +335,28 @@ while :; do
;;
sid) # Debian sid
__CodeName=sid
- __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
+ __UbuntuSuites=
- if [[ -z "$__UbuntuRepo" ]]; then
- __UbuntuRepo="http://ftp.debian.org/debian/"
+ # Debian-Ports architectures need different values
+ case "$__UbuntuArch" in
+ amd64|arm64|armel|armhf|i386|mips64el|ppc64el|riscv64|s390x)
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
+
+ if [[ -z "$__UbuntuRepo" ]]; then
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ fi
+ ;;
+ *)
+ __KeyringFile="/usr/share/keyrings/debian-ports-archive-keyring.gpg"
+
+ if [[ -z "$__UbuntuRepo" ]]; then
+ __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/"
+ fi
+ ;;
+ esac
+
+ if [[ -e "$__KeyringFile" ]]; then
+ __Keyring="--keyring $__KeyringFile"
fi
;;
tizen)
@@ -370,7 +383,7 @@ while :; do
;;
freebsd14)
__CodeName=freebsd
- __FreeBSDBase="14.0-RELEASE"
+ __FreeBSDBase="14.2-RELEASE"
__FreeBSDABI="14"
__SkipUnmount=1
;;
@@ -388,6 +401,9 @@ while :; do
--skipsigcheck)
__SkipSigCheck=1
;;
+ --skipemulation)
+ __SkipEmulation=1
+ ;;
--rootfsdir|-rootfsdir)
shift
__RootfsDir="$1"
@@ -420,16 +436,15 @@ case "$__AlpineVersion" in
elif [[ "$__AlpineArch" == "x86" ]]; then
__AlpineVersion=3.17 # minimum version that supports lldb-dev
__AlpinePackages+=" llvm15-libs"
- elif [[ "$__AlpineArch" == "riscv64" ]]; then
+ elif [[ "$__AlpineArch" == "riscv64" || "$__AlpineArch" == "loongarch64" ]]; then
+ __AlpineVersion=3.21 # minimum version that supports lldb-dev
+ __AlpinePackages+=" llvm19-libs"
+ elif [[ -n "$__AlpineMajorVersion" ]]; then
+ # use whichever alpine version is provided and select the latest toolchain libs
__AlpineLlvmLibsLookup=1
- __AlpineVersion=edge # minimum version with APKINDEX.tar.gz (packages archive)
else
__AlpineVersion=3.13 # 3.13 to maximize compatibility
__AlpinePackages+=" llvm10-libs"
-
- if [[ "$__AlpineArch" == "armv7" ]]; then
- __AlpinePackages="${__AlpinePackages//numactl-dev/}"
- fi
fi
esac
@@ -439,15 +454,6 @@ if [[ "$__AlpineVersion" =~ 3\.1[345] ]]; then
__AlpinePackages="${__AlpinePackages/compiler-rt/compiler-rt-static}"
fi
-if [[ "$__BuildArch" == "armel" ]]; then
- __LLDB_Package="lldb-3.5-dev"
-fi
-
-if [[ "$__CodeName" == "xenial" && "$__UbuntuArch" == "armhf" ]]; then
- # libnuma-dev is not available on armhf for xenial
- __UbuntuPackages="${__UbuntuPackages//libnuma-dev/}"
-fi
-
__UbuntuPackages+=" ${__LLDB_Package:-}"
if [[ -z "$__UbuntuRepo" ]]; then
@@ -496,7 +502,7 @@ if [[ "$__CodeName" == "alpine" ]]; then
arch="$(uname -m)"
ensureDownloadTool
-
+
if [[ "$__hasWget" == 1 ]]; then
wget -P "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static"
else
@@ -512,11 +518,6 @@ if [[ "$__CodeName" == "alpine" ]]; then
echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c
chmod +x "$__ApkToolsDir/apk.static"
- if [[ -f "/usr/bin/qemu-$__QEMUArch-static" ]]; then
- mkdir -p "$__RootfsDir"/usr/bin
- cp -v "/usr/bin/qemu-$__QEMUArch-static" "$__RootfsDir/usr/bin"
- fi
-
if [[ "$__AlpineVersion" == "edge" ]]; then
version=edge
else
@@ -536,6 +537,10 @@ if [[ "$__CodeName" == "alpine" ]]; then
__ApkSignatureArg="--keys-dir $__ApkKeysDir"
fi
+ if [[ "$__SkipEmulation" == "1" ]]; then
+ __NoEmulationArg="--no-scripts"
+ fi
+
# initialize DB
# shellcheck disable=SC2086
"$__ApkToolsDir/apk.static" \
@@ -557,7 +562,7 @@ if [[ "$__CodeName" == "alpine" ]]; then
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
- -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \
+ -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" $__NoEmulationArg \
add $__AlpinePackages
rm -r "$__ApkToolsDir"
@@ -573,7 +578,7 @@ elif [[ "$__CodeName" == "freebsd" ]]; then
curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
fi
echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf
- echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
+ echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
mkdir -p "$__RootfsDir"/tmp
# get and build package manager
if [[ "$__hasWget" == 1 ]]; then
@@ -681,7 +686,7 @@ elif [[ "$__CodeName" == "haiku" ]]; then
ensureDownloadTool
- echo "Downloading Haiku package tool"
+ echo "Downloading Haiku package tools"
git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 "$__RootfsDir/tmp/script"
if [[ "$__hasWget" == 1 ]]; then
wget -O "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)"
@@ -691,34 +696,42 @@ elif [[ "$__CodeName" == "haiku" ]]; then
unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin"
- DepotBaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg"
- HpkgBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current"
+ HaikuBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current"
+ HaikuPortsBaseUrl="https://eu.hpkg.haiku-os.org/haikuports/master/$__HaikuArch/current"
+
+ echo "Downloading HaikuPorts package repository index..."
+ if [[ "$__hasWget" == 1 ]]; then
+ wget -P "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo"
+ else
+ curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo"
+ fi
- # Download Haiku packages
echo "Downloading Haiku packages"
read -ra array <<<"$__HaikuPackages"
for package in "${array[@]}"; do
echo "Downloading $package..."
- # API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60
- # The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598
+ hpkgFilename="$(LD_LIBRARY_PATH="$__RootfsDir/tmp/bin" "$__RootfsDir/tmp/bin/package_repo" list -f "$__RootfsDir/tmp/download/repo" |
+ grep -E "${package}-" | sort -V | tail -n 1 | xargs)"
+ if [ -z "$hpkgFilename" ]; then
+ >&2 echo "ERROR: package $package missing."
+ exit 1
+ fi
+ echo "Resolved filename: $hpkgFilename..."
+ hpkgDownloadUrl="$HaikuPortsBaseUrl/packages/$hpkgFilename"
if [[ "$__hasWget" == 1 ]]; then
- hpkgDownloadUrl="$(wget -qO- --post-data '{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
- --header 'Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
else
- hpkgDownloadUrl="$(curl -sSL -XPOST --data '{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \
- --header 'Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$hpkgDownloadUrl"
fi
done
for package in haiku haiku_devel; do
echo "Downloading $package..."
if [[ "$__hasWget" == 1 ]]; then
- hpkgVersion="$(wget -qO- "$HpkgBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
- wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
+ hpkgVersion="$(wget -qO- "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
+ wget -P "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
else
- hpkgVersion="$(curl -sSL "$HpkgBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
- curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
+ hpkgVersion="$(curl -sSL "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')"
+ curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg"
fi
done
@@ -744,25 +757,67 @@ elif [[ "$__CodeName" == "haiku" ]]; then
popd
rm -rf "$__RootfsDir/tmp"
elif [[ -n "$__CodeName" ]]; then
+ __Suites="$__CodeName $(for suite in $__UbuntuSuites; do echo -n "$__CodeName-$suite "; done)"
+
+ if [[ "$__SkipEmulation" == "1" ]]; then
+ if [[ -z "$AR" ]]; then
+ if command -v ar &>/dev/null; then
+ AR="$(command -v ar)"
+ elif command -v llvm-ar &>/dev/null; then
+ AR="$(command -v llvm-ar)"
+ else
+ echo "Unable to find ar or llvm-ar on PATH, add them to PATH or set AR environment variable pointing to the available AR tool"
+ exit 1
+ fi
+ fi
+
+ PYTHON=${PYTHON_EXECUTABLE:-python3}
+
+ # shellcheck disable=SC2086,SC2046
+ echo running "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \
+ $(for suite in $__Suites; do echo -n "--suite $suite "; done) \
+ $__UbuntuPackages
+
+ # shellcheck disable=SC2086,SC2046
+ "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \
+ $(for suite in $__Suites; do echo -n "--suite $suite "; done) \
+ $__UbuntuPackages
+ exit 0
+ fi
+
+ __UpdateOptions=
if [[ "$__SkipSigCheck" == "0" ]]; then
__Keyring="$__Keyring --force-check-gpg"
+ else
+ __Keyring=
+ __UpdateOptions="--allow-unauthenticated --allow-insecure-repositories"
fi
# shellcheck disable=SC2086
echo running debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
- debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
+ # shellcheck disable=SC2086
+ if ! debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"; then
+ echo "debootstrap failed! dumping debootstrap.log"
+ cat "$__RootfsDir/debootstrap/debootstrap.log"
+ exit 1
+ fi
+
+ rm -rf "$__RootfsDir"/etc/apt/*.{sources,list} "$__RootfsDir"/etc/apt/sources.list.d
mkdir -p "$__RootfsDir/etc/apt/sources.list.d/"
+
+ # shellcheck disable=SC2086
cat > "$__RootfsDir/etc/apt/sources.list.d/$__CodeName.sources" < token2) - (token1 < token2)
+ else:
+ return -1 if isinstance(token1, str) else 1
+
+ return len(tokens1) - len(tokens2)
+
+def compare_debian_versions(version1, version2):
+ """Compare two Debian package versions."""
+ epoch1, upstream1, revision1 = parse_debian_version(version1)
+ epoch2, upstream2, revision2 = parse_debian_version(version2)
+
+ if epoch1 != epoch2:
+ return epoch1 - epoch2
+
+ result = compare_upstream_version(upstream1, upstream2)
+ if result != 0:
+ return result
+
+ return compare_upstream_version(revision1, revision2)
+
+def resolve_dependencies(packages, aliases, desired_packages):
+ """Recursively resolves dependencies for the desired packages."""
+ resolved = []
+ to_process = deque(desired_packages)
+
+ while to_process:
+ current = to_process.popleft()
+ resolved_package = current if current in packages else aliases.get(current, [None])[0]
+
+ if not resolved_package:
+ print(f"Error: Package '{current}' was not found in the available packages.")
+ sys.exit(1)
+
+ if resolved_package not in resolved:
+ resolved.append(resolved_package)
+
+ deps = packages.get(resolved_package, {}).get("Depends", "")
+ if deps:
+ deps = [dep.split(' ')[0] for dep in deps.split(', ') if dep]
+ for dep in deps:
+ if dep not in resolved and dep not in to_process and dep in packages:
+ to_process.append(dep)
+
+ return resolved
+
+def parse_package_index(content):
+ """Parses the Packages.gz file and returns package information."""
+ packages = {}
+ aliases = {}
+ entries = re.split(r'\n\n+', content)
+
+ for entry in entries:
+ fields = dict(re.findall(r'^(\S+): (.+)$', entry, re.MULTILINE))
+ if "Package" in fields:
+ package_name = fields["Package"]
+ version = fields.get("Version")
+ filename = fields.get("Filename")
+ depends = fields.get("Depends")
+ provides = fields.get("Provides", None)
+
+ # Only update if package_name is not in packages or if the new version is higher
+ if package_name not in packages or compare_debian_versions(version, packages[package_name]["Version"]) > 0:
+ packages[package_name] = {
+ "Version": version,
+ "Filename": filename,
+ "Depends": depends
+ }
+
+ # Update aliases if package provides any alternatives
+ if provides:
+ provides_list = [x.strip() for x in provides.split(",")]
+ for alias in provides_list:
+ # Strip version specifiers
+ alias_name = re.sub(r'\s*\(=.*\)', '', alias)
+ if alias_name not in aliases:
+ aliases[alias_name] = []
+ if package_name not in aliases[alias_name]:
+ aliases[alias_name].append(package_name)
+
+ return packages, aliases
+
+def install_packages(mirror, packages_info, aliases, tmp_dir, extract_dir, ar_tool, desired_packages):
+ """Downloads .deb files and extracts them."""
+ resolved_packages = resolve_dependencies(packages_info, aliases, desired_packages)
+ print(f"Resolved packages (including dependencies): {resolved_packages}")
+
+ packages_to_download = {}
+
+ for pkg in resolved_packages:
+ if pkg in packages_info:
+ packages_to_download[pkg] = packages_info[pkg]
+
+ if pkg in aliases:
+ for alias in aliases[pkg]:
+ if alias in packages_info:
+ packages_to_download[alias] = packages_info[alias]
+
+ asyncio.run(download_deb_files_parallel(mirror, packages_to_download, tmp_dir))
+
+ package_to_deb_file_map = {}
+ for pkg in resolved_packages:
+ pkg_info = packages_info.get(pkg)
+ if pkg_info:
+ deb_filename = pkg_info.get("Filename")
+ if deb_filename:
+ deb_file_path = os.path.join(tmp_dir, os.path.basename(deb_filename))
+ package_to_deb_file_map[pkg] = deb_file_path
+
+ for pkg in reversed(resolved_packages):
+ deb_file = package_to_deb_file_map.get(pkg)
+ if deb_file and os.path.exists(deb_file):
+ extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool)
+
+ print("All done!")
+
+def extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool):
+ """Extract .deb file contents"""
+
+ os.makedirs(extract_dir, exist_ok=True)
+
+ with tempfile.TemporaryDirectory(dir=tmp_dir) as tmp_subdir:
+ result = subprocess.run(f"{ar_tool} t {os.path.abspath(deb_file)}", cwd=tmp_subdir, check=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ tar_filename = None
+ for line in result.stdout.decode().splitlines():
+ if line.startswith("data.tar"):
+ tar_filename = line.strip()
+ break
+
+ if not tar_filename:
+ raise FileNotFoundError(f"Could not find 'data.tar.*' in {deb_file}.")
+
+ tar_file_path = os.path.join(tmp_subdir, tar_filename)
+ print(f"Extracting {tar_filename} from {deb_file}..")
+
+ subprocess.run(f"{ar_tool} p {os.path.abspath(deb_file)} {tar_filename} > {tar_file_path}", check=True, shell=True)
+
+ file_extension = os.path.splitext(tar_file_path)[1].lower()
+
+ if file_extension == ".xz":
+ mode = "r:xz"
+ elif file_extension == ".gz":
+ mode = "r:gz"
+ elif file_extension == ".zst":
+ # zstd is not supported by standard library yet
+ decompressed_tar_path = tar_file_path.replace(".zst", "")
+ with open(tar_file_path, "rb") as zst_file, open(decompressed_tar_path, "wb") as decompressed_file:
+ dctx = zstandard.ZstdDecompressor()
+ dctx.copy_stream(zst_file, decompressed_file)
+
+ tar_file_path = decompressed_tar_path
+ mode = "r"
+ else:
+ raise ValueError(f"Unsupported compression format: {file_extension}")
+
+ with tarfile.open(tar_file_path, mode) as tar:
+ tar.extractall(path=extract_dir, filter='fully_trusted')
+
+def finalize_setup(rootfsdir):
+ lib_dir = os.path.join(rootfsdir, 'lib')
+ usr_lib_dir = os.path.join(rootfsdir, 'usr', 'lib')
+
+ if os.path.exists(lib_dir):
+ if os.path.islink(lib_dir):
+ os.remove(lib_dir)
+ else:
+ os.makedirs(usr_lib_dir, exist_ok=True)
+
+ for item in os.listdir(lib_dir):
+ src = os.path.join(lib_dir, item)
+ dest = os.path.join(usr_lib_dir, item)
+
+ if os.path.isdir(src):
+ shutil.copytree(src, dest, dirs_exist_ok=True)
+ else:
+ shutil.copy2(src, dest)
+
+ shutil.rmtree(lib_dir)
+
+ os.symlink(usr_lib_dir, lib_dir)
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Generate rootfs for .NET runtime on Debian-like OS")
+ parser.add_argument("--distro", required=False, help="Distro name (e.g., debian, ubuntu, etc.)")
+ parser.add_argument("--arch", required=True, help="Architecture (e.g., amd64, loong64, etc.)")
+ parser.add_argument("--rootfsdir", required=True, help="Destination directory.")
+ parser.add_argument('--suite', required=True, action='append', help='Specify one or more repository suites to collect index data.')
+ parser.add_argument("--mirror", required=False, help="Mirror (e.g., http://ftp.debian.org/debian-ports etc.)")
+ parser.add_argument("--artool", required=False, default="ar", help="ar tool to extract debs (e.g., ar, llvm-ar etc.)")
+ parser.add_argument("packages", nargs="+", help="List of package names to be installed.")
+
+ args = parser.parse_args()
+
+ if args.mirror is None:
+ if args.distro == "ubuntu":
+ args.mirror = "http://archive.ubuntu.com/ubuntu" if args.arch in ["amd64", "i386"] else "http://ports.ubuntu.com/ubuntu-ports"
+ elif args.distro == "debian":
+ args.mirror = "http://ftp.debian.org/debian-ports"
+ else:
+ raise Exception("Unsupported distro")
+
+ DESIRED_PACKAGES = args.packages + [ # base packages
+ "dpkg",
+ "busybox",
+ "libc-bin",
+ "base-files",
+ "base-passwd",
+ "debianutils"
+ ]
+
+ print(f"Creating rootfs. rootfsdir: {args.rootfsdir}, distro: {args.distro}, arch: {args.arch}, suites: {args.suite}, mirror: {args.mirror}")
+
+ package_index_content = asyncio.run(download_package_index_parallel(args.mirror, args.arch, args.suite))
+
+ packages_info, aliases = parse_package_index(package_index_content)
+
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ install_packages(args.mirror, packages_info, aliases, tmp_dir, args.rootfsdir, args.artool, DESIRED_PACKAGES)
+
+ finalize_setup(args.rootfsdir)
diff --git a/eng/common/cross/tizen-fetch.sh b/eng/common/cross/tizen-fetch.sh
index 28936ceef3a..37c3a61f1de 100755
--- a/eng/common/cross/tizen-fetch.sh
+++ b/eng/common/cross/tizen-fetch.sh
@@ -156,13 +156,8 @@ fetch_tizen_pkgs()
done
}
-if [ "$TIZEN_ARCH" == "riscv64" ]; then
- BASE="Tizen-Base-RISCV"
- UNIFIED="Tizen-Unified-RISCV"
-else
- BASE="Tizen-Base"
- UNIFIED="Tizen-Unified"
-fi
+BASE="Tizen-Base"
+UNIFIED="Tizen-Unified"
Inform "Initialize ${TIZEN_ARCH} base"
fetch_tizen_pkgs_init standard $BASE
diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake
index 9a7ecfbd42c..0ff85cf0367 100644
--- a/eng/common/cross/toolchain.cmake
+++ b/eng/common/cross/toolchain.cmake
@@ -67,6 +67,13 @@ elseif(TARGET_ARCH_NAME STREQUAL "armv6")
else()
set(TOOLCHAIN "arm-linux-gnueabihf")
endif()
+elseif(TARGET_ARCH_NAME STREQUAL "loongarch64")
+ set(CMAKE_SYSTEM_PROCESSOR "loongarch64")
+ if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/loongarch64-alpine-linux-musl)
+ set(TOOLCHAIN "loongarch64-alpine-linux-musl")
+ else()
+ set(TOOLCHAIN "loongarch64-linux-gnu")
+ endif()
elseif(TARGET_ARCH_NAME STREQUAL "ppc64le")
set(CMAKE_SYSTEM_PROCESSOR ppc64le)
if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/powerpc64le-alpine-linux-musl)
@@ -118,7 +125,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "x86")
set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu")
endif()
else()
- message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, ppc64le, riscv64, s390x, x64 and x86 are supported!")
+ message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64 and x86 are supported!")
endif()
if(DEFINED ENV{TOOLCHAIN})
@@ -148,6 +155,25 @@ if(TIZEN)
include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++/${TIZEN_TOOLCHAIN})
endif()
+function(locate_toolchain_exec exec var)
+ set(TOOLSET_PREFIX ${TOOLCHAIN}-)
+ string(TOUPPER ${exec} EXEC_UPPERCASE)
+ if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
+ set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
+ return()
+ endif()
+
+ find_program(EXEC_LOCATION_${exec}
+ NAMES
+ "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
+ "${TOOLSET_PREFIX}${exec}")
+
+ if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
+ message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
+ endif()
+ set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
+endfunction()
+
if(ANDROID)
if(TARGET_ARCH_NAME STREQUAL "arm")
set(ANDROID_ABI armeabi-v7a)
@@ -178,66 +204,24 @@ elseif(FREEBSD)
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=lld")
elseif(ILLUMOS)
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
+ set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
+ set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
+ set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
include_directories(SYSTEM ${CROSS_ROOTFS}/include)
- set(TOOLSET_PREFIX ${TOOLCHAIN}-)
- function(locate_toolchain_exec exec var)
- string(TOUPPER ${exec} EXEC_UPPERCASE)
- if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
- set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
- return()
- endif()
-
- find_program(EXEC_LOCATION_${exec}
- NAMES
- "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
- "${TOOLSET_PREFIX}${exec}")
-
- if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
- message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
- endif()
- set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
- endfunction()
-
- set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
-
locate_toolchain_exec(gcc CMAKE_C_COMPILER)
locate_toolchain_exec(g++ CMAKE_CXX_COMPILER)
-
- set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
- set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
elseif(HAIKU)
set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
set(CMAKE_PROGRAM_PATH "${CMAKE_PROGRAM_PATH};${CROSS_ROOTFS}/cross-tools-x86_64/bin")
-
- set(TOOLSET_PREFIX ${TOOLCHAIN}-)
- function(locate_toolchain_exec exec var)
- string(TOUPPER ${exec} EXEC_UPPERCASE)
- if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
- set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
- return()
- endif()
-
- find_program(EXEC_LOCATION_${exec}
- NAMES
- "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
- "${TOOLSET_PREFIX}${exec}")
-
- if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
- message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
- endif()
- set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
- endfunction()
-
set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
+ set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
+ set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
locate_toolchain_exec(gcc CMAKE_C_COMPILER)
locate_toolchain_exec(g++ CMAKE_CXX_COMPILER)
- set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
- set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
-
# let CMake set up the correct search paths
include(Platform/Haiku)
else()
@@ -307,7 +291,7 @@ endif()
# Specify compile options
-if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU)
+if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|loongarch64|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU)
set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh
index 36dbd45e1ce..e889f439b8d 100755
--- a/eng/common/darc-init.sh
+++ b/eng/common/darc-init.sh
@@ -68,7 +68,7 @@ function InstallDarcCli {
fi
fi
- local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json"
+ local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json"
echo "Installing Darc CLI version $darcVersion..."
echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
diff --git a/eng/common/dotnet.cmd b/eng/common/dotnet.cmd
new file mode 100644
index 00000000000..527fa4bb38f
--- /dev/null
+++ b/eng/common/dotnet.cmd
@@ -0,0 +1,7 @@
+@echo off
+
+:: This script is used to install the .NET SDK.
+:: It will also invoke the SDK with any provided arguments.
+
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet.ps1""" %*"
+exit /b %ErrorLevel%
diff --git a/eng/common/dotnet.ps1 b/eng/common/dotnet.ps1
new file mode 100644
index 00000000000..45e5676c9eb
--- /dev/null
+++ b/eng/common/dotnet.ps1
@@ -0,0 +1,11 @@
+# This script is used to install the .NET SDK.
+# It will also invoke the SDK with any provided arguments.
+
+. $PSScriptRoot\tools.ps1
+$dotnetRoot = InitializeDotNetCli -install:$true
+
+# Invoke acquired SDK with args if they are provided
+if ($args.count -gt 0) {
+ $env:DOTNET_NOLOGO=1
+ & "$dotnetRoot\dotnet.exe" $args
+}
diff --git a/eng/common/dotnet.sh b/eng/common/dotnet.sh
new file mode 100644
index 00000000000..2ef68235675
--- /dev/null
+++ b/eng/common/dotnet.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+# This script is used to install the .NET SDK.
+# It will also invoke the SDK with any provided arguments.
+
+source="${BASH_SOURCE[0]}"
+# resolve $SOURCE until the file is no longer a symlink
+while [[ -h $source ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+source $scriptroot/tools.sh
+InitializeDotNetCli true # install
+
+# Invoke acquired SDK with args if they are provided
+if [[ $# > 0 ]]; then
+ __dotnetDir=${_InitializeDotNetCli}
+ dotnetPath=${__dotnetDir}/dotnet
+ ${dotnetPath} "$@"
+fi
diff --git a/eng/common/generate-locproject.ps1 b/eng/common/generate-locproject.ps1
index 524aaa57f2b..fa1cdc2b300 100644
--- a/eng/common/generate-locproject.ps1
+++ b/eng/common/generate-locproject.ps1
@@ -33,15 +33,27 @@ $jsonTemplateFiles | ForEach-Object {
$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern
+$wxlFilesV3 = @()
+$wxlFilesV5 = @()
$wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them
if (-not $wxlFiles) {
$wxlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\1033\\.+\.wxl" } # pick up en files (1033 = en) specifically so we can copy them to use as the neutral xlf files
if ($wxlEnFiles) {
- $wxlFiles = @()
- $wxlEnFiles | ForEach-Object {
- $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)"
- $wxlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
- }
+ $wxlFiles = @()
+ $wxlEnFiles | ForEach-Object {
+ $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)"
+ $content = Get-Content $_.FullName -Raw
+
+ # Split files on schema to select different parser settings in the generated project.
+ if ($content -like "*http://wixtoolset.org/schemas/v4/wxl*")
+ {
+ $wxlFilesV5 += Copy-Item $_.FullName -Destination $destinationFile -PassThru
+ }
+ elseif ($content -like "*http://schemas.microsoft.com/wix/2006/localization*")
+ {
+ $wxlFilesV3 += Copy-Item $_.FullName -Destination $destinationFile -PassThru
+ }
+ }
}
}
@@ -114,7 +126,32 @@ $locJson = @{
CloneLanguageSet = "WiX_CloneLanguages"
LssFiles = @( "wxl_loc.lss" )
LocItems = @(
- $wxlFiles | ForEach-Object {
+ $wxlFilesV3 | ForEach-Object {
+ $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\"
+ $continue = $true
+ foreach ($exclusion in $exclusions.Exclusions) {
+ if ($_.FullName.Contains($exclusion)) {
+ $continue = $false
+ }
+ }
+ $sourceFile = ($_.FullName | Resolve-Path -Relative)
+ if ($continue)
+ {
+ return @{
+ SourceFile = $sourceFile
+ CopyOption = "LangIDOnPath"
+ OutputPath = $outputPath
+ }
+ }
+ }
+ )
+ },
+ @{
+ LanguageSet = $LanguageSet
+ CloneLanguageSet = "WiX_CloneLanguages"
+ LssFiles = @( "P210WxlSchemaV4.lss" )
+ LocItems = @(
+ $wxlFilesV5 | ForEach-Object {
$outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\"
$continue = $true
foreach ($exclusion in $exclusions.Exclusions) {
diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh
new file mode 100644
index 00000000000..477a44f335b
--- /dev/null
+++ b/eng/common/native/install-dependencies.sh
@@ -0,0 +1,62 @@
+#!/bin/sh
+
+set -e
+
+# This is a simple script primarily used for CI to install necessary dependencies
+#
+# Usage:
+#
+# ./install-dependencies.sh
+
+os="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
+
+if [ -z "$os" ]; then
+ . "$(dirname "$0")"/init-os-and-arch.sh
+fi
+
+case "$os" in
+ linux)
+ if [ -e /etc/os-release ]; then
+ . /etc/os-release
+ fi
+
+ if [ "$ID" = "debian" ] || [ "$ID_LIKE" = "debian" ]; then
+ apt update
+
+ apt install -y build-essential gettext locales cmake llvm clang lld lldb liblldb-dev libunwind8-dev libicu-dev liblttng-ust-dev \
+ libssl-dev libkrb5-dev pigz cpio
+
+ localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
+ elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ]; then
+ pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)"
+ $pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio
+ elif [ "$ID" = "alpine" ]; then
+ apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio
+ else
+ echo "Unsupported distro. distro: $ID"
+ exit 1
+ fi
+ ;;
+
+ osx|maccatalyst|ios|iossimulator|tvos|tvossimulator)
+ echo "Installed xcode version: $(xcode-select -p)"
+
+ export HOMEBREW_NO_INSTALL_CLEANUP=1
+ export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1
+ # Skip brew update for now, see https://github.com/actions/setup-python/issues/577
+ # brew update --preinstall
+ brew bundle --no-upgrade --file=- < Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
+ Write-Host " -excludeCIBinaryLog When running on CI, allow no binary log (short: -nobl)"
Write-Host ""
Write-Host "Command line arguments not listed above are passed thru to msbuild."
}
@@ -34,10 +39,11 @@ function Print-Usage() {
function Build([string]$target) {
$logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
$log = Join-Path $LogDir "$task$logSuffix.binlog"
+ $binaryLogArg = if ($binaryLog) { "/bl:$log" } else { "" }
$outputPath = Join-Path $ToolsetDir "$task\"
MSBuild $taskProject `
- /bl:$log `
+ $binaryLogArg `
/t:$target `
/p:Configuration=$configuration `
/p:RepoRoot=$RepoRoot `
@@ -64,7 +70,7 @@ try {
$GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
}
if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
- $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.12.0" -MemberType NoteProperty
+ $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "18.0.0" -MemberType NoteProperty
}
if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
$xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
diff --git a/eng/common/sdk-task.sh b/eng/common/sdk-task.sh
new file mode 100644
index 00000000000..3270f83fa9a
--- /dev/null
+++ b/eng/common/sdk-task.sh
@@ -0,0 +1,121 @@
+#!/usr/bin/env bash
+
+show_usage() {
+ echo "Common settings:"
+ echo " --task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
+ echo " --restore Restore dependencies"
+ echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
+ echo " --help Print help and exit"
+ echo ""
+
+ echo "Advanced settings:"
+ echo " --excludeCIBinarylog Don't output binary log (short: -nobl)"
+ echo " --noWarnAsError Do not warn as error"
+ echo ""
+ echo "Command line arguments not listed above are passed thru to msbuild."
+}
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+Build() {
+ local target=$1
+ local log_suffix=""
+ [[ "$target" != "Execute" ]] && log_suffix=".$target"
+ local log="$log_dir/$task$log_suffix.binlog"
+ local binaryLogArg=""
+ [[ $binary_log == true ]] && binaryLogArg="/bl:$log"
+ local output_path="$toolset_dir/$task/"
+
+ MSBuild "$taskProject" \
+ $binaryLogArg \
+ /t:"$target" \
+ /p:Configuration="$configuration" \
+ /p:RepoRoot="$repo_root" \
+ /p:BaseIntermediateOutputPath="$output_path" \
+ /v:"$verbosity" \
+ $properties
+}
+
+binary_log=true
+configuration="Debug"
+verbosity="minimal"
+exclude_ci_binary_log=false
+restore=false
+help=false
+properties=''
+warnAsError=true
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ --task)
+ task=$2
+ shift 2
+ ;;
+ --restore)
+ restore=true
+ shift 1
+ ;;
+ --verbosity)
+ verbosity=$2
+ shift 2
+ ;;
+ --excludecibinarylog|--nobl)
+ binary_log=false
+ exclude_ci_binary_log=true
+ shift 1
+ ;;
+ --noWarnAsError)
+ warnAsError=false
+ shift 1
+ ;;
+ --help)
+ help=true
+ shift 1
+ ;;
+ *)
+ properties="$properties $1"
+ shift 1
+ ;;
+ esac
+done
+
+ci=true
+
+if $help; then
+ show_usage
+ exit 0
+fi
+
+. "$scriptroot/tools.sh"
+InitializeToolset
+
+if [[ -z "$task" ]]; then
+ Write-PipelineTelemetryError -Category 'Task' -Name 'MissingTask' -Message "Missing required parameter '-task '"
+ ExitWithExitCode 1
+fi
+
+taskProject=$(GetSdkTaskProject "$task")
+if [[ ! -e "$taskProject" ]]; then
+ Write-PipelineTelemetryError -Category 'Task' -Name 'UnknownTask' -Message "Unknown task: $task"
+ ExitWithExitCode 1
+fi
+
+if $restore; then
+ Build "Restore"
+fi
+
+Build "Execute"
+
+
+ExitWithExitCode 0
diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config
index 4585cfd6bba..e5f543ea68c 100644
--- a/eng/common/sdl/packages.config
+++ b/eng/common/sdl/packages.config
@@ -1,4 +1,4 @@
-
+
diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml
index 81ea7a261f2..92a0664f564 100644
--- a/eng/common/templates-official/job/job.yml
+++ b/eng/common/templates-official/job/job.yml
@@ -31,6 +31,7 @@ jobs:
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
condition: always()
+ retryCountOnTaskFailure: 10 # for any logs being locked
continueOnError: true
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- output: pipelineArtifact
@@ -39,6 +40,7 @@ jobs:
displayName: 'Publish logs'
continueOnError: true
condition: always()
+ retryCountOnTaskFailure: 10 # for any logs being locked
sbomEnabled: false # we don't need SBOM for logs
- ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}:
@@ -46,7 +48,7 @@ jobs:
displayName: Publish Logs
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
publishLocation: Container
- ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
continueOnError: true
condition: always()
sbomEnabled: false # we don't need SBOM for logs
diff --git a/eng/common/templates-official/steps/publish-build-artifacts.yml b/eng/common/templates-official/steps/publish-build-artifacts.yml
index 100a3fc9849..fcf6637b2eb 100644
--- a/eng/common/templates-official/steps/publish-build-artifacts.yml
+++ b/eng/common/templates-official/steps/publish-build-artifacts.yml
@@ -24,6 +24,10 @@ parameters:
- name: is1ESPipeline
type: boolean
default: true
+
+- name: retryCountOnTaskFailure
+ type: string
+ default: 10
steps:
- ${{ if ne(parameters.is1ESPipeline, true) }}:
@@ -38,4 +42,5 @@ steps:
PathtoPublish: ${{ parameters.pathToPublish }}
${{ if parameters.artifactName }}:
ArtifactName: ${{ parameters.artifactName }}
-
+ ${{ if parameters.retryCountOnTaskFailure }}:
+ retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }}
diff --git a/eng/common/templates-official/steps/source-index-stage1-publish.yml b/eng/common/templates-official/steps/source-index-stage1-publish.yml
new file mode 100644
index 00000000000..9b8b80942b5
--- /dev/null
+++ b/eng/common/templates-official/steps/source-index-stage1-publish.yml
@@ -0,0 +1,7 @@
+steps:
+- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
+ parameters:
+ is1ESPipeline: true
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index 5bdd3dd85fd..238fa0818f7 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -46,6 +46,7 @@ jobs:
artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
continueOnError: true
condition: always()
+ retryCountOnTaskFailure: 10 # for any logs being locked
- ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml
parameters:
@@ -56,6 +57,7 @@ jobs:
displayName: 'Publish logs'
continueOnError: true
condition: always()
+ retryCountOnTaskFailure: 10 # for any logs being locked
sbomEnabled: false # we don't need SBOM for logs
- ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
@@ -66,7 +68,7 @@ jobs:
displayName: Publish Logs
pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)'
publishLocation: Container
- artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }}
continueOnError: true
condition: always()
diff --git a/eng/common/templates/steps/publish-build-artifacts.yml b/eng/common/templates/steps/publish-build-artifacts.yml
index 6428a98dfef..605e602e94d 100644
--- a/eng/common/templates/steps/publish-build-artifacts.yml
+++ b/eng/common/templates/steps/publish-build-artifacts.yml
@@ -25,6 +25,10 @@ parameters:
type: string
default: 'Container'
+- name: retryCountOnTaskFailure
+ type: string
+ default: 10
+
steps:
- ${{ if eq(parameters.is1ESPipeline, true) }}:
- 'eng/common/templates cannot be referenced from a 1ES managed template': error
@@ -37,4 +41,6 @@ steps:
PublishLocation: ${{ parameters.publishLocation }}
PathtoPublish: ${{ parameters.pathToPublish }}
${{ if parameters.artifactName }}:
- ArtifactName: ${{ parameters.artifactName }}
\ No newline at end of file
+ ArtifactName: ${{ parameters.artifactName }}
+ ${{ if parameters.retryCountOnTaskFailure }}:
+ retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }}
diff --git a/eng/common/templates/steps/source-index-stage1-publish.yml b/eng/common/templates/steps/source-index-stage1-publish.yml
new file mode 100644
index 00000000000..182cec33a7b
--- /dev/null
+++ b/eng/common/templates/steps/source-index-stage1-publish.yml
@@ -0,0 +1,7 @@
+steps:
+- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml
+ parameters:
+ is1ESPipeline: false
+
+ ${{ each parameter in parameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
diff --git a/eng/common/templates/steps/vmr-sync.yml b/eng/common/templates/steps/vmr-sync.yml
new file mode 100644
index 00000000000..eb619c50268
--- /dev/null
+++ b/eng/common/templates/steps/vmr-sync.yml
@@ -0,0 +1,186 @@
+### These steps synchronize new code from product repositories into the VMR (https://github.com/dotnet/dotnet).
+### They initialize the darc CLI and pull the new updates.
+### Changes are applied locally onto the already cloned VMR (located in $vmrPath).
+
+parameters:
+- name: targetRef
+ displayName: Target revision in dotnet/ to synchronize
+ type: string
+ default: $(Build.SourceVersion)
+
+- name: vmrPath
+ displayName: Path where the dotnet/dotnet is checked out to
+ type: string
+ default: $(Agent.BuildDirectory)/vmr
+
+- name: additionalSyncs
+ displayName: Optional list of package names whose repo's source will also be synchronized in the local VMR, e.g. NuGet.Protocol
+ type: object
+ default: []
+
+steps:
+- checkout: vmr
+ displayName: Clone dotnet/dotnet
+ path: vmr
+ clean: true
+
+- checkout: self
+ displayName: Clone $(Build.Repository.Name)
+ path: repo
+ fetchDepth: 0
+
+# This step is needed so that when we get a detached HEAD / shallow clone,
+# we still pull the commit into the temporary repo clone to use it during the sync.
+# Also unshallow the clone so that forwardflow command would work.
+- script: |
+ git branch repo-head
+ git rev-parse HEAD
+ displayName: Label PR commit
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+- script: |
+ git config --global user.name "dotnet-maestro[bot]"
+ git config --global user.email "dotnet-maestro[bot]@users.noreply.github.com"
+ displayName: Set git author to dotnet-maestro[bot]
+ workingDirectory: ${{ parameters.vmrPath }}
+
+- script: |
+ ./eng/common/vmr-sync.sh \
+ --vmr ${{ parameters.vmrPath }} \
+ --tmp $(Agent.TempDirectory) \
+ --azdev-pat '$(dn-bot-all-orgs-code-r)' \
+ --ci \
+ --debug
+
+ if [ "$?" -ne 0 ]; then
+ echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
+ exit 1
+ fi
+ displayName: Sync repo into VMR (Unix)
+ condition: ne(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+- script: |
+ git config --global diff.astextplain.textconv echo
+ git config --system core.longpaths true
+ displayName: Configure Windows git (longpaths, astextplain)
+ condition: eq(variables['Agent.OS'], 'Windows_NT')
+
+- powershell: |
+ ./eng/common/vmr-sync.ps1 `
+ -vmr ${{ parameters.vmrPath }} `
+ -tmp $(Agent.TempDirectory) `
+ -azdevPat '$(dn-bot-all-orgs-code-r)' `
+ -ci `
+ -debugOutput
+
+ if ($LASTEXITCODE -ne 0) {
+ echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
+ exit 1
+ }
+ displayName: Sync repo into VMR (Windows)
+ condition: eq(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+- ${{ if eq(variables['Build.Reason'], 'PullRequest') }}:
+ - task: CopyFiles@2
+ displayName: Collect failed patches
+ condition: failed()
+ inputs:
+ SourceFolder: '$(Agent.TempDirectory)'
+ Contents: '*.patch'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/FailedPatches'
+
+ - publish: '$(Build.ArtifactStagingDirectory)/FailedPatches'
+ artifact: $(System.JobDisplayName)_FailedPatches
+ displayName: Upload failed patches
+ condition: failed()
+
+- ${{ each assetName in parameters.additionalSyncs }}:
+ # The vmr-sync script ends up staging files in the local VMR so we have to commit those
+ - script:
+ git commit --allow-empty -am "Forward-flow $(Build.Repository.Name)"
+ displayName: Commit local VMR changes
+ workingDirectory: ${{ parameters.vmrPath }}
+
+ - script: |
+ set -ex
+
+ echo "Searching for details of asset ${{ assetName }}..."
+
+ # Use darc to get dependencies information
+ dependencies=$(./.dotnet/dotnet darc get-dependencies --name '${{ assetName }}' --ci)
+
+ # Extract repository URL and commit hash
+ repository=$(echo "$dependencies" | grep 'Repo:' | sed 's/Repo:[[:space:]]*//' | head -1)
+
+ if [ -z "$repository" ]; then
+ echo "##vso[task.logissue type=error]Asset ${{ assetName }} not found in the dependency list"
+ exit 1
+ fi
+
+ commit=$(echo "$dependencies" | grep 'Commit:' | sed 's/Commit:[[:space:]]*//' | head -1)
+
+ echo "Updating the VMR from $repository / $commit..."
+ cd ..
+ git clone $repository ${{ assetName }}
+ cd ${{ assetName }}
+ git checkout $commit
+ git branch "sync/$commit"
+
+ ./eng/common/vmr-sync.sh \
+ --vmr ${{ parameters.vmrPath }} \
+ --tmp $(Agent.TempDirectory) \
+ --azdev-pat '$(dn-bot-all-orgs-code-r)' \
+ --ci \
+ --debug
+
+ if [ "$?" -ne 0 ]; then
+ echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
+ exit 1
+ fi
+ displayName: Sync ${{ assetName }} into (Unix)
+ condition: ne(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
+
+ - powershell: |
+ $ErrorActionPreference = 'Stop'
+
+ Write-Host "Searching for details of asset ${{ assetName }}..."
+
+ $dependencies = .\.dotnet\dotnet darc get-dependencies --name '${{ assetName }}' --ci
+
+ $repository = $dependencies | Select-String -Pattern 'Repo:\s+([^\s]+)' | Select-Object -First 1
+ $repository -match 'Repo:\s+([^\s]+)' | Out-Null
+ $repository = $matches[1]
+
+ if ($repository -eq $null) {
+ Write-Error "Asset ${{ assetName }} not found in the dependency list"
+ exit 1
+ }
+
+ $commit = $dependencies | Select-String -Pattern 'Commit:\s+([^\s]+)' | Select-Object -First 1
+ $commit -match 'Commit:\s+([^\s]+)' | Out-Null
+ $commit = $matches[1]
+
+ Write-Host "Updating the VMR from $repository / $commit..."
+ cd ..
+ git clone $repository ${{ assetName }}
+ cd ${{ assetName }}
+ git checkout $commit
+ git branch "sync/$commit"
+
+ .\eng\common\vmr-sync.ps1 `
+ -vmr ${{ parameters.vmrPath }} `
+ -tmp $(Agent.TempDirectory) `
+ -azdevPat '$(dn-bot-all-orgs-code-r)' `
+ -ci `
+ -debugOutput
+
+ if ($LASTEXITCODE -ne 0) {
+ echo "##vso[task.logissue type=error]Failed to synchronize the VMR"
+ exit 1
+ }
+ displayName: Sync ${{ assetName }} into (Windows)
+ condition: ne(variables['Agent.OS'], 'Windows_NT')
+ workingDirectory: $(Agent.BuildDirectory)/repo
diff --git a/eng/common/templates/vmr-build-pr.yml b/eng/common/templates/vmr-build-pr.yml
new file mode 100644
index 00000000000..2f3694fa132
--- /dev/null
+++ b/eng/common/templates/vmr-build-pr.yml
@@ -0,0 +1,43 @@
+# This pipeline is used for running the VMR verification of the PR changes in repo-level PRs.
+#
+# It will run a full set of verification jobs defined in:
+# https://github.com/dotnet/dotnet/blob/10060d128e3f470e77265f8490f5e4f72dae738e/eng/pipelines/templates/stages/vmr-build.yml#L27-L38
+#
+# For repos that do not need to run the full set, you would do the following:
+#
+# 1. Copy this YML file to a repo-specific location, i.e. outside of eng/common.
+#
+# 2. Add `verifications` parameter to VMR template reference
+#
+# Examples:
+# - For source-build stage 1 verification, add the following:
+# verifications: [ "source-build-stage1" ]
+#
+# - For Windows only verifications, add the following:
+# verifications: [ "unified-build-windows-x64", "unified-build-windows-x86" ]
+
+trigger: none
+pr: none
+
+variables:
+- template: /eng/common/templates/variables/pool-providers.yml@self
+
+- name: skipComponentGovernanceDetection # we run CG on internal builds only
+ value: true
+
+- name: Codeql.Enabled # we run CodeQL on internal builds only
+ value: false
+
+resources:
+ repositories:
+ - repository: vmr
+ type: github
+ name: dotnet/dotnet
+ endpoint: dotnet
+ ref: refs/heads/main # Set to whatever VMR branch the PR build should insert into
+
+stages:
+- template: /eng/pipelines/templates/stages/vmr-build.yml@vmr
+ parameters:
+ isBuiltFromVmr: false
+ scope: lite
diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
index a06513a5940..977a2d4b103 100644
--- a/eng/common/tools.ps1
+++ b/eng/common/tools.ps1
@@ -65,10 +65,8 @@ $ErrorActionPreference = 'Stop'
# Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed
[string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null }
-# True if the build is a product build
-[bool]$productBuild = if (Test-Path variable:productBuild) { $productBuild } else { $false }
-
-[String[]]$properties = if (Test-Path variable:properties) { $properties } else { @() }
+# True when the build is running within the VMR.
+[bool]$fromVMR = if (Test-Path variable:fromVMR) { $fromVMR } else { $false }
function Create-Directory ([string[]] $path) {
New-Item -Path $path -Force -ItemType 'Directory' | Out-Null
@@ -259,7 +257,20 @@ function Retry($downloadBlock, $maxRetries = 5) {
function GetDotNetInstallScript([string] $dotnetRoot) {
$installScript = Join-Path $dotnetRoot 'dotnet-install.ps1'
+ $shouldDownload = $false
+
if (!(Test-Path $installScript)) {
+ $shouldDownload = $true
+ } else {
+ # Check if the script is older than 30 days
+ $fileAge = (Get-Date) - (Get-Item $installScript).LastWriteTime
+ if ($fileAge.Days -gt 30) {
+ Write-Host "Existing install script is too old, re-downloading..."
+ $shouldDownload = $true
+ }
+ }
+
+ if ($shouldDownload) {
Create-Directory $dotnetRoot
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
$uri = "https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1"
@@ -383,8 +394,8 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements =
# If the version of msbuild is going to be xcopied,
# use this version. Version matches a package here:
- # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.12.0
- $defaultXCopyMSBuildVersion = '17.12.0'
+ # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/18.0.0
+ $defaultXCopyMSBuildVersion = '18.0.0'
if (!$vsRequirements) {
if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
@@ -533,7 +544,8 @@ function LocateVisualStudio([object]$vsRequirements = $null){
if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') {
$vswhereVersion = $GlobalJson.tools.vswhere
} else {
- $vswhereVersion = '2.5.2'
+ # keep this in sync with the VSWhereVersion in DefaultVersions.props
+ $vswhereVersion = '3.1.7'
}
$vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion"
@@ -541,7 +553,8 @@ function LocateVisualStudio([object]$vsRequirements = $null){
if (!(Test-Path $vsWhereExe)) {
Create-Directory $vsWhereDir
- Write-Host 'Downloading vswhere'
+ Write-Host "Downloading vswhere $vswhereVersion"
+ $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
Retry({
Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -UseBasicParsing -OutFile $vswhereExe
})
@@ -611,14 +624,7 @@ function InitializeBuildTool() {
}
$dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet')
- # Use override if it exists - commonly set by source-build
- if ($null -eq $env:_OverrideArcadeInitializeBuildToolFramework) {
- $initializeBuildToolFramework="net9.0"
- } else {
- $initializeBuildToolFramework=$env:_OverrideArcadeInitializeBuildToolFramework
- }
-
- $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = $initializeBuildToolFramework }
+ $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net' }
} elseif ($msbuildEngine -eq "vs") {
try {
$msbuildPath = InitializeVisualStudioMSBuild -install:$restore
@@ -627,7 +633,7 @@ function InitializeBuildTool() {
ExitWithExitCode 1
}
- $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472"; ExcludePrereleaseVS = $excludePrereleaseVS }
+ $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "netframework"; ExcludePrereleaseVS = $excludePrereleaseVS }
} else {
Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'."
ExitWithExitCode 1
@@ -660,7 +666,6 @@ function GetNuGetPackageCachePath() {
$env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\'
} else {
$env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\'
- $env:RESTORENOHTTPCACHE = $true
}
}
@@ -782,26 +787,13 @@ function MSBuild() {
$toolsetBuildProject = InitializeToolset
$basePath = Split-Path -parent $toolsetBuildProject
- $possiblePaths = @(
- # new scripts need to work with old packages, so we need to look for the old names/versions
- (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')),
- (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.Arcade.Sdk.dll')),
- (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.ArcadeLogging.dll')),
- (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.Arcade.Sdk.dll'))
- )
- $selectedPath = $null
- foreach ($path in $possiblePaths) {
- if (Test-Path $path -PathType Leaf) {
- $selectedPath = $path
- break
- }
- }
+ $selectedPath = Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')
+
if (-not $selectedPath) {
- Write-PipelineTelemetryError -Category 'Build' -Message 'Unable to find arcade sdk logger assembly.'
+ Write-PipelineTelemetryError -Category 'Build' -Message "Unable to find arcade sdk logger assembly: $selectedPath"
ExitWithExitCode 1
}
+
$args += "/logger:$selectedPath"
}
@@ -832,6 +824,11 @@ function MSBuild-Core() {
$cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci"
+ # Add -mt flag for MSBuild multithreaded mode if enabled via environment variable
+ if ($env:MSBUILD_MT_ENABLED -eq "1") {
+ $cmdArgs += ' -mt'
+ }
+
if ($warnAsError) {
$cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true'
}
@@ -864,8 +861,8 @@ function MSBuild-Core() {
}
# When running on Azure Pipelines, override the returned exit code to avoid double logging.
- # Skip this when the build is a child of the VMR orchestrator build.
- if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$productBuild -and -not($properties -like "*DotNetBuildRepo=true*")) {
+ # Skip this when the build is a child of the VMR build.
+ if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$fromVMR) {
Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
index 01b09b65796..1b296f646c2 100755
--- a/eng/common/tools.sh
+++ b/eng/common/tools.sh
@@ -5,6 +5,9 @@
# CI mode - set to true on CI server for PR validation build or official build.
ci=${ci:-false}
+# Build mode
+source_build=${source_build:-false}
+
# Set to true to use the pipelines logger which will enable Azure logging output.
# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md
# This flag is meant as a temporary opt-opt for the feature while validate it across
@@ -58,7 +61,8 @@ use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'}
# True to use global NuGet cache instead of restoring packages to repository-local directory.
-if [[ "$ci" == true ]]; then
+# Keep in sync with NuGetPackageroot in Arcade SDK's RepositoryLayout.props.
+if [[ "$ci" == true || "$source_build" == true ]]; then
use_global_nuget_cache=${use_global_nuget_cache:-false}
else
use_global_nuget_cache=${use_global_nuget_cache:-true}
@@ -68,8 +72,8 @@ fi
runtime_source_feed=${runtime_source_feed:-''}
runtime_source_feed_key=${runtime_source_feed_key:-''}
-# True if the build is a product build
-product_build=${product_build:-false}
+# True when the build is running within the VMR.
+from_vmr=${from_vmr:-false}
# Resolve any symlinks in the given path.
function ResolvePath {
@@ -296,8 +300,29 @@ function GetDotNetInstallScript {
local root=$1
local install_script="$root/dotnet-install.sh"
local install_script_url="https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh"
+ local timestamp_file="$root/.dotnet-install.timestamp"
+ local should_download=false
if [[ ! -a "$install_script" ]]; then
+ should_download=true
+ elif [[ -f "$timestamp_file" ]]; then
+ # Check if the script is older than 30 days using timestamp file
+ local download_time=$(cat "$timestamp_file" 2>/dev/null || echo "0")
+ local current_time=$(date +%s)
+ local age_seconds=$((current_time - download_time))
+
+ # 30 days = 30 * 24 * 60 * 60 = 2592000 seconds
+ if [[ $age_seconds -gt 2592000 ]]; then
+ echo "Existing install script is too old, re-downloading..."
+ should_download=true
+ fi
+ else
+ # No timestamp file exists, assume script is old and re-download
+ echo "No timestamp found for existing install script, re-downloading..."
+ should_download=true
+ fi
+
+ if [[ "$should_download" == true ]]; then
mkdir -p "$root"
echo "Downloading '$install_script_url'"
@@ -324,6 +349,9 @@ function GetDotNetInstallScript {
ExitWithExitCode $exit_code
}
fi
+
+ # Create timestamp file to track download time in seconds from epoch
+ date +%s > "$timestamp_file"
fi
# return value
_GetDotNetInstallScript="$install_script"
@@ -339,22 +367,14 @@ function InitializeBuildTool {
# return values
_InitializeBuildTool="$_InitializeDotNetCli/dotnet"
_InitializeBuildToolCommand="msbuild"
- # use override if it exists - commonly set by source-build
- if [[ "${_OverrideArcadeInitializeBuildToolFramework:-x}" == "x" ]]; then
- _InitializeBuildToolFramework="net9.0"
- else
- _InitializeBuildToolFramework="${_OverrideArcadeInitializeBuildToolFramework}"
- fi
}
-# Set RestoreNoHttpCache as a workaround for https://github.com/NuGet/Home/issues/3116
function GetNuGetPackageCachePath {
if [[ -z ${NUGET_PACKAGES:-} ]]; then
if [[ "$use_global_nuget_cache" == true ]]; then
export NUGET_PACKAGES="$HOME/.nuget/packages/"
else
export NUGET_PACKAGES="$repo_root/.packages/"
- export RESTORENOHTTPCACHE=true
fi
fi
@@ -451,25 +471,13 @@ function MSBuild {
fi
local toolset_dir="${_InitializeToolset%/*}"
- # new scripts need to work with old packages, so we need to look for the old names/versions
- local selectedPath=
- local possiblePaths=()
- possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" )
- possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.Arcade.Sdk.dll" )
- possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.ArcadeLogging.dll" )
- possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.Arcade.Sdk.dll" )
- for path in "${possiblePaths[@]}"; do
- if [[ -f $path ]]; then
- selectedPath=$path
- break
- fi
- done
+ local selectedPath="$toolset_dir/net/Microsoft.DotNet.ArcadeLogging.dll"
+
if [[ -z "$selectedPath" ]]; then
- Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly."
+ Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly: $selectedPath"
ExitWithExitCode 1
fi
+
args+=( "-logger:$selectedPath" )
fi
@@ -506,8 +514,8 @@ function MSBuild-Core {
echo "Build failed with exit code $exit_code. Check errors above."
# When running on Azure Pipelines, override the returned exit code to avoid double logging.
- # Skip this when the build is a child of the VMR orchestrator build.
- if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$product_build" != true && "$properties" != *"DotNetBuildRepo=true"* ]]; then
+ # Skip this when the build is a child of the VMR build.
+ if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$from_vmr" != true ]]; then
Write-PipelineSetResult -result "Failed" -message "msbuild execution failed."
# Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
# The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
@@ -518,7 +526,13 @@ function MSBuild-Core {
}
}
- RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@"
+ # Add -mt flag for MSBuild multithreaded mode if enabled via environment variable
+ local mt_switch=""
+ if [[ "${MSBUILD_MT_ENABLED:-}" == "1" ]]; then
+ mt_switch="-mt"
+ fi
+
+ RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch $mt_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@"
}
function GetDarc {
@@ -530,6 +544,13 @@ function GetDarc {
fi
"$eng_root/common/darc-init.sh" --toolpath "$darc_path" $version
+ darc_tool="$darc_path/darc"
+}
+
+# Returns a full path to an Arcade SDK task project file.
+function GetSdkTaskProject {
+ taskName=$1
+ echo "$(dirname $_InitializeToolset)/SdkTasks/$taskName.proj"
}
ResolvePath "${BASH_SOURCE[0]}"
diff --git a/eng/common/vmr-sync.ps1 b/eng/common/vmr-sync.ps1
new file mode 100644
index 00000000000..b37992d91cf
--- /dev/null
+++ b/eng/common/vmr-sync.ps1
@@ -0,0 +1,164 @@
+<#
+.SYNOPSIS
+
+This script is used for synchronizing the current repository into a local VMR.
+It pulls the current repository's code into the specified VMR directory for local testing or
+Source-Build validation.
+
+.DESCRIPTION
+
+The tooling used for synchronization will clone the VMR repository into a temporary folder if
+it does not already exist. These clones can be reused in future synchronizations, so it is
+recommended to dedicate a folder for this to speed up re-runs.
+
+.EXAMPLE
+ Synchronize current repository into a local VMR:
+ ./vmr-sync.ps1 -vmrDir "$HOME/repos/dotnet" -tmpDir "$HOME/repos/tmp"
+
+.PARAMETER tmpDir
+Required. Path to the temporary folder where repositories will be cloned
+
+.PARAMETER vmrBranch
+Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch
+
+.PARAMETER azdevPat
+Optional. Azure DevOps PAT to use for cloning private repositories.
+
+.PARAMETER vmrDir
+Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder
+
+.PARAMETER debugOutput
+Optional. Enables debug logging in the darc vmr command.
+
+.PARAMETER ci
+Optional. Denotes that the script is running in a CI environment.
+#>
+param (
+ [Parameter(Mandatory=$true, HelpMessage="Path to the temporary folder where repositories will be cloned")]
+ [string][Alias('t', 'tmp')]$tmpDir,
+ [string][Alias('b', 'branch')]$vmrBranch,
+ [string]$remote,
+ [string]$azdevPat,
+ [string][Alias('v', 'vmr')]$vmrDir,
+ [switch]$ci,
+ [switch]$debugOutput
+)
+
+function Fail {
+ Write-Host "> $($args[0])" -ForegroundColor 'Red'
+}
+
+function Highlight {
+ Write-Host "> $($args[0])" -ForegroundColor 'Cyan'
+}
+
+$verbosity = 'verbose'
+if ($debugOutput) {
+ $verbosity = 'debug'
+}
+# Validation
+
+if (-not $tmpDir) {
+ Fail "Missing -tmpDir argument. Please specify the path to the temporary folder where the repositories will be cloned"
+ exit 1
+}
+
+# Sanitize the input
+
+if (-not $vmrDir) {
+ $vmrDir = Join-Path $tmpDir 'dotnet'
+}
+
+if (-not (Test-Path -Path $tmpDir -PathType Container)) {
+ New-Item -ItemType Directory -Path $tmpDir | Out-Null
+}
+
+# Prepare the VMR
+
+if (-not (Test-Path -Path $vmrDir -PathType Container)) {
+ Highlight "Cloning 'dotnet/dotnet' into $vmrDir.."
+ git clone https://github.com/dotnet/dotnet $vmrDir
+
+ if ($vmrBranch) {
+ git -C $vmrDir switch -c $vmrBranch
+ }
+}
+else {
+ if ((git -C $vmrDir diff --quiet) -eq $false) {
+ Fail "There are changes in the working tree of $vmrDir. Please commit or stash your changes"
+ exit 1
+ }
+
+ if ($vmrBranch) {
+ Highlight "Preparing $vmrDir"
+ git -C $vmrDir checkout $vmrBranch
+ git -C $vmrDir pull
+ }
+}
+
+Set-StrictMode -Version Latest
+
+# Prepare darc
+
+Highlight 'Installing .NET, preparing the tooling..'
+. .\eng\common\tools.ps1
+$dotnetRoot = InitializeDotNetCli -install:$true
+$env:DOTNET_ROOT = $dotnetRoot
+$darc = Get-Darc
+
+Highlight "Starting the synchronization of VMR.."
+
+# Synchronize the VMR
+$versionDetailsPath = Resolve-Path (Join-Path $PSScriptRoot '..\Version.Details.xml') | Select-Object -ExpandProperty Path
+[xml]$versionDetails = Get-Content -Path $versionDetailsPath
+$repoName = $versionDetails.SelectSingleNode('//Source').Mapping
+if (-not $repoName) {
+ Fail "Failed to resolve repo mapping from $versionDetailsPath"
+ exit 1
+}
+
+$darcArgs = (
+ "vmr", "forwardflow",
+ "--tmp", $tmpDir,
+ "--$verbosity",
+ $vmrDir
+)
+
+if ($ci) {
+ $darcArgs += ("--ci")
+}
+
+if ($azdevPat) {
+ $darcArgs += ("--azdev-pat", $azdevPat)
+}
+
+& "$darc" $darcArgs
+
+if ($LASTEXITCODE -eq 0) {
+ Highlight "Synchronization succeeded"
+}
+else {
+ Highlight "Failed to flow code into the local VMR. Falling back to resetting the VMR to match repo contents..."
+ git -C $vmrDir reset --hard
+
+ $resetArgs = (
+ "vmr", "reset",
+ "${repoName}:HEAD",
+ "--vmr", $vmrDir,
+ "--tmp", $tmpDir,
+ "--additional-remotes", "${repoName}:${repoRoot}"
+ )
+
+ & "$darc" $resetArgs
+
+ if ($LASTEXITCODE -eq 0) {
+ Highlight "Successfully reset the VMR using 'darc vmr reset'"
+ }
+ else {
+ Fail "Synchronization of repo to VMR failed!"
+ Fail "'$vmrDir' is left in its last state (re-run of this script will reset it)."
+ Fail "Please inspect the logs which contain path to the failing patch file (use -debugOutput to get all the details)."
+ Fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script."
+ exit 1
+ }
+}
diff --git a/eng/common/vmr-sync.sh b/eng/common/vmr-sync.sh
new file mode 100644
index 00000000000..198caec59bd
--- /dev/null
+++ b/eng/common/vmr-sync.sh
@@ -0,0 +1,227 @@
+#!/bin/bash
+
+### This script is used for synchronizing the current repository into a local VMR.
+### It pulls the current repository's code into the specified VMR directory for local testing or
+### Source-Build validation.
+###
+### The tooling used for synchronization will clone the VMR repository into a temporary folder if
+### it does not already exist. These clones can be reused in future synchronizations, so it is
+### recommended to dedicate a folder for this to speed up re-runs.
+###
+### USAGE:
+### Synchronize current repository into a local VMR:
+### ./vmr-sync.sh --tmp "$HOME/repos/tmp" "$HOME/repos/dotnet"
+###
+### Options:
+### -t, --tmp, --tmp-dir PATH
+### Required. Path to the temporary folder where repositories will be cloned
+###
+### -b, --branch, --vmr-branch BRANCH_NAME
+### Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch
+###
+### --debug
+### Optional. Turns on the most verbose logging for the VMR tooling
+###
+### --remote name:URI
+### Optional. Additional remote to use during the synchronization
+### This can be used to synchronize to a commit from a fork of the repository
+### Example: 'runtime:https://github.com/yourfork/runtime'
+###
+### --azdev-pat
+### Optional. Azure DevOps PAT to use for cloning private repositories.
+###
+### -v, --vmr, --vmr-dir PATH
+### Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+function print_help () {
+ sed -n '/^### /,/^$/p' "$source" | cut -b 5-
+}
+
+COLOR_RED=$(tput setaf 1 2>/dev/null || true)
+COLOR_CYAN=$(tput setaf 6 2>/dev/null || true)
+COLOR_CLEAR=$(tput sgr0 2>/dev/null || true)
+COLOR_RESET=uniquesearchablestring
+FAILURE_PREFIX='> '
+
+function fail () {
+ echo "${COLOR_RED}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_RED}}${COLOR_CLEAR}" >&2
+}
+
+function highlight () {
+ echo "${COLOR_CYAN}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_CYAN}}${COLOR_CLEAR}"
+}
+
+tmp_dir=''
+vmr_dir=''
+vmr_branch=''
+additional_remotes=''
+verbosity=verbose
+azdev_pat=''
+ci=false
+
+while [[ $# -gt 0 ]]; do
+ opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -t|--tmp|--tmp-dir)
+ tmp_dir=$2
+ shift
+ ;;
+ -v|--vmr|--vmr-dir)
+ vmr_dir=$2
+ shift
+ ;;
+ -b|--branch|--vmr-branch)
+ vmr_branch=$2
+ shift
+ ;;
+ --remote)
+ additional_remotes="$additional_remotes $2"
+ shift
+ ;;
+ --azdev-pat)
+ azdev_pat=$2
+ shift
+ ;;
+ --ci)
+ ci=true
+ ;;
+ -d|--debug)
+ verbosity=debug
+ ;;
+ -h|--help)
+ print_help
+ exit 0
+ ;;
+ *)
+ fail "Invalid argument: $1"
+ print_help
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+# Validation
+
+if [[ -z "$tmp_dir" ]]; then
+ fail "Missing --tmp-dir argument. Please specify the path to the temporary folder where the repositories will be cloned"
+ exit 1
+fi
+
+# Sanitize the input
+
+if [[ -z "$vmr_dir" ]]; then
+ vmr_dir="$tmp_dir/dotnet"
+fi
+
+if [[ ! -d "$tmp_dir" ]]; then
+ mkdir -p "$tmp_dir"
+fi
+
+if [[ "$verbosity" == "debug" ]]; then
+ set -x
+fi
+
+# Prepare the VMR
+
+if [[ ! -d "$vmr_dir" ]]; then
+ highlight "Cloning 'dotnet/dotnet' into $vmr_dir.."
+ git clone https://github.com/dotnet/dotnet "$vmr_dir"
+
+ if [[ -n "$vmr_branch" ]]; then
+ git -C "$vmr_dir" switch -c "$vmr_branch"
+ fi
+else
+ if ! git -C "$vmr_dir" diff --quiet; then
+ fail "There are changes in the working tree of $vmr_dir. Please commit or stash your changes"
+ exit 1
+ fi
+
+ if [[ -n "$vmr_branch" ]]; then
+ highlight "Preparing $vmr_dir"
+ git -C "$vmr_dir" checkout "$vmr_branch"
+ git -C "$vmr_dir" pull
+ fi
+fi
+
+set -e
+
+# Prepare darc
+
+highlight 'Installing .NET, preparing the tooling..'
+source "./eng/common/tools.sh"
+InitializeDotNetCli true
+GetDarc
+dotnetDir=$( cd ./.dotnet/; pwd -P )
+dotnet=$dotnetDir/dotnet
+
+highlight "Starting the synchronization of VMR.."
+set +e
+
+if [[ -n "$additional_remotes" ]]; then
+ additional_remotes="--additional-remotes $additional_remotes"
+fi
+
+if [[ -n "$azdev_pat" ]]; then
+ azdev_pat="--azdev-pat $azdev_pat"
+fi
+
+ci_arg=''
+if [[ "$ci" == "true" ]]; then
+ ci_arg="--ci"
+fi
+
+# Synchronize the VMR
+
+version_details_path=$(cd "$scriptroot/.."; pwd -P)/Version.Details.xml
+repo_name=$(grep -m 1 '
-
+
diff --git a/global.json b/global.json
index 8decbcb016e..11caa1491ad 100644
--- a/global.json
+++ b/global.json
@@ -1,9 +1,9 @@
{
"sdk": {
- "version": "10.0.103"
+ "version": "10.0.105"
},
"tools": {
- "dotnet": "10.0.103",
+ "dotnet": "10.0.105",
"runtimes": {
"dotnet": [
"8.0.0",
@@ -20,7 +20,7 @@
"msbuild-sdks": {
"Microsoft.Build.NoTargets": "3.7.0",
"Microsoft.Build.Traversal": "3.2.0",
- "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.26123.3",
- "Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.26123.3"
+ "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.26168.1",
+ "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.26168.1"
}
}
diff --git a/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Logging/HttpLoggingServiceCollectionExtensions.cs b/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Logging/HttpLoggingServiceCollectionExtensions.cs
index c98055ff8b1..e84702294ce 100644
--- a/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Logging/HttpLoggingServiceCollectionExtensions.cs
+++ b/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Logging/HttpLoggingServiceCollectionExtensions.cs
@@ -18,7 +18,6 @@ namespace Microsoft.Extensions.DependencyInjection;
///
/// Extension methods to register the HTTP logging feature within the service.
///
-[Experimental(diagnosticId: DiagnosticIds.Experiments.HttpLogging, UrlFormat = DiagnosticIds.UrlFormat)]
public static class HttpLoggingServiceCollectionExtensions
{
///
@@ -31,6 +30,7 @@ public static class HttpLoggingServiceCollectionExtensions
/// Configures the redaction options.
/// The value of .
/// is .
+ [Experimental(diagnosticId: DiagnosticIds.Experiments.HttpLogging, UrlFormat = DiagnosticIds.UrlFormat)]
public static IServiceCollection AddHttpLoggingRedaction(this IServiceCollection services, Action? configure = null)
{
_ = Throw.IfNull(services);
@@ -56,6 +56,7 @@ public static IServiceCollection AddHttpLoggingRedaction(this IServiceCollection
/// The service collection.
/// The configuration section with the redaction settings.
/// The value of .
+ [Experimental(diagnosticId: DiagnosticIds.Experiments.HttpLogging, UrlFormat = DiagnosticIds.UrlFormat)]
public static IServiceCollection AddHttpLoggingRedaction(this IServiceCollection services, IConfigurationSection section)
{
_ = Throw.IfNull(section);
diff --git a/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Logging/IHttpLogEnricher.cs b/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Logging/IHttpLogEnricher.cs
index 5c221368cd5..ca87297c498 100644
--- a/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Logging/IHttpLogEnricher.cs
+++ b/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Logging/IHttpLogEnricher.cs
@@ -3,17 +3,14 @@
#if NET8_0_OR_GREATER
-using System.Diagnostics.CodeAnalysis;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Diagnostics.Enrichment;
-using Microsoft.Shared.DiagnosticIds;
namespace Microsoft.AspNetCore.Diagnostics.Logging;
///
/// Interface for implementing log enrichers for incoming HTTP requests.
///
-[Experimental(diagnosticId: DiagnosticIds.Experiments.HttpLogging, UrlFormat = DiagnosticIds.UrlFormat)]
public interface IHttpLogEnricher
{
///
diff --git a/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Logging/RequestHeadersLogEnricherOptions.cs b/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Logging/RequestHeadersLogEnricherOptions.cs
index e18822e7ad5..aa8c0975e94 100644
--- a/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Logging/RequestHeadersLogEnricherOptions.cs
+++ b/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Logging/RequestHeadersLogEnricherOptions.cs
@@ -4,9 +4,7 @@
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
-using System.Diagnostics.CodeAnalysis;
using Microsoft.Extensions.Compliance.Classification;
-using Microsoft.Shared.DiagnosticIds;
namespace Microsoft.AspNetCore.Diagnostics.Logging;
@@ -22,6 +20,5 @@ public class RequestHeadersLogEnricherOptions
/// Default value is an empty dictionary.
///
[Required]
- [Experimental(diagnosticId: DiagnosticIds.Experiments.Telemetry, UrlFormat = DiagnosticIds.UrlFormat)]
public IDictionary HeadersDataClasses { get; set; } = new Dictionary(StringComparer.OrdinalIgnoreCase);
}
diff --git a/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Microsoft.AspNetCore.Diagnostics.Middleware.json b/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Microsoft.AspNetCore.Diagnostics.Middleware.json
index f445bb581f5..2e7745a3610 100644
--- a/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Microsoft.AspNetCore.Diagnostics.Middleware.json
+++ b/src/Libraries/Microsoft.AspNetCore.Diagnostics.Middleware/Microsoft.AspNetCore.Diagnostics.Middleware.json
@@ -1,13 +1,13 @@
{
- "Name": "Microsoft.AspNetCore.Diagnostics.Middleware, Version=9.7.0.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35",
+ "Name": "Microsoft.AspNetCore.Diagnostics.Middleware, Version=10.5.0.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35",
"Types": [
{
"Type": "static class Microsoft.Extensions.DependencyInjection.HttpLoggingServiceCollectionExtensions",
- "Stage": "Experimental",
+ "Stage": "Stable",
"Methods": [
{
"Member": "static Microsoft.Extensions.DependencyInjection.IServiceCollection Microsoft.Extensions.DependencyInjection.HttpLoggingServiceCollectionExtensions.AddHttpLogEnricher(this Microsoft.Extensions.DependencyInjection.IServiceCollection services);",
- "Stage": "Experimental"
+ "Stage": "Stable"
},
{
"Member": "static Microsoft.Extensions.DependencyInjection.IServiceCollection Microsoft.Extensions.DependencyInjection.HttpLoggingServiceCollectionExtensions.AddHttpLoggingRedaction(this Microsoft.Extensions.DependencyInjection.IServiceCollection services, System.Action? configure = null);",
@@ -78,11 +78,11 @@
},
{
"Type": "interface Microsoft.AspNetCore.Diagnostics.Logging.IHttpLogEnricher",
- "Stage": "Experimental",
+ "Stage": "Stable",
"Methods": [
{
"Member": "void Microsoft.AspNetCore.Diagnostics.Logging.IHttpLogEnricher.Enrich(Microsoft.Extensions.Diagnostics.Enrichment.IEnrichmentTagCollector collector, Microsoft.AspNetCore.Http.HttpContext httpContext);",
- "Stage": "Experimental"
+ "Stage": "Stable"
}
]
},
@@ -189,7 +189,7 @@
"Stage": "Stable"
},
{
- "Member": "System.Collections.Generic.IList Microsoft.AspNetCore.Diagnostics.Buffering.PerRequestLogBufferingOptions.Rules { get; set; }",
+ "Member": "System.Collections.Generic.IList Microsoft.AspNetCore.Diagnostics.Buffering.PerRequestLogBufferingOptions.Rules { get; set; }",
"Stage": "Stable"
}
]
@@ -251,7 +251,7 @@
"Properties": [
{
"Member": "System.Collections.Generic.IDictionary Microsoft.AspNetCore.Diagnostics.Logging.RequestHeadersLogEnricherOptions.HeadersDataClasses { get; set; }",
- "Stage": "Experimental"
+ "Stage": "Stable"
}
]
},
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/CHANGELOG.md b/src/Libraries/Microsoft.Extensions.AI.Abstractions/CHANGELOG.md
deleted file mode 100644
index 4bf54ef2c1b..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/CHANGELOG.md
+++ /dev/null
@@ -1,245 +0,0 @@
-# Microsoft.Extensions.AI.Abstractions Release History
-
-## NOT YET RELEASED
-
-- `AddAIContentType` now automatically registers the content type against every base in the inheritance chain up to `AIContent`.
-- Added `IHostedFileClient` interface and related types for interacting with files hosted by the service.
-- Added `WebSearchToolCallContent` and `WebSearchToolResultContent` for representing web search tool calls and results.
-- Added `ToolCallContent` and `ToolResultContent` base classes.
-- Updated the design of the MCP and approvals-related types and marked them as stable.
-- Updated AI function parameter JSON schema generation to honor `[Required]` attributes.
-- Updated `AIFunctionFactory` to work better with `DynamicMethod`-based functions.
-- Removed the experimental `IToolReductionStrategy` type.
-
-## 10.3.0
-
-- Added `ReasoningOptions` to `ChatOptions` for configuring reasoning effort and output.
-- Unsealed `FunctionCallContent` and `FunctionResultContent`.
-- Added `InformationalOnly` property to `FunctionCallContent` to indicate whether the content is informing the consumer about a call that's being made elsewhere or that is a request for the call to be performed.
-- Added `LoadFromAsync` and `SaveToAsync` helper methods to `DataContent` for file I/O operations.
-- Removed `[Experimental]` attribute from `IChatReducer`.
-- Fixed JSON schema generation for nullable reference type annotations on parameters in AIFunctions.
-- Fixed `DataUriParser` to default to `text/plain;charset=US-ASCII` per RFC 2397.
-- Fixed serialization handling of `ImageGenerationToolCallContent` and `ImageGenerationToolResultContent`.
-
-## 10.2.0
-
-- Updated `ToChatResponse{Async}`'s handling of `AdditionalProperties`, such that `ChatResponseUpdate.AdditionalProperties` is merged into `ChatMessage.AdditionalProperties` for updates that have a non-`null` `MessageId`.
-- Updated `ToChatResponse{Async}` to use the first appropriate `ChatResponseUpdate`'s `CreatedAt` timestamp rather than the last.
-- Added a `Reason` property to `FunctionApprovalResponseContent` in support of custom rejection messages.
-- Added support for custom headers to `HostedMcpServerTool`.
-
-## 10.1.1
-
-- Added `InputCachedTokenCount` and `ReasoningTokenCount` to `UsageDetails`.
-- Added constructors to `HostedCodeInterpreterTool`, `HostedFileSearchTool`, `HostedImageGeneratorTool`, `HostedMcpServerTool`,
- and `HostedWebSearchTool` that accept a dictionary for `AdditionalProperties`.
-
-## 10.1.0
-
-- Fixed package references for net10.0 asset.
-- Added `AIJsonSchemaCreateOptions.ParameterDescriptions`.
-
-## 10.0.1
-
-- Updated return type of [Experimental] `ContinuationToken` properties.
-- Fixed ValidateSchemaDocument's handling of valid Boolean schemas.
-
-## 10.0.0
-
-- Added experimental `HostedImageGenerationTool`.
-- Updated .NET dependencies to 10.0.0 versions.
-
-## 9.10.2
-
-- Updated `AIFunctionFactory` to respect `[DisplayName(...)]` on functions as a way to override the function name.
-- Updated `AIFunctionFactory` to respect `[DefaultValue(...)]` on function parameters as a way to specify default values.
-- Added `CodeInterpreterToolCallContent`/`CodeInterpreterToolResultContent` for representing code interpreter tool calls and results.
-- Added `Name`, `MediaType`, and `HasTopLevelMediaType` to `HostedFileContent`.
-- Fixed the serialization/deserialization of variables typed as `UserInputRequestContent`/`UserInputResponseContent`.
-
-## 9.10.1
-
-- Updated `HostedMcpServerTool` to allow for non-`Uri` server addresses, in order to enable built-in names.
-- Updated `HostedMcpServerTool` to replace the header collection with an `AuthorizationToken` property.
-- Fixed `ToChatResponse{Async}` to not discard `TextReasoningContent.ProtectedData` when coalescing messages.
-- Fixed `AIFunctionFactory.Create` to special-case return types of `AIContent` and `IEnumerable` to not automatically JSON serialize them.
-
-## 9.10.0
-
-- Added protected copy constructors to options types (e.g. `ChatOptions`).
-- Added `[Experimental]` support for background responses, such that non-streaming responses are allowed to be pollable and responses / response updates can be tagged with continuation tokens to support later resumption.
-- Updated `AIFunctionFactory.Create` to produce better default names for lambdas and local functions.
-- Fixed `AIJsonUtilities.DefaultOptions` to handle the built-in `[Experimental]` `AIContent` types, like `FunctionApprovalRequestContent`.
-- Fixed `ToChatResponse{Async}` to factor `ChatResponseUpdate.AuthorName` into message boundary detection.
-- Fixed `ToChatResponse{Async}` to not overwrite `ChatMessage/ChatResponse.CreatedAt` with older timestamps during coalescing.
-- Fixed `EmbeddingGeneratorOptions`/`SpeechToTextOptions` `Clone` methods to correctly copy all properties.
-
-## 9.9.1
-
-- Added new `ChatResponseFormat.ForJsonSchema` overloads that export a JSON schema from a .NET type.
-- Added new `AITool.GetService` virtual method.
-- Updated `TextReasoningContent` to include `ProtectedData` for representing encrypted/redacted content.
-- Fixed `MinLength`/`MaxLength`/`Length` attribute mapping in nullable string properties during schema export.
-
-## 9.9.0
-
-- Added non-invocable `AIFunctionDeclaration` (base class for `AIFunction`), `AIFunctionFactory.CreateDeclaration`, and `AIFunction.AsDeclarationOnly`.
-- Added `[Experimental]` support for user approval of function invocations via `ApprovalRequiredAIFunction`, `FunctionApprovalRequestContent`, and friends.
-- Added `[Experimental]` support for MCP server-hosted tools via `HostedMcpServerTool`, `HostedMcpServerToolApprovalMode`, and friends.
-- Updated `AIContent` coalescing logic used by `ToChatResponse`/`ToChatResponseUpdate` to factor in `ChatMessage.Role`.
-- Moved `IChatReducer` into `Microsoft.Extensions.AI.Abstractions` from `Microsoft.Extensions.AI`.
-
-## 9.8.0
-
-- Added `AIAnnotation` and related types to represent citations and other annotations in chat messages.
-- Added `ChatMessage.CreatedAt` so that chat messages can carry their timestamp.
-- Added a `[Description(...)]` attribute to `DataContent.Uri` to clarify its purpose when used in schemas.
-- Added `DataContent.Name` property to associate a name with the binary data, like a filename.
-- Added `HostedFileContent` for representing files hosted by the service.
-- Added `HostedVectorStoreContent` for representing vector stores hosted by the service.
-- Added `HostedFileSearchTool` to represent server-side file search tools.
-- Added `HostedCodeInterpreterTool.Inputs` to supply context about what state is available to the code interpreter tool.
-- Added [Experimental] `IImageGenerator` and supporting types.
-- Improved handling of function parameter data annotation attributes in `AIJsonUtilities.CreateJsonSchema`.
-- Fixed schema generation to include an items keyword for arrays of objects in `AIJsonUtilities.CreateJsonSchema`.
-
-## 9.7.1
-
-- Fixed schema generation for nullable function parameters in `AIJsonUtilities.CreateJsonSchema`.
-- Added a flag for `AIFunctionFactory` to control whether return schemas are generated.
-- Added `DelegatingAIFunction` to simplify creating `AIFunction`s that call other `AIFunction`s.
-- Updated `AIFunctionFactory` to tolerate JSON string function parameters.
-- Fixed schema generation for nullable value type parameters.
-
-## 9.7.0
-
-- Added `ChatOptions.Instructions` property for configuring system instructions separate from chat messages.
-- Added `Usage` property to `SpeechToTextResponse` to provide details about the token usage.
-- Augmented `AIJsonUtilities.CreateJsonSchema` with support for data annotations.
-
-## 9.6.0
-
-- Added `AIFunction.ReturnJsonSchema` to represent the JSON schema of the return value of a function.
-- Removed title and description keywords from root-level schemas in `AIFunctionFactory`.
-
-## 9.5.0
-
-- Moved `AIFunctionFactory` down from `Microsoft.Extensions.AI` to `Microsoft.Extensions.AI.Abstractions`.
-- Added `BinaryEmbedding` type for representing bit embeddings.
-- Added `TextReasoningContent` to represent reasoning content in chat messages.
-- Added `ChatOptions.AllowMultipleToolCalls` for configuring parallel tool calling.
-- Added a public constructor to the base `AIContent`.
-- Added a missing `[DebuggerDisplay]` attribute on `AIFunctionArguments`.
-- Added `ChatOptions.RawRepresentationFactory` to facilitate passing raw options to the underlying service.
-- Added an `AIJsonSchemaTransformOptions` property inside `AIJsonSchemaCreateOptions`.
-- Added `DataContent.Base64Data` property for easier and more efficient handling of base64-encoded data.
-- Added JSON schema transformation functionality to `AIJsonUtilities`.
-- Fixed `AIJsonUtilities.CreateJsonSchema` to handle `JsonSerializerOptions` that do not have a `TypeInfoResolver` configured.
-- Fixed `AIFunctionFactory` handling of default struct arguments.
-- Fixed schema generation to ensure the type keyword is included when generating schemas for nullable enums.
-- Renamed the `GenerateXx` extension methods on `IEmbeddingGenerator<>`.
-- Renamed `ChatThreadId` to `ConversationId` across the libraries.
-- Replaced `Type targetType` parameter in `AIFunctionFactory.Create` with a delegate.
-- Remove `[Obsolete]` members from previews.
-
-## 9.4.4-preview.1.25259.16
-
-- Added `AIJsonUtilities.TransformSchema` and supporting types.
-- Added `BinaryEmbedding` for bit embeddings.
-- Added `ChatOptions.RawRepresentationFactory` to make it easier to pass options to the underlying service.
-- Added `Base64Data` property to `DataContent`.
-- Moved `AIFunctionFactory` to `Microsoft.Extensions.AI.Abstractions`.
-- Fixed `AIFunctionFactory` handling of default struct arguments.
-
-## 9.4.3-preview.1.25230.7
-
-- Renamed `ChatThreadId` to `ConversationId` on `ChatResponse`, `ChatResponseUpdate`, and `ChatOptions`.
-- Renamed `EmbeddingGeneratorExtensions` method `GenerateEmbeddingAsync` to `GenerateAsync` and `GenerateEmbeddingVectorAsync` to `GenerateVectorAsync`.
-- Made `AIContent`'s constructor `public` instead of `protected`.
-- Fixed `AIJsonUtilities.CreateJsonSchema` to tolerate `JsonSerializerOptions` instances that don't have a `TypeInfoResolver` already configured.
-
-## 9.4.0-preview.1.25207.5
-
-- Added `ErrorContent` and `TextReasoningContent`.
-- Added `MessageId` to `ChatMessage` and `ChatResponseUpdate`.
-- Added `AIFunctionArguments`, changing `AIFunction.InvokeAsync` to accept one and to return a `ValueTask`.
-- Updated `AIJsonUtilities`'s schema generation to not use `default` when `RequireAllProperties` is set to `true`.
-- Added [Experimental] `ISpeechToTextClient` and supporting types.
-- Fixed several issues related to Native AOT support.
-
-## 9.3.0-preview.1.25161.3
-
-- Changed `IChatClient.GetResponseAsync` and `IChatClient.GetStreamingResponseAsync` to accept an `IEnumerable` rather than an `IList`. It is no longer mutated by implementations.
-- Removed `ChatResponse.Choice` and `ChatResponseUpdate.ChoiceIndex`.
-- Replaced `ChatResponse.Message` with `ChatResponse.Messages`. Responses now carry with them all messages generated as part of the operation, rather than all but the last being added to the history and the last returned.
-- Added `GetRequiredService` extension method for `IChatClient`/`IEmbeddingGenerator`.
-- Added non-generic `IEmbeddingGenerator` interface, which is inherited by `IEmbeddingGenerator`. The `GetService` method moves down to the non-generic interface, and the `GetService`/`GetRequiredService` extension methods are now in terms of the non-generic.
-- `AIJsonUtilities.CreateFunctionJsonSchema` now special-cases `CancellationToken` to not include it in the schema.
-- Improved the debugger displays for `ChatMessage` and the `AIContent` types.
-- Added a static `AIJsonUtilities.HashDataToString` method.
-- Split `DataContent`, which handled both in-memory data and URIs to remote data, into `DataContent` (for the former) and `UriContent` (for the latter).
-- Renamed `DataContent.MediaTypeStartsWith` to `DataContent.HasTopLevelMediaType`, and changed semantics accordingly.
-
-## 9.3.0-preview.1.25114.11
-
-- Renamed `IChatClient.Complete{Streaming}Async` to `IChatClient.Get{Streaming}ResponseAsync`. This is to avoid confusion with "Complete" being about stopping an operation, as well as to avoid tying the methods to a particular implementation detail of how responses are generated. Along with this, renamed `ChatCompletion` to `ChatResponse`, `StreamingChatCompletionUpdate` to `ChatResponseUpdate`, `CompletionId` to `ResponseId`, `ToStreamingChatCompletionUpdates` to `ToChatResponseUpdates`, and `ToChatCompletion{Async}` to `ToChatResponse{Async}`.
-- Removed `IChatClient.Metadata` and `IEmbeddingGenerator.Metadata`. The `GetService` method may be used to retrieve `ChatClientMetadata` and `EmbeddingGeneratorMetadata`, respectively.
-- Added overloads of `Get{Streaming}ResponseAsync` that accept a single `ChatMessage` (in addition to the other overloads that accept a `List` or a `string`).
-- Added `ChatThreadId` properties to `ChatOptions`, `ChatResponse`, and `ChatResponseUpdate`. `IChatClient` can now be used in both stateful and stateless modes of operation, such as with agents that maintain server-side chat history.
-- Made `ChatOptions.ToolMode` nullable and added a `None` option.
-- Changed `UsageDetails`'s properties from `int?` to `long?`.
-- Removed `DataContent.ContainsData`; `DataContent.Data.HasValue` may be used instead.
-- Removed `ImageContent` and `AudioContent`; the base `DataContent` should now be used instead, with a new `DataContent.MediaTypeStartsWith` helper for routing based on media type.
-- Removed setters on `FunctionCallContent` and `FunctionResultContent` properties where the value is supplied to the constructor.
-- Removed `FunctionResultContent.Name`.
-- Augmented the base `AITool` with `Name`, `Description`, and `AdditionalProperties` virtual properties.
-- Added a `CodeInterpreterTool` for use with services that support server-side code execution.
-- Changed `AIFunction`'s schema representation to be for the whole function rather than per parameter, and exposed corresponding methods on `AIJsonUtilities`, e.g. `CreateFunctionJsonSchema`.
-- Removed `AIFunctionParameterMetadata` and `AIFunctionReturnParameterMetadata` classes and corresponding properties on `AIFunction` and `AIFunctionFactoryCreateOptions`, replacing them with a `MethodInfo?`. All relevant metadata, such as the JSON schema for the function, are moved to properties directly on `AIFunction`.
-- Renamed `AIFunctionFactoryCreateOptions` to `AIFunctionFactoryOptions` and made all its properties nullable.
-- Changed `AIJsonUtilities.DefaultOptions` to use relaxed JSON escaping.
-- Made `IEmbeddingGenerator` contravariant on `TInput`.
-
-## 9.1.0-preview.1.25064.3
-
-- Added `AdditionalPropertiesDictionary` and changed `UsageDetails.AdditionalProperties` to be named `AdditionalCounts` and to be of type `AdditionalPropertiesDictionary`.
-- Updated `FunctionCallingChatClient` to sum all `UsageDetails` token counts from all intermediate messages.
-- Fixed JSON schema generation for floating-point types.
-- Added `AddAIContentType` for enabling custom `AIContent`-derived types to participate in polymorphic serialization.
-
-## 9.0.1-preview.1.24570.5
-
-- Changed `IChatClient`/`IEmbeddingGenerator`.`GetService` to be non-generic.
-- Added `ToChatCompletion` / `ToChatCompletionUpdate` extension methods for `IEnumerable` / `IAsyncEnumerable`, respectively.
-- Added `ToStreamingChatCompletionUpdates` instance method to `ChatCompletion`.
-- Added `IncludeTypeInEnumSchemas`, `DisallowAdditionalProperties`, `RequireAllProperties`, and `TransformSchemaNode` options to `AIJsonSchemaCreateOptions`.
-- Fixed a Native AOT warning in `AIFunctionFactory.Create`.
-- Fixed a bug in `AIJsonUtilities` in the handling of Boolean schemas.
-- Improved the `ToString` override of `ChatMessage` and `StreamingChatCompletionUpdate` to include all `TextContent`, and of `ChatCompletion` to include all choices.
-- Added `DebuggerDisplay` attributes to `DataContent` and `GeneratedEmbeddings`.
-- Improved the documentation.
-
-## 9.0.0-preview.9.24556.5
-
-- Added a strongly-typed `ChatOptions.Seed` property.
-- Improved `AdditionalPropertiesDictionary` with a `TryAdd` method, a strongly-typed `Enumerator`, and debugger-related attributes for improved debuggability.
-- Fixed `AIJsonUtilities` schema generation for Boolean schemas.
-
-## 9.0.0-preview.9.24525.1
-
-- Lowered the required version of System.Text.Json to 8.0.5 when targeting net8.0 or older.
-- Annotated `FunctionCallContent.Exception` and `FunctionResultContent.Exception` as `[JsonIgnore]`, such that they're ignored when serializing instances with `JsonSerializer`. The corresponding constructors accepting an `Exception` were removed.
-- Annotated `ChatCompletion.Message` as `[JsonIgnore]`, such that it's ignored when serializing instances with `JsonSerializer`.
-- Added the `FunctionCallContent.CreateFromParsedArguments` method.
-- Added the `AdditionalPropertiesDictionary.TryGetValue` method.
-- Added the `StreamingChatCompletionUpdate.ModelId` property and removed the `AIContent.ModelId` property.
-- Renamed the `GenerateAsync` extension method on `IEmbeddingGenerator<,>` to `GenerateEmbeddingsAsync` and updated it to return `Embedding` rather than `GeneratedEmbeddings`.
-- Added `GenerateAndZipAsync` and `GenerateEmbeddingVectorAsync` extension methods for `IEmbeddingGenerator<,>`.
-- Added the `EmbeddingGeneratorOptions.Dimensions` property.
-- Added the `ChatOptions.TopK` property.
-- Normalized `null` inputs in `TextContent` to be empty strings.
-
-## 9.0.0-preview.9.24507.7
-
-- Initial Preview
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseExtensions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseExtensions.cs
index 4cbebdb3776..0ba4a0e1ccc 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseExtensions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/ChatCompletion/ChatResponseExtensions.cs
@@ -237,39 +237,37 @@ private static void CoalesceWebSearchToolCallContent(IList contents)
for (int i = 0; i < contents.Count; i++)
{
- if (contents[i] is WebSearchToolCallContent webSearchCall && !string.IsNullOrEmpty(webSearchCall.CallId))
+ if (contents[i] is WebSearchToolCallContent webSearchCall)
{
webSearchCallIndexById ??= new(StringComparer.Ordinal);
- if (webSearchCallIndexById.TryGetValue(webSearchCall.CallId!, out int existingIndex))
+ if (webSearchCallIndexById.TryGetValue(webSearchCall.CallId, out int existingIndex))
{
- // Merge data from the new item into the existing one.
+ // Create a new merged content rather than mutating the original content objects.
+ // The same content objects may be shared across multiple ToChatResponse calls
+ // (e.g. FunctionInvokingChatClient and the caller both call ToChatResponse on
+ // the same streaming updates), and in-place mutation would corrupt subsequent calls.
var existing = (WebSearchToolCallContent)contents[existingIndex];
- if (webSearchCall.Queries is { Count: > 0 })
+ if (!ReferenceEquals(existing, webSearchCall))
{
- if (existing.Queries is null)
+ contents[existingIndex] = new WebSearchToolCallContent(existing.CallId)
{
- existing.Queries = webSearchCall.Queries;
- }
- else
- {
- foreach (var query in webSearchCall.Queries)
- {
- existing.Queries.Add(query);
- }
- }
+ Queries = webSearchCall.Queries is not { Count: > 0 } ? existing.Queries :
+ existing.Queries is not { Count: > 0 } ? webSearchCall.Queries :
+ [.. existing.Queries, .. webSearchCall.Queries],
+ RawRepresentation = existing.RawRepresentation ?? webSearchCall.RawRepresentation,
+ AdditionalProperties = existing.AdditionalProperties ?? webSearchCall.AdditionalProperties,
+ Annotations = existing.Annotations ?? webSearchCall.Annotations,
+ };
}
- existing.RawRepresentation ??= webSearchCall.RawRepresentation;
- existing.AdditionalProperties ??= webSearchCall.AdditionalProperties;
-
contents[i] = null!;
hasRemovals = true;
}
else
{
- webSearchCallIndexById[webSearchCall.CallId!] = i;
+ webSearchCallIndexById[webSearchCall.CallId] = i;
}
}
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/CompatibilitySuppressions.xml b/src/Libraries/Microsoft.Extensions.AI.Abstractions/CompatibilitySuppressions.xml
deleted file mode 100644
index 106feff432e..00000000000
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/CompatibilitySuppressions.xml
+++ /dev/null
@@ -1,808 +0,0 @@
-
-
-
- CP0001
- T:Microsoft.Extensions.AI.FunctionApprovalRequestContent
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.FunctionApprovalResponseContent
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.McpServerToolApprovalRequestContent
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.McpServerToolApprovalResponseContent
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.UserInputRequestContent
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.UserInputResponseContent
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.FunctionApprovalRequestContent
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.FunctionApprovalResponseContent
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.McpServerToolApprovalRequestContent
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.McpServerToolApprovalResponseContent
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.UserInputRequestContent
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.UserInputResponseContent
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.FunctionApprovalRequestContent
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.FunctionApprovalResponseContent
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.McpServerToolApprovalRequestContent
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.McpServerToolApprovalResponseContent
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.UserInputRequestContent
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.UserInputResponseContent
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.FunctionApprovalRequestContent
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.FunctionApprovalResponseContent
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.McpServerToolApprovalRequestContent
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.McpServerToolApprovalResponseContent
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.UserInputRequestContent
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.UserInputResponseContent
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.FunctionApprovalRequestContent
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.FunctionApprovalResponseContent
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.McpServerToolApprovalRequestContent
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.McpServerToolApprovalResponseContent
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.UserInputRequestContent
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.UserInputResponseContent
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolCallContent.#ctor
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolCallContent.set_CallId(System.String)
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolResultContent.#ctor
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolResultContent.set_CallId(System.String)
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.HostedMcpServerTool.get_AuthorizationToken
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.HostedMcpServerTool.set_AuthorizationToken(System.String)
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.#ctor
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.get_ImageId
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.set_ImageId(System.String)
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.#ctor
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.get_ImageId
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.set_ImageId(System.String)
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolCallContent.get_Arguments
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolCallContent.get_ToolName
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolResultContent.get_Output
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolResultContent.set_Output(System.Collections.Generic.IList{Microsoft.Extensions.AI.AIContent})
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolCallContent.#ctor
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolCallContent.set_CallId(System.String)
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolResultContent.#ctor
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolResultContent.set_CallId(System.String)
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.HostedMcpServerTool.get_AuthorizationToken
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.HostedMcpServerTool.set_AuthorizationToken(System.String)
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.#ctor
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.get_ImageId
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.set_ImageId(System.String)
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.#ctor
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.get_ImageId
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.set_ImageId(System.String)
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolCallContent.get_Arguments
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolCallContent.get_ToolName
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolResultContent.get_Output
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolResultContent.set_Output(System.Collections.Generic.IList{Microsoft.Extensions.AI.AIContent})
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolCallContent.#ctor
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolCallContent.set_CallId(System.String)
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolResultContent.#ctor
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolResultContent.set_CallId(System.String)
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.HostedMcpServerTool.get_AuthorizationToken
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.HostedMcpServerTool.set_AuthorizationToken(System.String)
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.#ctor
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.get_ImageId
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.set_ImageId(System.String)
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.#ctor
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.get_ImageId
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.set_ImageId(System.String)
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolCallContent.get_Arguments
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolCallContent.get_ToolName
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolResultContent.get_Output
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolResultContent.set_Output(System.Collections.Generic.IList{Microsoft.Extensions.AI.AIContent})
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolCallContent.#ctor
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolCallContent.set_CallId(System.String)
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolResultContent.#ctor
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolResultContent.set_CallId(System.String)
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.HostedMcpServerTool.get_AuthorizationToken
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.HostedMcpServerTool.set_AuthorizationToken(System.String)
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.#ctor
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.get_ImageId
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.set_ImageId(System.String)
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.#ctor
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.get_ImageId
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.set_ImageId(System.String)
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolCallContent.get_Arguments
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolCallContent.get_ToolName
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolResultContent.get_Output
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolResultContent.set_Output(System.Collections.Generic.IList{Microsoft.Extensions.AI.AIContent})
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolCallContent.#ctor
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolCallContent.set_CallId(System.String)
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolResultContent.#ctor
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.CodeInterpreterToolResultContent.set_CallId(System.String)
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.HostedMcpServerTool.get_AuthorizationToken
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.HostedMcpServerTool.set_AuthorizationToken(System.String)
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.#ctor
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.get_ImageId
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolCallContent.set_ImageId(System.String)
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.#ctor
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.get_ImageId
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.ImageGenerationToolResultContent.set_ImageId(System.String)
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolCallContent.get_Arguments
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolCallContent.get_ToolName
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolResultContent.get_Output
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0002
- M:Microsoft.Extensions.AI.McpServerToolResultContent.set_Output(System.Collections.Generic.IList{Microsoft.Extensions.AI.AIContent})
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.IToolReductionStrategy
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net10.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.IToolReductionStrategy
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- lib/net462/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.IToolReductionStrategy
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net8.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.IToolReductionStrategy
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/net9.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
- CP0001
- T:Microsoft.Extensions.AI.IToolReductionStrategy
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- lib/netstandard2.0/Microsoft.Extensions.AI.Abstractions.dll
- true
-
-
\ No newline at end of file
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/UriContent.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/UriContent.cs
index 37acd121960..d4ab07b1eec 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/UriContent.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Contents/UriContent.cs
@@ -3,6 +3,7 @@
using System;
using System.Diagnostics;
+using System.Net.Mime;
using System.Text.Json.Serialization;
using Microsoft.Shared.Diagnostics;
@@ -18,6 +19,9 @@ namespace Microsoft.Extensions.AI;
[DebuggerDisplay("{DebuggerDisplay,nq}")]
public class UriContent : AIContent
{
+ /// The default media type for unknown file extensions.
+ private const string DefaultMediaType = "application/octet-stream";
+
/// The URI represented.
private Uri _uri;
@@ -26,37 +30,35 @@ public class UriContent : AIContent
/// Initializes a new instance of the class.
/// The URI to the represented content.
- /// The media type (also known as MIME type) represented by the content.
+ ///
+ /// The media type (also known as MIME type) represented by the content. If not provided,
+ /// it will be inferred from the file extension of the URI. If it cannot be inferred,
+ /// "application/octet-stream" is used.
+ ///
/// is .
- /// is .
/// is an invalid media type.
/// is an invalid URL.
- ///
- /// A media type must be specified, so that consumers know what to do with the content.
- /// If an exact media type is not known, but the category (e.g. image) is known, a wildcard
- /// may be used (e.g. "image/*").
- ///
- public UriContent(string uri, string mediaType)
+ public UriContent(string uri, string? mediaType = null)
: this(new Uri(Throw.IfNull(uri)), mediaType)
{
}
/// Initializes a new instance of the class.
/// The URI to the represented content.
- /// The media type (also known as MIME type) represented by the content.
+ ///
+ /// The media type (also known as MIME type) represented by the content. If not provided,
+ /// it will be inferred from the file extension of the URI. If it cannot be inferred,
+ /// "application/octet-stream" is used.
+ ///
/// is .
- /// is .
/// is an invalid media type.
- ///
- /// A media type must be specified, so that consumers know what to do with the content.
- /// If an exact media type is not known, but the category (e.g. image) is known, a wildcard
- /// may be used (e.g. "image/*").
- ///
[JsonConstructor]
- public UriContent(Uri uri, string mediaType)
+ public UriContent(Uri uri, string? mediaType = null)
{
_uri = Throw.IfNull(uri);
- _mediaType = DataUriParser.ThrowIfInvalidMediaType(mediaType);
+ _mediaType = mediaType is not null
+ ? DataUriParser.ThrowIfInvalidMediaType(mediaType)
+ : InferMediaType(uri);
}
/// Gets or sets the for this content.
@@ -90,4 +92,25 @@ public string MediaType
/// Gets a string representing this instance to display in the debugger.
[DebuggerBrowsable(DebuggerBrowsableState.Never)]
private string DebuggerDisplay => $"Uri = {_uri}";
+
+ /// Infers the media type from the URI's file extension.
+ private static string InferMediaType(Uri uri)
+ {
+ string path;
+ if (uri.IsAbsoluteUri)
+ {
+ path = uri.AbsolutePath;
+ }
+ else
+ {
+ path = uri.OriginalString;
+ int i = path.AsSpan().IndexOfAny('?', '#');
+ if (i >= 0)
+ {
+ path = path.Substring(0, i);
+ }
+ }
+
+ return MediaTypeMap.GetMediaType(path) ?? DefaultMediaType;
+ }
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Files/HostedFileDownloadStream.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Files/HostedFileDownloadStream.cs
index 91d22450c4f..f72f7eb3355 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Files/HostedFileDownloadStream.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Files/HostedFileDownloadStream.cs
@@ -51,6 +51,40 @@ protected HostedFileDownloadStream()
///
public virtual string? FileName => null;
+ ///
+ public override bool CanWrite => false;
+
+ ///
+ public override void SetLength(long value) => throw new NotSupportedException();
+
+ ///
+ public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback? callback, object? state) =>
+ throw new NotSupportedException();
+
+ ///
+ public override void EndWrite(IAsyncResult asyncResult) => throw new NotSupportedException();
+
+ ///
+ public override void WriteByte(byte value) => throw new NotSupportedException();
+
+ ///
+ public override void Write(byte[] buffer, int offset, int count) => throw new NotSupportedException();
+
+#if NET
+ ///
+ public override void Write(ReadOnlySpan buffer) => throw new NotSupportedException();
+#endif
+
+ ///
+ public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) =>
+ throw new NotSupportedException();
+
+#if NET
+ ///
+ public override ValueTask WriteAsync(ReadOnlyMemory buffer, CancellationToken cancellationToken = default) =>
+ throw new NotSupportedException();
+#endif
+
///
/// Reads the entire stream content from its current position and returns it as a .
///
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/AIFunctionDeclaration.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/AIFunctionDeclaration.cs
index 203045f92b2..ef8351b9eae 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/AIFunctionDeclaration.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/AIFunctionDeclaration.cs
@@ -40,6 +40,10 @@ protected AIFunctionDeclaration()
/// The metadata present in the schema document plays an important role in guiding AI function invocation.
///
///
+ /// When an is created via , this schema is automatically derived from the
+ /// method's parameters using the configured and .
+ ///
+ ///
/// When no schema is specified, consuming chat clients should assume the "{}" or "true" schema, indicating that any JSON input is admissible.
///
///
@@ -47,8 +51,18 @@ protected AIFunctionDeclaration()
/// Gets a JSON Schema describing the function's return value.
///
- /// A typically reflects a function that doesn't specify a return schema
- /// or a function that returns , , or .
+ ///
+ /// When an is created via , this schema is automatically derived from the
+ /// method's return type using the configured and .
+ /// For methods returning or , the schema is based on the
+ /// unwrapped result type. Return schema generation can be excluded by setting
+ /// to .
+ ///
+ ///
+ /// A value typically reflects a function that doesn't specify a return schema,
+ /// a function that returns , , or ,
+ /// or a function for which was set to .
+ ///
///
public virtual JsonElement? ReturnJsonSchema => null;
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/AIFunctionFactory.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/AIFunctionFactory.cs
index 9883531b438..9398805da9a 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/AIFunctionFactory.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/AIFunctionFactory.cs
@@ -27,6 +27,19 @@
namespace Microsoft.Extensions.AI;
/// Provides factory methods for creating commonly-used implementations of .
+///
+///
+/// The class creates instances that wrap .NET methods
+/// (specified as or ). As part of this process, JSON schemas are
+/// automatically derived for both the function's input parameters (exposed via )
+/// and, by default, the function's return type (exposed via ).
+/// These schemas are produced using the and
+/// , and enable AI services to understand and
+/// interact with the function. Return value serialization and schema derivation behavior can be customized
+/// via and ,
+/// respectively.
+///
+///
/// Invoke .NET functions using an AI model.
public static partial class AIFunctionFactory
{
@@ -98,7 +111,14 @@ public static partial class AIFunctionFactory
/// special-cased and are not serialized: the created function returns the original instance(s) directly to enable
/// callers (such as an IChatClient) to perform type tests and implement specialized handling. If
/// is supplied, that delegate governs the behavior instead.
- /// Handling of return values can be overridden via .
+ ///
+ ///
+ /// In addition to the parameter schema, a JSON schema is also derived from the method's return type and exposed via the
+ /// returned 's . For methods returning
+ /// , , or , no return schema is produced (the property is ).
+ /// For methods returning or , the schema is derived from the
+ /// unwrapped result type. Return schema generation can be excluded via ,
+ /// and its generation is governed by 's .
///
///
/// is .
@@ -169,6 +189,11 @@ public static AIFunction Create(Delegate method, AIFunctionFactoryOptions? optio
/// derived type of , or any type assignable from are not serialized;
/// they are returned as-is to facilitate specialized handling.
///
+ ///
+ /// A JSON schema is also derived from the method's return type and exposed via .
+ /// For methods returning , , or , no return schema is produced.
+ /// For methods returning or , the schema is derived from the unwrapped result type.
+ ///
///
/// is .
/// A parameter to is not serializable.
@@ -255,6 +280,14 @@ public static AIFunction Create(Delegate method, string? name = null, string? de
/// any type assignable from are not serialized and are instead returned directly.
/// Handling of return values can be overridden via .
///
+ ///
+ /// In addition to the parameter schema, a JSON schema is also derived from the method's return type and exposed via the
+ /// returned 's . For methods returning
+ /// , , or , no return schema is produced (the property is ).
+ /// For methods returning or , the schema is derived from the
+ /// unwrapped result type. Return schema generation can be excluded via ,
+ /// and its generation is governed by 's .
+ ///
///
/// is .
/// represents an instance method but is null.
@@ -334,6 +367,11 @@ public static AIFunction Create(MethodInfo method, object? target, AIFunctionFac
/// derived type of , or any type assignable from are returned
/// without serialization to enable specialized handling.
///
+ ///
+ /// A JSON schema is also derived from the method's return type and exposed via .
+ /// For methods returning , , or , no return schema is produced.
+ /// For methods returning or , the schema is derived from the unwrapped result type.
+ ///
///
/// is .
/// represents an instance method but is null.
@@ -433,6 +471,14 @@ public static AIFunction Create(MethodInfo method, object? target, string? name
/// assignable from are returned directly without serialization.
/// Handling of return values can be overridden via .
///
+ ///
+ /// In addition to the parameter schema, a JSON schema is also derived from the method's return type and exposed via the
+ /// returned 's . For methods returning
+ /// , , or , no return schema is produced (the property is ).
+ /// For methods returning or , the schema is derived from the
+ /// unwrapped result type. Return schema generation can be excluded via ,
+ /// and its generation is governed by 's .
+ ///
///
/// is .
/// is .
diff --git a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/AIFunctionFactoryOptions.cs b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/AIFunctionFactoryOptions.cs
index 5caef21900c..bcb552bf242 100644
--- a/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/AIFunctionFactoryOptions.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.Abstractions/Functions/AIFunctionFactoryOptions.cs
@@ -30,10 +30,13 @@ public AIFunctionFactoryOptions()
public JsonSerializerOptions? SerializerOptions { get; set; }
///
- /// Gets or sets the governing the generation of JSON schemas for the function.
+ /// Gets or sets the governing the generation of JSON schemas for
+ /// the function's input parameters and return type.
///
///
/// If no value has been specified, the instance will be used.
+ /// This setting affects both the (input parameters) and
+ /// the (return type).
///
public AIJsonSchemaCreateOptions? JsonSchemaCreateOptions { get; set; }
@@ -107,14 +110,16 @@ public AIFunctionFactoryOptions()
public Func