From 08f741c41ddae4d6ca0ad8e849cb5b727554d1f2 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Fri, 16 Aug 2024 11:41:11 +0200 Subject: [PATCH 01/79] pp --- Dockerfile | 6 ++-- src/Auth/Azure.DataApiBuilder.Auth.csproj | 2 +- src/Cli.Tests/Cli.Tests.csproj | 2 +- src/Cli/Cli.csproj | 2 +- src/Config/Azure.DataApiBuilder.Config.csproj | 2 +- src/Core/Azure.DataApiBuilder.Core.csproj | 2 +- src/Directory.Packages.props | 4 +-- .../Azure.DataApiBuilder.Product.csproj | 2 +- ...taApiBuilder.Service.GraphQLBuilder.csproj | 2 +- .../Azure.DataApiBuilder.Service.Tests.csproj | 2 +- src/Service/.config/dotnet-tools.json | 5 ++++ .../Azure.DataApiBuilder.Service.csproj | 7 +++-- .../PublishProfiles/crweudev01.pubxml | 30 +++++++++++++++++++ 13 files changed, 53 insertions(+), 15 deletions(-) create mode 100644 src/Service/.config/dotnet-tools.json create mode 100644 src/Service/Properties/PublishProfiles/crweudev01.pubxml diff --git a/Dockerfile b/Dockerfile index 5dbaa35a17..537ea4c78c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,13 +1,13 @@ # Version values referenced from https://hub.docker.com/_/microsoft-dotnet-aspnet -FROM mcr.microsoft.com/dotnet/sdk:6.0-cbl-mariner2.0. AS build +FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build WORKDIR /src COPY [".", "./"] -RUN dotnet build "./src/Service/Azure.DataApiBuilder.Service.csproj" -c Docker -o /out -r linux-x64 +RUN dotnet build "./src/Service/Azure.DataApiBuilder.Service.csproj" -f net8.0 -o /out -r linux-x64 --self-contained -FROM mcr.microsoft.com/dotnet/aspnet:6.0-cbl-mariner2.0 AS runtime +FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS runtime COPY --from=build /out /App WORKDIR /App diff --git a/src/Auth/Azure.DataApiBuilder.Auth.csproj b/src/Auth/Azure.DataApiBuilder.Auth.csproj index 9f63cd3ed6..1ee2df57b8 100644 --- a/src/Auth/Azure.DataApiBuilder.Auth.csproj +++ b/src/Auth/Azure.DataApiBuilder.Auth.csproj @@ -1,7 +1,7 @@ - net8.0;net6.0 + net8.0 enable enable $(BaseOutputPath)\engine diff --git a/src/Cli.Tests/Cli.Tests.csproj b/src/Cli.Tests/Cli.Tests.csproj index 46192e3802..a8081f5250 100644 --- a/src/Cli.Tests/Cli.Tests.csproj +++ b/src/Cli.Tests/Cli.Tests.csproj @@ -1,7 +1,7 @@ - net8.0;net6.0 + net8.0 enable enable false diff --git a/src/Cli/Cli.csproj b/src/Cli/Cli.csproj index fec0cc9786..5321f52356 100644 --- a/src/Cli/Cli.csproj +++ b/src/Cli/Cli.csproj @@ -2,7 +2,7 @@ Exe - net8.0;net6.0 + net8.0 Cli enable enable diff --git a/src/Config/Azure.DataApiBuilder.Config.csproj b/src/Config/Azure.DataApiBuilder.Config.csproj index 501f10bc22..cf739f24ec 100644 --- a/src/Config/Azure.DataApiBuilder.Config.csproj +++ b/src/Config/Azure.DataApiBuilder.Config.csproj @@ -1,7 +1,7 @@ - net8.0;net6.0 + net8.0 enable enable $(BaseOutputPath)\engine diff --git a/src/Core/Azure.DataApiBuilder.Core.csproj b/src/Core/Azure.DataApiBuilder.Core.csproj index d8e314d78d..e2cc111979 100644 --- a/src/Core/Azure.DataApiBuilder.Core.csproj +++ b/src/Core/Azure.DataApiBuilder.Core.csproj @@ -1,7 +1,7 @@ - net8.0;net6.0 + net8.0 enable enable true diff --git a/src/Directory.Packages.props b/src/Directory.Packages.props index b33dd9cc9e..ef2add3302 100644 --- a/src/Directory.Packages.props +++ b/src/Directory.Packages.props @@ -20,7 +20,7 @@ - + @@ -66,4 +66,4 @@ - + \ No newline at end of file diff --git a/src/Product/Azure.DataApiBuilder.Product.csproj b/src/Product/Azure.DataApiBuilder.Product.csproj index f0d0e70927..6045200487 100644 --- a/src/Product/Azure.DataApiBuilder.Product.csproj +++ b/src/Product/Azure.DataApiBuilder.Product.csproj @@ -1,7 +1,7 @@ - net8.0;net6.0 + net8.0 enable enable $(BaseOutputPath)\engine diff --git a/src/Service.GraphQLBuilder/Azure.DataApiBuilder.Service.GraphQLBuilder.csproj b/src/Service.GraphQLBuilder/Azure.DataApiBuilder.Service.GraphQLBuilder.csproj index 649e46c550..87eab3c75b 100644 --- a/src/Service.GraphQLBuilder/Azure.DataApiBuilder.Service.GraphQLBuilder.csproj +++ b/src/Service.GraphQLBuilder/Azure.DataApiBuilder.Service.GraphQLBuilder.csproj @@ -1,7 +1,7 @@ - net8.0;net6.0 + net8.0 enable enable $(BaseOutputPath)\engine diff --git a/src/Service.Tests/Azure.DataApiBuilder.Service.Tests.csproj b/src/Service.Tests/Azure.DataApiBuilder.Service.Tests.csproj index 112cb64714..26a2204725 100644 --- a/src/Service.Tests/Azure.DataApiBuilder.Service.Tests.csproj +++ b/src/Service.Tests/Azure.DataApiBuilder.Service.Tests.csproj @@ -1,7 +1,7 @@ - net8.0;net6.0 + net8.0 false disable $(BaseOutputPath)\tests diff --git a/src/Service/.config/dotnet-tools.json b/src/Service/.config/dotnet-tools.json new file mode 100644 index 0000000000..b0e38abdac --- /dev/null +++ b/src/Service/.config/dotnet-tools.json @@ -0,0 +1,5 @@ +{ + "version": 1, + "isRoot": true, + "tools": {} +} \ No newline at end of file diff --git a/src/Service/Azure.DataApiBuilder.Service.csproj b/src/Service/Azure.DataApiBuilder.Service.csproj index 710048b9fa..4d32c53912 100644 --- a/src/Service/Azure.DataApiBuilder.Service.csproj +++ b/src/Service/Azure.DataApiBuilder.Service.csproj @@ -1,7 +1,7 @@ - net8.0;net6.0 + net8.0 Debug;Release;Docker $(BaseOutputPath)\engine win-x64;linux-x64;osx-x64 @@ -36,6 +36,10 @@ True + + True + + $(CopyToOutputDirectoryAction) @@ -45,7 +49,6 @@ - diff --git a/src/Service/Properties/PublishProfiles/crweudev01.pubxml b/src/Service/Properties/PublishProfiles/crweudev01.pubxml new file mode 100644 index 0000000000..a785ab2cac --- /dev/null +++ b/src/Service/Properties/PublishProfiles/crweudev01.pubxml @@ -0,0 +1,30 @@ + + + + + Container + NetSdk + /subscriptions/d81425a2-a8ea-4ea4-9bdd-ca9db5cdfbc6/resourceGroups/rg-weu-api/providers/Microsoft.ContainerRegistry/registries/crweudev01 + crweudev01 + rg-weu-api + sub-mdm-weu-dev + crweudev01.azurecr.io + + latest + ContainerRegistry + Release + Any CPU + linux-x64 + 208fc26c-a21c-4c96-98ee-f10fdaeac508 + <_TargetId>NetSdkAzureContainerRegistry + + true + false + net8.0 + true + true + true + + \ No newline at end of file From b4239d0d5b09ac747a02138dc0e36a7dd228ced7 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Tue, 20 Aug 2024 22:18:09 +0200 Subject: [PATCH 02/79] Preserve original roles claim and allow session context updates Added a new constant `ORIGINAL_ROLE_CLAIM_TYPE` in `AuthenticationOptions.cs` to store the original roles claim type. Modified `AuthorizationResolver` to preserve the original 'roles' claim by adding it to the `resolvedClaims` dictionary under the new key. Changed `MsSqlQueryExecutor` to set session context parameters with `@read_only = 0` to allow modifications. --- src/Config/ObjectModel/AuthenticationOptions.cs | 1 + src/Core/Authorization/AuthorizationResolver.cs | 7 ++++++- src/Core/Resolvers/MsSqlQueryExecutor.cs | 4 ++-- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/Config/ObjectModel/AuthenticationOptions.cs b/src/Config/ObjectModel/AuthenticationOptions.cs index 189540fbe6..6750d6e807 100644 --- a/src/Config/ObjectModel/AuthenticationOptions.cs +++ b/src/Config/ObjectModel/AuthenticationOptions.cs @@ -17,6 +17,7 @@ public record AuthenticationOptions(string Provider = nameof(EasyAuthType.Static public const string CLIENT_PRINCIPAL_HEADER = "X-MS-CLIENT-PRINCIPAL"; public const string NAME_CLAIM_TYPE = "name"; public const string ROLE_CLAIM_TYPE = "roles"; + public const string ORIGINAL_ROLE_CLAIM_TYPE = "original_roles"; /// /// Returns whether the configured Provider matches an diff --git a/src/Core/Authorization/AuthorizationResolver.cs b/src/Core/Authorization/AuthorizationResolver.cs index 64785de703..f368eed5f5 100644 --- a/src/Core/Authorization/AuthorizationResolver.cs +++ b/src/Core/Authorization/AuthorizationResolver.cs @@ -604,9 +604,14 @@ public static Dictionary> GetAllAuthenticatedUserClaims(Http // into a list and storing that in resolvedClaims using the claimType as the key. foreach (Claim claim in identity.Claims) { - // 'roles' claim has already been processed. + // 'roles' claim has already been processed. But we preserve the original 'roles' claim if (claim.Type.Equals(AuthenticationOptions.ROLE_CLAIM_TYPE)) { + if(!resolvedClaims.TryAdd(AuthenticationOptions.ORIGINAL_ROLE_CLAIM_TYPE, new List() { claim })) + { + resolvedClaims[AuthenticationOptions.ORIGINAL_ROLE_CLAIM_TYPE].Add(claim); + } + continue; } diff --git a/src/Core/Resolvers/MsSqlQueryExecutor.cs b/src/Core/Resolvers/MsSqlQueryExecutor.cs index 96f82cfa25..389256049f 100644 --- a/src/Core/Resolvers/MsSqlQueryExecutor.cs +++ b/src/Core/Resolvers/MsSqlQueryExecutor.cs @@ -217,9 +217,9 @@ public override string GetSessionParamsQuery(HttpContext? httpContext, IDictiona foreach ((string claimType, string claimValue) in sessionParams) { string paramName = $"{SESSION_PARAM_NAME}{counter.Next()}"; - parameters.Add(paramName, new(claimValue)); + parameters.Add(paramName, new(claimValue)); // Append statement to set read only param value - can be set only once for a connection. - string statementToSetReadOnlyParam = "EXEC sp_set_session_context " + $"'{claimType}', " + paramName + ", @read_only = 1;"; + string statementToSetReadOnlyParam = "EXEC sp_set_session_context " + $"'{claimType}', " + paramName + ", @read_only = 0;"; sessionMapQuery = sessionMapQuery.Append(statementToSetReadOnlyParam); } From 132a46eb42cc98e2affcc005bd80a9116f913c33 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Tue, 20 Aug 2024 22:38:54 +0200 Subject: [PATCH 03/79] Revert "pp" This reverts commit 08f741c41ddae4d6ca0ad8e849cb5b727554d1f2. --- Dockerfile | 6 ++-- src/Auth/Azure.DataApiBuilder.Auth.csproj | 2 +- src/Cli.Tests/Cli.Tests.csproj | 2 +- src/Cli/Cli.csproj | 2 +- src/Config/Azure.DataApiBuilder.Config.csproj | 2 +- src/Core/Azure.DataApiBuilder.Core.csproj | 2 +- src/Directory.Packages.props | 4 +-- .../Azure.DataApiBuilder.Product.csproj | 2 +- ...taApiBuilder.Service.GraphQLBuilder.csproj | 2 +- .../Azure.DataApiBuilder.Service.Tests.csproj | 2 +- src/Service/.config/dotnet-tools.json | 5 ---- .../Azure.DataApiBuilder.Service.csproj | 7 ++--- .../PublishProfiles/crweudev01.pubxml | 30 ------------------- 13 files changed, 15 insertions(+), 53 deletions(-) delete mode 100644 src/Service/.config/dotnet-tools.json delete mode 100644 src/Service/Properties/PublishProfiles/crweudev01.pubxml diff --git a/Dockerfile b/Dockerfile index 537ea4c78c..5dbaa35a17 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,13 +1,13 @@ # Version values referenced from https://hub.docker.com/_/microsoft-dotnet-aspnet -FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build +FROM mcr.microsoft.com/dotnet/sdk:6.0-cbl-mariner2.0. AS build WORKDIR /src COPY [".", "./"] -RUN dotnet build "./src/Service/Azure.DataApiBuilder.Service.csproj" -f net8.0 -o /out -r linux-x64 --self-contained +RUN dotnet build "./src/Service/Azure.DataApiBuilder.Service.csproj" -c Docker -o /out -r linux-x64 -FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS runtime +FROM mcr.microsoft.com/dotnet/aspnet:6.0-cbl-mariner2.0 AS runtime COPY --from=build /out /App WORKDIR /App diff --git a/src/Auth/Azure.DataApiBuilder.Auth.csproj b/src/Auth/Azure.DataApiBuilder.Auth.csproj index 1ee2df57b8..9f63cd3ed6 100644 --- a/src/Auth/Azure.DataApiBuilder.Auth.csproj +++ b/src/Auth/Azure.DataApiBuilder.Auth.csproj @@ -1,7 +1,7 @@ - net8.0 + net8.0;net6.0 enable enable $(BaseOutputPath)\engine diff --git a/src/Cli.Tests/Cli.Tests.csproj b/src/Cli.Tests/Cli.Tests.csproj index a8081f5250..46192e3802 100644 --- a/src/Cli.Tests/Cli.Tests.csproj +++ b/src/Cli.Tests/Cli.Tests.csproj @@ -1,7 +1,7 @@ - net8.0 + net8.0;net6.0 enable enable false diff --git a/src/Cli/Cli.csproj b/src/Cli/Cli.csproj index 5321f52356..fec0cc9786 100644 --- a/src/Cli/Cli.csproj +++ b/src/Cli/Cli.csproj @@ -2,7 +2,7 @@ Exe - net8.0 + net8.0;net6.0 Cli enable enable diff --git a/src/Config/Azure.DataApiBuilder.Config.csproj b/src/Config/Azure.DataApiBuilder.Config.csproj index cf739f24ec..501f10bc22 100644 --- a/src/Config/Azure.DataApiBuilder.Config.csproj +++ b/src/Config/Azure.DataApiBuilder.Config.csproj @@ -1,7 +1,7 @@ - net8.0 + net8.0;net6.0 enable enable $(BaseOutputPath)\engine diff --git a/src/Core/Azure.DataApiBuilder.Core.csproj b/src/Core/Azure.DataApiBuilder.Core.csproj index e2cc111979..d8e314d78d 100644 --- a/src/Core/Azure.DataApiBuilder.Core.csproj +++ b/src/Core/Azure.DataApiBuilder.Core.csproj @@ -1,7 +1,7 @@ - net8.0 + net8.0;net6.0 enable enable true diff --git a/src/Directory.Packages.props b/src/Directory.Packages.props index ef2add3302..b33dd9cc9e 100644 --- a/src/Directory.Packages.props +++ b/src/Directory.Packages.props @@ -20,7 +20,7 @@ - + @@ -66,4 +66,4 @@ - \ No newline at end of file + diff --git a/src/Product/Azure.DataApiBuilder.Product.csproj b/src/Product/Azure.DataApiBuilder.Product.csproj index 6045200487..f0d0e70927 100644 --- a/src/Product/Azure.DataApiBuilder.Product.csproj +++ b/src/Product/Azure.DataApiBuilder.Product.csproj @@ -1,7 +1,7 @@ - net8.0 + net8.0;net6.0 enable enable $(BaseOutputPath)\engine diff --git a/src/Service.GraphQLBuilder/Azure.DataApiBuilder.Service.GraphQLBuilder.csproj b/src/Service.GraphQLBuilder/Azure.DataApiBuilder.Service.GraphQLBuilder.csproj index 87eab3c75b..649e46c550 100644 --- a/src/Service.GraphQLBuilder/Azure.DataApiBuilder.Service.GraphQLBuilder.csproj +++ b/src/Service.GraphQLBuilder/Azure.DataApiBuilder.Service.GraphQLBuilder.csproj @@ -1,7 +1,7 @@ - net8.0 + net8.0;net6.0 enable enable $(BaseOutputPath)\engine diff --git a/src/Service.Tests/Azure.DataApiBuilder.Service.Tests.csproj b/src/Service.Tests/Azure.DataApiBuilder.Service.Tests.csproj index 26a2204725..112cb64714 100644 --- a/src/Service.Tests/Azure.DataApiBuilder.Service.Tests.csproj +++ b/src/Service.Tests/Azure.DataApiBuilder.Service.Tests.csproj @@ -1,7 +1,7 @@ - net8.0 + net8.0;net6.0 false disable $(BaseOutputPath)\tests diff --git a/src/Service/.config/dotnet-tools.json b/src/Service/.config/dotnet-tools.json deleted file mode 100644 index b0e38abdac..0000000000 --- a/src/Service/.config/dotnet-tools.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "version": 1, - "isRoot": true, - "tools": {} -} \ No newline at end of file diff --git a/src/Service/Azure.DataApiBuilder.Service.csproj b/src/Service/Azure.DataApiBuilder.Service.csproj index 4d32c53912..710048b9fa 100644 --- a/src/Service/Azure.DataApiBuilder.Service.csproj +++ b/src/Service/Azure.DataApiBuilder.Service.csproj @@ -1,7 +1,7 @@ - net8.0 + net8.0;net6.0 Debug;Release;Docker $(BaseOutputPath)\engine win-x64;linux-x64;osx-x64 @@ -36,10 +36,6 @@ True - - True - - $(CopyToOutputDirectoryAction) @@ -49,6 +45,7 @@ + diff --git a/src/Service/Properties/PublishProfiles/crweudev01.pubxml b/src/Service/Properties/PublishProfiles/crweudev01.pubxml deleted file mode 100644 index a785ab2cac..0000000000 --- a/src/Service/Properties/PublishProfiles/crweudev01.pubxml +++ /dev/null @@ -1,30 +0,0 @@ - - - - - Container - NetSdk - /subscriptions/d81425a2-a8ea-4ea4-9bdd-ca9db5cdfbc6/resourceGroups/rg-weu-api/providers/Microsoft.ContainerRegistry/registries/crweudev01 - crweudev01 - rg-weu-api - sub-mdm-weu-dev - crweudev01.azurecr.io - - latest - ContainerRegistry - Release - Any CPU - linux-x64 - 208fc26c-a21c-4c96-98ee-f10fdaeac508 - <_TargetId>NetSdkAzureContainerRegistry - - true - false - net8.0 - true - true - true - - \ No newline at end of file From 19e9dbccde9f994665f638d80477e45478ff706d Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Wed, 21 Aug 2024 09:11:40 +0200 Subject: [PATCH 04/79] Adjust the relevant tests --- .../Authorization/AuthorizationResolverUnitTests.cs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs b/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs index 3c7c31a8ca..733ec15b24 100644 --- a/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs +++ b/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs @@ -1293,7 +1293,8 @@ public void UniqueClaimsResolvedForDbPolicy_SessionCtx_Usage() new("sub", "Aa_0RISCzzZ-abC1De2fGHIjKLMNo123pQ4rStUVWXY"), new("oid", "55296aad-ea7f-4c44-9a4c-bb1e8d43a005"), new(AuthenticationOptions.ROLE_CLAIM_TYPE, TEST_ROLE), - new(AuthenticationOptions.ROLE_CLAIM_TYPE, "ROLE2") + new(AuthenticationOptions.ROLE_CLAIM_TYPE, "ROLE2"), + new(AuthenticationOptions.ROLE_CLAIM_TYPE, "ROLE3") }; //Add identity object to the Mock context object. @@ -1315,6 +1316,7 @@ public void UniqueClaimsResolvedForDbPolicy_SessionCtx_Usage() Assert.AreEqual(expected: "Aa_0RISCzzZ-abC1De2fGHIjKLMNo123pQ4rStUVWXY", actual: claimsInRequestContext["sub"], message: "Expected the sub claim to be present."); Assert.AreEqual(expected: "55296aad-ea7f-4c44-9a4c-bb1e8d43a005", actual: claimsInRequestContext["oid"], message: "Expected the oid claim to be present."); Assert.AreEqual(claimsInRequestContext[AuthenticationOptions.ROLE_CLAIM_TYPE], actual: TEST_ROLE, message: "The roles claim should have the value:" + TEST_ROLE); + Assert.AreEqual(expected: "[\"" + TEST_ROLE + "\",\"ROLE2\",\"ROLE3\"]", actual: claimsInRequestContext[AuthenticationOptions.ORIGINAL_ROLE_CLAIM_TYPE], message: "Original roles should be preserved in a new context"); } /// @@ -1365,7 +1367,7 @@ public void ValidateUnauthenticatedUserClaimsAreNotResolvedWhenProcessingUserCla Dictionary resolvedClaims = AuthorizationResolver.GetProcessedUserClaims(context.Object); // Assert - Assert.AreEqual(expected: authenticatedUserclaims.Count, actual: resolvedClaims.Count, message: "Only two claims should be present."); + Assert.AreEqual(expected: authenticatedUserclaims.Count + 1, actual: resolvedClaims.Count, message: "Only " + (authenticatedUserclaims.Count + 1) + " claims should be present."); Assert.AreEqual(expected: "openid", actual: resolvedClaims["scp"], message: "Unexpected scp claim returned."); bool didResolveUnauthenticatedRoleClaim = resolvedClaims[AuthenticationOptions.ROLE_CLAIM_TYPE] == "Don't_Parse_This_Role"; From d10be7a25eff097104f19fa88815f93b23b6f2cb Mon Sep 17 00:00:00 2001 From: M4Al Date: Fri, 30 Aug 2024 09:50:44 +0200 Subject: [PATCH 05/79] Update src/Core/Resolvers/MsSqlQueryExecutor.cs remove trailing space Co-authored-by: Aniruddh Munde --- src/Core/Resolvers/MsSqlQueryExecutor.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Core/Resolvers/MsSqlQueryExecutor.cs b/src/Core/Resolvers/MsSqlQueryExecutor.cs index 389256049f..1ac61f7dfb 100644 --- a/src/Core/Resolvers/MsSqlQueryExecutor.cs +++ b/src/Core/Resolvers/MsSqlQueryExecutor.cs @@ -217,7 +217,7 @@ public override string GetSessionParamsQuery(HttpContext? httpContext, IDictiona foreach ((string claimType, string claimValue) in sessionParams) { string paramName = $"{SESSION_PARAM_NAME}{counter.Next()}"; - parameters.Add(paramName, new(claimValue)); + parameters.Add(paramName, new(claimValue)); // Append statement to set read only param value - can be set only once for a connection. string statementToSetReadOnlyParam = "EXEC sp_set_session_context " + $"'{claimType}', " + paramName + ", @read_only = 0;"; sessionMapQuery = sessionMapQuery.Append(statementToSetReadOnlyParam); From 4c5a5b869ce8c72fedea06c370ec8885b317b3a2 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Tue, 3 Sep 2024 23:35:39 +0200 Subject: [PATCH 06/79] Add recordcount --- src/Core/Resolvers/DWSqlQueryBuilder.cs | 44 ++++++++++++++++++++++-- src/Core/Resolvers/SqlResponseHelpers.cs | 27 +++++++++++++-- 2 files changed, 66 insertions(+), 5 deletions(-) diff --git a/src/Core/Resolvers/DWSqlQueryBuilder.cs b/src/Core/Resolvers/DWSqlQueryBuilder.cs index e1768a97df..780c477364 100644 --- a/src/Core/Resolvers/DWSqlQueryBuilder.cs +++ b/src/Core/Resolvers/DWSqlQueryBuilder.cs @@ -46,8 +46,21 @@ public string Build(SqlQueryStructure structure) /// private string BuildAsJson(SqlQueryStructure structure, bool subQueryStructure = false) { + string subQueryAlias = "CountQuery"; + + string countSql = $" CROSS JOIN ( {BuildSqlCountQuery(structure)} ) {subQueryAlias}"; + + //Add a new column to the structure + structure.Columns.Add(new LabelledColumn("", subQueryAlias, "RecordCount", "RecordCount", subQueryAlias)); + + //Add a subquery 'a' ti the structure + structure.JoinQueries.Add(subQueryAlias, structure); + string columns = GenerateColumnsAsJson(structure, subQueryStructure); - string fromSql = $"{BuildSqlQuery(structure)}"; + + structure.JoinQueries.Remove(subQueryAlias); + + string fromSql = $"{BuildSqlQuery(structure, countSql)}"; string query = $"SELECT {columns}" + $" FROM ({fromSql}) AS {QuoteIdentifier(structure.SourceAlias)}"; return query; @@ -64,7 +77,7 @@ private string BuildAsJson(SqlQueryStructure structure, bool subQueryStructure = /// FROM dbo_books AS[table0] /// OUTER APPLY(SubQuery generated by recursive call to build function, will create the _subq tables) /// - private string BuildSqlQuery(SqlQueryStructure structure) + private string BuildSqlQuery(SqlQueryStructure structure, string? subQuery) { string dataIdent = QuoteIdentifier(SqlQueryStructure.DATA_IDENT); StringBuilder fromSql = new(); @@ -87,11 +100,38 @@ private string BuildSqlQuery(SqlQueryStructure structure) string query = $"SELECT TOP {structure.Limit()} {columns}" + $" FROM {fromSql}" + + $" {subQuery}" + $" WHERE {predicates}" + orderBy; return query; } + private string BuildSqlCountQuery(SqlQueryStructure structure) + { + string dataIdent = QuoteIdentifier(SqlQueryStructure.DATA_IDENT); + StringBuilder fromSql = new(); + + fromSql.Append($"{QuoteIdentifier(structure.DatabaseObject.SchemaName)}.{QuoteIdentifier(structure.DatabaseObject.Name)} " + + $"AS {QuoteIdentifier($"{structure.SourceAlias}")}{Build(structure.Joins)}"); + + fromSql.Append(string.Join( + "", + structure.JoinQueries.Select( + x => $" OUTER APPLY ({BuildAsJson(x.Value, true)}) AS {QuoteIdentifier(x.Key)}({dataIdent})"))); + + string predicates = JoinPredicateStrings( + structure.GetDbPolicyForOperation(EntityActionOperation.Read), + structure.FilterPredicates, + Build(structure.Predicates), + Build(structure.PaginationMetadata.PaginationPredicate)); + + string query = $"SELECT cast(count(1) as varchar(50)) as RecordCount " + + $" FROM {fromSql}" + + $" WHERE {predicates}"; + + return query; + } + private static string GenerateColumnsAsJson(SqlQueryStructure structure, bool subQueryStructure = false) { string columns; diff --git a/src/Core/Resolvers/SqlResponseHelpers.cs b/src/Core/Resolvers/SqlResponseHelpers.cs index 7701d662d3..9171a3a322 100644 --- a/src/Core/Resolvers/SqlResponseHelpers.cs +++ b/src/Core/Resolvers/SqlResponseHelpers.cs @@ -51,6 +51,13 @@ public static OkObjectResult FormatFindResult( ? DetermineExtraFieldsInResponse(findOperationResponse, context.FieldsToBeReturned) : DetermineExtraFieldsInResponse(findOperationResponse.EnumerateArray().First(), context.FieldsToBeReturned); + //Remove RecordCOunt from extraFieldsInResponse if present + /* + if (extraFieldsInResponse.Contains("RecordCount")) + { + extraFieldsInResponse.Remove("RecordCount"); + } + */ uint defaultPageSize = runtimeConfig.DefaultPageSize(); uint maxPageSize = runtimeConfig.MaxPageSize(); @@ -113,6 +120,16 @@ public static OkObjectResult FormatFindResult( queryStringParameters: context!.ParsedQueryString, after); + //Get the element RecordCount from the first element of the array + JsonElement recordCountElement = rootEnumerated[0].GetProperty("RecordCount"); + string jsonRecordCount = JsonSerializer.Serialize(new[] + { + new + { + recordCount = @$"{rootEnumerated[0].GetProperty("RecordCount")}" + } + }); + // When there are extra fields present, they are removed before returning the response. if (extraFieldsInResponse.Count > 0) { @@ -120,6 +137,7 @@ public static OkObjectResult FormatFindResult( } rootEnumerated.Add(nextLink); + rootEnumerated.Add(JsonSerializer.Deserialize(jsonRecordCount)); return OkResponse(JsonSerializer.SerializeToElement(rootEnumerated)); } @@ -218,13 +236,16 @@ public static OkObjectResult OkResponse(JsonElement jsonResult) // we strip the "[" and "]" and then save the nextLink element // into a dictionary with a key of "nextLink" and a value that // represents the nextLink data we require. - string nextLinkJsonString = JsonSerializer.Serialize(resultEnumerated[resultEnumerated.Count - 1]); + string nextLinkJsonString = JsonSerializer.Serialize(resultEnumerated[resultEnumerated.Count - 2]); + string recordCountJsonString = JsonSerializer.Serialize(resultEnumerated[resultEnumerated.Count - 1]); Dictionary nextLink = JsonSerializer.Deserialize>(nextLinkJsonString[1..^1])!; - IEnumerable value = resultEnumerated.Take(resultEnumerated.Count - 1); + Dictionary recordCount = JsonSerializer.Deserialize>(recordCountJsonString[1..^1])!; + IEnumerable value = resultEnumerated.Take(resultEnumerated.Count - 2); return new OkObjectResult(new { value = value, - @nextLink = nextLink["nextLink"] + @nextLink = nextLink["nextLink"], + @recordCount = recordCount["recordCount"] }); } From b560b20fa89538a3c4bff5187e68f71fe362ac41 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Wed, 4 Sep 2024 10:25:05 +0200 Subject: [PATCH 07/79] Update dockerfile to .net8 --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 5dbaa35a17..537ea4c78c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,13 +1,13 @@ # Version values referenced from https://hub.docker.com/_/microsoft-dotnet-aspnet -FROM mcr.microsoft.com/dotnet/sdk:6.0-cbl-mariner2.0. AS build +FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build WORKDIR /src COPY [".", "./"] -RUN dotnet build "./src/Service/Azure.DataApiBuilder.Service.csproj" -c Docker -o /out -r linux-x64 +RUN dotnet build "./src/Service/Azure.DataApiBuilder.Service.csproj" -f net8.0 -o /out -r linux-x64 --self-contained -FROM mcr.microsoft.com/dotnet/aspnet:6.0-cbl-mariner2.0 AS runtime +FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS runtime COPY --from=build /out /App WORKDIR /App From 356778516c9599d227463d2f198e9c36286d86f2 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Wed, 4 Sep 2024 11:30:54 +0200 Subject: [PATCH 08/79] Downgrade SqlClient --- src/Directory.Packages.props | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Directory.Packages.props b/src/Directory.Packages.props index b33dd9cc9e..1c758176ae 100644 --- a/src/Directory.Packages.props +++ b/src/Directory.Packages.props @@ -20,7 +20,7 @@ - + From 1479f34bc3fc434fd7529dded4927638124dc5b1 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Thu, 5 Sep 2024 16:40:29 +0200 Subject: [PATCH 09/79] Rename FIRST_URL constant value from "$first" to "$top" Updated the constant `FIRST_URL` in `RequestParser.cs` within the `Azure.DataApiBuilder.Core.Parsers` namespace to use the value `"$top"` instead of `"$first"`. This change aligns with naming conventions or standards used elsewhere in the codebase or API, ensuring consistency and improving clarity for developers. --- src/Core/Parsers/RequestParser.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Core/Parsers/RequestParser.cs b/src/Core/Parsers/RequestParser.cs index 9a3a602329..86aed37d06 100644 --- a/src/Core/Parsers/RequestParser.cs +++ b/src/Core/Parsers/RequestParser.cs @@ -30,7 +30,7 @@ public class RequestParser /// /// Prefix used for specifying limit in the query string of the URL. /// - public const string FIRST_URL = "$first"; + public const string FIRST_URL = "$top"; /// /// Prefix used for specifying paging in the query string of the URL. /// From cbfd7898dd7ad4266c0817f488385950cd387ae7 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Wed, 25 Sep 2024 18:37:30 +0200 Subject: [PATCH 10/79] Add 'offset' feature & totalCount --- src/Core/Models/PaginationMetadata.cs | 2 ++ src/Core/Resolvers/DWSqlQueryBuilder.cs | 5 +++-- .../Sql Query Structures/SqlQueryStructure.cs | 5 +++++ src/Core/Resolvers/SqlPaginationUtil.cs | 10 ++++++++++ src/Core/Services/ExecutionHelper.cs | 13 ++++++++++++- src/Service.GraphQLBuilder/Queries/QueryBuilder.cs | 11 +++++++++++ 6 files changed, 43 insertions(+), 3 deletions(-) diff --git a/src/Core/Models/PaginationMetadata.cs b/src/Core/Models/PaginationMetadata.cs index bd57041d7d..a857a8d3d5 100644 --- a/src/Core/Models/PaginationMetadata.cs +++ b/src/Core/Models/PaginationMetadata.cs @@ -17,6 +17,8 @@ public class PaginationMetadata : IMetadata /// public bool IsPaginated { get; set; } = DEFAULT_PAGINATION_FLAGS_VALUE; + public int TotalCount { get; set; } + /// /// Shows if items is requested from the pagination result /// diff --git a/src/Core/Resolvers/DWSqlQueryBuilder.cs b/src/Core/Resolvers/DWSqlQueryBuilder.cs index 780c477364..20451ce9ca 100644 --- a/src/Core/Resolvers/DWSqlQueryBuilder.cs +++ b/src/Core/Resolvers/DWSqlQueryBuilder.cs @@ -98,11 +98,12 @@ private string BuildSqlQuery(SqlQueryStructure structure, string? subQuery) string columns = WrappedColumns(structure); string orderBy = $" ORDER BY {Build(structure.OrderByColumns)}"; - string query = $"SELECT TOP {structure.Limit()} {columns}" + string query = $"SELECT {columns}" + $" FROM {fromSql}" + $" {subQuery}" + $" WHERE {predicates}" - + orderBy; + + orderBy + + $" OFFSET {structure.Offset()} ROWS FETCH NEXT {structure.Limit()} ROWS ONLY"; return query; } diff --git a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs index 6f85fa6be4..91fba6da9c 100644 --- a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs @@ -825,6 +825,11 @@ private void AddGraphQLFields(IReadOnlyList selections, RuntimeC } } + public uint? Offset() + { + return this._ctx?.ArgumentValue("offset"); + } + /// /// Create a list of orderBy columns from the orderBy argument /// passed to the gql query. The orderBy argument could contain mapped field names diff --git a/src/Core/Resolvers/SqlPaginationUtil.cs b/src/Core/Resolvers/SqlPaginationUtil.cs index 5d44dfd05b..f39e76f4ae 100644 --- a/src/Core/Resolvers/SqlPaginationUtil.cs +++ b/src/Core/Resolvers/SqlPaginationUtil.cs @@ -97,6 +97,15 @@ private static JsonObject CreatePaginationConnection(JsonElement root, Paginatio // use rootEnumerated to make the *Connection.items since the last element of rootEnumerated // is removed if the result has an extra element connection.Add(QueryBuilder.PAGINATION_FIELD_NAME, JsonSerializer.Serialize(rootEnumerated.ToArray())); + if (int.TryParse(root[0].GetProperty("RecordCount").ToString(), out int recordCount)) + { + connection.Add(QueryBuilder.TOTAL_COUNT_FIELD_NAME, recordCount); + } + else + { + // Handle the case where parsing fails, if necessary + connection.Add(QueryBuilder.TOTAL_COUNT_FIELD_NAME, 0); // or some default value + } } else { @@ -126,6 +135,7 @@ private static JsonObject CreatePaginationConnection(JsonElement root, Paginatio } } + return connection; } diff --git a/src/Core/Services/ExecutionHelper.cs b/src/Core/Services/ExecutionHelper.cs index d96573f7ea..585abf4f54 100644 --- a/src/Core/Services/ExecutionHelper.cs +++ b/src/Core/Services/ExecutionHelper.cs @@ -15,6 +15,7 @@ using HotChocolate.Execution; using HotChocolate.Language; using HotChocolate.Resolvers; +using HotChocolate.Types.Descriptors.Definitions; using HotChocolate.Types.NodaTime; using NodaTime.Text; @@ -350,6 +351,16 @@ private static bool TryGetPropertyFromParent( // Fill the parameters dictionary with the default argument values IFieldCollection schemaArguments = schema.Arguments; + // CNEXT: This is a nasty exernal dependancy + // Add one extra allowable parameter to schemaArgument: offset + + InputFieldDefinition offsetDef = new("offset", "Offset for the query", null, null, null); + IInputField offS = new InputField(offsetDef, 5); + + IEnumerable ss = schemaArguments.Append(offS); + + //IInputField i = new InputField("offset", new IntType()) + // Example 'argumentSchemas' IInputField objects of type 'HotChocolate.Types.Argument': // These are all default arguments defined in the schema for queries. // {first:int} @@ -358,7 +369,7 @@ private static bool TryGetPropertyFromParent( // {orderBy:entityOrderByInput} // The values in schemaArguments will have default values when the backing // entity is a stored procedure with runtime config defined default parameter values. - foreach (IInputField argument in schemaArguments) + foreach (IInputField argument in ss) { if (argument.DefaultValue != null) { diff --git a/src/Service.GraphQLBuilder/Queries/QueryBuilder.cs b/src/Service.GraphQLBuilder/Queries/QueryBuilder.cs index b68eb642af..431bbf1abc 100644 --- a/src/Service.GraphQLBuilder/Queries/QueryBuilder.cs +++ b/src/Service.GraphQLBuilder/Queries/QueryBuilder.cs @@ -24,6 +24,8 @@ public static class QueryBuilder public const string ORDER_BY_FIELD_NAME = "orderBy"; public const string PARTITION_KEY_FIELD_NAME = "_partitionKeyValue"; public const string ID_FIELD_NAME = "id"; + public const string TOTAL_COUNT_FIELD_NAME = "totalCount"; + public const string OFFSET_FIELD_NAME = "offset"; /// /// Creates a DocumentNode containing FieldDefinitionNodes representing the FindByPK and FindAll queries @@ -189,6 +191,8 @@ public static List QueryArgumentsForField(string filte new(location: null, new NameNode(PAGINATION_TOKEN_ARGUMENT_NAME), new StringValueNode("A pagination token from a previous query to continue through a paginated list"), new StringType().ToTypeNode(), defaultValue: null, new List()), new(location: null, new NameNode(FILTER_FIELD_NAME), new StringValueNode("Filter options for query"), new NamedTypeNode(filterInputName), defaultValue: null, new List()), new(location: null, new NameNode(ORDER_BY_FIELD_NAME), new StringValueNode("Ordering options for query"), new NamedTypeNode(orderByInputName), defaultValue: null, new List()), + new(location: null, new NameNode(OFFSET_FIELD_NAME), new StringValueNode("Partition key value for the query"), new IntType().ToTypeNode(), defaultValue: null, new List()), + }; } @@ -268,6 +272,13 @@ public static ObjectTypeDefinitionNode GenerateReturnType(NameNode name) new StringValueNode("Indicates if there are more pages of items to return"), new List(), new NonNullType(new BooleanType()).ToTypeNode(), + new List()), + new( + location: null, + new NameNode(TOTAL_COUNT_FIELD_NAME), + new StringValueNode("The total number of items that matched the filter"), + new List(), + new NonNullType(new IntType()).ToTypeNode(), new List()) } ); From f5bd201da41961ca4bfe83bb7fe299662d491d09 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Thu, 26 Sep 2024 11:13:32 +0200 Subject: [PATCH 11/79] Provide a default value, and fix subquery --- src/Core/Resolvers/DWSqlQueryBuilder.cs | 7 +++++-- .../Resolvers/Sql Query Structures/SqlQueryStructure.cs | 3 ++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/Core/Resolvers/DWSqlQueryBuilder.cs b/src/Core/Resolvers/DWSqlQueryBuilder.cs index 20451ce9ca..6416a8a75f 100644 --- a/src/Core/Resolvers/DWSqlQueryBuilder.cs +++ b/src/Core/Resolvers/DWSqlQueryBuilder.cs @@ -50,8 +50,11 @@ private string BuildAsJson(SqlQueryStructure structure, bool subQueryStructure = string countSql = $" CROSS JOIN ( {BuildSqlCountQuery(structure)} ) {subQueryAlias}"; - //Add a new column to the structure - structure.Columns.Add(new LabelledColumn("", subQueryAlias, "RecordCount", "RecordCount", subQueryAlias)); + //Add a new column to the structure if not already there + if (!structure.Columns.Exists(c => c.ColumnName == "RecordCount")) + { + structure.Columns.Add(new LabelledColumn("", subQueryAlias, "RecordCount", "RecordCount", subQueryAlias)); + } //Add a subquery 'a' ti the structure structure.JoinQueries.Add(subQueryAlias, structure); diff --git a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs index 91fba6da9c..fd85c5bb8f 100644 --- a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs @@ -827,7 +827,8 @@ private void AddGraphQLFields(IReadOnlyList selections, RuntimeC public uint? Offset() { - return this._ctx?.ArgumentValue("offset"); + // Check if the offset argument is present in the query, if not, return 0 + return this._ctx?.ArgumentValue("offset") ?? 0; } /// From 8569e26579f849fcea614af10a6fc15159fca1cf Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Thu, 26 Sep 2024 14:15:25 +0200 Subject: [PATCH 12/79] Handle the totalCount correctly, just like the other --- src/Core/Models/PaginationMetadata.cs | 2 ++ .../Sql Query Structures/SqlQueryStructure.cs | 4 +++ src/Core/Resolvers/SqlPaginationUtil.cs | 25 +++++++++++-------- 3 files changed, 21 insertions(+), 10 deletions(-) diff --git a/src/Core/Models/PaginationMetadata.cs b/src/Core/Models/PaginationMetadata.cs index a857a8d3d5..7c46240644 100644 --- a/src/Core/Models/PaginationMetadata.cs +++ b/src/Core/Models/PaginationMetadata.cs @@ -34,6 +34,8 @@ public class PaginationMetadata : IMetadata /// public bool RequestedHasNextPage { get; set; } = DEFAULT_PAGINATION_FLAGS_VALUE; + public bool RequestedTotalCount { get; set; } = DEFAULT_PAGINATION_FLAGS_VALUE; + /// /// Keeps a reference to the SqlQueryStructure the pagination metadata is associated with /// diff --git a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs index fd85c5bb8f..302dbf1398 100644 --- a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs @@ -670,6 +670,10 @@ void ProcessPaginationFields(IReadOnlyList paginationSelections) case QueryBuilder.HAS_NEXT_PAGE_FIELD_NAME: PaginationMetadata.RequestedHasNextPage = true; break; + case QueryBuilder.TOTAL_COUNT_FIELD_NAME: + PaginationMetadata.RequestedTotalCount = true; + break; + } } } diff --git a/src/Core/Resolvers/SqlPaginationUtil.cs b/src/Core/Resolvers/SqlPaginationUtil.cs index f39e76f4ae..3516cce272 100644 --- a/src/Core/Resolvers/SqlPaginationUtil.cs +++ b/src/Core/Resolvers/SqlPaginationUtil.cs @@ -66,6 +66,21 @@ private static JsonObject CreatePaginationConnection(JsonElement root, Paginatio // values we need to determine the correct pagination logic bool isPaginationRequested = paginationMetadata.RequestedHasNextPage || paginationMetadata.RequestedEndCursor; + // Id the request Includes a totalCount, make sure we always add it to the connection + + if (paginationMetadata.RequestedTotalCount) + { + if (root.GetArrayLength() > 0 && int.TryParse(root[0].GetProperty("RecordCount").ToString(), out int recordCount)) + { + connection.Add(QueryBuilder.TOTAL_COUNT_FIELD_NAME, recordCount); + } + else + { + // Handle the case where parsing fails, if necessary + connection.Add(QueryBuilder.TOTAL_COUNT_FIELD_NAME, 0); // or some default value + } + } + IEnumerable rootEnumerated = root.EnumerateArray(); int returnedElementCount = rootEnumerated.Count(); bool hasExtraElement = false; @@ -97,15 +112,6 @@ private static JsonObject CreatePaginationConnection(JsonElement root, Paginatio // use rootEnumerated to make the *Connection.items since the last element of rootEnumerated // is removed if the result has an extra element connection.Add(QueryBuilder.PAGINATION_FIELD_NAME, JsonSerializer.Serialize(rootEnumerated.ToArray())); - if (int.TryParse(root[0].GetProperty("RecordCount").ToString(), out int recordCount)) - { - connection.Add(QueryBuilder.TOTAL_COUNT_FIELD_NAME, recordCount); - } - else - { - // Handle the case where parsing fails, if necessary - connection.Add(QueryBuilder.TOTAL_COUNT_FIELD_NAME, 0); // or some default value - } } else { @@ -134,7 +140,6 @@ private static JsonObject CreatePaginationConnection(JsonElement root, Paginatio paginationMetadata.Structure!.MetadataProvider)); } } - return connection; } From 9e48eb4e261bab05b82fa2a4dfce93a7947bdd90 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Tue, 15 Oct 2024 11:44:08 +0200 Subject: [PATCH 13/79] Change default pagesize to 1000 --- src/Config/ObjectModel/PaginationOptions.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Config/ObjectModel/PaginationOptions.cs b/src/Config/ObjectModel/PaginationOptions.cs index ab4bff29ff..e44877e780 100644 --- a/src/Config/ObjectModel/PaginationOptions.cs +++ b/src/Config/ObjectModel/PaginationOptions.cs @@ -18,7 +18,7 @@ public record PaginationOptions /// /// Default page size. /// - public const uint DEFAULT_PAGE_SIZE = 100; + public const uint DEFAULT_PAGE_SIZE = 1000; /// /// Max page size. From af24f6f1a6a5b56a68a78becdcc256706b068a1f Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Mon, 18 Nov 2024 14:30:46 +0100 Subject: [PATCH 14/79] Backport changes to MSSQL branch --- src/Core/Resolvers/MsSqlQueryBuilder.cs | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/Core/Resolvers/MsSqlQueryBuilder.cs b/src/Core/Resolvers/MsSqlQueryBuilder.cs index 43e7fd775f..4dd8399899 100644 --- a/src/Core/Resolvers/MsSqlQueryBuilder.cs +++ b/src/Core/Resolvers/MsSqlQueryBuilder.cs @@ -61,10 +61,20 @@ public string Build(SqlQueryStructure structure) Build(structure.PaginationMetadata.PaginationPredicate)); } - string query = $"SELECT TOP {structure.Limit()} {WrappedColumns(structure)}" + //Add recordcount + string recordCountSql = $"SELECT cast(count(1) as int) as RecordCount " + + $" FROM {fromSql}" + + $" WHERE {predicates}"; + + string orderBy = $" ORDER BY {Build(structure.OrderByColumns)}"; + + fromSql += $" OUTER APPLY ({recordCountSql}) RecordCountQuery"; + + string query = $"SELECT {WrappedColumns(structure)}, RecordCountQuery.RecordCount" + $" FROM {fromSql}" + $" WHERE {predicates}" - + $" ORDER BY {Build(structure.OrderByColumns)}"; + + orderBy + + $" OFFSET {structure.Offset()} ROWS FETCH NEXT {structure.Limit()} ROWS ONLY"; query += FOR_JSON_SUFFIX; if (!structure.IsListQuery) From b8fca935d33ac5f5a0b0e2b0ce7ce58887192dbc Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Tue, 19 Nov 2024 17:06:33 +0100 Subject: [PATCH 15/79] REvert hard coded pagination --- src/Config/ObjectModel/PaginationOptions.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Config/ObjectModel/PaginationOptions.cs b/src/Config/ObjectModel/PaginationOptions.cs index e44877e780..ab4bff29ff 100644 --- a/src/Config/ObjectModel/PaginationOptions.cs +++ b/src/Config/ObjectModel/PaginationOptions.cs @@ -18,7 +18,7 @@ public record PaginationOptions /// /// Default page size. /// - public const uint DEFAULT_PAGE_SIZE = 1000; + public const uint DEFAULT_PAGE_SIZE = 100; /// /// Max page size. From 73be00b712f1178776d8ad332986ae6084bca5e2 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Thu, 21 Nov 2024 11:54:28 +0100 Subject: [PATCH 16/79] Fix the handling of empty cache responses --- src/Core/Resolvers/SqlQueryEngine.cs | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/src/Core/Resolvers/SqlQueryEngine.cs b/src/Core/Resolvers/SqlQueryEngine.cs index 00f313cd1d..dc156e7c2b 100644 --- a/src/Core/Resolvers/SqlQueryEngine.cs +++ b/src/Core/Resolvers/SqlQueryEngine.cs @@ -325,9 +325,19 @@ public object ResolveList(JsonElement array, IObjectField fieldSchema, ref IMeta { DatabaseQueryMetadata queryMetadata = new(queryText: queryString, dataSource: dataSourceName, queryParameters: structure.Parameters); JsonElement result = await _cache.GetOrSetAsync(queryExecutor, queryMetadata, cacheEntryTtl: runtimeConfig.GetEntityCacheEntryTtl(entityName: structure.EntityName)); - byte[] jsonBytes = JsonSerializer.SerializeToUtf8Bytes(result); - JsonDocument cacheServiceResponse = JsonDocument.Parse(jsonBytes); - return cacheServiceResponse; + // If there is an empty result cached, then the returned value from the cache service is some bizarre + // undefined value that is unparsable + try + { + byte[] jsonBytes = JsonSerializer.SerializeToUtf8Bytes(result); + JsonDocument cacheServiceResponse = JsonDocument.Parse(jsonBytes); + return cacheServiceResponse; + } + catch(InvalidOperationException) + { + return null; + } + } } From 46f66d24c9016d3402d35ee58e6e1a306955d590 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Wed, 4 Dec 2024 16:31:47 +0100 Subject: [PATCH 17/79] Allow sorting on second level fields. --- src/Core/Resolvers/BaseSqlQueryBuilder.cs | 7 +++ .../Sql Query Structures/SqlQueryStructure.cs | 51 +++++++++++++++++++ .../CosmosSqlMetadataProvider.cs | 5 ++ .../MetadataProviders/ISqlMetadataProvider.cs | 1 + .../MetadataProviders/SqlMetadataProvider.cs | 10 ++++ 5 files changed, 74 insertions(+) diff --git a/src/Core/Resolvers/BaseSqlQueryBuilder.cs b/src/Core/Resolvers/BaseSqlQueryBuilder.cs index 150c103183..2a7dff3253 100644 --- a/src/Core/Resolvers/BaseSqlQueryBuilder.cs +++ b/src/Core/Resolvers/BaseSqlQueryBuilder.cs @@ -143,6 +143,13 @@ private static string GetComparisonFromDirection(OrderBy direction) /// protected virtual string Build(Column column) { + // If the table is a subQUery, we return some fancy JSON_VALUE + + if(column.TableAlias != null && column.TableAlias.Contains("_subq") && column is OrderByColumn) + { + return $"JSON_VALUE({QuoteIdentifier(column.TableAlias)}.[data], '$.{column.ColumnName}')"; + } + // If the table alias is not empty, we return [{SourceAlias}].[{Column}] if (!string.IsNullOrEmpty(column.TableAlias)) { diff --git a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs index 302dbf1398..2263fea8ec 100644 --- a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs @@ -3,6 +3,7 @@ using System.Data; using System.Net; +using System.Text.RegularExpressions; using Azure.DataApiBuilder.Auth; using Azure.DataApiBuilder.Config.ObjectModel; using Azure.DataApiBuilder.Core.Configurations; @@ -835,6 +836,21 @@ private void AddGraphQLFields(IReadOnlyList selections, RuntimeC return this._ctx?.ArgumentValue("offset") ?? 0; } + private static string ExtractColumnName(string fieldValue) + { + string pattern = @"\{\s*([^:]+)\s*:"; + Match match = Regex.Match(fieldValue, pattern); + if (match.Success) + { + string columnName = match.Groups[1].Value.Trim(); + return columnName; + } + else + { + return ""; + } + } + /// /// Create a list of orderBy columns from the orderBy argument /// passed to the gql query. The orderBy argument could contain mapped field names @@ -872,6 +888,36 @@ private List ProcessGqlOrderByArg(List orderByFi string fieldName = field.Name.ToString(); + // Let's check if we're trying to sort on a child object. If tgis is a 'one' relationship this will just work + if (field.Value.ToString().Contains(':')) + { + // Check if the fieldName is a relationship element + if (MetadataProvider.TryGetEntityDefenition(EntityName, out Entity? baseEntity)) + { + if (baseEntity!.Relationships!.ContainsKey(fieldName)) + { + // Look up out alias in the JoinQueries + //myJoin = this.JoinQueries; + //stuff + Column? linkColumn = FindColumnByLabel(fieldName); + if (linkColumn == null) + { + throw new DataApiBuilderException(message: "Unable to resolve relation " + fieldName, + statusCode: HttpStatusCode.InternalServerError, + subStatusCode: DataApiBuilderException.SubStatusCodes.UnexpectedError); + } + + orderByColumnsList.Add(new OrderByColumn(tableSchema: linkColumn.TableSchema, + tableName: linkColumn.TableName, + columnName: ExtractColumnName(field.Value.ToString()), + tableAlias: linkColumn.TableAlias)); + + } + } + + continue; + } + if (!MetadataProvider.TryGetBackingColumn(EntityName, fieldName, out string? backingColumnName)) { throw new DataApiBuilderException(message: "Mapped fieldname could not be found.", @@ -973,6 +1019,11 @@ public bool IsSubqueryColumn(Column column) return column.TableAlias == null ? false : JoinQueries.ContainsKey(column.TableAlias); } + public LabelledColumn? FindColumnByLabel(string fieldName) + { + return Columns.FirstOrDefault(column => column.Label.Equals(fieldName, StringComparison.OrdinalIgnoreCase)); + } + /// /// Add column label string literals as parameters to the query structure /// diff --git a/src/Core/Services/MetadataProviders/CosmosSqlMetadataProvider.cs b/src/Core/Services/MetadataProviders/CosmosSqlMetadataProvider.cs index 226882ab86..6b40ce902d 100644 --- a/src/Core/Services/MetadataProviders/CosmosSqlMetadataProvider.cs +++ b/src/Core/Services/MetadataProviders/CosmosSqlMetadataProvider.cs @@ -621,5 +621,10 @@ public void InitializeAsync( { throw new NotImplementedException(); } + + public bool TryGetEntityDefenition(string entityName, out Entity? entityDefenition) + { + throw new NotImplementedException(); + } } } diff --git a/src/Core/Services/MetadataProviders/ISqlMetadataProvider.cs b/src/Core/Services/MetadataProviders/ISqlMetadataProvider.cs index 581f8a80bc..6c61c2efe2 100644 --- a/src/Core/Services/MetadataProviders/ISqlMetadataProvider.cs +++ b/src/Core/Services/MetadataProviders/ISqlMetadataProvider.cs @@ -241,5 +241,6 @@ public bool TryGetFKDefinition( string referencedEntityName, [NotNullWhen(true)] out ForeignKeyDefinition? foreignKeyDefinition, bool isMToNRelationship) => throw new NotImplementedException(); + public bool TryGetEntityDefenition(string entityName, out Entity? entityDefenition); } } diff --git a/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs b/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs index 1e6e61edf0..a6e546238a 100644 --- a/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs +++ b/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs @@ -101,6 +101,16 @@ private void HandleOrRecordException(Exception e) } } + public bool TryGetEntityDefenition(string entityName, out Entity? entityDefenition) + { + if (!_entities.TryGetValue(entityName, out entityDefenition)) + { + throw new KeyNotFoundException($"Initialization of metadata incomplete for entity: {entityName}"); + } + + return true; + } + public SqlMetadataProvider( RuntimeConfigProvider runtimeConfigProvider, IAbstractQueryManagerFactory engineFactory, From 6bdc35d439b39d078b819c46b6679cae7b4c3570 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Tue, 31 Dec 2024 14:33:24 +0100 Subject: [PATCH 18/79] Bugfix for by_pk methods --- src/Core/Resolvers/MsSqlQueryBuilder.cs | 31 +++++++++++++++---------- 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/src/Core/Resolvers/MsSqlQueryBuilder.cs b/src/Core/Resolvers/MsSqlQueryBuilder.cs index 4dd8399899..728c780540 100644 --- a/src/Core/Resolvers/MsSqlQueryBuilder.cs +++ b/src/Core/Resolvers/MsSqlQueryBuilder.cs @@ -33,6 +33,7 @@ public override string QuoteIdentifier(string ident) /// public string Build(SqlQueryStructure structure) { + string query; string dataIdent = QuoteIdentifier(SqlQueryStructure.DATA_IDENT); string fromSql = $"{QuoteIdentifier(structure.DatabaseObject.SchemaName)}.{QuoteIdentifier(structure.DatabaseObject.Name)} " + $"AS {QuoteIdentifier($"{structure.SourceAlias}")}{Build(structure.Joins)}"; @@ -61,20 +62,26 @@ public string Build(SqlQueryStructure structure) Build(structure.PaginationMetadata.PaginationPredicate)); } - //Add recordcount - string recordCountSql = $"SELECT cast(count(1) as int) as RecordCount " - + $" FROM {fromSql}" - + $" WHERE {predicates}"; - string orderBy = $" ORDER BY {Build(structure.OrderByColumns)}"; - fromSql += $" OUTER APPLY ({recordCountSql}) RecordCountQuery"; - - string query = $"SELECT {WrappedColumns(structure)}, RecordCountQuery.RecordCount" - + $" FROM {fromSql}" - + $" WHERE {predicates}" - + orderBy - + $" OFFSET {structure.Offset()} ROWS FETCH NEXT {structure.Limit()} ROWS ONLY"; + //Add recordcount if needed + if (structure.IsListQuery) + { + string recordCountSql = $"SELECT cast(count(1) as int) as RecordCount " + + $" FROM {fromSql}" + + $" WHERE {predicates}"; + fromSql += $" OUTER APPLY ({recordCountSql}) RecordCountQuery"; + query = $"SELECT {WrappedColumns(structure)}, RecordCountQuery.RecordCount" + + $" FROM {fromSql}" + + $" WHERE {predicates}" + + orderBy + + $" OFFSET {structure.Offset()} ROWS FETCH NEXT {structure.Limit()} ROWS ONLY"; + } else + { + query = $"SELECT {WrappedColumns(structure)}" + + $" FROM {fromSql}" + + $" WHERE {predicates}"; + } query += FOR_JSON_SUFFIX; if (!structure.IsListQuery) From 82dacc7adda51a99f130cba37f210a7bb90354e8 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Fri, 3 Jan 2025 15:44:59 +0100 Subject: [PATCH 19/79] No change --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 537ea4c78c..3dd9105892 100644 --- a/Dockerfile +++ b/Dockerfile @@ -10,6 +10,7 @@ RUN dotnet build "./src/Service/Azure.DataApiBuilder.Service.csproj" -f net8.0 - FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS runtime COPY --from=build /out /App + WORKDIR /App ENV ASPNETCORE_URLS=http://+:5000 ENTRYPOINT ["dotnet", "Azure.DataApiBuilder.Service.dll"] From 6ade6065a8cb97ee41333c9bd975f6f722186870 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Fri, 3 Jan 2025 15:58:56 +0100 Subject: [PATCH 20/79] Update --- src/Core/Resolvers/SqlResponseHelpers.cs | 2 +- src/Directory.Packages.props | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/Core/Resolvers/SqlResponseHelpers.cs b/src/Core/Resolvers/SqlResponseHelpers.cs index 9171a3a322..87fc649ace 100644 --- a/src/Core/Resolvers/SqlResponseHelpers.cs +++ b/src/Core/Resolvers/SqlResponseHelpers.cs @@ -121,7 +121,7 @@ public static OkObjectResult FormatFindResult( after); //Get the element RecordCount from the first element of the array - JsonElement recordCountElement = rootEnumerated[0].GetProperty("RecordCount"); + //JsonElement recordCountElement = rootEnumerated[0].GetProperty("RecordCount"); string jsonRecordCount = JsonSerializer.Serialize(new[] { new diff --git a/src/Directory.Packages.props b/src/Directory.Packages.props index 42f3aa7221..e3ba05a891 100644 --- a/src/Directory.Packages.props +++ b/src/Directory.Packages.props @@ -20,7 +20,7 @@ - + @@ -66,4 +66,4 @@ - + \ No newline at end of file From ec2b24791db9503a35f1d1db2b41b51637a8b2e5 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Tue, 14 Jan 2025 15:51:19 +0100 Subject: [PATCH 21/79] Add exception handling to Offset method in SqlQueryStructure The Offset method now includes a try-catch block to handle potential HotChocolate.GraphQLException exceptions, ensuring that the method returns 0 in such cases. This adds robustness to the method by preventing it from failing unexpectedly when the "offset" argument cannot be retrieved. --- .../Resolvers/Sql Query Structures/SqlQueryStructure.cs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs index 2263fea8ec..36a81bcc34 100644 --- a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs @@ -833,7 +833,14 @@ private void AddGraphQLFields(IReadOnlyList selections, RuntimeC public uint? Offset() { // Check if the offset argument is present in the query, if not, return 0 - return this._ctx?.ArgumentValue("offset") ?? 0; + try + { + return this._ctx?.ArgumentValue("offset") ?? 0; + } + catch (HotChocolate.GraphQLException) + { + return 0; // This is a stop-gat and indicated a very fishy situation + } } private static string ExtractColumnName(string fieldValue) From 6ff8fb42409fd1a99087678e007102859e20081a Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Mon, 10 Mar 2025 14:28:45 +0100 Subject: [PATCH 22/79] Orderby order was ignored --- src/Core/Resolvers/QueryExecutor.cs | 8 +++++++- .../Sql Query Structures/SqlQueryStructure.cs | 19 ++++++++++++++++++- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/src/Core/Resolvers/QueryExecutor.cs b/src/Core/Resolvers/QueryExecutor.cs index 743f0e8141..de07597a8d 100644 --- a/src/Core/Resolvers/QueryExecutor.cs +++ b/src/Core/Resolvers/QueryExecutor.cs @@ -142,6 +142,7 @@ public void OnConfigChanged(object? sender, HotReloadEventArgs args) { string correlationId = HttpContextExtensions.GetLoggerCorrelationId(httpContext); QueryExecutorLogger.LogDebug("{correlationId} Executing query: {queryText}", correlationId, sqltext); + QueryExecutorLogger.LogDebug($"Paramaters: {string.Join(", ", parameters.Select(param => $"{param.Key}: {param.Value?.Value} (DbType: {param.Value?.DbType}, SqlDbType: {param.Value?.SqlDbType})"))}"); } TResult? result = ExecuteQueryAgainstDb(conn, sqltext, parameters, dataReaderHandler, httpContext, dataSourceName, args); @@ -228,9 +229,14 @@ public void OnConfigChanged(object? sender, HotReloadEventArgs args) { string correlationId = HttpContextExtensions.GetLoggerCorrelationId(httpContext); QueryExecutorLogger.LogDebug("{correlationId} Executing query: {queryText}", correlationId, sqltext); + + if (parameters != null) + { + QueryExecutorLogger.LogDebug($"Parameters: {string.Join(", ", parameters.Select(param => $"{param.Key}: {param.Value?.Value} (DbType: {param.Value?.DbType}, SqlDbType: {param.Value?.SqlDbType})"))}"); + } } - TResult? result = await ExecuteQueryAgainstDbAsync(conn, sqltext, parameters, dataReaderHandler, httpContext, dataSourceName, args); + TResult? result = await ExecuteQueryAgainstDbAsync(conn, sqltext, parameters!, dataReaderHandler, httpContext, dataSourceName, args); if (retryAttempt > 1) { diff --git a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs index 36a81bcc34..4bdcf1d87b 100644 --- a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs @@ -858,6 +858,21 @@ private static string ExtractColumnName(string fieldValue) } } + private static string ExtractValue(string fieldValue) + { + string pattern = @"\{\s*([^:]+)\s*:\s*(.*?)\s*\}"; + Match match = Regex.Match(fieldValue, pattern); + if (match.Success) + { + string value = match.Groups[2].Value.Trim(); + return value; + } + else + { + return ""; + } + } + /// /// Create a list of orderBy columns from the orderBy argument /// passed to the gql query. The orderBy argument could contain mapped field names @@ -917,7 +932,9 @@ private List ProcessGqlOrderByArg(List orderByFi orderByColumnsList.Add(new OrderByColumn(tableSchema: linkColumn.TableSchema, tableName: linkColumn.TableName, columnName: ExtractColumnName(field.Value.ToString()), - tableAlias: linkColumn.TableAlias)); + tableAlias: linkColumn.TableAlias, + direction: Enum.Parse(ExtractValue(field.Value.ToString())) + )); } } From 9f3ab328cd1bffc51c9135a1def55261534ce245 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Wed, 12 Mar 2025 17:37:40 +0100 Subject: [PATCH 23/79] Handle the Offset in a correct way for child queries --- .../Sql Query Structures/SqlQueryStructure.cs | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs index 4bdcf1d87b..f37a73fc00 100644 --- a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs @@ -64,6 +64,8 @@ public class SqlQueryStructure : BaseSqlQueryStructure /// private uint? _limit = PaginationOptions.DEFAULT_PAGE_SIZE; + private int? _offset; + /// /// If this query is built because of a GraphQL query (as opposed to /// REST), then this is set to the resolver context of that query. @@ -458,12 +460,26 @@ private SqlQueryStructure( // parse first parameter for all list queries object? firstObject = queryParams[QueryBuilder.PAGE_START_ARGUMENT_NAME]; _limit = runtimeConfig?.GetPaginationLimit((int?)firstObject); + } else { // if first is not passed, we should use the default page size. _limit = runtimeConfig?.DefaultPageSize(); } + + if (queryParams.ContainsKey(QueryBuilder.OFFSET_FIELD_NAME)) + { + // parse the offset parameter for all list queries + object? offsetObject = queryParams[QueryBuilder.OFFSET_FIELD_NAME]; + _offset = (int?)offsetObject; + + } + else + { + // if first is not passed, we should use the default page size. + _offset = 0; + } } if (IsListQuery && queryParams.ContainsKey(QueryBuilder.FILTER_FIELD_NAME)) @@ -830,12 +846,13 @@ private void AddGraphQLFields(IReadOnlyList selections, RuntimeC } } - public uint? Offset() + public int? Offset() { // Check if the offset argument is present in the query, if not, return 0 try { - return this._ctx?.ArgumentValue("offset") ?? 0; + //return this._ctx?.ArgumentValue("offset") ?? 0; + return _offset; } catch (HotChocolate.GraphQLException) { From 04294db4e2536ba62adb34881abaed7cb3c17304 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Mon, 31 Mar 2025 10:53:31 +0200 Subject: [PATCH 24/79] Add Exit to crach the app on horribme errors --- src/Core/Services/GraphQLSchemaCreator.cs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/Core/Services/GraphQLSchemaCreator.cs b/src/Core/Services/GraphQLSchemaCreator.cs index 76ba3218c8..c1ab4ca8e3 100644 --- a/src/Core/Services/GraphQLSchemaCreator.cs +++ b/src/Core/Services/GraphQLSchemaCreator.cs @@ -229,9 +229,11 @@ private DocumentNode GenerateSqlGraphQLObjects(RuntimeEntities entities, Diction } else { + Environment.Exit(-777); throw new DataApiBuilderException(message: $"Database Object definition for {entityName} has not been inferred.", statusCode: HttpStatusCode.InternalServerError, subStatusCode: DataApiBuilderException.SubStatusCodes.ErrorInInitialization); + } } From 39b7d4a27b66add8179bb13d48c53eb0f79a7524 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Tue, 22 Apr 2025 11:10:15 +0200 Subject: [PATCH 25/79] Die on fatal error --- src/Core/Resolvers/QueryExecutor.cs | 12 ++++++++++-- .../MetadataProviders/SqlMetadataProvider.cs | 3 +++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/src/Core/Resolvers/QueryExecutor.cs b/src/Core/Resolvers/QueryExecutor.cs index de07597a8d..164ced8cbc 100644 --- a/src/Core/Resolvers/QueryExecutor.cs +++ b/src/Core/Resolvers/QueryExecutor.cs @@ -141,7 +141,7 @@ public void OnConfigChanged(object? sender, HotReloadEventArgs args) if (!ConfigProvider.IsLateConfigured) { string correlationId = HttpContextExtensions.GetLoggerCorrelationId(httpContext); - QueryExecutorLogger.LogDebug("{correlationId} Executing query: {queryText}", correlationId, sqltext); + QueryExecutorLogger.LogDebug("{correlationId} Executing query : {queryText}", correlationId, sqltext); QueryExecutorLogger.LogDebug($"Paramaters: {string.Join(", ", parameters.Select(param => $"{param.Key}: {param.Value?.Value} (DbType: {param.Value?.DbType}, SqlDbType: {param.Value?.SqlDbType})"))}"); } @@ -228,11 +228,19 @@ public void OnConfigChanged(object? sender, HotReloadEventArgs args) if (!ConfigProvider.IsLateConfigured) { string correlationId = HttpContextExtensions.GetLoggerCorrelationId(httpContext); - QueryExecutorLogger.LogDebug("{correlationId} Executing query: {queryText}", correlationId, sqltext); + QueryExecutorLogger.LogDebug("{correlationId} Executing query2: {queryText}", correlationId, sqltext); if (parameters != null) { QueryExecutorLogger.LogDebug($"Parameters: {string.Join(", ", parameters.Select(param => $"{param.Key}: {param.Value?.Value} (DbType: {param.Value?.DbType}, SqlDbType: {param.Value?.SqlDbType})"))}"); + IEnumerable paramDeclarations = parameters.Select(param => + { + string paramType = param.Value.DbType.HasValue ? param.Value.DbType.ToString()! : "varchar(255)"; + string paramValue = param.Value.Value != null ? param.Value.Value.ToString()! : "NULL"; + return $"declare {param.Key} {paramType} = '{paramValue}'"; + }); + + QueryExecutorLogger.LogDebug($"Parameters2: {string.Join("; ", paramDeclarations)}"); } } diff --git a/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs b/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs index a6e546238a..4a651f859f 100644 --- a/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs +++ b/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs @@ -171,6 +171,7 @@ public virtual string GetSchemaName(string entityName) { if (!EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? databaseObject)) { + Environment.Exit(-777); throw new DataApiBuilderException(message: $"Table Definition for {entityName} has not been inferred.", statusCode: HttpStatusCode.InternalServerError, subStatusCode: DataApiBuilderException.SubStatusCodes.EntityNotFound); @@ -189,6 +190,7 @@ public string GetDatabaseObjectName(string entityName) { if (!EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? databaseObject)) { + Environment.Exit(-777); throw new DataApiBuilderException(message: $"Table Definition for {entityName} has not been inferred.", statusCode: HttpStatusCode.InternalServerError, subStatusCode: DataApiBuilderException.SubStatusCodes.EntityNotFound); @@ -202,6 +204,7 @@ public SourceDefinition GetSourceDefinition(string entityName) { if (!EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? databaseObject)) { + Environment.Exit(-777); throw new DataApiBuilderException(message: $"Table Definition for {entityName} has not been inferred.", statusCode: HttpStatusCode.InternalServerError, subStatusCode: DataApiBuilderException.SubStatusCodes.EntityNotFound); From 7deed3995f96a595943e060e06bfb9d3799299e1 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Thu, 8 May 2025 10:11:52 +0200 Subject: [PATCH 26/79] Add request logging --- src/Core/Resolvers/QueryExecutor.cs | 11 ++++++----- src/Service/Startup.cs | 7 +++++++ src/Service/appsettings.json | 3 ++- 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/src/Core/Resolvers/QueryExecutor.cs b/src/Core/Resolvers/QueryExecutor.cs index 164ced8cbc..49619c3a6a 100644 --- a/src/Core/Resolvers/QueryExecutor.cs +++ b/src/Core/Resolvers/QueryExecutor.cs @@ -219,6 +219,8 @@ public void OnConfigChanged(object? sender, HotReloadEventArgs args) try { + string correlationId = HttpContextExtensions.GetLoggerCorrelationId(httpContext); + DateTime startTime = DateTime.Now; result = await _retryPolicyAsync.ExecuteAsync(async () => { retryAttempt++; @@ -227,12 +229,11 @@ public void OnConfigChanged(object? sender, HotReloadEventArgs args) // When IsLateConfigured is true we are in a hosted scenario and do not reveal query information. if (!ConfigProvider.IsLateConfigured) { - string correlationId = HttpContextExtensions.GetLoggerCorrelationId(httpContext); - QueryExecutorLogger.LogDebug("{correlationId} Executing query2: {queryText}", correlationId, sqltext); + QueryExecutorLogger.LogDebug("{correlationId} {ts} Executing query: {queryText}", correlationId, DateTime.Now.ToString() , sqltext); if (parameters != null) { - QueryExecutorLogger.LogDebug($"Parameters: {string.Join(", ", parameters.Select(param => $"{param.Key}: {param.Value?.Value} (DbType: {param.Value?.DbType}, SqlDbType: {param.Value?.SqlDbType})"))}"); + // QueryExecutorLogger.LogDebug($"Parameters: {string.Join(", ", parameters.Select(param => $"{param.Key}: {param.Value?.Value} (DbType: {param.Value?.DbType}, SqlDbType: {param.Value?.SqlDbType})"))}"); IEnumerable paramDeclarations = parameters.Select(param => { string paramType = param.Value.DbType.HasValue ? param.Value.DbType.ToString()! : "varchar(255)"; @@ -240,7 +241,7 @@ public void OnConfigChanged(object? sender, HotReloadEventArgs args) return $"declare {param.Key} {paramType} = '{paramValue}'"; }); - QueryExecutorLogger.LogDebug($"Parameters2: {string.Join("; ", paramDeclarations)}"); + //QueryExecutorLogger.LogDebug($"Parameters2: {string.Join("; ", paramDeclarations)}"); } } @@ -253,7 +254,7 @@ public void OnConfigChanged(object? sender, HotReloadEventArgs args) // This implies that the request got successfully executed during one of retry attempts. QueryExecutorLogger.LogInformation("{correlationId} Request executed successfully in {retryAttempt} attempt of {maxRetries} available attempts.", correlationId, retryAttempt, maxRetries); } - + QueryExecutorLogger.LogDebug("{correlationId} {ts} Query executed in {time}", correlationId, DateTime.Now.ToString(), DateTime.Now - startTime); return result; } catch (DbException e) diff --git a/src/Service/Startup.cs b/src/Service/Startup.cs index e0653d90a5..019f3b8495 100644 --- a/src/Service/Startup.cs +++ b/src/Service/Startup.cs @@ -192,6 +192,12 @@ public void ConfigureServices(IServiceCollection services) services.AddSingleton(); services.AddSingleton(); + services.AddHttpLogging(logging => + { + logging.LoggingFields = Microsoft.AspNetCore.HttpLogging.HttpLoggingFields.RequestBody; + logging.ResponseBodyLogLimit = 9999999; + }); + AddGraphQLService(services, runtimeConfig?.Runtime?.GraphQL); services.AddFusionCache() .WithOptions(options => @@ -328,6 +334,7 @@ public void Configure(IApplicationBuilder app, IWebHostEnvironment env, RuntimeC // https://andrewlock.net/understanding-pathbase-in-aspnetcore/#placing-usepathbase-in-the-correct-location app.UseCorrelationIdMiddleware(); app.UsePathRewriteMiddleware(); + app.UseHttpLogging(); // SwaggerUI visualization of the OpenAPI description document is only available // in developer mode in alignment with the restriction placed on ChilliCream's BananaCakePop IDE. diff --git a/src/Service/appsettings.json b/src/Service/appsettings.json index c4dc8f6326..1ec900abf9 100644 --- a/src/Service/appsettings.json +++ b/src/Service/appsettings.json @@ -3,7 +3,8 @@ "LogLevel": { "Default": "Error", "Microsoft": "Warning", - "Microsoft.Hosting.Lifetime": "Information" + "Microsoft.Hosting.Lifetime": "Information", + "Microsoft.AspNetCore.HttpLogging.HttpLoggingMiddleware": "Information" } }, "AllowedHosts": "*" From f37ffb6adfc0207532ca2a85272992a7fea2b345 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Thu, 8 May 2025 10:14:38 +0200 Subject: [PATCH 27/79] aa --- src/Service/dab-config-dev.json | 2921 +++++++++++++++++++++++++++++++ src/Service/dab-config.json | 2921 +++++++++++++++++++++++++++++++ 2 files changed, 5842 insertions(+) create mode 100644 src/Service/dab-config-dev.json create mode 100644 src/Service/dab-config.json diff --git a/src/Service/dab-config-dev.json b/src/Service/dab-config-dev.json new file mode 100644 index 0000000000..416626edd5 --- /dev/null +++ b/src/Service/dab-config-dev.json @@ -0,0 +1,2921 @@ +{ + "$schema": "https://github.com/Azure/data-api-builder/releases/latest/download/dab.draft.schema.json", + "data-source": { + "database-type": "mssql", + "connection-string": "Data Source=nqf4kgvoqm4ufazdzriupb2pay-doa2ptopus4ufglh5rxt3is4yi.database.fabric.microsoft.com,1433;User ID=8bfaf0d6-fa20-4ed5-a450-0005ceb77729;Password=z5y8Q~hLcfdAflVrfnYoVxdavIJXZb5tlH~tAbRn;Pooling=True;Min Pool Size=0;Max Pool Size=100;Multiple Active Result Sets=False;Connect Timeout=30;Encrypt=False;Trust Server Certificate=True;Authentication=ActiveDirectoryServicePrincipal;Initial Catalog=apiLayer-fad0b3db-cb87-4a73-9c54-e1fc417bc08c", + "options": { + "set-session-context": false + } + }, + "runtime": { + "telemetry": { + "application-insights": { + "enabled": true, + "connection-string": "InstrumentationKey=d303d229-1055-4f48-a811-4dc0a3d4aa1e;IngestionEndpoint=https://westeurope-5.in.applicationinsights.azure.com/;LiveEndpoint=https://westeurope.livediagnostics.monitor.azure.com/;ApplicationId=c98a3731-8125-4aa0-867a-3361c0e536db" + } + }, + "pagination": { + "max-page-size": 100000 + }, + "cache": { + "enabled": true, + "ttl-seconds": 30 + }, + "rest": { + "enabled": true, + "path": "/api", + "request-body-strict": true + }, + "graphql": { + "enabled": true, + "path": "/graphql", + "allow-introspection": true + }, + "host": { + "cors": { + "origins": [], + "allow-credentials": false + }, + "authentication": { + "provider": "StaticWebApps" + }, + "mode": "development" + } + }, + "entities": { + "Shipment": { + "source": { + "object": "silver_ops.v_Shipment", + "type": "table", + "key-fields": [ + "shipmentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Shipment", + "plural": "Shipments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "BillOfLading": { + "cardinality": "many", + "target.entity": "BillOfLading", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "EnvOrderNo" + ] + }, + "ShipmentEquipments": { + "cardinality": "many", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + }, + "CargoItems": { + "cardinality": "many", + "target.entity": "CargoItem", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentId" + ] + }, + "carrier": { + "cardinality": "one", + "target.entity": "CarrierPartner", + "source.fields": [ + "carrierID" + ], + "target.fields": [ + "partnerID" + ] + }, + "event": { + "cardinality": "many", + "target.entity": "ShipmentEvent", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "ShipmentId" + ] + }, + "customer": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "customerID" + ], + "target.fields": [ + "partnerID" + ] + }, + "shipmentPol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "pol" + ], + "target.fields": [ + "Code" + ] + }, + "shipmentPod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "pod" + ], + "target.fields": [ + "Code" + ] + }, + "shipmentPrc": { + "cardinality": "one", + "target.entity": "prcPort", + "source.fields": [ + "prc" + ], + "target.fields": [ + "Code" + ] + }, + "shipmentPdy": { + "cardinality": "one", + "target.entity": "pdyPort", + "source.fields": [ + "pdy" + ], + "target.fields": [ + "Code" + ] + }, + "goodsSummary": { + "cardinality": "one", + "target.entity": "GoodsSummary", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "podPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "podPort", + "plural": "podPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "prcPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "prcPort", + "plural": "prcPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "pdyPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "pdyPort", + "plural": "pdyPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "polPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "polPort", + "plural": "polPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TariffContainerSize": { + "source": { + "object": "silver_ops.v_TariffContainerSize", + "type": "table", + "key-fields": [ + "containerSize" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffContainerSize", + "plural": "TariffContainerSizes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "GoodsSummary": { + "source": { + "object": "silver_ops.v_GoodsSummmary", + "type": "table", + "key-fields": [ + "shipmentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "GoodsSummary", + "plural": "GoodsSummaries" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "CarrierPartner": { + "source": { + "object": "silver_ops.Partner", + "type": "table", + "key-fields": [ + "partnerID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CarrierPartner", + "plural": "CarrierPartners" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipments": { + "cardinality": "many", + "target.entity": "Shipment", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "carrierID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "CustomerPartner": { + "source": { + "object": "silver_ops.v_Partner", + "type": "table", + "key-fields": [ + "partnerID" + ] + }, + "cache": { + "enabled": true, + "ttl-seconds": 15 + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CustomerPartner", + "plural": "CustomerPartners" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Organization": { + "cardinality": "one", + "target.entity": "Organization", + "source.fields": [ + "mdmOrganizationEntityID" + ], + "target.fields": [ + "mdmEntityID" + ] + }, + "Shipment": { + "cardinality": "many", + "target.entity": "Shipment", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "customerID" + ] + }, + "InboundStockOrder": { + "cardinality": "many", + "target.entity": "InboundOrder", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "customerCode" + ] + }, + "OutboundStockOrder": { + "cardinality": "many", + "target.entity": "OutboundOrder", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "customerCode" + ] + }, + "Company": { + "cardinality": "many", + "target.entity": "PartnerPerCompany", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "sourceEntityID" + ] + }, + "TariffContactGroup": { + "cardinality": "many", + "target.entity": "TariffContactGroupMember", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "PartnerId" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "OrganisationLocation": { + "source": { + "object": "silver_mdm.v_Location", + "type": "table", + "key-fields": [ + "mdmEntityID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OrganisationLocation", + "plural": "OrganisationLocations" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "OrganizationLocationRelation": { + "source": { + "object": "silver_mdm.OrganizationAddressRelation", + "type": "table", + "key-fields": [ + "mdmRelationshipID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OrganizationLocationRelation", + "plural": "OrganizationLocationRelations" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Organization": { + "cardinality": "one", + "target.entity": "Organization", + "source.fields": [ + "mdmEntityOrganizationID" + ], + "target.fields": [ + "mdmEntityID" + ] + }, + "Location": { + "cardinality": "one", + "target.entity": "OrganisationLocation", + "source.fields": [ + "mdmEntityLocationID" + ], + "target.fields": [ + "mdmEntityID" + ] + } + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "PartnerPerCompany": { + "source": { + "object": "silver_ops.v_PartnerPerCompany", + "type": "table", + "key-fields": [ + "PartnerPerCompanyId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "PartnerPerCompany", + "plural": "PartnerPerCompanies" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Customer": { + "cardinality": "many", + "target.entity": "CustomerPartner", + "source.fields": [ + "sourceEntityID" + ], + "target.fields": [ + "partnerID" + ] + }, + "organization": { + "cardinality": "one", + "target.entity": "Organization", + "source.fields": [ + "mdmOrganizationEntityID" + ], + "target.fields": [ + "mdmEntityID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Equipment": { + "source": { + "object": "silver_ops.Equipment", + "type": "table", + "key-fields": [ + "equipmentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Equipment", + "plural": "Equipment" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "relationships": { + "ShipmentEquipments": { + "cardinality": "many", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "equipmentID" + ], + "target.fields": [ + "equipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentEquipment": { + "source": { + "object": "silver_ops.v_ShipmentEquipment", + "type": "table", + "key-fields": [ + "id" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentEquipment", + "plural": "ShipmentEquipment" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Equipments": { + "cardinality": "one", + "target.entity": "Equipment", + "source.fields": [ + "equipmentID" + ], + "target.fields": [ + "equipmentID" + ] + }, + "TrackingEvents": { + "cardinality": "many", + "target.entity": "TrackingEvent", + "source.fields": [ + "id" + ], + "target.fields": [ + "shipmentEquipmentID" + ] + }, + "ShipmentEquipmentTransport": { + "cardinality": "many", + "target.entity": "ShipmentEquipmentTransport", + "source.fields": [ + "id" + ], + "target.fields": [ + "shipmentEquipmentID" + ] + }, + "CargoItem": { + "cardinality": "many", + "target.entity": "CargoItem", + "source.fields": [ + "id" + ], + "target.fields": [ + "shipmentEquipmentId" + ] + }, + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TrackingPort": { + "source": { + "object": "silver_trk.ww_ports", + "type": "table", + "key-fields": [ + "portId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TrackingPort", + "plural": "TrackingPorts" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentEquipmentTransport": { + "source": { + "object": "silver_ops.ShipmentEquipmentTransport", + "type": "table", + "key-fields": [ + "id" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentEquipmentTransport", + "plural": "ShipmentEquipmentTransports" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "pol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polID" + ], + "target.fields": [ + "Code" + ] + }, + "pod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podID" + ], + "target.fields": [ + "Code" + ] + }, + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentID" + ], + "target.fields": [ + "id" + ] + }, + "Transport": { + "cardinality": "one", + "target.entity": "Transport", + "source.fields": [ + "transportID" + ], + "target.fields": [ + "id" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Transport": { + "source": { + "object": "silver_ops.Transport", + "type": "table", + "key-fields": [ + "id" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Transport", + "plural": "Transports" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "ShipmentEquipmentTransport": { + "cardinality": "many", + "target.entity": "ShipmentEquipmentTransport", + "source.fields": [ + "id" + ], + "target.fields": [ + "transportID" + ] + }, + "Vessel": { + "cardinality": "one", + "target.entity": "Vessel", + "source.fields": [ + "vesselID" + ], + "target.fields": [ + "vesselID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "SalesDocumentLine": { + "source": { + "object": "silver_ops.SalesDocumentLine", + "type": "table", + "key-fields": [ + "salesDocumentID", + "lineNo" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "SalesDocumentLine", + "plural": "SalesDocumentLines" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "SalesDocument": { + "cardinality": "one", + "target.entity": "SalesDocument", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [ + "D365FSynchErrorText", + "D365FSynchStatus", + "Env", + "OrderNo", + "SelltoCustomerNo", + "sourceOrderNo" + ] + } + } + ] + } + ] + }, + "BillOfLadingParty": { + "source": { + "object": "silver_ops.BillOfLadingParty", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "BillOfLadingParty", + "plural": "BillOfLadingParties" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "BillOfLading": { + "cardinality": "one", + "target.entity": "BillOfLading", + "source.fields": [ + "DocumentNo" + ], + "target.fields": [ + "DocumentNo" + ] + }, + "Partner": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "ContactNo" + ], + "target.fields": [ + "partnerID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "BillOfLadingPerShipmentEquipment": { + "source": { + "object": "silver_ops.BillOfLadingPerShipmentEquipment", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "BillOfLadingPerShipmentEquipment", + "plural": "BillOfLadingsPerShipmentEquipment" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentId" + ], + "target.fields": [ + "id" + ] + }, + "BillOfLading": { + "cardinality": "one", + "target.entity": "BillOfLading", + "source.fields": [ + "BOLId" + ], + "target.fields": [ + "systemId" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "BillOfLading": { + "source": { + "object": "silver_ops.BillOfLading", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "BillOfLading", + "plural": "BillsOfLading" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "BillOfLadingPerShipmentEquipment": { + "cardinality": "many", + "target.entity": "BillOfLadingPerShipmentEquipment", + "source.fields": [ + "systemId" + ], + "target.fields": [ + "BOLId" + ] + }, + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "EnvOrderNo" + ], + "target.fields": [ + "shipmentID" + ] + }, + "Party": { + "cardinality": "many", + "target.entity": "BillOfLadingParty", + "source.fields": [ + "DocumentNo" + ], + "target.fields": [ + "DocumentNo" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "SalesDocument": { + "source": { + "object": "silver_ops.v_SalesDocument", + "type": "table", + "key-fields": [ + "salesDocumentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "SalesDocument", + "plural": "SalesDocuments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "invoicee": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "invoiceeID" + ], + "target.fields": [ + "partnerID" + ] + }, + "SalesDocumentLine": { + "cardinality": "many", + "target.entity": "SalesDocumentLine", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "Shipments": { + "cardinality": "many", + "target.entity": "ShipmentsPerSalesDocument", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "Document": { + "cardinality": "one", + "target.entity": "ShipmentDocument", + "source.fields": [ + "documentNumber" + ], + "target.fields": [ + "invoiceCrMemoNo" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [ + "D365FSynchErrorText", + "D365FSynchStatus", + "Env", + "OrderNo", + "SelltoCustomerNo", + "SourceOrderNos" + ] + } + } + ] + } + ] + }, + "CargoMovementType": { + "source": { + "object": "silver_ops.CargoMovementType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CargoMovementType", + "plural": "CargoMovementTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "EquipmentEventType": { + "source": { + "object": "silver_ops.EquipmentEventType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "EquipmentEventType", + "plural": "EquipmentEventTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ModeOfTransportType": { + "source": { + "object": "silver_ops.ModeOfTransportType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ModeOfTransportType", + "plural": "ModeOfTransportTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Organization": { + "source": { + "object": "silver_mdm.Organization", + "type": "table", + "key-fields": [ + "mdmEntityID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Organization", + "plural": "Organizations" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "PartnerPerCompany": { + "cardinality": "many", + "target.entity": "PartnerPerCompany", + "source.fields": [ + "mdmEntityID" + ], + "target.fields": [ + "mdmOrganizationEntityID" + ] + }, + "Partner": { + "cardinality": "many", + "target.entity": "CustomerPartner", + "source.fields": [ + "mdmEntityID" + ], + "target.fields": [ + "mdmOrganizationEntityID" + ] + }, + "OrganizationLocation": { + "cardinality": "many", + "target.entity": "OrganizationLocationRelation", + "source.fields": [ + "mdmEntityID" + ], + "target.fields": [ + "mdmEntityOrganizationID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentLocationType": { + "source": { + "object": "silver_ops.ShipmentLocationType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentLocationType", + "plural": "ShipmentLocationTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentStatusType": { + "source": { + "object": "silver_ops.ShipmentStatusType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentStatusType", + "plural": "ShipmentStatusTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TransportPlanStageType": { + "source": { + "object": "silver_ops.TransportPlanStageType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TransportPlanStageType", + "plural": "TransportPlanStageTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "InboundOrder": { + "source": { + "object": "silver_ops.InboundOrder", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "InboundOrder", + "plural": "InboundOrders" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "customer": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "customerCode" + ], + "target.fields": [ + "partnerID" + ] + }, + "inboundOrderLines": { + "cardinality": "many", + "target.entity": "InboundOrderLine", + "source.fields": [ + "no" + ], + "target.fields": [ + "documentNo" + ] + }, + "shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "opsFactOrderId" + ], + "target.fields": [ + "shipmentID" + ] + }, + "characteristics": { + "cardinality": "one", + "target.entity": "CharacteristicGroup", + "source.fields": [ + "characteristicGroupCode" + ], + "target.fields": [ + "code" + ] + }, + "terminalLocation": { + "cardinality": "one", + "target.entity": "TerminalLocation", + "source.fields": [ + "unloadingTerminalCode" + ], + "target.fields": [ + "terminalCode" + ] + }, + "pol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polCode" + ], + "target.fields": [ + "Code" + ] + }, + "pod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podCode" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ] + }, + "InboundOrderLine": { + "source": { + "object": "silver_ops.v_InboundOrderLine", + "type": "table", + "key-fields": [ + "documentNo", + "lineNo" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "InboundOrderLine", + "plural": "InboundOrderLines" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "inboundOrder": { + "cardinality": "one", + "target.entity": "InboundOrder", + "source.fields": [ + "documentNo" + ], + "target.fields": [ + "no" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ] + }, + "OutboundOrder": { + "source": { + "object": "silver_ops.v_OutboundOrder", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OutboundOrder", + "plural": "OutboundOrders" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "customer": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "customerCode" + ], + "target.fields": [ + "partnerID" + ] + }, + "outboundOrderLines": { + "cardinality": "many", + "target.entity": "OutboundOrderLine", + "source.fields": [ + "no" + ], + "target.fields": [ + "documentNo" + ] + }, + "shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "opsFactOrderId" + ], + "target.fields": [ + "shipmentID" + ] + }, + "pol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polCode" + ], + "target.fields": [ + "Code" + ] + }, + "pod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podCode" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ] + }, + "OutboundOrderLine": { + "source": { + "object": "silver_ops.v_OutboundOrderLine", + "type": "table", + "key-fields": [ + "documentNo", + "lineNo" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OutboundOrderLine", + "plural": "OutboundOrderLines" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "outboundOrder": { + "cardinality": "one", + "target.entity": "OutboundOrder", + "source.fields": [ + "documentNo" + ], + "target.fields": [ + "no" + ] + }, + "charecteristics": { + "cardinality": "one", + "target.entity": "CharacteristicGroup", + "source.fields": [ + "characteristicGroupCode" + ], + "target.fields": [ + "code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ] + }, + "PurchaseDocument": { + "source": { + "object": "silver_ops.PurchaseDocument", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "PurchaseDocument", + "plural": "PurchaseDocuments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "OperationalOrderNo" + ], + "target.fields": [ + "mplBookingReference" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ] + }, + "CharacteristicGroup": { + "source": { + "object": "silver_ops.v_Characteristics", + "type": "table", + "key-fields": [ + "code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CharacteristicGroup", + "plural": "CharacteristicGroups" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "CargoItem": { + "source": { + "object": "silver_ops.CargoItem", + "type": "table", + "key-fields": [ + "cargoItemID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CargoItem", + "plural": "CargoItems" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentId" + ], + "target.fields": [ + "shipmentID" + ] + }, + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentId" + ], + "target.fields": [ + "id" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentsPerSalesDocument": { + "source": { + "object": "silver_ops.v_ShipmentsPerSalesDocument", + "type": "table", + "key-fields": [ + "salesDocumentID", + "shipmentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentsPerSalesDocument", + "plural": "ShipmentsPerSalesDocuments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "SalesDocument": { + "cardinality": "one", + "target.entity": "SalesDocument", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TerminalLocation": { + "source": { + "object": "silver_ops.v_TerminalLocation", + "type": "table", + "key-fields": [ + "terminalCode" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TerminalLocation", + "plural": "TerminalLocations" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LoadReport": { + "source": { + "object": "silver_ops.v_loadReport", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Loadreport", + "plural": "Loadreports" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "ops_fact_order_id" + ], + "target.fields": [ + "shipmentID" + ] + }, + "POL": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polCode" + ], + "target.fields": [ + "Code" + ] + }, + "POD": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podCode" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentDocument": { + "source": { + "object": "silver_ops.ShipmentDocuments", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentDocument", + "plural": "ShipmentDocuments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "ops_fact_order_id" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TrackingEvent": { + "source": { + "object": "silver_trk.TrackingEvent", + "type": "table", + "key-fields": [ + "trackingEventID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TrackingEvent", + "plural": "TrackingEvents" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentID" + ], + "target.fields": [ + "id" + ] + }, + "TrackingPort": { + "cardinality": "one", + "target.entity": "TrackingPort", + "source.fields": [ + "portID" + ], + "target.fields": [ + "portId" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LocationPOD": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPOD", + "plural": "LocationsPOD" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LocationPOL": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPOL", + "plural": "LocationsPOL" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LocationPDY": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPDY", + "plural": "LocationsPDY" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LocationPRC": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPRC", + "plural": "LocationsPRC" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LocationTS1": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationTS1", + "plural": "LocationsTS1" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LocationTS2": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPS2", + "plural": "LocationsTS2" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TariffContainerGroup": { + "source": { + "object": "silver_ops.TariffContainerGroup", + "type": "table", + "key-fields": [ + "ContainerCode" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffContainerGroup", + "plural": "TariffContainerGroups" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Tariff": { + "cardinality": "many", + "target.entity": "Tariff", + "source.fields": [ + "ContainerCode" + ], + "target.fields": [ + "ContainerType" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TariffContactGroupMember": { + "source": { + "object": "silver_ops.TariffContactGroupMembers", + "type": "table", + "key-fields": [ + "ContactGroupCode" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffContactGroupMember", + "plural": "TariffContactGroupMembers" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Tariff": { + "cardinality": "many", + "target.entity": "Tariff", + "source.fields": [ + "ContactGroupCode" + ], + "target.fields": [ + "SourceNo" + ] + }, + "Customer": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "PartnerId" + ], + "target.fields": [ + "partnerID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TariffSurcharge": { + "source": { + "object": "silver_ops.TariffSurcharge", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffSurcharge", + "plural": "TariffSurcharges" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Tariff": { + "cardinality": "one", + "target.entity": "Tariff", + "source.fields": [ + "TariffId" + ], + "target.fields": [ + "TariffSurchargeKey" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentEvent": { + "source": { + "object": "silver_ops.v_ShipmentEvent", + "type": "table", + "key-fields": [ + "ShipmentId", + "systemCreatedAt" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentEvent", + "plural": "ShipmentEvents" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "ShipmentId" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LoadtypeType": { + "source": { + "object": "silver_ops.v_LoadTypeType", + "type": "table", + "key-fields": [ + "loadType" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LoadtypeType", + "plural": "LoadtypeTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "FlowtypeType": { + "source": { + "object": "silver_ops.v_FlowTypeType", + "type": "table", + "key-fields": [ + "flowType" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "FlowtypeType", + "plural": "FlowtypeTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Tariff": { + "source": { + "object": "silver_ops.v_Tariff", + "type": "table", + "key-fields": [ + "TariffId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Tariff", + "plural": "Tariffs" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "TariffSurcharge": { + "cardinality": "many", + "target.entity": "TariffSurcharge", + "source.fields": [ + "TariffSurchargeKey" + ], + "target.fields": [ + "TariffId" + ] + }, + "TariffContactGroupMember": { + "cardinality": "many", + "target.entity": "TariffContactGroupMember", + "source.fields": [ + "SourceNo" + ], + "target.fields": [ + "ContactGroupCode" + ] + }, + "Pol": { + "cardinality": "one", + "target.entity": "LocationPOL", + "source.fields": [ + "FromZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pod": { + "cardinality": "one", + "target.entity": "LocationPOD", + "source.fields": [ + "ToZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Prc": { + "cardinality": "one", + "target.entity": "LocationPRC", + "source.fields": [ + "PRCCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pdy": { + "cardinality": "one", + "target.entity": "LocationPDY", + "source.fields": [ + "PDYCode" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment1": { + "cardinality": "one", + "target.entity": "LocationTS1", + "source.fields": [ + "TransshipmentPort" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment2": { + "cardinality": "one", + "target.entity": "LocationTS2", + "source.fields": [ + "TransshipmentPort2" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TariffAll": { + "source": { + "object": "silver_ops.v_TariffAll", + "type": "table", + "key-fields": [ + "TariffId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffAll", + "plural": "TariffsAll" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "TariffSurcharge": { + "cardinality": "many", + "target.entity": "TariffSurcharge", + "source.fields": [ + "TariffSurchargeKey" + ], + "target.fields": [ + "TariffId" + ] + }, + "TariffContactGroupMember": { + "cardinality": "many", + "target.entity": "TariffContactGroupMember", + "source.fields": [ + "SourceNo" + ], + "target.fields": [ + "ContactGroupCode" + ] + }, + "Pol": { + "cardinality": "one", + "target.entity": "LocationPOL", + "source.fields": [ + "FromZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pod": { + "cardinality": "one", + "target.entity": "LocationPOD", + "source.fields": [ + "ToZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Prc": { + "cardinality": "one", + "target.entity": "LocationPRC", + "source.fields": [ + "PRCCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pdy": { + "cardinality": "one", + "target.entity": "LocationPDY", + "source.fields": [ + "PDYCode" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment1": { + "cardinality": "one", + "target.entity": "LocationTS1", + "source.fields": [ + "TransshipmentPort" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment2": { + "cardinality": "one", + "target.entity": "LocationTS2", + "source.fields": [ + "TransshipmentPort2" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Vessel": { + "source": { + "object": "silver_ops.Vessel", + "type": "table", + "key-fields": [ + "vesselID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Vessel", + "plural": "Vessels" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Transport": { + "cardinality": "many", + "target.entity": "Transport", + "source.fields": [ + "vesselID" + ], + "target.fields": [ + "vesselID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "WindwardRegistrationFeedback": { + "source": { + "object": "silver_trk.ww_TrackingFeedback", + "type": "table", + "key-fields": [ + "shipmentId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "WindwardRegistrationFeedback", + "plural": "WindwardRegistrationFeedbacks" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentId" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "EquipmentMap": { + "source": { + "object": "silver_ops.usp_MapSource", + "type": "stored-procedure", + "parameters": { + "customerScope": "string" + } + }, + "cache": { + "enabled": true, + "ttl-seconds": 60 + }, + "graphql": { + "enabled": true, + "type": { + "singular": "EquipmentMap", + "plural": "EquipmentMaps" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "execute" + } + ] + } + ] + }, + "GlobalSearch": { + "source": { + "object": "silver_ops.usp_globalSearch", + "type": "stored-procedure", + "parameters": { + "searchString": "string", + "customerScope": "string" + } + }, + "graphql": { + "enabled": true, + "type": { + "singular": "GlobalSearch", + "plural": "GlobalSearches" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "execute" + } + ] + } + ] + } + } +} diff --git a/src/Service/dab-config.json b/src/Service/dab-config.json new file mode 100644 index 0000000000..82ea6f2736 --- /dev/null +++ b/src/Service/dab-config.json @@ -0,0 +1,2921 @@ +{ + "$schema": "https://github.com/Azure/data-api-builder/releases/latest/download/dab.draft.schema.json", + "data-source": { + "database-type": "mssql", + "connection-string": "Data Source=nqf4kgvoqm4ufazdzriupb2pay-hmnbvxar2mgu7e3ng27fsqy3we.database.fabric.microsoft.com,1433;User ID=8bfaf0d6-fa20-4ed5-a450-0005ceb77729;Password=z5y8Q~hLcfdAflVrfnYoVxdavIJXZb5tlH~tAbRn;Pooling=True;Min Pool Size=0;Max Pool Size=100;Multiple Active Result Sets=False;Connect Timeout=30;Encrypt=False;Trust Server Certificate=True;Authentication=ActiveDirectoryServicePrincipal;Initial Catalog=apiLayer-345587c4-1232-457e-9761-b6bca3d72e2e", + "options": { + "set-session-context": false + } + }, + "runtime": { + "telemetry": { + "application-insights": { + "enabled": true, + "connection-string": "InstrumentationKey=d303d229-1055-4f48-a811-4dc0a3d4aa1e;IngestionEndpoint=https://westeurope-5.in.applicationinsights.azure.com/;LiveEndpoint=https://westeurope.livediagnostics.monitor.azure.com/;ApplicationId=c98a3731-8125-4aa0-867a-3361c0e536db" + } + }, + "pagination": { + "max-page-size": 100000 + }, + "cache": { + "enabled": true, + "ttl-seconds": 30 + }, + "rest": { + "enabled": true, + "path": "/api", + "request-body-strict": true + }, + "graphql": { + "enabled": true, + "path": "/graphql", + "allow-introspection": true + }, + "host": { + "cors": { + "origins": [], + "allow-credentials": false + }, + "authentication": { + "provider": "StaticWebApps" + }, + "mode": "development" + } + }, + "entities": { + "Shipment": { + "source": { + "object": "silver_ops.v_Shipment", + "type": "table", + "key-fields": [ + "shipmentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Shipment", + "plural": "Shipments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "BillOfLading": { + "cardinality": "many", + "target.entity": "BillOfLading", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "EnvOrderNo" + ] + }, + "ShipmentEquipments": { + "cardinality": "many", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + }, + "CargoItems": { + "cardinality": "many", + "target.entity": "CargoItem", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentId" + ] + }, + "carrier": { + "cardinality": "one", + "target.entity": "CarrierPartner", + "source.fields": [ + "carrierID" + ], + "target.fields": [ + "partnerID" + ] + }, + "event": { + "cardinality": "many", + "target.entity": "ShipmentEvent", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "ShipmentId" + ] + }, + "customer": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "customerID" + ], + "target.fields": [ + "partnerID" + ] + }, + "shipmentPol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "pol" + ], + "target.fields": [ + "Code" + ] + }, + "shipmentPod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "pod" + ], + "target.fields": [ + "Code" + ] + }, + "shipmentPrc": { + "cardinality": "one", + "target.entity": "prcPort", + "source.fields": [ + "prc" + ], + "target.fields": [ + "Code" + ] + }, + "shipmentPdy": { + "cardinality": "one", + "target.entity": "pdyPort", + "source.fields": [ + "pdy" + ], + "target.fields": [ + "Code" + ] + }, + "goodsSummary": { + "cardinality": "one", + "target.entity": "GoodsSummary", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "podPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "podPort", + "plural": "podPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "prcPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "prcPort", + "plural": "prcPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "pdyPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "pdyPort", + "plural": "pdyPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "polPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "polPort", + "plural": "polPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TariffContainerSize": { + "source": { + "object": "silver_ops.v_TariffContainerSize", + "type": "table", + "key-fields": [ + "containerSize" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffContainerSize", + "plural": "TariffContainerSizes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "GoodsSummary": { + "source": { + "object": "silver_ops.v_GoodsSummmary", + "type": "table", + "key-fields": [ + "shipmentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "GoodsSummary", + "plural": "GoodsSummaries" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "CarrierPartner": { + "source": { + "object": "silver_ops.Partner", + "type": "table", + "key-fields": [ + "partnerID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CarrierPartner", + "plural": "CarrierPartners" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipments": { + "cardinality": "many", + "target.entity": "Shipment", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "carrierID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "CustomerPartner": { + "source": { + "object": "silver_ops.v_Partner", + "type": "table", + "key-fields": [ + "partnerID" + ] + }, + "cache": { + "enabled": true, + "ttl-seconds": 15 + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CustomerPartner", + "plural": "CustomerPartners" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Organization": { + "cardinality": "one", + "target.entity": "Organization", + "source.fields": [ + "mdmOrganizationEntityID" + ], + "target.fields": [ + "mdmEntityID" + ] + }, + "Shipment": { + "cardinality": "many", + "target.entity": "Shipment", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "customerID" + ] + }, + "InboundStockOrder": { + "cardinality": "many", + "target.entity": "InboundOrder", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "customerCode" + ] + }, + "OutboundStockOrder": { + "cardinality": "many", + "target.entity": "OutboundOrder", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "customerCode" + ] + }, + "Company": { + "cardinality": "many", + "target.entity": "PartnerPerCompany", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "sourceEntityID" + ] + }, + "TariffContactGroup": { + "cardinality": "many", + "target.entity": "TariffContactGroupMember", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "PartnerId" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "OrganisationLocation": { + "source": { + "object": "silver_mdm.v_Location", + "type": "table", + "key-fields": [ + "mdmEntityID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OrganisationLocation", + "plural": "OrganisationLocations" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "OrganizationLocationRelation": { + "source": { + "object": "silver_mdm.OrganizationAddressRelation", + "type": "table", + "key-fields": [ + "mdmRelationshipID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OrganizationLocationRelation", + "plural": "OrganizationLocationRelations" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Organization": { + "cardinality": "one", + "target.entity": "Organization", + "source.fields": [ + "mdmEntityOrganizationID" + ], + "target.fields": [ + "mdmEntityID" + ] + }, + "Location": { + "cardinality": "one", + "target.entity": "OrganisationLocation", + "source.fields": [ + "mdmEntityLocationID" + ], + "target.fields": [ + "mdmEntityID" + ] + } + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "PartnerPerCompany": { + "source": { + "object": "silver_ops.v_PartnerPerCompany", + "type": "table", + "key-fields": [ + "PartnerPerCompanyId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "PartnerPerCompany", + "plural": "PartnerPerCompanies" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Customer": { + "cardinality": "many", + "target.entity": "CustomerPartner", + "source.fields": [ + "sourceEntityID" + ], + "target.fields": [ + "partnerID" + ] + }, + "organization": { + "cardinality": "one", + "target.entity": "Organization", + "source.fields": [ + "mdmOrganizationEntityID" + ], + "target.fields": [ + "mdmEntityID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Equipment": { + "source": { + "object": "silver_ops.Equipment", + "type": "table", + "key-fields": [ + "equipmentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Equipment", + "plural": "Equipment" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "relationships": { + "ShipmentEquipments": { + "cardinality": "many", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "equipmentID" + ], + "target.fields": [ + "equipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentEquipment": { + "source": { + "object": "silver_ops.v_ShipmentEquipment", + "type": "table", + "key-fields": [ + "id" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentEquipment", + "plural": "ShipmentEquipment" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Equipments": { + "cardinality": "one", + "target.entity": "Equipment", + "source.fields": [ + "equipmentID" + ], + "target.fields": [ + "equipmentID" + ] + }, + "TrackingEvents": { + "cardinality": "many", + "target.entity": "TrackingEvent", + "source.fields": [ + "id" + ], + "target.fields": [ + "shipmentEquipmentID" + ] + }, + "ShipmentEquipmentTransport": { + "cardinality": "many", + "target.entity": "ShipmentEquipmentTransport", + "source.fields": [ + "id" + ], + "target.fields": [ + "shipmentEquipmentID" + ] + }, + "CargoItem": { + "cardinality": "many", + "target.entity": "CargoItem", + "source.fields": [ + "id" + ], + "target.fields": [ + "shipmentEquipmentId" + ] + }, + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TrackingPort": { + "source": { + "object": "silver_trk.ww_ports", + "type": "table", + "key-fields": [ + "portId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TrackingPort", + "plural": "TrackingPorts" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentEquipmentTransport": { + "source": { + "object": "silver_ops.ShipmentEquipmentTransport", + "type": "table", + "key-fields": [ + "id" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentEquipmentTransport", + "plural": "ShipmentEquipmentTransports" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "pol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polID" + ], + "target.fields": [ + "Code" + ] + }, + "pod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podID" + ], + "target.fields": [ + "Code" + ] + }, + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentID" + ], + "target.fields": [ + "id" + ] + }, + "Transport": { + "cardinality": "one", + "target.entity": "Transport", + "source.fields": [ + "transportID" + ], + "target.fields": [ + "id" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Transport": { + "source": { + "object": "silver_ops.Transport", + "type": "table", + "key-fields": [ + "id" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Transport", + "plural": "Transports" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "ShipmentEquipmentTransport": { + "cardinality": "many", + "target.entity": "ShipmentEquipmentTransport", + "source.fields": [ + "id" + ], + "target.fields": [ + "transportID" + ] + }, + "Vessel": { + "cardinality": "one", + "target.entity": "Vessel", + "source.fields": [ + "vesselID" + ], + "target.fields": [ + "vesselID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "SalesDocumentLine": { + "source": { + "object": "silver_ops.SalesDocumentLine", + "type": "table", + "key-fields": [ + "salesDocumentID", + "lineNo" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "SalesDocumentLine", + "plural": "SalesDocumentLines" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "SalesDocument": { + "cardinality": "one", + "target.entity": "SalesDocument", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [ + "D365FSynchErrorText", + "D365FSynchStatus", + "Env", + "OrderNo", + "SelltoCustomerNo", + "sourceOrderNo" + ] + } + } + ] + } + ] + }, + "BillOfLadingParty": { + "source": { + "object": "silver_ops.BillOfLadingParty", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "BillOfLadingParty", + "plural": "BillOfLadingParties" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "BillOfLading": { + "cardinality": "one", + "target.entity": "BillOfLading", + "source.fields": [ + "DocumentNo" + ], + "target.fields": [ + "DocumentNo" + ] + }, + "Partner": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "ContactNo" + ], + "target.fields": [ + "partnerID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "BillOfLadingPerShipmentEquipment": { + "source": { + "object": "silver_ops.BillOfLadingPerShipmentEquipment", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "BillOfLadingPerShipmentEquipment", + "plural": "BillOfLadingsPerShipmentEquipment" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentId" + ], + "target.fields": [ + "id" + ] + }, + "BillOfLading": { + "cardinality": "one", + "target.entity": "BillOfLading", + "source.fields": [ + "BOLId" + ], + "target.fields": [ + "systemId" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "BillOfLading": { + "source": { + "object": "silver_ops.BillOfLading", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "BillOfLading", + "plural": "BillsOfLading" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "BillOfLadingPerShipmentEquipment": { + "cardinality": "many", + "target.entity": "BillOfLadingPerShipmentEquipment", + "source.fields": [ + "systemId" + ], + "target.fields": [ + "BOLId" + ] + }, + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "EnvOrderNo" + ], + "target.fields": [ + "shipmentID" + ] + }, + "Party": { + "cardinality": "many", + "target.entity": "BillOfLadingParty", + "source.fields": [ + "DocumentNo" + ], + "target.fields": [ + "DocumentNo" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "SalesDocument": { + "source": { + "object": "silver_ops.v_SalesDocument", + "type": "table", + "key-fields": [ + "salesDocumentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "SalesDocument", + "plural": "SalesDocuments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "invoicee": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "invoiceeID" + ], + "target.fields": [ + "partnerID" + ] + }, + "SalesDocumentLine": { + "cardinality": "many", + "target.entity": "SalesDocumentLine", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "Shipments": { + "cardinality": "many", + "target.entity": "ShipmentsPerSalesDocument", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "Document": { + "cardinality": "one", + "target.entity": "ShipmentDocument", + "source.fields": [ + "documentNumber" + ], + "target.fields": [ + "invoiceCrMemoNo" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [ + "D365FSynchErrorText", + "D365FSynchStatus", + "Env", + "OrderNo", + "SelltoCustomerNo", + "SourceOrderNos" + ] + } + } + ] + } + ] + }, + "CargoMovementType": { + "source": { + "object": "silver_ops.CargoMovementType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CargoMovementType", + "plural": "CargoMovementTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "EquipmentEventType": { + "source": { + "object": "silver_ops.EquipmentEventType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "EquipmentEventType", + "plural": "EquipmentEventTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ModeOfTransportType": { + "source": { + "object": "silver_ops.ModeOfTransportType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ModeOfTransportType", + "plural": "ModeOfTransportTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Organization": { + "source": { + "object": "silver_mdm.Organization", + "type": "table", + "key-fields": [ + "mdmEntityID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Organization", + "plural": "Organizations" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "PartnerPerCompany": { + "cardinality": "many", + "target.entity": "PartnerPerCompany", + "source.fields": [ + "mdmEntityID" + ], + "target.fields": [ + "mdmOrganizationEntityID" + ] + }, + "Partner": { + "cardinality": "many", + "target.entity": "CustomerPartner", + "source.fields": [ + "mdmEntityID" + ], + "target.fields": [ + "mdmOrganizationEntityID" + ] + }, + "OrganizationLocation": { + "cardinality": "many", + "target.entity": "OrganizationLocationRelation", + "source.fields": [ + "mdmEntityID" + ], + "target.fields": [ + "mdmEntityOrganizationID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentLocationType": { + "source": { + "object": "silver_ops.ShipmentLocationType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentLocationType", + "plural": "ShipmentLocationTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentStatusType": { + "source": { + "object": "silver_ops.ShipmentStatusType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentStatusType", + "plural": "ShipmentStatusTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TransportPlanStageType": { + "source": { + "object": "silver_ops.TransportPlanStageType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TransportPlanStageType", + "plural": "TransportPlanStageTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "InboundOrder": { + "source": { + "object": "silver_ops.InboundOrder", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "InboundOrder", + "plural": "InboundOrders" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "customer": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "customerCode" + ], + "target.fields": [ + "partnerID" + ] + }, + "inboundOrderLines": { + "cardinality": "many", + "target.entity": "InboundOrderLine", + "source.fields": [ + "no" + ], + "target.fields": [ + "documentNo" + ] + }, + "shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "opsFactOrderId" + ], + "target.fields": [ + "shipmentID" + ] + }, + "characteristics": { + "cardinality": "one", + "target.entity": "CharacteristicGroup", + "source.fields": [ + "characteristicGroupCode" + ], + "target.fields": [ + "code" + ] + }, + "terminalLocation": { + "cardinality": "one", + "target.entity": "TerminalLocation", + "source.fields": [ + "unloadingTerminalCode" + ], + "target.fields": [ + "terminalCode" + ] + }, + "pol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polCode" + ], + "target.fields": [ + "Code" + ] + }, + "pod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podCode" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ] + }, + "InboundOrderLine": { + "source": { + "object": "silver_ops.v_InboundOrderLine", + "type": "table", + "key-fields": [ + "documentNo", + "lineNo" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "InboundOrderLine", + "plural": "InboundOrderLines" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "inboundOrder": { + "cardinality": "one", + "target.entity": "InboundOrder", + "source.fields": [ + "documentNo" + ], + "target.fields": [ + "no" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ] + }, + "OutboundOrder": { + "source": { + "object": "silver_ops.v_OutboundOrder", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OutboundOrder", + "plural": "OutboundOrders" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "customer": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "customerCode" + ], + "target.fields": [ + "partnerID" + ] + }, + "outboundOrderLines": { + "cardinality": "many", + "target.entity": "OutboundOrderLine", + "source.fields": [ + "no" + ], + "target.fields": [ + "documentNo" + ] + }, + "shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "opsFactOrderId" + ], + "target.fields": [ + "shipmentID" + ] + }, + "pol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polCode" + ], + "target.fields": [ + "Code" + ] + }, + "pod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podCode" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ] + }, + "OutboundOrderLine": { + "source": { + "object": "silver_ops.v_OutboundOrderLine", + "type": "table", + "key-fields": [ + "documentNo", + "lineNo" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OutboundOrderLine", + "plural": "OutboundOrderLines" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "outboundOrder": { + "cardinality": "one", + "target.entity": "OutboundOrder", + "source.fields": [ + "documentNo" + ], + "target.fields": [ + "no" + ] + }, + "charecteristics": { + "cardinality": "one", + "target.entity": "CharacteristicGroup", + "source.fields": [ + "characteristicGroupCode" + ], + "target.fields": [ + "code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ] + }, + "PurchaseDocument": { + "source": { + "object": "silver_ops.PurchaseDocument", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "PurchaseDocument", + "plural": "PurchaseDocuments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "OperationalOrderNo" + ], + "target.fields": [ + "mplBookingReference" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ] + }, + "CharacteristicGroup": { + "source": { + "object": "silver_ops.v_Characteristics", + "type": "table", + "key-fields": [ + "code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CharacteristicGroup", + "plural": "CharacteristicGroups" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "CargoItem": { + "source": { + "object": "silver_ops.CargoItem", + "type": "table", + "key-fields": [ + "cargoItemID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CargoItem", + "plural": "CargoItems" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentId" + ], + "target.fields": [ + "shipmentID" + ] + }, + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentId" + ], + "target.fields": [ + "id" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentsPerSalesDocument": { + "source": { + "object": "silver_ops.v_ShipmentsPerSalesDocument", + "type": "table", + "key-fields": [ + "salesDocumentID", + "shipmentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentsPerSalesDocument", + "plural": "ShipmentsPerSalesDocuments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "SalesDocument": { + "cardinality": "one", + "target.entity": "SalesDocument", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TerminalLocation": { + "source": { + "object": "silver_ops.v_TerminalLocation", + "type": "table", + "key-fields": [ + "terminalCode" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TerminalLocation", + "plural": "TerminalLocations" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LoadReport": { + "source": { + "object": "silver_ops.v_loadReport", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Loadreport", + "plural": "Loadreports" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "ops_fact_order_id" + ], + "target.fields": [ + "shipmentID" + ] + }, + "POL": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polCode" + ], + "target.fields": [ + "Code" + ] + }, + "POD": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podCode" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentDocument": { + "source": { + "object": "silver_ops.ShipmentDocuments", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentDocument", + "plural": "ShipmentDocuments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "ops_fact_order_id" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TrackingEvent": { + "source": { + "object": "silver_trk.TrackingEvent", + "type": "table", + "key-fields": [ + "trackingEventID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TrackingEvent", + "plural": "TrackingEvents" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentID" + ], + "target.fields": [ + "id" + ] + }, + "TrackingPort": { + "cardinality": "one", + "target.entity": "TrackingPort", + "source.fields": [ + "portID" + ], + "target.fields": [ + "portId" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LocationPOD": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPOD", + "plural": "LocationsPOD" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LocationPOL": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPOL", + "plural": "LocationsPOL" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LocationPDY": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPDY", + "plural": "LocationsPDY" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LocationPRC": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPRC", + "plural": "LocationsPRC" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LocationTS1": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationTS1", + "plural": "LocationsTS1" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LocationTS2": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPS2", + "plural": "LocationsTS2" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TariffContainerGroup": { + "source": { + "object": "silver_ops.TariffContainerGroup", + "type": "table", + "key-fields": [ + "ContainerCode" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffContainerGroup", + "plural": "TariffContainerGroups" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Tariff": { + "cardinality": "many", + "target.entity": "Tariff", + "source.fields": [ + "ContainerCode" + ], + "target.fields": [ + "ContainerType" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TariffContactGroupMember": { + "source": { + "object": "silver_ops.TariffContactGroupMembers", + "type": "table", + "key-fields": [ + "ContactGroupCode" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffContactGroupMember", + "plural": "TariffContactGroupMembers" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Tariff": { + "cardinality": "many", + "target.entity": "Tariff", + "source.fields": [ + "ContactGroupCode" + ], + "target.fields": [ + "SourceNo" + ] + }, + "Customer": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "PartnerId" + ], + "target.fields": [ + "partnerID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TariffSurcharge": { + "source": { + "object": "silver_ops.TariffSurcharge", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffSurcharge", + "plural": "TariffSurcharges" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Tariff": { + "cardinality": "one", + "target.entity": "Tariff", + "source.fields": [ + "TariffId" + ], + "target.fields": [ + "TariffSurchargeKey" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentEvent": { + "source": { + "object": "silver_ops.v_ShipmentEvent", + "type": "table", + "key-fields": [ + "ShipmentId", + "systemCreatedAt" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentEvent", + "plural": "ShipmentEvents" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "ShipmentId" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LoadtypeType": { + "source": { + "object": "silver_ops.v_LoadTypeType", + "type": "table", + "key-fields": [ + "loadType" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LoadtypeType", + "plural": "LoadtypeTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "FlowtypeType": { + "source": { + "object": "silver_ops.v_FlowTypeType", + "type": "table", + "key-fields": [ + "flowType" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "FlowtypeType", + "plural": "FlowtypeTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Tariff": { + "source": { + "object": "silver_ops.v_Tariff", + "type": "table", + "key-fields": [ + "TariffId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Tariff", + "plural": "Tariffs" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "TariffSurcharge": { + "cardinality": "many", + "target.entity": "TariffSurcharge", + "source.fields": [ + "TariffSurchargeKey" + ], + "target.fields": [ + "TariffId" + ] + }, + "TariffContactGroupMember": { + "cardinality": "many", + "target.entity": "TariffContactGroupMember", + "source.fields": [ + "SourceNo" + ], + "target.fields": [ + "ContactGroupCode" + ] + }, + "Pol": { + "cardinality": "one", + "target.entity": "LocationPOL", + "source.fields": [ + "FromZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pod": { + "cardinality": "one", + "target.entity": "LocationPOD", + "source.fields": [ + "ToZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Prc": { + "cardinality": "one", + "target.entity": "LocationPRC", + "source.fields": [ + "PRCCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pdy": { + "cardinality": "one", + "target.entity": "LocationPDY", + "source.fields": [ + "PDYCode" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment1": { + "cardinality": "one", + "target.entity": "LocationTS1", + "source.fields": [ + "TransshipmentPort" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment2": { + "cardinality": "one", + "target.entity": "LocationTS2", + "source.fields": [ + "TransshipmentPort2" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TariffAll": { + "source": { + "object": "silver_ops.v_TariffAll", + "type": "table", + "key-fields": [ + "TariffId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffAll", + "plural": "TariffsAll" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "TariffSurcharge": { + "cardinality": "many", + "target.entity": "TariffSurcharge", + "source.fields": [ + "TariffSurchargeKey" + ], + "target.fields": [ + "TariffId" + ] + }, + "TariffContactGroupMember": { + "cardinality": "many", + "target.entity": "TariffContactGroupMember", + "source.fields": [ + "SourceNo" + ], + "target.fields": [ + "ContactGroupCode" + ] + }, + "Pol": { + "cardinality": "one", + "target.entity": "LocationPOL", + "source.fields": [ + "FromZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pod": { + "cardinality": "one", + "target.entity": "LocationPOD", + "source.fields": [ + "ToZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Prc": { + "cardinality": "one", + "target.entity": "LocationPRC", + "source.fields": [ + "PRCCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pdy": { + "cardinality": "one", + "target.entity": "LocationPDY", + "source.fields": [ + "PDYCode" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment1": { + "cardinality": "one", + "target.entity": "LocationTS1", + "source.fields": [ + "TransshipmentPort" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment2": { + "cardinality": "one", + "target.entity": "LocationTS2", + "source.fields": [ + "TransshipmentPort2" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Vessel": { + "source": { + "object": "silver_ops.Vessel", + "type": "table", + "key-fields": [ + "vesselID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Vessel", + "plural": "Vessels" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Transport": { + "cardinality": "many", + "target.entity": "Transport", + "source.fields": [ + "vesselID" + ], + "target.fields": [ + "vesselID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "WindwardRegistrationFeedback": { + "source": { + "object": "silver_trk.ww_TrackingFeedback", + "type": "table", + "key-fields": [ + "shipmentId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "WindwardRegistrationFeedback", + "plural": "WindwardRegistrationFeedbacks" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentId" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "EquipmentMap": { + "source": { + "object": "silver_ops.usp_MapSource", + "type": "stored-procedure", + "parameters": { + "customerScope": "string" + } + }, + "cache": { + "enabled": true, + "ttl-seconds": 60 + }, + "graphql": { + "enabled": true, + "type": { + "singular": "EquipmentMap", + "plural": "EquipmentMaps" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "execute" + } + ] + } + ] + }, + "GlobalSearch": { + "source": { + "object": "silver_ops.usp_globalSearch", + "type": "stored-procedure", + "parameters": { + "searchString": "string", + "customerScope": "string" + } + }, + "graphql": { + "enabled": true, + "type": { + "singular": "GlobalSearch", + "plural": "GlobalSearches" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "execute" + } + ] + } + ] + } + } +} From 19ed9eecab815653ed82212bd36fa35ad5d73062 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Thu, 19 Jun 2025 15:46:16 +0200 Subject: [PATCH 28/79] Add support for multuple datasets (to cheat the dm_exec_describe_first_result_set_for_object for stored proc if you use temp tables --- src/Core/Resolvers/QueryExecutor.cs | 8 +- .../Azure.DataApiBuilder.Service.csproj | 3 +- src/Service/dab-config-dev.json | 2921 ----------------- src/Service/dab-config.json | 2571 ++++++++------- 4 files changed, 1396 insertions(+), 4107 deletions(-) delete mode 100644 src/Service/dab-config-dev.json diff --git a/src/Core/Resolvers/QueryExecutor.cs b/src/Core/Resolvers/QueryExecutor.cs index 49619c3a6a..0daeb1f30b 100644 --- a/src/Core/Resolvers/QueryExecutor.cs +++ b/src/Core/Resolvers/QueryExecutor.cs @@ -241,10 +241,11 @@ public void OnConfigChanged(object? sender, HotReloadEventArgs args) return $"declare {param.Key} {paramType} = '{paramValue}'"; }); - //QueryExecutorLogger.LogDebug($"Parameters2: {string.Join("; ", paramDeclarations)}"); + QueryExecutorLogger.LogDebug($"Parameters2: {string.Join("; ", paramDeclarations).Replace("String", "varchar(255)")}"); } } + TResult? result = await ExecuteQueryAgainstDbAsync(conn, sqltext, parameters!, dataReaderHandler, httpContext, dataSourceName, args); if (retryAttempt > 1) @@ -485,6 +486,11 @@ public bool Read(DbDataReader reader) public async Task ExtractResultSetFromDbDataReaderAsync(DbDataReader dbDataReader, List? args = null) { + // If the first dataset has no records, try if there is a second one ... + if (!dbDataReader.HasRows) + { + dbDataReader.NextResult(); + } DbResultSet dbResultSet = new(resultProperties: GetResultPropertiesAsync(dbDataReader).Result ?? new()); long availableBytes = _maxResponseSizeBytes; while (await ReadAsync(dbDataReader)) diff --git a/src/Service/Azure.DataApiBuilder.Service.csproj b/src/Service/Azure.DataApiBuilder.Service.csproj index 710048b9fa..ea5cdc3a07 100644 --- a/src/Service/Azure.DataApiBuilder.Service.csproj +++ b/src/Service/Azure.DataApiBuilder.Service.csproj @@ -45,7 +45,6 @@ - @@ -97,4 +96,6 @@ + + diff --git a/src/Service/dab-config-dev.json b/src/Service/dab-config-dev.json deleted file mode 100644 index 416626edd5..0000000000 --- a/src/Service/dab-config-dev.json +++ /dev/null @@ -1,2921 +0,0 @@ -{ - "$schema": "https://github.com/Azure/data-api-builder/releases/latest/download/dab.draft.schema.json", - "data-source": { - "database-type": "mssql", - "connection-string": "Data Source=nqf4kgvoqm4ufazdzriupb2pay-doa2ptopus4ufglh5rxt3is4yi.database.fabric.microsoft.com,1433;User ID=8bfaf0d6-fa20-4ed5-a450-0005ceb77729;Password=z5y8Q~hLcfdAflVrfnYoVxdavIJXZb5tlH~tAbRn;Pooling=True;Min Pool Size=0;Max Pool Size=100;Multiple Active Result Sets=False;Connect Timeout=30;Encrypt=False;Trust Server Certificate=True;Authentication=ActiveDirectoryServicePrincipal;Initial Catalog=apiLayer-fad0b3db-cb87-4a73-9c54-e1fc417bc08c", - "options": { - "set-session-context": false - } - }, - "runtime": { - "telemetry": { - "application-insights": { - "enabled": true, - "connection-string": "InstrumentationKey=d303d229-1055-4f48-a811-4dc0a3d4aa1e;IngestionEndpoint=https://westeurope-5.in.applicationinsights.azure.com/;LiveEndpoint=https://westeurope.livediagnostics.monitor.azure.com/;ApplicationId=c98a3731-8125-4aa0-867a-3361c0e536db" - } - }, - "pagination": { - "max-page-size": 100000 - }, - "cache": { - "enabled": true, - "ttl-seconds": 30 - }, - "rest": { - "enabled": true, - "path": "/api", - "request-body-strict": true - }, - "graphql": { - "enabled": true, - "path": "/graphql", - "allow-introspection": true - }, - "host": { - "cors": { - "origins": [], - "allow-credentials": false - }, - "authentication": { - "provider": "StaticWebApps" - }, - "mode": "development" - } - }, - "entities": { - "Shipment": { - "source": { - "object": "silver_ops.v_Shipment", - "type": "table", - "key-fields": [ - "shipmentID" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "Shipment", - "plural": "Shipments" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "BillOfLading": { - "cardinality": "many", - "target.entity": "BillOfLading", - "source.fields": [ - "shipmentID" - ], - "target.fields": [ - "EnvOrderNo" - ] - }, - "ShipmentEquipments": { - "cardinality": "many", - "target.entity": "ShipmentEquipment", - "source.fields": [ - "shipmentID" - ], - "target.fields": [ - "shipmentID" - ] - }, - "CargoItems": { - "cardinality": "many", - "target.entity": "CargoItem", - "source.fields": [ - "shipmentID" - ], - "target.fields": [ - "shipmentId" - ] - }, - "carrier": { - "cardinality": "one", - "target.entity": "CarrierPartner", - "source.fields": [ - "carrierID" - ], - "target.fields": [ - "partnerID" - ] - }, - "event": { - "cardinality": "many", - "target.entity": "ShipmentEvent", - "source.fields": [ - "shipmentID" - ], - "target.fields": [ - "ShipmentId" - ] - }, - "customer": { - "cardinality": "one", - "target.entity": "CustomerPartner", - "source.fields": [ - "customerID" - ], - "target.fields": [ - "partnerID" - ] - }, - "shipmentPol": { - "cardinality": "one", - "target.entity": "polPort", - "source.fields": [ - "pol" - ], - "target.fields": [ - "Code" - ] - }, - "shipmentPod": { - "cardinality": "one", - "target.entity": "podPort", - "source.fields": [ - "pod" - ], - "target.fields": [ - "Code" - ] - }, - "shipmentPrc": { - "cardinality": "one", - "target.entity": "prcPort", - "source.fields": [ - "prc" - ], - "target.fields": [ - "Code" - ] - }, - "shipmentPdy": { - "cardinality": "one", - "target.entity": "pdyPort", - "source.fields": [ - "pdy" - ], - "target.fields": [ - "Code" - ] - }, - "goodsSummary": { - "cardinality": "one", - "target.entity": "GoodsSummary", - "source.fields": [ - "shipmentID" - ], - "target.fields": [ - "shipmentID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "podPort": { - "source": { - "object": "silver_ops.Port", - "type": "table", - "key-fields": [ - "Code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "podPort", - "plural": "podPorts" - } - }, - "rest": { - "enabled": true - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "prcPort": { - "source": { - "object": "silver_ops.Port", - "type": "table", - "key-fields": [ - "Code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "prcPort", - "plural": "prcPorts" - } - }, - "rest": { - "enabled": true - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "pdyPort": { - "source": { - "object": "silver_ops.Port", - "type": "table", - "key-fields": [ - "Code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "pdyPort", - "plural": "pdyPorts" - } - }, - "rest": { - "enabled": true - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "polPort": { - "source": { - "object": "silver_ops.Port", - "type": "table", - "key-fields": [ - "Code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "polPort", - "plural": "polPorts" - } - }, - "rest": { - "enabled": true - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "TariffContainerSize": { - "source": { - "object": "silver_ops.v_TariffContainerSize", - "type": "table", - "key-fields": [ - "containerSize" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "TariffContainerSize", - "plural": "TariffContainerSizes" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "GoodsSummary": { - "source": { - "object": "silver_ops.v_GoodsSummmary", - "type": "table", - "key-fields": [ - "shipmentID" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "GoodsSummary", - "plural": "GoodsSummaries" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "relationships": { - "Shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "shipmentID" - ], - "target.fields": [ - "shipmentID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "CarrierPartner": { - "source": { - "object": "silver_ops.Partner", - "type": "table", - "key-fields": [ - "partnerID" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "CarrierPartner", - "plural": "CarrierPartners" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Shipments": { - "cardinality": "many", - "target.entity": "Shipment", - "source.fields": [ - "partnerID" - ], - "target.fields": [ - "carrierID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "CustomerPartner": { - "source": { - "object": "silver_ops.v_Partner", - "type": "table", - "key-fields": [ - "partnerID" - ] - }, - "cache": { - "enabled": true, - "ttl-seconds": 15 - }, - "graphql": { - "enabled": true, - "type": { - "singular": "CustomerPartner", - "plural": "CustomerPartners" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Organization": { - "cardinality": "one", - "target.entity": "Organization", - "source.fields": [ - "mdmOrganizationEntityID" - ], - "target.fields": [ - "mdmEntityID" - ] - }, - "Shipment": { - "cardinality": "many", - "target.entity": "Shipment", - "source.fields": [ - "partnerID" - ], - "target.fields": [ - "customerID" - ] - }, - "InboundStockOrder": { - "cardinality": "many", - "target.entity": "InboundOrder", - "source.fields": [ - "partnerID" - ], - "target.fields": [ - "customerCode" - ] - }, - "OutboundStockOrder": { - "cardinality": "many", - "target.entity": "OutboundOrder", - "source.fields": [ - "partnerID" - ], - "target.fields": [ - "customerCode" - ] - }, - "Company": { - "cardinality": "many", - "target.entity": "PartnerPerCompany", - "source.fields": [ - "partnerID" - ], - "target.fields": [ - "sourceEntityID" - ] - }, - "TariffContactGroup": { - "cardinality": "many", - "target.entity": "TariffContactGroupMember", - "source.fields": [ - "partnerID" - ], - "target.fields": [ - "PartnerId" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "OrganisationLocation": { - "source": { - "object": "silver_mdm.v_Location", - "type": "table", - "key-fields": [ - "mdmEntityID" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "OrganisationLocation", - "plural": "OrganisationLocations" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "OrganizationLocationRelation": { - "source": { - "object": "silver_mdm.OrganizationAddressRelation", - "type": "table", - "key-fields": [ - "mdmRelationshipID" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "OrganizationLocationRelation", - "plural": "OrganizationLocationRelations" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Organization": { - "cardinality": "one", - "target.entity": "Organization", - "source.fields": [ - "mdmEntityOrganizationID" - ], - "target.fields": [ - "mdmEntityID" - ] - }, - "Location": { - "cardinality": "one", - "target.entity": "OrganisationLocation", - "source.fields": [ - "mdmEntityLocationID" - ], - "target.fields": [ - "mdmEntityID" - ] - } - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "PartnerPerCompany": { - "source": { - "object": "silver_ops.v_PartnerPerCompany", - "type": "table", - "key-fields": [ - "PartnerPerCompanyId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "PartnerPerCompany", - "plural": "PartnerPerCompanies" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Customer": { - "cardinality": "many", - "target.entity": "CustomerPartner", - "source.fields": [ - "sourceEntityID" - ], - "target.fields": [ - "partnerID" - ] - }, - "organization": { - "cardinality": "one", - "target.entity": "Organization", - "source.fields": [ - "mdmOrganizationEntityID" - ], - "target.fields": [ - "mdmEntityID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "Equipment": { - "source": { - "object": "silver_ops.Equipment", - "type": "table", - "key-fields": [ - "equipmentID" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "Equipment", - "plural": "Equipment" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "relationships": { - "ShipmentEquipments": { - "cardinality": "many", - "target.entity": "ShipmentEquipment", - "source.fields": [ - "equipmentID" - ], - "target.fields": [ - "equipmentID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "ShipmentEquipment": { - "source": { - "object": "silver_ops.v_ShipmentEquipment", - "type": "table", - "key-fields": [ - "id" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "ShipmentEquipment", - "plural": "ShipmentEquipment" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Equipments": { - "cardinality": "one", - "target.entity": "Equipment", - "source.fields": [ - "equipmentID" - ], - "target.fields": [ - "equipmentID" - ] - }, - "TrackingEvents": { - "cardinality": "many", - "target.entity": "TrackingEvent", - "source.fields": [ - "id" - ], - "target.fields": [ - "shipmentEquipmentID" - ] - }, - "ShipmentEquipmentTransport": { - "cardinality": "many", - "target.entity": "ShipmentEquipmentTransport", - "source.fields": [ - "id" - ], - "target.fields": [ - "shipmentEquipmentID" - ] - }, - "CargoItem": { - "cardinality": "many", - "target.entity": "CargoItem", - "source.fields": [ - "id" - ], - "target.fields": [ - "shipmentEquipmentId" - ] - }, - "Shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "shipmentID" - ], - "target.fields": [ - "shipmentID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "TrackingPort": { - "source": { - "object": "silver_trk.ww_ports", - "type": "table", - "key-fields": [ - "portId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "TrackingPort", - "plural": "TrackingPorts" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "ShipmentEquipmentTransport": { - "source": { - "object": "silver_ops.ShipmentEquipmentTransport", - "type": "table", - "key-fields": [ - "id" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "ShipmentEquipmentTransport", - "plural": "ShipmentEquipmentTransports" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "pol": { - "cardinality": "one", - "target.entity": "polPort", - "source.fields": [ - "polID" - ], - "target.fields": [ - "Code" - ] - }, - "pod": { - "cardinality": "one", - "target.entity": "podPort", - "source.fields": [ - "podID" - ], - "target.fields": [ - "Code" - ] - }, - "ShipmentEquipment": { - "cardinality": "one", - "target.entity": "ShipmentEquipment", - "source.fields": [ - "shipmentEquipmentID" - ], - "target.fields": [ - "id" - ] - }, - "Transport": { - "cardinality": "one", - "target.entity": "Transport", - "source.fields": [ - "transportID" - ], - "target.fields": [ - "id" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "Transport": { - "source": { - "object": "silver_ops.Transport", - "type": "table", - "key-fields": [ - "id" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "Transport", - "plural": "Transports" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "ShipmentEquipmentTransport": { - "cardinality": "many", - "target.entity": "ShipmentEquipmentTransport", - "source.fields": [ - "id" - ], - "target.fields": [ - "transportID" - ] - }, - "Vessel": { - "cardinality": "one", - "target.entity": "Vessel", - "source.fields": [ - "vesselID" - ], - "target.fields": [ - "vesselID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "SalesDocumentLine": { - "source": { - "object": "silver_ops.SalesDocumentLine", - "type": "table", - "key-fields": [ - "salesDocumentID", - "lineNo" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "SalesDocumentLine", - "plural": "SalesDocumentLines" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "SalesDocument": { - "cardinality": "one", - "target.entity": "SalesDocument", - "source.fields": [ - "salesDocumentID" - ], - "target.fields": [ - "salesDocumentID" - ] - }, - "shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "shipmentID" - ], - "target.fields": [ - "shipmentID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read", - "fields": { - "exclude": [ - "D365FSynchErrorText", - "D365FSynchStatus", - "Env", - "OrderNo", - "SelltoCustomerNo", - "sourceOrderNo" - ] - } - } - ] - } - ] - }, - "BillOfLadingParty": { - "source": { - "object": "silver_ops.BillOfLadingParty", - "type": "table", - "key-fields": [ - "systemId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "BillOfLadingParty", - "plural": "BillOfLadingParties" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "BillOfLading": { - "cardinality": "one", - "target.entity": "BillOfLading", - "source.fields": [ - "DocumentNo" - ], - "target.fields": [ - "DocumentNo" - ] - }, - "Partner": { - "cardinality": "one", - "target.entity": "CustomerPartner", - "source.fields": [ - "ContactNo" - ], - "target.fields": [ - "partnerID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "BillOfLadingPerShipmentEquipment": { - "source": { - "object": "silver_ops.BillOfLadingPerShipmentEquipment", - "type": "table", - "key-fields": [ - "systemId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "BillOfLadingPerShipmentEquipment", - "plural": "BillOfLadingsPerShipmentEquipment" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "ShipmentEquipment": { - "cardinality": "one", - "target.entity": "ShipmentEquipment", - "source.fields": [ - "shipmentEquipmentId" - ], - "target.fields": [ - "id" - ] - }, - "BillOfLading": { - "cardinality": "one", - "target.entity": "BillOfLading", - "source.fields": [ - "BOLId" - ], - "target.fields": [ - "systemId" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "BillOfLading": { - "source": { - "object": "silver_ops.BillOfLading", - "type": "table", - "key-fields": [ - "systemId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "BillOfLading", - "plural": "BillsOfLading" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "BillOfLadingPerShipmentEquipment": { - "cardinality": "many", - "target.entity": "BillOfLadingPerShipmentEquipment", - "source.fields": [ - "systemId" - ], - "target.fields": [ - "BOLId" - ] - }, - "Shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "EnvOrderNo" - ], - "target.fields": [ - "shipmentID" - ] - }, - "Party": { - "cardinality": "many", - "target.entity": "BillOfLadingParty", - "source.fields": [ - "DocumentNo" - ], - "target.fields": [ - "DocumentNo" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "SalesDocument": { - "source": { - "object": "silver_ops.v_SalesDocument", - "type": "table", - "key-fields": [ - "salesDocumentID" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "SalesDocument", - "plural": "SalesDocuments" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "invoicee": { - "cardinality": "one", - "target.entity": "CustomerPartner", - "source.fields": [ - "invoiceeID" - ], - "target.fields": [ - "partnerID" - ] - }, - "SalesDocumentLine": { - "cardinality": "many", - "target.entity": "SalesDocumentLine", - "source.fields": [ - "salesDocumentID" - ], - "target.fields": [ - "salesDocumentID" - ] - }, - "Shipments": { - "cardinality": "many", - "target.entity": "ShipmentsPerSalesDocument", - "source.fields": [ - "salesDocumentID" - ], - "target.fields": [ - "salesDocumentID" - ] - }, - "Document": { - "cardinality": "one", - "target.entity": "ShipmentDocument", - "source.fields": [ - "documentNumber" - ], - "target.fields": [ - "invoiceCrMemoNo" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read", - "fields": { - "exclude": [ - "D365FSynchErrorText", - "D365FSynchStatus", - "Env", - "OrderNo", - "SelltoCustomerNo", - "SourceOrderNos" - ] - } - } - ] - } - ] - }, - "CargoMovementType": { - "source": { - "object": "silver_ops.CargoMovementType", - "type": "table" - }, - "graphql": { - "enabled": true, - "type": { - "singular": "CargoMovementType", - "plural": "CargoMovementTypes" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "EquipmentEventType": { - "source": { - "object": "silver_ops.EquipmentEventType", - "type": "table" - }, - "graphql": { - "enabled": true, - "type": { - "singular": "EquipmentEventType", - "plural": "EquipmentEventTypes" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "ModeOfTransportType": { - "source": { - "object": "silver_ops.ModeOfTransportType", - "type": "table" - }, - "graphql": { - "enabled": true, - "type": { - "singular": "ModeOfTransportType", - "plural": "ModeOfTransportTypes" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "Organization": { - "source": { - "object": "silver_mdm.Organization", - "type": "table", - "key-fields": [ - "mdmEntityID" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "Organization", - "plural": "Organizations" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "PartnerPerCompany": { - "cardinality": "many", - "target.entity": "PartnerPerCompany", - "source.fields": [ - "mdmEntityID" - ], - "target.fields": [ - "mdmOrganizationEntityID" - ] - }, - "Partner": { - "cardinality": "many", - "target.entity": "CustomerPartner", - "source.fields": [ - "mdmEntityID" - ], - "target.fields": [ - "mdmOrganizationEntityID" - ] - }, - "OrganizationLocation": { - "cardinality": "many", - "target.entity": "OrganizationLocationRelation", - "source.fields": [ - "mdmEntityID" - ], - "target.fields": [ - "mdmEntityOrganizationID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "ShipmentLocationType": { - "source": { - "object": "silver_ops.ShipmentLocationType", - "type": "table" - }, - "graphql": { - "enabled": true, - "type": { - "singular": "ShipmentLocationType", - "plural": "ShipmentLocationTypes" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "ShipmentStatusType": { - "source": { - "object": "silver_ops.ShipmentStatusType", - "type": "table" - }, - "graphql": { - "enabled": true, - "type": { - "singular": "ShipmentStatusType", - "plural": "ShipmentStatusTypes" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "TransportPlanStageType": { - "source": { - "object": "silver_ops.TransportPlanStageType", - "type": "table" - }, - "graphql": { - "enabled": true, - "type": { - "singular": "TransportPlanStageType", - "plural": "TransportPlanStageTypes" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "InboundOrder": { - "source": { - "object": "silver_ops.InboundOrder", - "type": "table", - "key-fields": [ - "systemId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "InboundOrder", - "plural": "InboundOrders" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "customer": { - "cardinality": "one", - "target.entity": "CustomerPartner", - "source.fields": [ - "customerCode" - ], - "target.fields": [ - "partnerID" - ] - }, - "inboundOrderLines": { - "cardinality": "many", - "target.entity": "InboundOrderLine", - "source.fields": [ - "no" - ], - "target.fields": [ - "documentNo" - ] - }, - "shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "opsFactOrderId" - ], - "target.fields": [ - "shipmentID" - ] - }, - "characteristics": { - "cardinality": "one", - "target.entity": "CharacteristicGroup", - "source.fields": [ - "characteristicGroupCode" - ], - "target.fields": [ - "code" - ] - }, - "terminalLocation": { - "cardinality": "one", - "target.entity": "TerminalLocation", - "source.fields": [ - "unloadingTerminalCode" - ], - "target.fields": [ - "terminalCode" - ] - }, - "pol": { - "cardinality": "one", - "target.entity": "polPort", - "source.fields": [ - "polCode" - ], - "target.fields": [ - "Code" - ] - }, - "pod": { - "cardinality": "one", - "target.entity": "podPort", - "source.fields": [ - "podCode" - ], - "target.fields": [ - "Code" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read", - "fields": { - "exclude": [] - } - } - ] - } - ] - }, - "InboundOrderLine": { - "source": { - "object": "silver_ops.v_InboundOrderLine", - "type": "table", - "key-fields": [ - "documentNo", - "lineNo" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "InboundOrderLine", - "plural": "InboundOrderLines" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "inboundOrder": { - "cardinality": "one", - "target.entity": "InboundOrder", - "source.fields": [ - "documentNo" - ], - "target.fields": [ - "no" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read", - "fields": { - "exclude": [] - } - } - ] - } - ] - }, - "OutboundOrder": { - "source": { - "object": "silver_ops.v_OutboundOrder", - "type": "table", - "key-fields": [ - "systemId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "OutboundOrder", - "plural": "OutboundOrders" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "customer": { - "cardinality": "one", - "target.entity": "CustomerPartner", - "source.fields": [ - "customerCode" - ], - "target.fields": [ - "partnerID" - ] - }, - "outboundOrderLines": { - "cardinality": "many", - "target.entity": "OutboundOrderLine", - "source.fields": [ - "no" - ], - "target.fields": [ - "documentNo" - ] - }, - "shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "opsFactOrderId" - ], - "target.fields": [ - "shipmentID" - ] - }, - "pol": { - "cardinality": "one", - "target.entity": "polPort", - "source.fields": [ - "polCode" - ], - "target.fields": [ - "Code" - ] - }, - "pod": { - "cardinality": "one", - "target.entity": "podPort", - "source.fields": [ - "podCode" - ], - "target.fields": [ - "Code" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read", - "fields": { - "exclude": [] - } - } - ] - } - ] - }, - "OutboundOrderLine": { - "source": { - "object": "silver_ops.v_OutboundOrderLine", - "type": "table", - "key-fields": [ - "documentNo", - "lineNo" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "OutboundOrderLine", - "plural": "OutboundOrderLines" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "outboundOrder": { - "cardinality": "one", - "target.entity": "OutboundOrder", - "source.fields": [ - "documentNo" - ], - "target.fields": [ - "no" - ] - }, - "charecteristics": { - "cardinality": "one", - "target.entity": "CharacteristicGroup", - "source.fields": [ - "characteristicGroupCode" - ], - "target.fields": [ - "code" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read", - "fields": { - "exclude": [] - } - } - ] - } - ] - }, - "PurchaseDocument": { - "source": { - "object": "silver_ops.PurchaseDocument", - "type": "table", - "key-fields": [ - "systemId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "PurchaseDocument", - "plural": "PurchaseDocuments" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "OperationalOrderNo" - ], - "target.fields": [ - "mplBookingReference" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read", - "fields": { - "exclude": [] - } - } - ] - } - ] - }, - "CharacteristicGroup": { - "source": { - "object": "silver_ops.v_Characteristics", - "type": "table", - "key-fields": [ - "code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "CharacteristicGroup", - "plural": "CharacteristicGroups" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "CargoItem": { - "source": { - "object": "silver_ops.CargoItem", - "type": "table", - "key-fields": [ - "cargoItemID" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "CargoItem", - "plural": "CargoItems" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "shipmentId" - ], - "target.fields": [ - "shipmentID" - ] - }, - "ShipmentEquipment": { - "cardinality": "one", - "target.entity": "ShipmentEquipment", - "source.fields": [ - "shipmentEquipmentId" - ], - "target.fields": [ - "id" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "ShipmentsPerSalesDocument": { - "source": { - "object": "silver_ops.v_ShipmentsPerSalesDocument", - "type": "table", - "key-fields": [ - "salesDocumentID", - "shipmentID" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "ShipmentsPerSalesDocument", - "plural": "ShipmentsPerSalesDocuments" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "SalesDocument": { - "cardinality": "one", - "target.entity": "SalesDocument", - "source.fields": [ - "salesDocumentID" - ], - "target.fields": [ - "salesDocumentID" - ] - }, - "Shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "shipmentID" - ], - "target.fields": [ - "shipmentID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "TerminalLocation": { - "source": { - "object": "silver_ops.v_TerminalLocation", - "type": "table", - "key-fields": [ - "terminalCode" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "TerminalLocation", - "plural": "TerminalLocations" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "LoadReport": { - "source": { - "object": "silver_ops.v_loadReport", - "type": "table", - "key-fields": [ - "systemId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "Loadreport", - "plural": "Loadreports" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "ops_fact_order_id" - ], - "target.fields": [ - "shipmentID" - ] - }, - "POL": { - "cardinality": "one", - "target.entity": "polPort", - "source.fields": [ - "polCode" - ], - "target.fields": [ - "Code" - ] - }, - "POD": { - "cardinality": "one", - "target.entity": "podPort", - "source.fields": [ - "podCode" - ], - "target.fields": [ - "Code" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "ShipmentDocument": { - "source": { - "object": "silver_ops.ShipmentDocuments", - "type": "table", - "key-fields": [ - "systemId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "ShipmentDocument", - "plural": "ShipmentDocuments" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "ops_fact_order_id" - ], - "target.fields": [ - "shipmentID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "TrackingEvent": { - "source": { - "object": "silver_trk.TrackingEvent", - "type": "table", - "key-fields": [ - "trackingEventID" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "TrackingEvent", - "plural": "TrackingEvents" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "ShipmentEquipment": { - "cardinality": "one", - "target.entity": "ShipmentEquipment", - "source.fields": [ - "shipmentEquipmentID" - ], - "target.fields": [ - "id" - ] - }, - "TrackingPort": { - "cardinality": "one", - "target.entity": "TrackingPort", - "source.fields": [ - "portID" - ], - "target.fields": [ - "portId" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "LocationPOD": { - "source": { - "object": "silver_ops.Location", - "type": "table", - "key-fields": [ - "Code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "LocationPOD", - "plural": "LocationsPOD" - } - }, - "rest": { - "enabled": true - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "LocationPOL": { - "source": { - "object": "silver_ops.Location", - "type": "table", - "key-fields": [ - "Code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "LocationPOL", - "plural": "LocationsPOL" - } - }, - "rest": { - "enabled": true - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "LocationPDY": { - "source": { - "object": "silver_ops.Location", - "type": "table", - "key-fields": [ - "Code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "LocationPDY", - "plural": "LocationsPDY" - } - }, - "rest": { - "enabled": true - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "LocationPRC": { - "source": { - "object": "silver_ops.Location", - "type": "table", - "key-fields": [ - "Code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "LocationPRC", - "plural": "LocationsPRC" - } - }, - "rest": { - "enabled": true - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "LocationTS1": { - "source": { - "object": "silver_ops.Location", - "type": "table", - "key-fields": [ - "Code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "LocationTS1", - "plural": "LocationsTS1" - } - }, - "rest": { - "enabled": true - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "LocationTS2": { - "source": { - "object": "silver_ops.Location", - "type": "table", - "key-fields": [ - "Code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "LocationPS2", - "plural": "LocationsTS2" - } - }, - "rest": { - "enabled": true - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "TariffContainerGroup": { - "source": { - "object": "silver_ops.TariffContainerGroup", - "type": "table", - "key-fields": [ - "ContainerCode" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "TariffContainerGroup", - "plural": "TariffContainerGroups" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Tariff": { - "cardinality": "many", - "target.entity": "Tariff", - "source.fields": [ - "ContainerCode" - ], - "target.fields": [ - "ContainerType" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "TariffContactGroupMember": { - "source": { - "object": "silver_ops.TariffContactGroupMembers", - "type": "table", - "key-fields": [ - "ContactGroupCode" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "TariffContactGroupMember", - "plural": "TariffContactGroupMembers" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Tariff": { - "cardinality": "many", - "target.entity": "Tariff", - "source.fields": [ - "ContactGroupCode" - ], - "target.fields": [ - "SourceNo" - ] - }, - "Customer": { - "cardinality": "one", - "target.entity": "CustomerPartner", - "source.fields": [ - "PartnerId" - ], - "target.fields": [ - "partnerID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "TariffSurcharge": { - "source": { - "object": "silver_ops.TariffSurcharge", - "type": "table", - "key-fields": [ - "systemId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "TariffSurcharge", - "plural": "TariffSurcharges" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Tariff": { - "cardinality": "one", - "target.entity": "Tariff", - "source.fields": [ - "TariffId" - ], - "target.fields": [ - "TariffSurchargeKey" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "ShipmentEvent": { - "source": { - "object": "silver_ops.v_ShipmentEvent", - "type": "table", - "key-fields": [ - "ShipmentId", - "systemCreatedAt" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "ShipmentEvent", - "plural": "ShipmentEvents" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "ShipmentId" - ], - "target.fields": [ - "shipmentID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "LoadtypeType": { - "source": { - "object": "silver_ops.v_LoadTypeType", - "type": "table", - "key-fields": [ - "loadType" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "LoadtypeType", - "plural": "LoadtypeTypes" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "FlowtypeType": { - "source": { - "object": "silver_ops.v_FlowTypeType", - "type": "table", - "key-fields": [ - "flowType" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "FlowtypeType", - "plural": "FlowtypeTypes" - } - }, - "rest": { - "enabled": true - }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "Tariff": { - "source": { - "object": "silver_ops.v_Tariff", - "type": "table", - "key-fields": [ - "TariffId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "Tariff", - "plural": "Tariffs" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "TariffSurcharge": { - "cardinality": "many", - "target.entity": "TariffSurcharge", - "source.fields": [ - "TariffSurchargeKey" - ], - "target.fields": [ - "TariffId" - ] - }, - "TariffContactGroupMember": { - "cardinality": "many", - "target.entity": "TariffContactGroupMember", - "source.fields": [ - "SourceNo" - ], - "target.fields": [ - "ContactGroupCode" - ] - }, - "Pol": { - "cardinality": "one", - "target.entity": "LocationPOL", - "source.fields": [ - "FromZoneCode" - ], - "target.fields": [ - "Code" - ] - }, - "Pod": { - "cardinality": "one", - "target.entity": "LocationPOD", - "source.fields": [ - "ToZoneCode" - ], - "target.fields": [ - "Code" - ] - }, - "Prc": { - "cardinality": "one", - "target.entity": "LocationPRC", - "source.fields": [ - "PRCCode" - ], - "target.fields": [ - "Code" - ] - }, - "Pdy": { - "cardinality": "one", - "target.entity": "LocationPDY", - "source.fields": [ - "PDYCode" - ], - "target.fields": [ - "Code" - ] - }, - "Transshipment1": { - "cardinality": "one", - "target.entity": "LocationTS1", - "source.fields": [ - "TransshipmentPort" - ], - "target.fields": [ - "Code" - ] - }, - "Transshipment2": { - "cardinality": "one", - "target.entity": "LocationTS2", - "source.fields": [ - "TransshipmentPort2" - ], - "target.fields": [ - "Code" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "TariffAll": { - "source": { - "object": "silver_ops.v_TariffAll", - "type": "table", - "key-fields": [ - "TariffId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "TariffAll", - "plural": "TariffsAll" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "TariffSurcharge": { - "cardinality": "many", - "target.entity": "TariffSurcharge", - "source.fields": [ - "TariffSurchargeKey" - ], - "target.fields": [ - "TariffId" - ] - }, - "TariffContactGroupMember": { - "cardinality": "many", - "target.entity": "TariffContactGroupMember", - "source.fields": [ - "SourceNo" - ], - "target.fields": [ - "ContactGroupCode" - ] - }, - "Pol": { - "cardinality": "one", - "target.entity": "LocationPOL", - "source.fields": [ - "FromZoneCode" - ], - "target.fields": [ - "Code" - ] - }, - "Pod": { - "cardinality": "one", - "target.entity": "LocationPOD", - "source.fields": [ - "ToZoneCode" - ], - "target.fields": [ - "Code" - ] - }, - "Prc": { - "cardinality": "one", - "target.entity": "LocationPRC", - "source.fields": [ - "PRCCode" - ], - "target.fields": [ - "Code" - ] - }, - "Pdy": { - "cardinality": "one", - "target.entity": "LocationPDY", - "source.fields": [ - "PDYCode" - ], - "target.fields": [ - "Code" - ] - }, - "Transshipment1": { - "cardinality": "one", - "target.entity": "LocationTS1", - "source.fields": [ - "TransshipmentPort" - ], - "target.fields": [ - "Code" - ] - }, - "Transshipment2": { - "cardinality": "one", - "target.entity": "LocationTS2", - "source.fields": [ - "TransshipmentPort2" - ], - "target.fields": [ - "Code" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "Vessel": { - "source": { - "object": "silver_ops.Vessel", - "type": "table", - "key-fields": [ - "vesselID" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "Vessel", - "plural": "Vessels" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Transport": { - "cardinality": "many", - "target.entity": "Transport", - "source.fields": [ - "vesselID" - ], - "target.fields": [ - "vesselID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "WindwardRegistrationFeedback": { - "source": { - "object": "silver_trk.ww_TrackingFeedback", - "type": "table", - "key-fields": [ - "shipmentId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "WindwardRegistrationFeedback", - "plural": "WindwardRegistrationFeedbacks" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "Shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "shipmentId" - ], - "target.fields": [ - "shipmentID" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] - }, - "EquipmentMap": { - "source": { - "object": "silver_ops.usp_MapSource", - "type": "stored-procedure", - "parameters": { - "customerScope": "string" - } - }, - "cache": { - "enabled": true, - "ttl-seconds": 60 - }, - "graphql": { - "enabled": true, - "type": { - "singular": "EquipmentMap", - "plural": "EquipmentMaps" - } - }, - "rest": { - "enabled": true - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "execute" - } - ] - } - ] - }, - "GlobalSearch": { - "source": { - "object": "silver_ops.usp_globalSearch", - "type": "stored-procedure", - "parameters": { - "searchString": "string", - "customerScope": "string" - } - }, - "graphql": { - "enabled": true, - "type": { - "singular": "GlobalSearch", - "plural": "GlobalSearches" - } - }, - "rest": { - "enabled": true - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "execute" - } - ] - } - ] - } - } -} diff --git a/src/Service/dab-config.json b/src/Service/dab-config.json index 82ea6f2736..57d314ef8e 100644 --- a/src/Service/dab-config.json +++ b/src/Service/dab-config.json @@ -19,7 +19,7 @@ }, "cache": { "enabled": true, - "ttl-seconds": 30 + "ttl-seconds": 120 }, "rest": { "enabled": true, @@ -43,133 +43,53 @@ } }, "entities": { - "Shipment": { + "BillOfLading": { "source": { - "object": "silver_ops.v_Shipment", + "object": "silver_ops.BillOfLading", "type": "table", "key-fields": [ - "shipmentID" + "systemId" ] }, "graphql": { "enabled": true, "type": { - "singular": "Shipment", - "plural": "Shipments" + "singular": "BillOfLading", + "plural": "BillsOfLading" } }, "rest": { "enabled": true }, "relationships": { - "BillOfLading": { - "cardinality": "many", - "target.entity": "BillOfLading", - "source.fields": [ - "shipmentID" - ], - "target.fields": [ - "EnvOrderNo" - ] - }, - "ShipmentEquipments": { - "cardinality": "many", - "target.entity": "ShipmentEquipment", - "source.fields": [ - "shipmentID" - ], - "target.fields": [ - "shipmentID" - ] - }, - "CargoItems": { + "BillOfLadingPerShipmentEquipment": { "cardinality": "many", - "target.entity": "CargoItem", + "target.entity": "BillOfLadingPerShipmentEquipment", "source.fields": [ - "shipmentID" + "systemId" ], "target.fields": [ - "shipmentId" + "BOLId" ] }, - "carrier": { + "Shipment": { "cardinality": "one", - "target.entity": "CarrierPartner", + "target.entity": "Shipment", "source.fields": [ - "carrierID" + "EnvOrderNo" ], "target.fields": [ - "partnerID" - ] - }, - "event": { - "cardinality": "many", - "target.entity": "ShipmentEvent", - "source.fields": [ "shipmentID" - ], - "target.fields": [ - "ShipmentId" - ] - }, - "customer": { - "cardinality": "one", - "target.entity": "CustomerPartner", - "source.fields": [ - "customerID" - ], - "target.fields": [ - "partnerID" - ] - }, - "shipmentPol": { - "cardinality": "one", - "target.entity": "polPort", - "source.fields": [ - "pol" - ], - "target.fields": [ - "Code" ] }, - "shipmentPod": { - "cardinality": "one", - "target.entity": "podPort", - "source.fields": [ - "pod" - ], - "target.fields": [ - "Code" - ] - }, - "shipmentPrc": { - "cardinality": "one", - "target.entity": "prcPort", - "source.fields": [ - "prc" - ], - "target.fields": [ - "Code" - ] - }, - "shipmentPdy": { - "cardinality": "one", - "target.entity": "pdyPort", - "source.fields": [ - "pdy" - ], - "target.fields": [ - "Code" - ] - }, - "goodsSummary": { - "cardinality": "one", - "target.entity": "GoodsSummary", + "Party": { + "cardinality": "many", + "target.entity": "BillOfLadingParty", "source.fields": [ - "shipmentID" + "DocumentNo" ], "target.fields": [ - "shipmentID" + "DocumentNo" ] } }, @@ -182,54 +102,51 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "podPort": { + "BillOfLadingParty": { "source": { - "object": "silver_ops.Port", + "object": "silver_ops.BillOfLadingParty", "type": "table", "key-fields": [ - "Code" + "systemId" ] }, "graphql": { "enabled": true, "type": { - "singular": "podPort", - "plural": "podPorts" + "singular": "BillOfLadingParty", + "plural": "BillOfLadingParties" } }, "rest": { "enabled": true }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } + "relationships": { + "BillOfLading": { + "cardinality": "one", + "target.entity": "BillOfLading", + "source.fields": [ + "DocumentNo" + ], + "target.fields": [ + "DocumentNo" + ] + }, + "Partner": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "ContactNo" + ], + "target.fields": [ + "partnerID" ] } - ] - }, - "prcPort": { - "source": { - "object": "silver_ops.Port", - "type": "table", - "key-fields": [ - "Code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "prcPort", - "plural": "prcPorts" - } - }, - "rest": { - "enabled": true }, "permissions": [ { @@ -240,26 +157,52 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "pdyPort": { + "BillOfLadingPerShipmentEquipment": { "source": { - "object": "silver_ops.Port", + "object": "silver_ops.BillOfLadingPerShipmentEquipment", "type": "table", "key-fields": [ - "Code" + "systemId" ] }, "graphql": { "enabled": true, "type": { - "singular": "pdyPort", - "plural": "pdyPorts" + "singular": "BillOfLadingPerShipmentEquipment", + "plural": "BillOfLadingsPerShipmentEquipment" } }, "rest": { "enabled": true }, + "relationships": { + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentId" + ], + "target.fields": [ + "id" + ] + }, + "BillOfLading": { + "cardinality": "one", + "target.entity": "BillOfLading", + "source.fields": [ + "BOLId" + ], + "target.fields": [ + "systemId" + ] + } + }, "permissions": [ { "role": "anonymous", @@ -269,26 +212,52 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "polPort": { + "CargoItem": { "source": { - "object": "silver_ops.Port", + "object": "silver_ops.CargoItem", "type": "table", "key-fields": [ - "Code" + "cargoItemID" ] }, "graphql": { "enabled": true, "type": { - "singular": "polPort", - "plural": "polPorts" + "singular": "CargoItem", + "plural": "CargoItems" } }, "rest": { "enabled": true }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentId" + ], + "target.fields": [ + "shipmentID" + ] + }, + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentId" + ], + "target.fields": [ + "id" + ] + } + }, "permissions": [ { "role": "anonymous", @@ -298,21 +267,22 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "TariffContainerSize": { + "CargoMovementType": { "source": { - "object": "silver_ops.v_TariffContainerSize", - "type": "table", - "key-fields": [ - "containerSize" - ] + "object": "silver_ops.CargoMovementType", + "type": "table" }, "graphql": { "enabled": true, "type": { - "singular": "TariffContainerSize", - "plural": "TariffContainerSizes" + "singular": "CargoMovementType", + "plural": "CargoMovementTypes" } }, "rest": { @@ -333,37 +303,33 @@ } ] }, - "GoodsSummary": { + "CarrierPartner": { "source": { - "object": "silver_ops.v_GoodsSummmary", + "object": "silver_ops.Partner", "type": "table", "key-fields": [ - "shipmentID" + "partnerID" ] }, "graphql": { "enabled": true, "type": { - "singular": "GoodsSummary", - "plural": "GoodsSummaries" + "singular": "CarrierPartner", + "plural": "CarrierPartners" } }, "rest": { "enabled": true }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, "relationships": { - "Shipment": { - "cardinality": "one", + "Shipments": { + "cardinality": "many", "target.entity": "Shipment", "source.fields": [ - "shipmentID" + "partnerID" ], "target.fields": [ - "shipmentID" + "carrierID" ] } }, @@ -376,37 +342,33 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "CarrierPartner": { + "CharacteristicGroup": { "source": { - "object": "silver_ops.Partner", + "object": "silver_ops.v_Characteristics", "type": "table", "key-fields": [ - "partnerID" + "code" ] }, "graphql": { "enabled": true, "type": { - "singular": "CarrierPartner", - "plural": "CarrierPartners" + "singular": "CharacteristicGroup", + "plural": "CharacteristicGroups" } }, "rest": { "enabled": true }, - "relationships": { - "Shipments": { - "cardinality": "many", - "target.entity": "Shipment", - "source.fields": [ - "partnerID" - ], - "target.fields": [ - "carrierID" - ] - } + "cache": { + "enabled": true, + "ttl-seconds": 120 }, "permissions": [ { @@ -514,19 +476,19 @@ } ] }, - "OrganisationLocation": { + "Equipment": { "source": { - "object": "silver_mdm.v_Location", + "object": "silver_ops.Equipment", "type": "table", "key-fields": [ - "mdmEntityID" + "equipmentID" ] }, "graphql": { "enabled": true, "type": { - "singular": "OrganisationLocation", - "plural": "OrganisationLocations" + "singular": "Equipment", + "plural": "Equipment" } }, "rest": { @@ -536,6 +498,18 @@ "enabled": true, "ttl-seconds": 120 }, + "relationships": { + "ShipmentEquipments": { + "cardinality": "many", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "equipmentID" + ], + "target.fields": [ + "equipmentID" + ] + } + }, "permissions": [ { "role": "anonymous", @@ -547,125 +521,145 @@ } ] }, - "OrganizationLocationRelation": { + "EquipmentEventType": { "source": { - "object": "silver_mdm.OrganizationAddressRelation", - "type": "table", - "key-fields": [ - "mdmRelationshipID" - ] + "object": "silver_ops.EquipmentEventType", + "type": "table" }, "graphql": { "enabled": true, "type": { - "singular": "OrganizationLocationRelation", - "plural": "OrganizationLocationRelations" + "singular": "EquipmentEventType", + "plural": "EquipmentEventTypes" } }, "rest": { "enabled": true }, - "relationships": { - "Organization": { - "cardinality": "one", - "target.entity": "Organization", - "source.fields": [ - "mdmEntityOrganizationID" - ], - "target.fields": [ - "mdmEntityID" - ] - }, - "Location": { - "cardinality": "one", - "target.entity": "OrganisationLocation", - "source.fields": [ - "mdmEntityLocationID" - ], - "target.fields": [ - "mdmEntityID" + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } ] } + ] + }, + "EquipmentMap": { + "source": { + "object": "silver_ops.usp_MapSource", + "type": "stored-procedure", + "parameters": { + "customerScope": "string" + } }, "cache": { "enabled": true, - "ttl-seconds": 120 + "ttl-seconds": 60 + }, + "graphql": { + "enabled": true, + "type": { + "singular": "EquipmentMap", + "plural": "EquipmentMaps" + } + }, + "rest": { + "enabled": true }, "permissions": [ { "role": "anonymous", "actions": [ { - "action": "read" + "action": "execute" } ] } ] }, - "PartnerPerCompany": { + "FlowtypeType": { "source": { - "object": "silver_ops.v_PartnerPerCompany", + "object": "silver_ops.v_FlowTypeType", "type": "table", "key-fields": [ - "PartnerPerCompanyId" + "flowType" ] }, "graphql": { "enabled": true, "type": { - "singular": "PartnerPerCompany", - "plural": "PartnerPerCompanies" + "singular": "FlowtypeType", + "plural": "FlowtypeTypes" } }, "rest": { "enabled": true }, - "relationships": { - "Customer": { - "cardinality": "many", - "target.entity": "CustomerPartner", - "source.fields": [ - "sourceEntityID" - ], - "target.fields": [ - "partnerID" - ] - }, - "organization": { - "cardinality": "one", - "target.entity": "Organization", - "source.fields": [ - "mdmOrganizationEntityID" - ], - "target.fields": [ - "mdmEntityID" + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } ] } + ] + }, + "GlobalSearch": { + "source": { + "object": "silver_ops.usp_globalSearch", + "type": "stored-procedure", + "parameters": { + "searchString": "string", + "customerScope": "string" + } + }, + "graphql": { + "enabled": true, + "type": { + "singular": "GlobalSearch", + "plural": "GlobalSearches" + } + }, + "rest": { + "enabled": true }, "permissions": [ { "role": "anonymous", "actions": [ { - "action": "read" + "action": "execute" } ] } ] }, - "Equipment": { + "GoodsSummary": { "source": { - "object": "silver_ops.Equipment", + "object": "silver_ops.v_GoodsSummmary", "type": "table", "key-fields": [ - "equipmentID" + "shipmentID" ] }, "graphql": { "enabled": true, "type": { - "singular": "Equipment", - "plural": "Equipment" + "singular": "GoodsSummary", + "plural": "GoodsSummaries" } }, "rest": { @@ -676,14 +670,14 @@ "ttl-seconds": 120 }, "relationships": { - "ShipmentEquipments": { - "cardinality": "many", - "target.entity": "ShipmentEquipment", + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", "source.fields": [ - "equipmentID" + "shipmentID" ], "target.fields": [ - "equipmentID" + "shipmentID" ] } }, @@ -698,73 +692,93 @@ } ] }, - "ShipmentEquipment": { + "InboundOrder": { "source": { - "object": "silver_ops.v_ShipmentEquipment", + "object": "silver_ops.InboundOrder", "type": "table", "key-fields": [ - "id" + "systemId" ] }, "graphql": { "enabled": true, "type": { - "singular": "ShipmentEquipment", - "plural": "ShipmentEquipment" + "singular": "InboundOrder", + "plural": "InboundOrders" } }, "rest": { "enabled": true }, "relationships": { - "Equipments": { + "customer": { "cardinality": "one", - "target.entity": "Equipment", + "target.entity": "CustomerPartner", "source.fields": [ - "equipmentID" + "customerCode" ], "target.fields": [ - "equipmentID" + "partnerID" ] }, - "TrackingEvents": { + "inboundOrderLines": { "cardinality": "many", - "target.entity": "TrackingEvent", + "target.entity": "InboundOrderLine", "source.fields": [ - "id" + "no" ], "target.fields": [ - "shipmentEquipmentID" + "documentNo" ] }, - "ShipmentEquipmentTransport": { - "cardinality": "many", - "target.entity": "ShipmentEquipmentTransport", + "shipment": { + "cardinality": "one", + "target.entity": "Shipment", "source.fields": [ - "id" + "opsFactOrderId" ], "target.fields": [ - "shipmentEquipmentID" + "shipmentID" ] }, - "CargoItem": { - "cardinality": "many", - "target.entity": "CargoItem", + "characteristics": { + "cardinality": "one", + "target.entity": "CharacteristicGroup", "source.fields": [ - "id" + "characteristicGroupCode" ], "target.fields": [ - "shipmentEquipmentId" + "code" ] }, - "Shipment": { + "terminalLocation": { "cardinality": "one", - "target.entity": "Shipment", + "target.entity": "TerminalLocation", "source.fields": [ - "shipmentID" + "unloadingTerminalCode" ], "target.fields": [ - "shipmentID" + "terminalCode" + ] + }, + "pol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polCode" + ], + "target.fields": [ + "Code" + ] + }, + "pod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podCode" + ], + "target.fields": [ + "Code" ] } }, @@ -773,102 +787,115 @@ "role": "anonymous", "actions": [ { - "action": "read" + "action": "read", + "fields": { + "exclude": [] + } } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "TrackingPort": { + "InboundOrderLine": { "source": { - "object": "silver_trk.ww_ports", + "object": "silver_ops.v_InboundOrderLine", "type": "table", "key-fields": [ - "portId" + "documentNo", + "lineNo" ] }, "graphql": { "enabled": true, "type": { - "singular": "TrackingPort", - "plural": "TrackingPorts" + "singular": "InboundOrderLine", + "plural": "InboundOrderLines" } }, "rest": { "enabled": true }, - "cache": { - "enabled": true, - "ttl-seconds": 120 + "relationships": { + "inboundOrder": { + "cardinality": "one", + "target.entity": "InboundOrder", + "source.fields": [ + "documentNo" + ], + "target.fields": [ + "no" + ] + } }, "permissions": [ { "role": "anonymous", "actions": [ { - "action": "read" + "action": "read", + "fields": { + "exclude": [] + } } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "ShipmentEquipmentTransport": { + "LoadReport": { "source": { - "object": "silver_ops.ShipmentEquipmentTransport", + "object": "silver_ops.v_loadReport", "type": "table", "key-fields": [ - "id" + "systemId" ] }, "graphql": { "enabled": true, "type": { - "singular": "ShipmentEquipmentTransport", - "plural": "ShipmentEquipmentTransports" + "singular": "Loadreport", + "plural": "Loadreports" } }, "rest": { "enabled": true }, "relationships": { - "pol": { + "Shipment": { "cardinality": "one", - "target.entity": "polPort", + "target.entity": "Shipment", "source.fields": [ - "polID" + "ops_fact_order_id" ], "target.fields": [ - "Code" + "shipmentID" ] }, - "pod": { + "POL": { "cardinality": "one", - "target.entity": "podPort", + "target.entity": "polPort", "source.fields": [ - "podID" + "polCode" ], "target.fields": [ "Code" ] }, - "ShipmentEquipment": { - "cardinality": "one", - "target.entity": "ShipmentEquipment", - "source.fields": [ - "shipmentEquipmentID" - ], - "target.fields": [ - "id" - ] - }, - "Transport": { + "POD": { "cardinality": "one", - "target.entity": "Transport", + "target.entity": "podPort", "source.fields": [ - "transportID" + "podCode" ], "target.fields": [ - "id" + "Code" ] } }, @@ -881,47 +908,33 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "Transport": { + "LoadtypeType": { "source": { - "object": "silver_ops.Transport", + "object": "silver_ops.v_LoadTypeType", "type": "table", "key-fields": [ - "id" + "loadType" ] }, "graphql": { "enabled": true, "type": { - "singular": "Transport", - "plural": "Transports" + "singular": "LoadtypeType", + "plural": "LoadtypeTypes" } }, "rest": { "enabled": true }, - "relationships": { - "ShipmentEquipmentTransport": { - "cardinality": "many", - "target.entity": "ShipmentEquipmentTransport", - "source.fields": [ - "id" - ], - "target.fields": [ - "transportID" - ] - }, - "Vessel": { - "cardinality": "one", - "target.entity": "Vessel", - "source.fields": [ - "vesselID" - ], - "target.fields": [ - "vesselID" - ] - } + "cache": { + "enabled": true, + "ttl-seconds": 120 }, "permissions": [ { @@ -934,108 +947,57 @@ } ] }, - "SalesDocumentLine": { + "LocationPDY": { "source": { - "object": "silver_ops.SalesDocumentLine", + "object": "silver_ops.Location", "type": "table", "key-fields": [ - "salesDocumentID", - "lineNo" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "SalesDocumentLine", - "plural": "SalesDocumentLines" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "SalesDocument": { - "cardinality": "one", - "target.entity": "SalesDocument", - "source.fields": [ - "salesDocumentID" - ], - "target.fields": [ - "salesDocumentID" - ] - }, - "shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "shipmentID" - ], - "target.fields": [ - "shipmentID" - ] + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPDY", + "plural": "LocationsPDY" } }, + "rest": { + "enabled": true + }, "permissions": [ { "role": "anonymous", "actions": [ { - "action": "read", - "fields": { - "exclude": [ - "D365FSynchErrorText", - "D365FSynchStatus", - "Env", - "OrderNo", - "SelltoCustomerNo", - "sourceOrderNo" - ] - } + "action": "read" } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "BillOfLadingParty": { + "LocationPOD": { "source": { - "object": "silver_ops.BillOfLadingParty", + "object": "silver_ops.Location", "type": "table", "key-fields": [ - "systemId" + "Code" ] }, "graphql": { "enabled": true, "type": { - "singular": "BillOfLadingParty", - "plural": "BillOfLadingParties" + "singular": "LocationPOD", + "plural": "LocationsPOD" } }, "rest": { "enabled": true }, - "relationships": { - "BillOfLading": { - "cardinality": "one", - "target.entity": "BillOfLading", - "source.fields": [ - "DocumentNo" - ], - "target.fields": [ - "DocumentNo" - ] - }, - "Partner": { - "cardinality": "one", - "target.entity": "CustomerPartner", - "source.fields": [ - "ContactNo" - ], - "target.fields": [ - "partnerID" - ] - } - }, "permissions": [ { "role": "anonymous", @@ -1045,48 +1007,30 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "BillOfLadingPerShipmentEquipment": { + "LocationPOL": { "source": { - "object": "silver_ops.BillOfLadingPerShipmentEquipment", + "object": "silver_ops.Location", "type": "table", "key-fields": [ - "systemId" + "Code" ] }, "graphql": { "enabled": true, "type": { - "singular": "BillOfLadingPerShipmentEquipment", - "plural": "BillOfLadingsPerShipmentEquipment" + "singular": "LocationPOL", + "plural": "LocationsPOL" } }, "rest": { "enabled": true }, - "relationships": { - "ShipmentEquipment": { - "cardinality": "one", - "target.entity": "ShipmentEquipment", - "source.fields": [ - "shipmentEquipmentId" - ], - "target.fields": [ - "id" - ] - }, - "BillOfLading": { - "cardinality": "one", - "target.entity": "BillOfLading", - "source.fields": [ - "BOLId" - ], - "target.fields": [ - "systemId" - ] - } - }, "permissions": [ { "role": "anonymous", @@ -1096,58 +1040,30 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "BillOfLading": { + "LocationPRC": { "source": { - "object": "silver_ops.BillOfLading", + "object": "silver_ops.Location", "type": "table", "key-fields": [ - "systemId" + "Code" ] }, "graphql": { "enabled": true, "type": { - "singular": "BillOfLading", - "plural": "BillsOfLading" + "singular": "LocationPRC", + "plural": "LocationsPRC" } }, "rest": { "enabled": true }, - "relationships": { - "BillOfLadingPerShipmentEquipment": { - "cardinality": "many", - "target.entity": "BillOfLadingPerShipmentEquipment", - "source.fields": [ - "systemId" - ], - "target.fields": [ - "BOLId" - ] - }, - "Shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "EnvOrderNo" - ], - "target.fields": [ - "shipmentID" - ] - }, - "Party": { - "cardinality": "many", - "target.entity": "BillOfLadingParty", - "source.fields": [ - "DocumentNo" - ], - "target.fields": [ - "DocumentNo" - ] - } - }, "permissions": [ { "role": "anonymous", @@ -1157,108 +1073,63 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "SalesDocument": { + "LocationTS1": { "source": { - "object": "silver_ops.v_SalesDocument", + "object": "silver_ops.Location", "type": "table", "key-fields": [ - "salesDocumentID" + "Code" ] }, "graphql": { "enabled": true, "type": { - "singular": "SalesDocument", - "plural": "SalesDocuments" + "singular": "LocationTS1", + "plural": "LocationsTS1" } }, "rest": { "enabled": true }, - "relationships": { - "invoicee": { - "cardinality": "one", - "target.entity": "CustomerPartner", - "source.fields": [ - "invoiceeID" - ], - "target.fields": [ - "partnerID" - ] - }, - "SalesDocumentLine": { - "cardinality": "many", - "target.entity": "SalesDocumentLine", - "source.fields": [ - "salesDocumentID" - ], - "target.fields": [ - "salesDocumentID" - ] - }, - "Shipments": { - "cardinality": "many", - "target.entity": "ShipmentsPerSalesDocument", - "source.fields": [ - "salesDocumentID" - ], - "target.fields": [ - "salesDocumentID" - ] - }, - "Document": { - "cardinality": "one", - "target.entity": "ShipmentDocument", - "source.fields": [ - "documentNumber" - ], - "target.fields": [ - "invoiceCrMemoNo" - ] - } - }, "permissions": [ { "role": "anonymous", "actions": [ { - "action": "read", - "fields": { - "exclude": [ - "D365FSynchErrorText", - "D365FSynchStatus", - "Env", - "OrderNo", - "SelltoCustomerNo", - "SourceOrderNos" - ] - } + "action": "read" } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "CargoMovementType": { + "LocationTS2": { "source": { - "object": "silver_ops.CargoMovementType", - "type": "table" + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] }, "graphql": { "enabled": true, "type": { - "singular": "CargoMovementType", - "plural": "CargoMovementTypes" + "singular": "LocationPS2", + "plural": "LocationsTS2" } }, "rest": { "enabled": true }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, "permissions": [ { "role": "anonymous", @@ -1268,18 +1139,22 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "EquipmentEventType": { + "ModeOfTransportType": { "source": { - "object": "silver_ops.EquipmentEventType", + "object": "silver_ops.ModeOfTransportType", "type": "table" }, "graphql": { "enabled": true, "type": { - "singular": "EquipmentEventType", - "plural": "EquipmentEventTypes" + "singular": "ModeOfTransportType", + "plural": "ModeOfTransportTypes" } }, "rest": { @@ -1300,16 +1175,19 @@ } ] }, - "ModeOfTransportType": { + "OrganisationLocation": { "source": { - "object": "silver_ops.ModeOfTransportType", - "type": "table" + "object": "silver_mdm.v_Location", + "type": "table", + "key-fields": [ + "mdmEntityID" + ] }, "graphql": { "enabled": true, "type": { - "singular": "ModeOfTransportType", - "plural": "ModeOfTransportTypes" + "singular": "OrganisationLocation", + "plural": "OrganisationLocations" } }, "rest": { @@ -1389,82 +1267,51 @@ } ] } - ] - }, - "ShipmentLocationType": { - "source": { - "object": "silver_ops.ShipmentLocationType", - "type": "table" - }, - "graphql": { - "enabled": true, - "type": { - "singular": "ShipmentLocationType", - "plural": "ShipmentLocationTypes" - } - }, - "rest": { - "enabled": true - }, + ], "cache": { "enabled": true, "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] + } }, - "ShipmentStatusType": { + "OrganizationLocationRelation": { "source": { - "object": "silver_ops.ShipmentStatusType", - "type": "table" + "object": "silver_mdm.OrganizationAddressRelation", + "type": "table", + "key-fields": [ + "mdmRelationshipID" + ] }, "graphql": { "enabled": true, "type": { - "singular": "ShipmentStatusType", - "plural": "ShipmentStatusTypes" + "singular": "OrganizationLocationRelation", + "plural": "OrganizationLocationRelations" } }, "rest": { "enabled": true }, - "cache": { - "enabled": true, - "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } + "relationships": { + "Organization": { + "cardinality": "one", + "target.entity": "Organization", + "source.fields": [ + "mdmEntityOrganizationID" + ], + "target.fields": [ + "mdmEntityID" + ] + }, + "Location": { + "cardinality": "one", + "target.entity": "OrganisationLocation", + "source.fields": [ + "mdmEntityLocationID" + ], + "target.fields": [ + "mdmEntityID" ] } - ] - }, - "TransportPlanStageType": { - "source": { - "object": "silver_ops.TransportPlanStageType", - "type": "table" - }, - "graphql": { - "enabled": true, - "type": { - "singular": "TransportPlanStageType", - "plural": "TransportPlanStageTypes" - } - }, - "rest": { - "enabled": true }, "cache": { "enabled": true, @@ -1481,9 +1328,9 @@ } ] }, - "InboundOrder": { + "OutboundOrder": { "source": { - "object": "silver_ops.InboundOrder", + "object": "silver_ops.v_OutboundOrder", "type": "table", "key-fields": [ "systemId" @@ -1492,8 +1339,8 @@ "graphql": { "enabled": true, "type": { - "singular": "InboundOrder", - "plural": "InboundOrders" + "singular": "OutboundOrder", + "plural": "OutboundOrders" } }, "rest": { @@ -1510,9 +1357,9 @@ "partnerID" ] }, - "inboundOrderLines": { + "outboundOrderLines": { "cardinality": "many", - "target.entity": "InboundOrderLine", + "target.entity": "OutboundOrderLine", "source.fields": [ "no" ], @@ -1530,244 +1377,290 @@ "shipmentID" ] }, - "characteristics": { + "pol": { "cardinality": "one", - "target.entity": "CharacteristicGroup", + "target.entity": "polPort", "source.fields": [ - "characteristicGroupCode" + "polCode" ], "target.fields": [ - "code" + "Code" ] }, - "terminalLocation": { + "pod": { "cardinality": "one", - "target.entity": "TerminalLocation", + "target.entity": "podPort", "source.fields": [ - "unloadingTerminalCode" + "podCode" ], "target.fields": [ - "terminalCode" + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "OutboundOrderLine": { + "source": { + "object": "silver_ops.v_OutboundOrderLine", + "type": "table", + "key-fields": [ + "documentNo", + "lineNo" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OutboundOrderLine", + "plural": "OutboundOrderLines" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "outboundOrder": { + "cardinality": "one", + "target.entity": "OutboundOrder", + "source.fields": [ + "documentNo" + ], + "target.fields": [ + "no" ] }, - "pol": { + "charecteristics": { "cardinality": "one", - "target.entity": "polPort", + "target.entity": "CharacteristicGroup", "source.fields": [ - "polCode" + "characteristicGroupCode" ], "target.fields": [ - "Code" + "code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "PartnerPerCompany": { + "source": { + "object": "silver_ops.v_PartnerPerCompany", + "type": "table", + "key-fields": [ + "PartnerPerCompanyId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "PartnerPerCompany", + "plural": "PartnerPerCompanies" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Customer": { + "cardinality": "many", + "target.entity": "CustomerPartner", + "source.fields": [ + "sourceEntityID" + ], + "target.fields": [ + "partnerID" ] }, - "pod": { + "organization": { "cardinality": "one", - "target.entity": "podPort", + "target.entity": "Organization", "source.fields": [ - "podCode" + "mdmOrganizationEntityID" ], "target.fields": [ - "Code" + "mdmEntityID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } ] } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "pdyPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "pdyPort", + "plural": "pdyPorts" + } + }, + "rest": { + "enabled": true }, "permissions": [ { "role": "anonymous", "actions": [ { - "action": "read", - "fields": { - "exclude": [] - } + "action": "read" } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "InboundOrderLine": { + "podPort": { "source": { - "object": "silver_ops.v_InboundOrderLine", + "object": "silver_ops.Port", "type": "table", "key-fields": [ - "documentNo", - "lineNo" + "Code" ] }, "graphql": { "enabled": true, "type": { - "singular": "InboundOrderLine", - "plural": "InboundOrderLines" + "singular": "podPort", + "plural": "podPorts" } }, "rest": { "enabled": true }, - "relationships": { - "inboundOrder": { - "cardinality": "one", - "target.entity": "InboundOrder", - "source.fields": [ - "documentNo" - ], - "target.fields": [ - "no" - ] - } - }, "permissions": [ { "role": "anonymous", "actions": [ { - "action": "read", - "fields": { - "exclude": [] - } + "action": "read" } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "OutboundOrder": { + "polPort": { "source": { - "object": "silver_ops.v_OutboundOrder", + "object": "silver_ops.Port", "type": "table", "key-fields": [ - "systemId" + "Code" ] }, "graphql": { "enabled": true, "type": { - "singular": "OutboundOrder", - "plural": "OutboundOrders" + "singular": "polPort", + "plural": "polPorts" } }, "rest": { "enabled": true }, - "relationships": { - "customer": { - "cardinality": "one", - "target.entity": "CustomerPartner", - "source.fields": [ - "customerCode" - ], - "target.fields": [ - "partnerID" - ] - }, - "outboundOrderLines": { - "cardinality": "many", - "target.entity": "OutboundOrderLine", - "source.fields": [ - "no" - ], - "target.fields": [ - "documentNo" - ] - }, - "shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "opsFactOrderId" - ], - "target.fields": [ - "shipmentID" - ] - }, - "pol": { - "cardinality": "one", - "target.entity": "polPort", - "source.fields": [ - "polCode" - ], - "target.fields": [ - "Code" - ] - }, - "pod": { - "cardinality": "one", - "target.entity": "podPort", - "source.fields": [ - "podCode" - ], - "target.fields": [ - "Code" - ] - } - }, "permissions": [ { "role": "anonymous", "actions": [ { - "action": "read", - "fields": { - "exclude": [] - } + "action": "read" } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "OutboundOrderLine": { + "prcPort": { "source": { - "object": "silver_ops.v_OutboundOrderLine", + "object": "silver_ops.Port", "type": "table", "key-fields": [ - "documentNo", - "lineNo" + "Code" ] }, "graphql": { "enabled": true, "type": { - "singular": "OutboundOrderLine", - "plural": "OutboundOrderLines" + "singular": "prcPort", + "plural": "prcPorts" } }, "rest": { "enabled": true }, - "relationships": { - "outboundOrder": { - "cardinality": "one", - "target.entity": "OutboundOrder", - "source.fields": [ - "documentNo" - ], - "target.fields": [ - "no" - ] - }, - "charecteristics": { - "cardinality": "one", - "target.entity": "CharacteristicGroup", - "source.fields": [ - "characteristicGroupCode" - ], - "target.fields": [ - "code" - ] - } - }, "permissions": [ { "role": "anonymous", "actions": [ { - "action": "read", - "fields": { - "exclude": [] - } + "action": "read" } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, "PurchaseDocument": { "source": { @@ -1811,78 +1704,69 @@ } ] } - ] - }, - "CharacteristicGroup": { - "source": { - "object": "silver_ops.v_Characteristics", - "type": "table", - "key-fields": [ - "code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "CharacteristicGroup", - "plural": "CharacteristicGroups" - } - }, - "rest": { - "enabled": true - }, + ], "cache": { "enabled": true, "ttl-seconds": 120 - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ] + } }, - "CargoItem": { + "SalesDocument": { "source": { - "object": "silver_ops.CargoItem", + "object": "silver_ops.v_SalesDocument", "type": "table", "key-fields": [ - "cargoItemID" + "salesDocumentID" ] }, "graphql": { "enabled": true, "type": { - "singular": "CargoItem", - "plural": "CargoItems" + "singular": "SalesDocument", + "plural": "SalesDocuments" } }, "rest": { "enabled": true }, "relationships": { - "Shipment": { + "invoicee": { "cardinality": "one", - "target.entity": "Shipment", + "target.entity": "CustomerPartner", "source.fields": [ - "shipmentId" + "invoiceeID" ], "target.fields": [ - "shipmentID" + "partnerID" ] }, - "ShipmentEquipment": { + "SalesDocumentLine": { + "cardinality": "many", + "target.entity": "SalesDocumentLine", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "Shipments": { + "cardinality": "many", + "target.entity": "ShipmentsPerSalesDocument", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "Document": { "cardinality": "one", - "target.entity": "ShipmentEquipment", + "target.entity": "ShipmentDocument", "source.fields": [ - "shipmentEquipmentId" + "documentNumber" ], "target.fields": [ - "id" + "invoiceCrMemoNo" ] } }, @@ -1891,26 +1775,40 @@ "role": "anonymous", "actions": [ { - "action": "read" + "action": "read", + "fields": { + "exclude": [ + "D365FSynchErrorText", + "D365FSynchStatus", + "Env", + "OrderNo", + "SelltoCustomerNo", + "SourceOrderNos" + ] + } } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "ShipmentsPerSalesDocument": { + "SalesDocumentLine": { "source": { - "object": "silver_ops.v_ShipmentsPerSalesDocument", + "object": "silver_ops.SalesDocumentLine", "type": "table", "key-fields": [ "salesDocumentID", - "shipmentID" + "lineNo" ] }, "graphql": { "enabled": true, "type": { - "singular": "ShipmentsPerSalesDocument", - "plural": "ShipmentsPerSalesDocuments" + "singular": "SalesDocumentLine", + "plural": "SalesDocumentLines" } }, "rest": { @@ -1927,7 +1825,7 @@ "salesDocumentID" ] }, - "Shipment": { + "shipment": { "cardinality": "one", "target.entity": "Shipment", "source.fields": [ @@ -1943,33 +1841,155 @@ "role": "anonymous", "actions": [ { - "action": "read" + "action": "read", + "fields": { + "exclude": [ + "D365FSynchErrorText", + "D365FSynchStatus", + "Env", + "OrderNo", + "SelltoCustomerNo", + "sourceOrderNo" + ] + } } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "TerminalLocation": { + "Shipment": { "source": { - "object": "silver_ops.v_TerminalLocation", + "object": "silver_ops.v_Shipment", "type": "table", "key-fields": [ - "terminalCode" + "shipmentID" ] }, "graphql": { "enabled": true, "type": { - "singular": "TerminalLocation", - "plural": "TerminalLocations" + "singular": "Shipment", + "plural": "Shipments" } }, "rest": { "enabled": true }, - "cache": { - "enabled": true, - "ttl-seconds": 120 + "relationships": { + "BillOfLading": { + "cardinality": "many", + "target.entity": "BillOfLading", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "EnvOrderNo" + ] + }, + "ShipmentEquipments": { + "cardinality": "many", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + }, + "CargoItems": { + "cardinality": "many", + "target.entity": "CargoItem", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentId" + ] + }, + "carrier": { + "cardinality": "one", + "target.entity": "CarrierPartner", + "source.fields": [ + "carrierID" + ], + "target.fields": [ + "partnerID" + ] + }, + "event": { + "cardinality": "many", + "target.entity": "ShipmentEvent", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "ShipmentId" + ] + }, + "customer": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "customerID" + ], + "target.fields": [ + "partnerID" + ] + }, + "shipmentPol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "pol" + ], + "target.fields": [ + "Code" + ] + }, + "shipmentPod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "pod" + ], + "target.fields": [ + "Code" + ] + }, + "shipmentPrc": { + "cardinality": "one", + "target.entity": "prcPort", + "source.fields": [ + "prc" + ], + "target.fields": [ + "Code" + ] + }, + "shipmentPdy": { + "cardinality": "one", + "target.entity": "pdyPort", + "source.fields": [ + "pdy" + ], + "target.fields": [ + "Code" + ] + }, + "goodsSummary": { + "cardinality": "one", + "target.entity": "GoodsSummary", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } }, "permissions": [ { @@ -1980,11 +2000,15 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "LoadReport": { + "ShipmentDocument": { "source": { - "object": "silver_ops.v_loadReport", + "object": "silver_ops.v_ShipmentDocument", "type": "table", "key-fields": [ "systemId" @@ -1993,8 +2017,8 @@ "graphql": { "enabled": true, "type": { - "singular": "Loadreport", - "plural": "Loadreports" + "singular": "ShipmentDocument", + "plural": "ShipmentDocuments" } }, "rest": { @@ -2010,26 +2034,6 @@ "target.fields": [ "shipmentID" ] - }, - "POL": { - "cardinality": "one", - "target.entity": "polPort", - "source.fields": [ - "polCode" - ], - "target.fields": [ - "Code" - ] - }, - "POD": { - "cardinality": "one", - "target.entity": "podPort", - "source.fields": [ - "podCode" - ], - "target.fields": [ - "Code" - ] } }, "permissions": [ @@ -2041,32 +2045,76 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "ShipmentDocument": { + "ShipmentEquipment": { "source": { - "object": "silver_ops.ShipmentDocuments", + "object": "silver_ops.v_ShipmentEquipment", "type": "table", "key-fields": [ - "systemId" + "id" ] }, "graphql": { "enabled": true, "type": { - "singular": "ShipmentDocument", - "plural": "ShipmentDocuments" + "singular": "ShipmentEquipment", + "plural": "ShipmentEquipment" } }, "rest": { "enabled": true }, "relationships": { + "Equipments": { + "cardinality": "one", + "target.entity": "Equipment", + "source.fields": [ + "equipmentID" + ], + "target.fields": [ + "equipmentID" + ] + }, + "TrackingEvents": { + "cardinality": "many", + "target.entity": "TrackingEvent", + "source.fields": [ + "id" + ], + "target.fields": [ + "shipmentEquipmentID" + ] + }, + "ShipmentEquipmentTransport": { + "cardinality": "many", + "target.entity": "ShipmentEquipmentTransport", + "source.fields": [ + "id" + ], + "target.fields": [ + "shipmentEquipmentID" + ] + }, + "CargoItem": { + "cardinality": "many", + "target.entity": "CargoItem", + "source.fields": [ + "id" + ], + "target.fields": [ + "shipmentEquipmentId" + ] + }, "Shipment": { "cardinality": "one", "target.entity": "Shipment", "source.fields": [ - "ops_fact_order_id" + "shipmentID" ], "target.fields": [ "shipmentID" @@ -2082,27 +2130,51 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "TrackingEvent": { + "ShipmentEquipmentTransport": { "source": { - "object": "silver_trk.TrackingEvent", + "object": "silver_ops.ShipmentEquipmentTransport", "type": "table", "key-fields": [ - "trackingEventID" + "id" ] }, "graphql": { "enabled": true, "type": { - "singular": "TrackingEvent", - "plural": "TrackingEvents" + "singular": "ShipmentEquipmentTransport", + "plural": "ShipmentEquipmentTransports" } }, "rest": { "enabled": true }, "relationships": { + "pol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polID" + ], + "target.fields": [ + "Code" + ] + }, + "pod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podID" + ], + "target.fields": [ + "Code" + ] + }, "ShipmentEquipment": { "cardinality": "one", "target.entity": "ShipmentEquipment", @@ -2113,14 +2185,14 @@ "id" ] }, - "TrackingPort": { + "Transport": { "cardinality": "one", - "target.entity": "TrackingPort", + "target.entity": "Transport", "source.fields": [ - "portID" + "transportID" ], "target.fields": [ - "portId" + "id" ] } }, @@ -2133,54 +2205,42 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "LocationPOD": { + "ShipmentEvent": { "source": { - "object": "silver_ops.Location", + "object": "silver_ops.v_ShipmentEvent", "type": "table", "key-fields": [ - "Code" + "ShipmentId", + "systemCreatedAt" ] }, "graphql": { "enabled": true, "type": { - "singular": "LocationPOD", - "plural": "LocationsPOD" + "singular": "ShipmentEvent", + "plural": "ShipmentEvents" } }, "rest": { "enabled": true }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "ShipmentId" + ], + "target.fields": [ + "shipmentID" ] } - ] - }, - "LocationPOL": { - "source": { - "object": "silver_ops.Location", - "type": "table", - "key-fields": [ - "Code" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "LocationPOL", - "plural": "LocationsPOL" - } - }, - "rest": { - "enabled": true }, "permissions": [ { @@ -2191,26 +2251,31 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "LocationPDY": { + "ShipmentLocationType": { "source": { - "object": "silver_ops.Location", - "type": "table", - "key-fields": [ - "Code" - ] + "object": "silver_ops.ShipmentLocationType", + "type": "table" }, "graphql": { "enabled": true, "type": { - "singular": "LocationPDY", - "plural": "LocationsPDY" + "singular": "ShipmentLocationType", + "plural": "ShipmentLocationTypes" } }, "rest": { "enabled": true }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, "permissions": [ { "role": "anonymous", @@ -2222,24 +2287,47 @@ } ] }, - "LocationPRC": { + "ShipmentsPerSalesDocument": { "source": { - "object": "silver_ops.Location", + "object": "silver_ops.v_ShipmentsPerSalesDocument", "type": "table", "key-fields": [ - "Code" + "salesDocumentID", + "shipmentID" ] }, "graphql": { "enabled": true, "type": { - "singular": "LocationPRC", - "plural": "LocationsPRC" + "singular": "ShipmentsPerSalesDocument", + "plural": "ShipmentsPerSalesDocuments" } }, "rest": { "enabled": true }, + "relationships": { + "SalesDocument": { + "cardinality": "one", + "target.entity": "SalesDocument", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, "permissions": [ { "role": "anonymous", @@ -2249,26 +2337,31 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "LocationTS1": { + "ShipmentStatusType": { "source": { - "object": "silver_ops.Location", - "type": "table", - "key-fields": [ - "Code" - ] + "object": "silver_ops.ShipmentStatusType", + "type": "table" }, "graphql": { "enabled": true, "type": { - "singular": "LocationTS1", - "plural": "LocationsTS1" + "singular": "ShipmentStatusType", + "plural": "ShipmentStatusTypes" } }, "rest": { "enabled": true }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, "permissions": [ { "role": "anonymous", @@ -2280,24 +2373,106 @@ } ] }, - "LocationTS2": { + "Tariff": { "source": { - "object": "silver_ops.Location", + "object": "silver_ops.v_Tariff", "type": "table", "key-fields": [ - "Code" + "TariffId" ] }, "graphql": { "enabled": true, "type": { - "singular": "LocationPS2", - "plural": "LocationsTS2" + "singular": "Tariff", + "plural": "Tariffs" } }, "rest": { "enabled": true }, + "relationships": { + "TariffSurcharge": { + "cardinality": "many", + "target.entity": "TariffSurcharge", + "source.fields": [ + "TariffSurchargeKey" + ], + "target.fields": [ + "TariffId" + ] + }, + "TariffContactGroupMember": { + "cardinality": "many", + "target.entity": "TariffContactGroupMember", + "source.fields": [ + "SourceNo" + ], + "target.fields": [ + "ContactGroupCode" + ] + }, + "Pol": { + "cardinality": "one", + "target.entity": "LocationPOL", + "source.fields": [ + "FromZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pod": { + "cardinality": "one", + "target.entity": "LocationPOD", + "source.fields": [ + "ToZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Prc": { + "cardinality": "one", + "target.entity": "LocationPRC", + "source.fields": [ + "PRCCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pdy": { + "cardinality": "one", + "target.entity": "LocationPDY", + "source.fields": [ + "PDYCode" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment1": { + "cardinality": "one", + "target.entity": "LocationTS1", + "source.fields": [ + "TransshipmentPort" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment2": { + "cardinality": "one", + "target.entity": "LocationTS2", + "source.fields": [ + "TransshipmentPort2" + ], + "target.fields": [ + "Code" + ] + } + }, "permissions": [ { "role": "anonymous", @@ -2307,35 +2482,109 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "TariffContainerGroup": { + "TariffAll": { "source": { - "object": "silver_ops.TariffContainerGroup", + "object": "silver_ops.v_TariffAll", "type": "table", "key-fields": [ - "ContainerCode" + "TariffId" ] }, "graphql": { "enabled": true, "type": { - "singular": "TariffContainerGroup", - "plural": "TariffContainerGroups" + "singular": "TariffAll", + "plural": "TariffsAll" } }, "rest": { "enabled": true }, "relationships": { - "Tariff": { + "TariffSurcharge": { "cardinality": "many", - "target.entity": "Tariff", + "target.entity": "TariffSurcharge", "source.fields": [ - "ContainerCode" + "TariffSurchargeKey" ], "target.fields": [ - "ContainerType" + "TariffId" + ] + }, + "TariffContactGroupMember": { + "cardinality": "many", + "target.entity": "TariffContactGroupMember", + "source.fields": [ + "SourceNo" + ], + "target.fields": [ + "ContactGroupCode" + ] + }, + "Pol": { + "cardinality": "one", + "target.entity": "LocationPOL", + "source.fields": [ + "FromZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pod": { + "cardinality": "one", + "target.entity": "LocationPOD", + "source.fields": [ + "ToZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Prc": { + "cardinality": "one", + "target.entity": "LocationPRC", + "source.fields": [ + "PRCCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pdy": { + "cardinality": "one", + "target.entity": "LocationPDY", + "source.fields": [ + "PDYCode" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment1": { + "cardinality": "one", + "target.entity": "LocationTS1", + "source.fields": [ + "TransshipmentPort" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment2": { + "cardinality": "one", + "target.entity": "LocationTS2", + "source.fields": [ + "TransshipmentPort2" + ], + "target.fields": [ + "Code" ] } }, @@ -2348,7 +2597,11 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, "TariffContactGroupMember": { "source": { @@ -2399,21 +2652,25 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "TariffSurcharge": { + "TariffContainerGroup": { "source": { - "object": "silver_ops.TariffSurcharge", + "object": "silver_ops.TariffContainerGroup", "type": "table", "key-fields": [ - "systemId" + "ContainerCode" ] }, "graphql": { "enabled": true, "type": { - "singular": "TariffSurcharge", - "plural": "TariffSurcharges" + "singular": "TariffContainerGroup", + "plural": "TariffContainerGroups" } }, "rest": { @@ -2421,13 +2678,13 @@ }, "relationships": { "Tariff": { - "cardinality": "one", + "cardinality": "many", "target.entity": "Tariff", "source.fields": [ - "TariffId" + "ContainerCode" ], "target.fields": [ - "TariffSurchargeKey" + "ContainerType" ] } }, @@ -2440,38 +2697,33 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "ShipmentEvent": { + "TariffContainerSize": { "source": { - "object": "silver_ops.v_ShipmentEvent", + "object": "silver_ops.v_TariffContainerSize", "type": "table", "key-fields": [ - "ShipmentId", - "systemCreatedAt" + "containerSize" ] }, "graphql": { "enabled": true, "type": { - "singular": "ShipmentEvent", - "plural": "ShipmentEvents" + "singular": "TariffContainerSize", + "plural": "TariffContainerSizes" } }, "rest": { "enabled": true }, - "relationships": { - "Shipment": { - "cardinality": "one", - "target.entity": "Shipment", - "source.fields": [ - "ShipmentId" - ], - "target.fields": [ - "shipmentID" - ] - } + "cache": { + "enabled": true, + "ttl-seconds": 120 }, "permissions": [ { @@ -2484,27 +2736,35 @@ } ] }, - "LoadtypeType": { + "TariffSurcharge": { "source": { - "object": "silver_ops.v_LoadTypeType", + "object": "silver_ops.TariffSurcharge", "type": "table", "key-fields": [ - "loadType" + "systemId" ] }, "graphql": { "enabled": true, "type": { - "singular": "LoadtypeType", - "plural": "LoadtypeTypes" + "singular": "TariffSurcharge", + "plural": "TariffSurcharges" } }, "rest": { "enabled": true }, - "cache": { - "enabled": true, - "ttl-seconds": 120 + "relationships": { + "Tariff": { + "cardinality": "one", + "target.entity": "Tariff", + "source.fields": [ + "TariffId" + ], + "target.fields": [ + "TariffSurchargeKey" + ] + } }, "permissions": [ { @@ -2515,21 +2775,25 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "FlowtypeType": { + "TerminalLocation": { "source": { - "object": "silver_ops.v_FlowTypeType", + "object": "silver_ops.v_TerminalLocation", "type": "table", "key-fields": [ - "flowType" + "terminalCode" ] }, "graphql": { "enabled": true, "type": { - "singular": "FlowtypeType", - "plural": "FlowtypeTypes" + "singular": "TerminalLocation", + "plural": "TerminalLocations" } }, "rest": { @@ -2550,103 +2814,53 @@ } ] }, - "Tariff": { + "TrackingEvent": { "source": { - "object": "silver_ops.v_Tariff", + "object": "silver_trk.TrackingEvent", "type": "table", "key-fields": [ - "TariffId" + "trackingEventID" ] }, "graphql": { "enabled": true, "type": { - "singular": "Tariff", - "plural": "Tariffs" + "singular": "TrackingEvent", + "plural": "TrackingEvents" } }, "rest": { "enabled": true }, "relationships": { - "TariffSurcharge": { - "cardinality": "many", - "target.entity": "TariffSurcharge", - "source.fields": [ - "TariffSurchargeKey" - ], - "target.fields": [ - "TariffId" - ] - }, - "TariffContactGroupMember": { - "cardinality": "many", - "target.entity": "TariffContactGroupMember", - "source.fields": [ - "SourceNo" - ], - "target.fields": [ - "ContactGroupCode" - ] - }, - "Pol": { - "cardinality": "one", - "target.entity": "LocationPOL", - "source.fields": [ - "FromZoneCode" - ], - "target.fields": [ - "Code" - ] - }, - "Pod": { - "cardinality": "one", - "target.entity": "LocationPOD", - "source.fields": [ - "ToZoneCode" - ], - "target.fields": [ - "Code" - ] - }, - "Prc": { - "cardinality": "one", - "target.entity": "LocationPRC", - "source.fields": [ - "PRCCode" - ], - "target.fields": [ - "Code" - ] - }, - "Pdy": { + "ShipmentEquipment": { "cardinality": "one", - "target.entity": "LocationPDY", + "target.entity": "ShipmentEquipment", "source.fields": [ - "PDYCode" + "shipmentEquipmentID" ], "target.fields": [ - "Code" + "id" ] }, - "Transshipment1": { + "TrackingPort": { "cardinality": "one", - "target.entity": "LocationTS1", + "target.entity": "TrackingPort", "source.fields": [ - "TransshipmentPort" + "portID" ], "target.fields": [ - "Code" + "portId" ] }, - "Transshipment2": { + "TrackingVessel": { "cardinality": "one", - "target.entity": "LocationTS2", + "target.entity": "TrackingVessel", "source.fields": [ - "TransshipmentPort2" + "vesselId" ], "target.fields": [ - "Code" + "vesselId" ] } }, @@ -2659,107 +2873,33 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "TariffAll": { + "TrackingPort": { "source": { - "object": "silver_ops.v_TariffAll", + "object": "silver_trk.ww_ports", "type": "table", "key-fields": [ - "TariffId" + "portId" ] }, "graphql": { "enabled": true, "type": { - "singular": "TariffAll", - "plural": "TariffsAll" + "singular": "TrackingPort", + "plural": "TrackingPorts" } }, "rest": { "enabled": true }, - "relationships": { - "TariffSurcharge": { - "cardinality": "many", - "target.entity": "TariffSurcharge", - "source.fields": [ - "TariffSurchargeKey" - ], - "target.fields": [ - "TariffId" - ] - }, - "TariffContactGroupMember": { - "cardinality": "many", - "target.entity": "TariffContactGroupMember", - "source.fields": [ - "SourceNo" - ], - "target.fields": [ - "ContactGroupCode" - ] - }, - "Pol": { - "cardinality": "one", - "target.entity": "LocationPOL", - "source.fields": [ - "FromZoneCode" - ], - "target.fields": [ - "Code" - ] - }, - "Pod": { - "cardinality": "one", - "target.entity": "LocationPOD", - "source.fields": [ - "ToZoneCode" - ], - "target.fields": [ - "Code" - ] - }, - "Prc": { - "cardinality": "one", - "target.entity": "LocationPRC", - "source.fields": [ - "PRCCode" - ], - "target.fields": [ - "Code" - ] - }, - "Pdy": { - "cardinality": "one", - "target.entity": "LocationPDY", - "source.fields": [ - "PDYCode" - ], - "target.fields": [ - "Code" - ] - }, - "Transshipment1": { - "cardinality": "one", - "target.entity": "LocationTS1", - "source.fields": [ - "TransshipmentPort" - ], - "target.fields": [ - "Code" - ] - }, - "Transshipment2": { - "cardinality": "one", - "target.entity": "LocationTS2", - "source.fields": [ - "TransshipmentPort2" - ], - "target.fields": [ - "Code" - ] - } + "cache": { + "enabled": true, + "ttl-seconds": 120 }, "permissions": [ { @@ -2772,35 +2912,27 @@ } ] }, - "Vessel": { + "TrackingVessel": { "source": { - "object": "silver_ops.Vessel", + "object": "silver_trk.ww_vessels", "type": "table", "key-fields": [ - "vesselID" + "vesselId" ] }, "graphql": { "enabled": true, "type": { - "singular": "Vessel", - "plural": "Vessels" + "singular": "TrackingVessel", + "plural": "TrackingVessels" } }, "rest": { "enabled": true }, - "relationships": { - "Transport": { - "cardinality": "many", - "target.entity": "Transport", - "source.fields": [ - "vesselID" - ], - "target.fields": [ - "vesselID" - ] - } + "cache": { + "enabled": true, + "ttl-seconds": 120 }, "permissions": [ { @@ -2813,33 +2945,43 @@ } ] }, - "WindwardRegistrationFeedback": { + "Transport": { "source": { - "object": "silver_trk.ww_TrackingFeedback", + "object": "silver_ops.Transport", "type": "table", "key-fields": [ - "shipmentId" + "id" ] }, "graphql": { "enabled": true, "type": { - "singular": "WindwardRegistrationFeedback", - "plural": "WindwardRegistrationFeedbacks" + "singular": "Transport", + "plural": "Transports" } }, "rest": { "enabled": true }, "relationships": { - "Shipment": { + "ShipmentEquipmentTransport": { + "cardinality": "many", + "target.entity": "ShipmentEquipmentTransport", + "source.fields": [ + "id" + ], + "target.fields": [ + "transportID" + ] + }, + "Vessel": { "cardinality": "one", - "target.entity": "Shipment", + "target.entity": "Vessel", "source.fields": [ - "shipmentId" + "vesselID" ], "target.fields": [ - "shipmentID" + "vesselID" ] } }, @@ -2852,70 +2994,131 @@ } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "EquipmentMap": { + "TransportPlanStageType": { "source": { - "object": "silver_ops.usp_MapSource", - "type": "stored-procedure", - "parameters": { - "customerScope": "string" + "object": "silver_ops.TransportPlanStageType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TransportPlanStageType", + "plural": "TransportPlanStageTypes" } }, + "rest": { + "enabled": true + }, "cache": { "enabled": true, - "ttl-seconds": 60 + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Vessel": { + "source": { + "object": "silver_ops.Vessel", + "type": "table", + "key-fields": [ + "vesselID" + ] }, "graphql": { "enabled": true, "type": { - "singular": "EquipmentMap", - "plural": "EquipmentMaps" + "singular": "Vessel", + "plural": "Vessels" } }, "rest": { "enabled": true }, + "relationships": { + "Transport": { + "cardinality": "many", + "target.entity": "Transport", + "source.fields": [ + "vesselID" + ], + "target.fields": [ + "vesselID" + ] + } + }, "permissions": [ { "role": "anonymous", "actions": [ { - "action": "execute" + "action": "read" } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } }, - "GlobalSearch": { + "WindwardRegistrationFeedback": { "source": { - "object": "silver_ops.usp_globalSearch", - "type": "stored-procedure", - "parameters": { - "searchString": "string", - "customerScope": "string" - } + "object": "silver_trk.ww_TrackingFeedback", + "type": "table", + "key-fields": [ + "shipmentId" + ] }, "graphql": { "enabled": true, "type": { - "singular": "GlobalSearch", - "plural": "GlobalSearches" + "singular": "WindwardRegistrationFeedback", + "plural": "WindwardRegistrationFeedbacks" } }, "rest": { "enabled": true }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentId" + ], + "target.fields": [ + "shipmentID" + ] + } + }, "permissions": [ { "role": "anonymous", "actions": [ { - "action": "execute" + "action": "read" } ] } - ] + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } } } } From 77d05e44df333f1353f46ecc90fdc48848cb1626 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Thu, 26 Jun 2025 15:29:37 +0200 Subject: [PATCH 29/79] Updates --- src/Service/dab-config.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/Service/dab-config.json b/src/Service/dab-config.json index 57d314ef8e..b1099fe7d8 100644 --- a/src/Service/dab-config.json +++ b/src/Service/dab-config.json @@ -4,13 +4,13 @@ "database-type": "mssql", "connection-string": "Data Source=nqf4kgvoqm4ufazdzriupb2pay-hmnbvxar2mgu7e3ng27fsqy3we.database.fabric.microsoft.com,1433;User ID=8bfaf0d6-fa20-4ed5-a450-0005ceb77729;Password=z5y8Q~hLcfdAflVrfnYoVxdavIJXZb5tlH~tAbRn;Pooling=True;Min Pool Size=0;Max Pool Size=100;Multiple Active Result Sets=False;Connect Timeout=30;Encrypt=False;Trust Server Certificate=True;Authentication=ActiveDirectoryServicePrincipal;Initial Catalog=apiLayer-345587c4-1232-457e-9761-b6bca3d72e2e", "options": { - "set-session-context": false + "set-session-context": true } }, "runtime": { "telemetry": { "application-insights": { - "enabled": true, + "enabled": false, "connection-string": "InstrumentationKey=d303d229-1055-4f48-a811-4dc0a3d4aa1e;IngestionEndpoint=https://westeurope-5.in.applicationinsights.azure.com/;LiveEndpoint=https://westeurope.livediagnostics.monitor.azure.com/;ApplicationId=c98a3731-8125-4aa0-867a-3361c0e536db" } }, @@ -2816,7 +2816,7 @@ }, "TrackingEvent": { "source": { - "object": "silver_trk.TrackingEvent", + "object": "silver_trk.v_TrackingEvent", "type": "table", "key-fields": [ "trackingEventID" From 07def1198d12de2675d759a81923a21b3962fe59 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Jun 2025 14:09:16 -0700 Subject: [PATCH 30/79] Bump dotnet-sdk from 8.0.100 to 8.0.411 (#2744) Bumps [dotnet-sdk](https://github.com/dotnet/sdk) from 8.0.100 to 8.0.411.
Release notes

Sourced from dotnet-sdk's releases.

v8.0.411

Dependency Updates

  • Updated dotnet/templating dependencies
    • Regular updates to the .NET templating engine across multiple release branches to ensure users have the latest templates, bug fixes, and improvements.
  • Updated dotnet/msbuild dependencies
    • Keeps the MSBuild engine up to date for improved build reliability and new features.
  • Updated dotnet/arcade dependencies
  • Updated dotnet/source-build-reference-packages dependencies
    • Keeps source-build reference packages up to date, improving compatibility and reliability for source-build scenarios.
  • Updated dotnet/razor dependencies
    • Ensures the Razor tooling is current, providing the latest features and fixes for Razor-based projects.

Miscellaneous

  • Branding Updates
    • Updated SDK branding to reflect new versions, ensuring users and tools can identify the correct SDK release.

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=dotnet-sdk&package-manager=dotnet_sdk&previous-version=8.0.100&new-version=8.0.411)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- global.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/global.json b/global.json index 391ba3c2a3..08b8436e55 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "sdk": { - "version": "8.0.100", + "version": "8.0.411", "rollForward": "latestFeature" } } From a294b8ab3d8f9f1c87de7b0cd4ef001a2faf605c Mon Sep 17 00:00:00 2001 From: souvikghosh04 Date: Tue, 1 Jul 2025 22:00:27 +0530 Subject: [PATCH 31/79] CodeQL suppression for DefaultAzureCredential default and SwaggerUI usage (#2747) ## Why make this change? - Closes #2746 - This PR addresses a CodeQL false positive regarding - the usage of `DefaultAzureCredential()` - the usage of SwaggerUI - Reference documentation: - https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/codeql/codeql-semmle - [CodeQL suppression syntax](https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#suppressing-or-resolving-alerts) - [DefaultAzureCredential documentation](https://learn.microsoft.com/en-us/dotnet/api/azure.identity.defaultazurecredential) - https://liquid.microsoft.com/Web/Object/Read/Campaign.Requirements/Requirements/SSIRP.floss ## What is this change? - Suppresses the CodeQL warnings: - Adds a suppression rule to the relevant file - No logic or runtime code is changed; this is a configuration only update to streamline code analysis. ## How was this tested? - [x] Integration Tests - [x] Unit Tests The change will be validated by running the full test suite and triggering a CodeQL analysis to confirm the warning is suppressed and no new issues are introduced as part of a next build. ## Sample Request(s) - NA --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- src/Core/Resolvers/CosmosClientProvider.cs | 2 +- src/Core/Resolvers/MsSqlQueryExecutor.cs | 2 +- src/Core/Resolvers/MySqlQueryExecutor.cs | 2 +- src/Core/Resolvers/PostgreSqlExecutor.cs | 2 +- src/Service/Startup.cs | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/Core/Resolvers/CosmosClientProvider.cs b/src/Core/Resolvers/CosmosClientProvider.cs index 374492325c..82e2b5e9c7 100644 --- a/src/Core/Resolvers/CosmosClientProvider.cs +++ b/src/Core/Resolvers/CosmosClientProvider.cs @@ -79,7 +79,7 @@ private void InitializeClient(RuntimeConfig? configuration) } else if (!_accessToken.ContainsKey(dataSourceName)) { - client = new CosmosClient(accountEndPoint, new DefaultAzureCredential(), options); + client = new CosmosClient(accountEndPoint, new DefaultAzureCredential(), options); // CodeQL [SM05137] DefaultAzureCredential will use Managed Identity if available or fallback to default. } else { diff --git a/src/Core/Resolvers/MsSqlQueryExecutor.cs b/src/Core/Resolvers/MsSqlQueryExecutor.cs index 45d641bb32..7a0260cd20 100644 --- a/src/Core/Resolvers/MsSqlQueryExecutor.cs +++ b/src/Core/Resolvers/MsSqlQueryExecutor.cs @@ -44,7 +44,7 @@ public class MsSqlQueryExecutor : QueryExecutor public override IDictionary ConnectionStringBuilders => base.ConnectionStringBuilders; - public DefaultAzureCredential AzureCredential { get; set; } = new(); + public DefaultAzureCredential AzureCredential { get; set; } = new(); // CodeQL [SM05137] DefaultAzureCredential will use Managed Identity if available or fallback to default. /// /// The saved cached access token obtained from DefaultAzureCredentials diff --git a/src/Core/Resolvers/MySqlQueryExecutor.cs b/src/Core/Resolvers/MySqlQueryExecutor.cs index b28620b09a..670232b826 100644 --- a/src/Core/Resolvers/MySqlQueryExecutor.cs +++ b/src/Core/Resolvers/MySqlQueryExecutor.cs @@ -32,7 +32,7 @@ public class MySqlQueryExecutor : QueryExecutor /// private Dictionary _accessTokensFromConfiguration; - public DefaultAzureCredential AzureCredential { get; set; } = new(); + public DefaultAzureCredential AzureCredential { get; set; } = new(); // CodeQL [SM05137] DefaultAzureCredential will use Managed Identity if available or fallback to default. /// /// The MySql specific connection string builders. diff --git a/src/Core/Resolvers/PostgreSqlExecutor.cs b/src/Core/Resolvers/PostgreSqlExecutor.cs index 9bab1f8a39..70fa0f1079 100644 --- a/src/Core/Resolvers/PostgreSqlExecutor.cs +++ b/src/Core/Resolvers/PostgreSqlExecutor.cs @@ -33,7 +33,7 @@ public class PostgreSqlQueryExecutor : QueryExecutor /// private Dictionary _accessTokensFromConfiguration; - public DefaultAzureCredential AzureCredential { get; set; } = new(); + public DefaultAzureCredential AzureCredential { get; set; } = new(); // CodeQL [SM05137]: DefaultAzureCredential will use Managed Identity if available or fallback to default. /// /// The PostgreSql specific connection string builders. diff --git a/src/Service/Startup.cs b/src/Service/Startup.cs index a696148332..d8cb218b75 100644 --- a/src/Service/Startup.cs +++ b/src/Service/Startup.cs @@ -576,7 +576,7 @@ public void Configure(IApplicationBuilder app, IWebHostEnvironment env, RuntimeC // Consequently, SwaggerUI is not presented in a StaticWebApps (late-bound config) environment. if (IsUIEnabled(runtimeConfig, env)) { - app.UseSwaggerUI(c => + app.UseSwaggerUI(c => // CodeQL [SM04686] SwaggerUI is only enabled for Development environment. { c.ConfigObject.Urls = new SwaggerEndpointMapper(app.ApplicationServices.GetService()); }); From bfb7dd461448082b7498df7df2488c35155a289d Mon Sep 17 00:00:00 2001 From: souvikghosh04 Date: Thu, 3 Jul 2025 11:41:49 +0530 Subject: [PATCH 32/79] Optimize health check response using parallel queries (#2714) ## Why make this change? - #2533 - Improve health check endpoint performance by running queries in parallel. - This allows the health check endpoint to load faster ## What is this change? - Provide option to specify max degree of query parallelism as a config option - The default value is 4 and max allowed value is 8. System automatically adjusts itself within this range in case a value specified is lower or higher - .Net framework's Parallel tasks are used to run queries in parallel ### Usage - `max-query-parallelism` can be set in the config, e.g.: `"runtime": { "health": { "max-query-parallelism": 8 } }` - User can set any value between 1 and 8, inclusive or the system automatically adjust in case of Null value or out of range value ## How was this tested? - [x] Integration Tests - [x] Unit Tests ## Sample Request(s) - GET http://localhost:5000/health --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Co-authored-by: Aniruddh Munde --- schemas/dab.draft.schema.json | 5 ++ .../RuntimeHealthOptionsConvertorFactory.cs | 19 ++++++- .../HealthCheck/RuntimeHealthCheckConfig.cs | 44 ++++++++++++++-- .../Configuration/HealthEndpointTests.cs | 40 +++++++++++++++ src/Service/HealthCheck/HealthCheckHelper.cs | 51 ++++++++++++++++--- 5 files changed, 148 insertions(+), 11 deletions(-) diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json index 20903284b0..692b82b642 100644 --- a/schemas/dab.draft.schema.json +++ b/schemas/dab.draft.schema.json @@ -457,6 +457,11 @@ "type": "integer", "description": "Time to live in seconds for the Comprehensive Health Check Report cache entry.", "default": 5 + }, + "max-query-parallelism": { + "type": "integer", + "description": "The max degree of parallelism for running parallel health check queries.", + "default": 4 } } } diff --git a/src/Config/Converters/RuntimeHealthOptionsConvertorFactory.cs b/src/Config/Converters/RuntimeHealthOptionsConvertorFactory.cs index cca3366cc4..d49cc264e7 100644 --- a/src/Config/Converters/RuntimeHealthOptionsConvertorFactory.cs +++ b/src/Config/Converters/RuntimeHealthOptionsConvertorFactory.cs @@ -55,12 +55,13 @@ internal HealthCheckOptionsConverter(bool replaceEnvVar) bool? enabled = null; int? cacheTtlSeconds = null; HashSet? roles = null; + int? maxQueryParallelism = null; while (reader.Read()) { if (reader.TokenType is JsonTokenType.EndObject) { - return new RuntimeHealthCheckConfig(enabled, roles, cacheTtlSeconds); + return new RuntimeHealthCheckConfig(enabled, roles, cacheTtlSeconds, maxQueryParallelism); } string? property = reader.GetString(); @@ -120,7 +121,17 @@ internal HealthCheckOptionsConverter(bool replaceEnvVar) } break; + case "max-query-parallelism": + if (reader.TokenType is not JsonTokenType.Null) + { + // Allow user to set values between 1 and 8 (inclusive). If not set, the value will be set to 4 during health check. + int userValue = reader.GetInt32(); + int parseMaxQueryParallelism = Math.Clamp(userValue, RuntimeHealthCheckConfig.LOWEST_MAX_QUERY_PARALLELISM, + RuntimeHealthCheckConfig.UPPER_LIMIT_MAX_QUERY_PARALLELISM); + maxQueryParallelism = parseMaxQueryParallelism; + } + break; default: throw new JsonException($"Unexpected property {property}"); } @@ -149,6 +160,12 @@ public override void Write(Utf8JsonWriter writer, RuntimeHealthCheckConfig value JsonSerializer.Serialize(writer, value.Roles, options); } + if (value?.UserProvidedMaxQueryParallelism is true) + { + writer.WritePropertyName("max-query-parallelism"); + JsonSerializer.Serialize(writer, value.MaxQueryParallelism, options); + } + writer.WriteEndObject(); } else diff --git a/src/Config/HealthCheck/RuntimeHealthCheckConfig.cs b/src/Config/HealthCheck/RuntimeHealthCheckConfig.cs index 02c6cf42f2..fef45cd7f9 100644 --- a/src/Config/HealthCheck/RuntimeHealthCheckConfig.cs +++ b/src/Config/HealthCheck/RuntimeHealthCheckConfig.cs @@ -7,22 +7,46 @@ namespace Azure.DataApiBuilder.Config.ObjectModel; public record RuntimeHealthCheckConfig : HealthCheckConfig { + /// + /// Represents the lowest maximum query parallelism for health check. + /// + public const int LOWEST_MAX_QUERY_PARALLELISM = 1; + + /// + /// Default maximum query parallelism for health check. + /// + public const int DEFAULT_MAX_QUERY_PARALLELISM = 4; + + /// + /// Upper limit of maximum query parallelism for health check. + /// + public const int UPPER_LIMIT_MAX_QUERY_PARALLELISM = 8; + [JsonPropertyName("cache-ttl-seconds")] public int CacheTtlSeconds { get; set; } public HashSet? Roles { get; set; } - // TODO: Add support for parallel stream to run the health check query in upcoming PRs - // public int MaxDop { get; set; } = 1; // Parallelized streams to run Health Check (Default: 1) - [JsonIgnore(Condition = JsonIgnoreCondition.Always)] public bool UserProvidedTtlOptions { get; init; } = false; + /// + /// Flag to indicate if the user has provided a value for MaxQueryParallelism. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + public bool UserProvidedMaxQueryParallelism { get; init; } = false; + + /// + /// Gets or sets the maximum number of queries that can be executed in parallel. + /// + [JsonPropertyName("max-query-parallelism")] + public int? MaxQueryParallelism { get; set; } + public RuntimeHealthCheckConfig() : base() { } - public RuntimeHealthCheckConfig(bool? enabled, HashSet? roles = null, int? cacheTtlSeconds = null) : base(enabled) + public RuntimeHealthCheckConfig(bool? enabled, HashSet? roles = null, int? cacheTtlSeconds = null, int? maxQueryParallelism = null) : base(enabled) { this.Roles = roles; @@ -35,5 +59,17 @@ public RuntimeHealthCheckConfig(bool? enabled, HashSet? roles = null, in { this.CacheTtlSeconds = EntityCacheOptions.DEFAULT_TTL_SECONDS; } + + // Allow user to set values between 1 and 8 (inclusive). If not set, the value will be set to 4 during health check. + if (maxQueryParallelism is not null) + { + this.MaxQueryParallelism = maxQueryParallelism; + UserProvidedMaxQueryParallelism = true; + } + else + { + this.MaxQueryParallelism = DEFAULT_MAX_QUERY_PARALLELISM; + } + } } diff --git a/src/Service.Tests/Configuration/HealthEndpointTests.cs b/src/Service.Tests/Configuration/HealthEndpointTests.cs index c765a837b7..4fd2e52bf4 100644 --- a/src/Service.Tests/Configuration/HealthEndpointTests.cs +++ b/src/Service.Tests/Configuration/HealthEndpointTests.cs @@ -206,6 +206,46 @@ public async Task TestFailureHealthCheckGraphQLResponseAsync() Assert.IsNotNull(errorMessageFromGraphQL); } + /// + /// Tests the serialization behavior of for the property." + /// + /// This test ensures that the JSON serialization behavior of adheres to the expected behavior where default values are omitted from + /// the output. + [TestMethod] + public void MaxQueryParallelismSerializationDependsOnUserInput() + { + // Case 1: default value NOT explicitly provided => should NOT serialize + RuntimeHealthCheckConfig configWithDefault = new( + enabled: true, + roles: null, + cacheTtlSeconds: null, + maxQueryParallelism: null // implicit default + ); + + Assert.IsFalse(configWithDefault.UserProvidedMaxQueryParallelism, "UserProvidedMaxQueryParallelism should be false for default value."); + + // Case 2: default value EXPLICITLY provided => should serialize + RuntimeHealthCheckConfig configWithExplicitDefault = new( + enabled: true, + roles: null, + cacheTtlSeconds: null, + maxQueryParallelism: RuntimeHealthCheckConfig.DEFAULT_MAX_QUERY_PARALLELISM + ); + + Assert.IsTrue(configWithExplicitDefault.UserProvidedMaxQueryParallelism, "UserProvidedMaxQueryParallelism should be true for explicit default value."); + + // Case 3: non-default value => should serialize + RuntimeHealthCheckConfig configWithCustomValue = new( + enabled: true, + roles: null, + cacheTtlSeconds: null, + maxQueryParallelism: RuntimeHealthCheckConfig.DEFAULT_MAX_QUERY_PARALLELISM + 1 + ); + + Assert.IsTrue(configWithCustomValue.UserProvidedMaxQueryParallelism, "UserProvidedMaxQueryParallelism should be true for custom value."); + } + #region Helper Methods private static HttpUtilities SetupRestTest(RuntimeConfig runtimeConfig, HttpStatusCode httpStatusCode = HttpStatusCode.OK) { diff --git a/src/Service/HealthCheck/HealthCheckHelper.cs b/src/Service/HealthCheck/HealthCheckHelper.cs index 5361a55da5..addb6b582a 100644 --- a/src/Service/HealthCheck/HealthCheckHelper.cs +++ b/src/Service/HealthCheck/HealthCheckHelper.cs @@ -2,6 +2,7 @@ // Licensed under the MIT License. using System; +using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.Linq; @@ -196,18 +197,56 @@ private async Task UpdateDataSourceHealthCheckResultsAsync(ComprehensiveHealthCh // Updates the Entity Health Check Results in the response. // Goes through the entities one by one and executes the rest and graphql checks (if enabled). - private async Task UpdateEntityHealthCheckResultsAsync(ComprehensiveHealthCheckReport ComprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) + private async Task UpdateEntityHealthCheckResultsAsync(ComprehensiveHealthCheckReport report, RuntimeConfig runtimeConfig) { - if (runtimeConfig?.Entities != null && runtimeConfig.Entities.Entities.Any()) + List> enabledEntities = runtimeConfig.Entities.Entities + .Where(e => e.Value.IsEntityHealthEnabled) + .ToList(); + + if (enabledEntities.Count == 0) + { + _logger.LogInformation("No enabled entities found for health checks. Skipping entity health checks."); + return; + } + + ConcurrentBag concurrentChecks = new(); + + // Use MaxQueryParallelism from RuntimeConfig or default to RuntimeHealthCheckConfig.DEFAULT_MAX_QUERY_PARALLELISM + int maxParallelism = runtimeConfig.Runtime?.Health?.MaxQueryParallelism ?? RuntimeHealthCheckConfig.DEFAULT_MAX_QUERY_PARALLELISM; + + _logger.LogInformation("Executing health checks for {Count} enabled entities with parallelism of {MaxParallelism}.", enabledEntities.Count, maxParallelism); + + // Executes health checks for all enabled entities in parallel, with a maximum degree of parallelism + // determined by configuration (or a default). Each entity's health check runs as an independent task. + // Results are collected in a thread-safe ConcurrentBag. This approach significantly improves performance + // for large numbers of entities by utilizing available CPU and I/O resources efficiently. + await Parallel.ForEachAsync(enabledEntities, new ParallelOptions { MaxDegreeOfParallelism = maxParallelism }, async (entity, _) => { - foreach (KeyValuePair Entity in runtimeConfig.Entities.Entities) + try { - if (Entity.Value.IsEntityHealthEnabled) + ComprehensiveHealthCheckReport localReport = new() { - await PopulateEntityHealthAsync(ComprehensiveHealthCheckReport, Entity, runtimeConfig); + Checks = new List() + }; + + await PopulateEntityHealthAsync(localReport, entity, runtimeConfig); + + if (localReport.Checks != null) + { + foreach (HealthCheckResultEntry check in localReport.Checks) + { + concurrentChecks.Add(check); + } } } - } + catch (Exception ex) + { + _logger.LogError(ex, "Error processing entity '{EntityKey}'", entity.Key); + } + }); + + report.Checks ??= new List(); + report.Checks.AddRange(concurrentChecks); } // Populates the Entity Health Check Results in the response for a particular entity. From 8c974d6817ef0d45025606271cc109806e21a1a4 Mon Sep 17 00:00:00 2001 From: Alekhya-Polavarapu Date: Tue, 8 Jul 2025 04:44:45 -0700 Subject: [PATCH 33/79] Prevent empty input node generation in mutation builder. (#2729) ## Why make this change? Currently today when the tables has only autogenerated fileds, then the schema generation is failing with an error saying that "Empty input for create and update mutation input". Error trace: `For more details look at the `Errors` property.\r\n\r\n1. InputObject `CreateNewTableInput` has no fields declared. (HotChocolate.Types.InputObjectType)\r\n2. InputObject `UpdateNewTableInput` has no fields declared.` https://github.com/Azure/data-api-builder/issues/2739 ## What is this change? This PR addresses the issue by conditionally generating create and update mutation input types only when the table contains at least one non-auto-generated field. This ensures that the schema remains valid and avoids generating empty input objects. ## How was this tested? - Unit tests - Manual testing ## screenshots ### For create: ![image](https://github.com/user-attachments/assets/fb6e841e-f27b-4b89-b827-bb3dbab1bade) ### for update: ![image](https://github.com/user-attachments/assets/4891b165-4a01-4497-8f21-d91c852c20d5) --------- Co-authored-by: Aniruddh Munde --- .../Mutations/CreateMutationBuilder.cs | 86 +++++----- .../Mutations/MutationBuilder.cs | 55 ++++--- .../UpdateAndPatchMutationBuilder.cs | 150 ++++++++++-------- .../GraphQLBuilder/MutationBuilderTests.cs | 35 ++++ 4 files changed, 199 insertions(+), 127 deletions(-) diff --git a/src/Service.GraphQLBuilder/Mutations/CreateMutationBuilder.cs b/src/Service.GraphQLBuilder/Mutations/CreateMutationBuilder.cs index c36ec96511..c2a9b0a9ac 100644 --- a/src/Service.GraphQLBuilder/Mutations/CreateMutationBuilder.cs +++ b/src/Service.GraphQLBuilder/Mutations/CreateMutationBuilder.cs @@ -31,8 +31,8 @@ public static class CreateMutationBuilder /// Database type of the relational database to generate input type for. /// Runtime config information. /// Indicates whether multiple create operation is enabled - /// A GraphQL input type with all expected fields mapped as GraphQL inputs. - private static InputObjectTypeDefinitionNode GenerateCreateInputTypeForRelationalDb( + /// An optional GraphQL input type with all expected fields mapped as GraphQL inputs. + private static InputObjectTypeDefinitionNode? GenerateCreateInputTypeForRelationalDb( Dictionary inputs, ObjectTypeDefinitionNode objectTypeDefinitionNode, string entityName, @@ -44,6 +44,7 @@ private static InputObjectTypeDefinitionNode GenerateCreateInputTypeForRelationa bool IsMultipleCreateOperationEnabled) { NameNode inputName = GenerateInputTypeName(name.Value); + InputObjectTypeDefinitionNode? input = null; if (inputs.TryGetValue(inputName, out InputObjectTypeDefinitionNode? db)) { @@ -54,7 +55,7 @@ private static InputObjectTypeDefinitionNode GenerateCreateInputTypeForRelationa // 1. Scalar input fields corresponding to columns which belong to the table. // 2. Complex input fields corresponding to related (target) entities (table backed entities, for now) // which are defined in the runtime config. - List inputFields = new(); + List inputFields = new(); // 1. Scalar input fields. IEnumerable scalarInputFields = objectTypeDefinitionNode.Fields @@ -62,24 +63,26 @@ private static InputObjectTypeDefinitionNode GenerateCreateInputTypeForRelationa .Select(field => GenerateScalarInputType(name, field, IsMultipleCreateOperationEnabled)); // Add scalar input fields to list of input fields for current input type. - inputFields.AddRange(scalarInputFields); - - // Create input object for this entity. - InputObjectTypeDefinitionNode input = - new( - location: null, - inputName, - new StringValueNode($"Input type for creating {name}"), - new List(), - inputFields - ); - - // Add input object to the dictionary of entities for which input object has already been created. - // This input object currently holds only scalar fields. - // The complex fields (for related entities) would be added later when we return from recursion. - // Adding the input object to the dictionary ensures that we don't go into infinite recursion and return whenever - // we find that the input object has already been created for the entity. - inputs.Add(input.Name, input); + // Generate the create input type only if there are any scalar fields that are not auto-generated fields. + if (scalarInputFields.Any()) + { + inputFields.AddRange(scalarInputFields); + + // Create input object for this entity. + input = + new( + location: null, + inputName, + new StringValueNode($"Input type for creating {name}"), + new List(), + inputFields!); + // Add input object to the dictionary of entities for which input object has already been created. + // This input object currently holds only scalar fields. + // The complex fields (for related entities) would be added later when we return from recursion. + // Adding the input object to the dictionary ensures that we don't go into infinite recursion and return whenever + // we find that the input object has already been created for the entity. + inputs.Add(input.Name, input); + } // Generate fields for related entities when // 1. Multiple mutation operations are supported for the database type. @@ -88,7 +91,7 @@ private static InputObjectTypeDefinitionNode GenerateCreateInputTypeForRelationa { // 2. Complex input fields. // Evaluate input objects for related entities. - IEnumerable complexInputFields = + IEnumerable complexInputFields = objectTypeDefinitionNode.Fields .Where(field => !IsBuiltInType(field.Type) && IsComplexFieldAllowedForCreateInputInRelationalDb(field, definitions)) .Select(field => @@ -148,7 +151,7 @@ private static InputObjectTypeDefinitionNode GenerateCreateInputTypeForRelationa databaseType: databaseType, entities: entities, IsMultipleCreateOperationEnabled: IsMultipleCreateOperationEnabled); - }); + }).Where(complexInputType => complexInputType != null); // Append relationship fields to the input fields. inputFields.AddRange(complexInputFields); } @@ -307,8 +310,8 @@ private static InputValueDefinitionNode GenerateScalarInputType(NameNode name, F /// The GraphQL object type to create the input type for. /// Database type to generate the input type for. /// Runtime configuration information for entities. - /// A GraphQL input type value. - private static InputValueDefinitionNode GenerateComplexInputTypeForRelationalDb( + /// An Optional GraphQL input type value. + private static InputValueDefinitionNode? GenerateComplexInputTypeForRelationalDb( string entityName, Dictionary inputs, IEnumerable definitions, @@ -320,7 +323,7 @@ private static InputValueDefinitionNode GenerateComplexInputTypeForRelationalDb( RuntimeEntities entities, bool IsMultipleCreateOperationEnabled) { - InputObjectTypeDefinitionNode node; + InputObjectTypeDefinitionNode? node; NameNode inputTypeName = GenerateInputTypeName(typeName); if (!inputs.ContainsKey(inputTypeName)) { @@ -340,7 +343,7 @@ private static InputValueDefinitionNode GenerateComplexInputTypeForRelationalDb( node = inputs[inputTypeName]; } - return GetComplexInputType(field, node, inputTypeName, IsMultipleCreateOperationEnabled); + return node == null ? null : GetComplexInputType(field, node, inputTypeName, IsMultipleCreateOperationEnabled); } /// @@ -487,7 +490,7 @@ public static IEnumerable Build( { List createMutationNodes = new(); Entity entity = entities[dbEntityName]; - InputObjectTypeDefinitionNode input; + InputObjectTypeDefinitionNode? input; if (!IsRelationalDb(databaseType)) { input = GenerateCreateInputTypeForNonRelationalDb( @@ -528,12 +531,14 @@ public static IEnumerable Build( string singularName = GetDefinedSingularName(name.Value, entity); - // Create one node. - FieldDefinitionNode createOneNode = new( - location: null, - name: new NameNode(GetPointCreateMutationNodeName(name.Value, entity)), - description: new StringValueNode($"Creates a new {singularName}"), - arguments: new List { + if (input != null) + { + // Create one node. + FieldDefinitionNode createOneNode = new( + location: null, + name: new NameNode(GetPointCreateMutationNodeName(name.Value, entity)), + description: new StringValueNode($"Creates a new {singularName}"), + arguments: new List { new( location : null, new NameNode(MutationBuilder.ITEM_INPUT_ARGUMENT_NAME), @@ -541,15 +546,16 @@ public static IEnumerable Build( new NonNullTypeNode(new NamedTypeNode(input.Name)), defaultValue: null, new List()) - }, - type: new NamedTypeNode(returnEntityName), - directives: fieldDefinitionNodeDirectives - ); + }, + type: new NamedTypeNode(returnEntityName), + directives: fieldDefinitionNodeDirectives + ); - createMutationNodes.Add(createOneNode); + createMutationNodes.Add(createOneNode); + } // Multiple create node is created in the schema only when multiple create operation is enabled. - if (IsMultipleCreateOperationEnabled) + if (IsMultipleCreateOperationEnabled && input != null) { // Create multiple node. FieldDefinitionNode createMultipleNode = new( diff --git a/src/Service.GraphQLBuilder/Mutations/MutationBuilder.cs b/src/Service.GraphQLBuilder/Mutations/MutationBuilder.cs index 35c6e5e3a8..6ceb4445d3 100644 --- a/src/Service.GraphQLBuilder/Mutations/MutationBuilder.cs +++ b/src/Service.GraphQLBuilder/Mutations/MutationBuilder.cs @@ -152,31 +152,42 @@ private static void AddMutations( break; case EntityActionOperation.Update: // Generate Mutation operation for Patch and Update both for CosmosDB - mutationFields.Add(UpdateAndPatchMutationBuilder.Build( - name, - inputs, - objectTypeDefinitionNode, - root, - entities, - dbEntityName, - databaseType, - returnEntityName, - rolesAllowedForMutation)); + FieldDefinitionNode? mutationField = UpdateAndPatchMutationBuilder.Build( + name, + inputs, + objectTypeDefinitionNode, + root, + entities, + dbEntityName, + databaseType, + returnEntityName, + rolesAllowedForMutation); + + if (mutationField != null) + { + mutationFields.Add(mutationField); + } if (databaseType is DatabaseType.CosmosDB_NoSQL) { - mutationFields.Add(UpdateAndPatchMutationBuilder.Build( - name, - inputs, - objectTypeDefinitionNode, - root, - entities, - dbEntityName, - databaseType, - returnEntityName, - rolesAllowedForMutation, - EntityActionOperation.Patch, - operationNamePrefix: "patch")); + FieldDefinitionNode? cosmosMutationField = UpdateAndPatchMutationBuilder.Build( + name, + inputs, + objectTypeDefinitionNode, + root, + entities, + dbEntityName, + databaseType, + returnEntityName, + rolesAllowedForMutation, + EntityActionOperation.Patch, + operationNamePrefix: "patch"); + + if (cosmosMutationField != null) + { + mutationFields.Add(cosmosMutationField); + } + } break; diff --git a/src/Service.GraphQLBuilder/Mutations/UpdateAndPatchMutationBuilder.cs b/src/Service.GraphQLBuilder/Mutations/UpdateAndPatchMutationBuilder.cs index 7755e015d8..8916864a37 100644 --- a/src/Service.GraphQLBuilder/Mutations/UpdateAndPatchMutationBuilder.cs +++ b/src/Service.GraphQLBuilder/Mutations/UpdateAndPatchMutationBuilder.cs @@ -55,7 +55,7 @@ private static bool FieldAllowedOnUpdateInput(FieldDefinitionNode field, return true; } - private static InputObjectTypeDefinitionNode GenerateUpdateInputType( + private static InputObjectTypeDefinitionNode? GenerateUpdateInputType( Dictionary inputs, ObjectTypeDefinitionNode objectTypeDefinitionNode, NameNode name, @@ -65,13 +65,14 @@ private static InputObjectTypeDefinitionNode GenerateUpdateInputType( EntityActionOperation operation) { NameNode inputName = GenerateInputTypeName(operation, name.Value); + InputObjectTypeDefinitionNode? input; if (inputs.ContainsKey(inputName)) { return inputs[inputName]; } - IEnumerable inputFields = + IEnumerable inputFields = objectTypeDefinitionNode.Fields .Where(f => FieldAllowedOnUpdateInput(f, databaseType, definitions, operation, objectTypeDefinitionNode)) .Select(f => @@ -89,17 +90,26 @@ private static InputObjectTypeDefinitionNode GenerateUpdateInputType( return GenerateSimpleInputType(name, f, databaseType, operation); }); - InputObjectTypeDefinitionNode input = + if (inputFields.Any()) + { + List inputFieldsList = inputFields + .Where(i => i != null) + .Select(i => i!) + .ToList(); + input = new( location: null, inputName, new StringValueNode($"Input type for updating {name}"), new List(), - inputFields.ToList() + inputFieldsList ); - inputs.Add(input.Name, input); - return input; + inputs.Add(input.Name, input); + return input; + } + + return null; } private static InputValueDefinitionNode GenerateSimpleInputType(NameNode name, FieldDefinitionNode f, DatabaseType databaseType, EntityActionOperation operation) @@ -117,7 +127,7 @@ private static InputValueDefinitionNode GenerateSimpleInputType(NameNode name, F ); } - private static InputValueDefinitionNode GetComplexInputType( + private static InputValueDefinitionNode? GetComplexInputType( Dictionary inputs, IEnumerable definitions, FieldDefinitionNode f, @@ -127,7 +137,7 @@ private static InputValueDefinitionNode GetComplexInputType( DatabaseType databaseType, EntityActionOperation operation) { - InputObjectTypeDefinitionNode node; + InputObjectTypeDefinitionNode? node; NameNode inputTypeName = GenerateInputTypeName(operation, typeName); if (!inputs.ContainsKey(inputTypeName)) @@ -139,35 +149,40 @@ private static InputValueDefinitionNode GetComplexInputType( node = inputs[inputTypeName]; } - ITypeNode type = new NamedTypeNode(node.Name); - - // For a type like [Bar!]! we have to first unpack the outer non-null - if (f.Type.IsNonNullType()) + if ((node != null)) { - // The innerType is the raw List, scalar or object type without null settings - ITypeNode innerType = f.Type.InnerType(); + ITypeNode type = new NamedTypeNode(node.Name); + + // For a type like [Bar!]! we have to first unpack the outer non-null + if (f.Type.IsNonNullType()) + { + // The innerType is the raw List, scalar or object type without null settings + ITypeNode innerType = f.Type.InnerType(); - if (innerType.IsListType()) + if (innerType.IsListType()) + { + type = GenerateListType(type, innerType); + } + + // Wrap the input with non-null to match the field definition + type = new NonNullTypeNode((INullableTypeNode)type); + } + else if (f.Type.IsListType()) { - type = GenerateListType(type, innerType); + type = GenerateListType(type, f.Type); } - // Wrap the input with non-null to match the field definition - type = new NonNullTypeNode((INullableTypeNode)type); - } - else if (f.Type.IsListType()) - { - type = GenerateListType(type, f.Type); + return new( + location: null, + f.Name, + new StringValueNode($"Input for field {f.Name} on type {inputTypeName}"), + type, + defaultValue: null, + f.Directives + ); } - return new( - location: null, - f.Name, - new StringValueNode($"Input for field {f.Name} on type {inputTypeName}"), - type, - defaultValue: null, - f.Directives - ); + return null; } private static ITypeNode GenerateListType(ITypeNode type, ITypeNode fieldType) @@ -201,7 +216,7 @@ private static NameNode GenerateInputTypeName(EntityActionOperation operation, s /// Runtime config information for the object type. /// Collection of role names allowed for action, to be added to authorize directive. /// A update*ObjectName* field to be added to the Mutation type. - public static FieldDefinitionNode Build( + public static FieldDefinitionNode? Build( NameNode name, Dictionary inputs, ObjectTypeDefinitionNode objectTypeDefinitionNode, @@ -214,7 +229,7 @@ public static FieldDefinitionNode Build( EntityActionOperation operation = EntityActionOperation.Update, string operationNamePrefix = UPDATE_MUTATION_PREFIX) { - InputObjectTypeDefinitionNode input = GenerateUpdateInputType( + InputObjectTypeDefinitionNode? input = GenerateUpdateInputType( inputs, objectTypeDefinitionNode, name, @@ -234,19 +249,21 @@ public static FieldDefinitionNode Build( description = "The ID of the item being updated."; } - List inputValues = new(); - foreach (FieldDefinitionNode idField in idFields) + if (input != null) { - inputValues.Add(new InputValueDefinitionNode( - location: null, - idField.Name, - new StringValueNode(description), - new NonNullTypeNode(idField.Type.NamedType()), - defaultValue: null, - new List())); - } + List inputValues = new(); + foreach (FieldDefinitionNode idField in idFields) + { + inputValues.Add(new InputValueDefinitionNode( + location: null, + idField.Name, + new StringValueNode(description), + new NonNullTypeNode(idField.Type.NamedType()), + defaultValue: null, + new List())); + } - inputValues.Add(new InputValueDefinitionNode( + inputValues.Add(new InputValueDefinitionNode( location: null, new NameNode(INPUT_ARGUMENT_NAME), new StringValueNode($"Input representing all the fields for updating {name}"), @@ -254,30 +271,33 @@ public static FieldDefinitionNode Build( defaultValue: null, new List())); - // Create authorize directive denoting allowed roles - List fieldDefinitionNodeDirectives = new() - { - new DirectiveNode( - ModelDirective.Names.MODEL, - new ArgumentNode(ModelDirective.Names.NAME_ARGUMENT, dbEntityName)) - }; - - if (CreateAuthorizationDirectiveIfNecessary( - rolesAllowedForMutation, - out DirectiveNode? authorizeDirective)) - { - fieldDefinitionNodeDirectives.Add(authorizeDirective!); + // Create authorize directive denoting allowed roles + List fieldDefinitionNodeDirectives = new() + { + new DirectiveNode( + ModelDirective.Names.MODEL, + new ArgumentNode(ModelDirective.Names.NAME_ARGUMENT, dbEntityName)) + }; + + if (CreateAuthorizationDirectiveIfNecessary( + rolesAllowedForMutation, + out DirectiveNode? authorizeDirective)) + { + fieldDefinitionNodeDirectives.Add(authorizeDirective!); + } + + string singularName = GetDefinedSingularName(name.Value, entities[dbEntityName]); + return new( + location: null, + name: new NameNode($"{operationNamePrefix}{singularName}"), + description: new StringValueNode($"Updates a {singularName}"), + arguments: inputValues, + type: new NamedTypeNode(returnEntityName), + directives: fieldDefinitionNodeDirectives + ); } - string singularName = GetDefinedSingularName(name.Value, entities[dbEntityName]); - return new( - location: null, - name: new NameNode($"{operationNamePrefix}{singularName}"), - description: new StringValueNode($"Updates a {singularName}"), - arguments: inputValues, - type: new NamedTypeNode(returnEntityName), - directives: fieldDefinitionNodeDirectives - ); + return null; } } } diff --git a/src/Service.Tests/GraphQLBuilder/MutationBuilderTests.cs b/src/Service.Tests/GraphQLBuilder/MutationBuilderTests.cs index 8a75724b62..4ebe842c36 100644 --- a/src/Service.Tests/GraphQLBuilder/MutationBuilderTests.cs +++ b/src/Service.Tests/GraphQLBuilder/MutationBuilderTests.cs @@ -194,6 +194,41 @@ type Foo @model(name:""Foo"") { Assert.AreEqual("bar", argType.Fields[0].Name.Value); } + [TestMethod] + [TestCategory("Mutation Builder - Create")] + [TestCategory("Mutation Builder - Update")] + [TestCategory("Mutation Builder - Delete")] + public void MutationExcludedForAllAutogeneratedFields() + { + string gql = + @" +type Foo @model(name:""Foo"") { + id: ID! @autoGenerated +} + "; + + DocumentNode root = Utf8GraphQLParser.Parse(gql); + + Dictionary entityNameToDatabasetype = new() + { + { "Foo", DatabaseType.MSSQL } + }; + + DocumentNode mutationRoot = MutationBuilder.Build( + root, + entityNameToDatabasetype, + new(new Dictionary { { "Foo", GenerateEmptyEntity() } }), + entityPermissionsMap: _entityPermissions); + + ObjectTypeDefinitionNode query = GetMutationNode(mutationRoot); + List fieldNames = query.Fields.Select(f => f.Name.Value).ToList(); + + // Assert that "createFoo" and "updateFoo" are not present + Assert.IsFalse(fieldNames.Contains("createFoo"), "createFoo should not be present"); + Assert.IsFalse(fieldNames.Contains("updateFoo"), "updateFoo should not be present"); + Assert.IsTrue(fieldNames.Contains("deleteFoo"), "deleteFoo should be present"); + } + [TestMethod] [TestCategory("Mutation Builder - Create")] [TestCategory("Schema Builder - Simple Type")] From e2240bec2a5e288d118ede65169c19cf04ff7ea6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Jul 2025 09:11:43 +0530 Subject: [PATCH 34/79] Bump dotnet-sdk from 8.0.411 to 8.0.412 (#2755) Bumps [dotnet-sdk](https://github.com/dotnet/sdk) from 8.0.411 to 8.0.412.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=dotnet-sdk&package-manager=dotnet_sdk&previous-version=8.0.411&new-version=8.0.412)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- global.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/global.json b/global.json index 08b8436e55..5db7415f9c 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "sdk": { - "version": "8.0.411", + "version": "8.0.412", "rollForward": "latestFeature" } } From 48d1f8f5f2837a7efb09b71725a30aa229af56be Mon Sep 17 00:00:00 2001 From: aaronburtle <93220300+aaronburtle@users.noreply.github.com> Date: Fri, 11 Jul 2025 16:56:57 -0700 Subject: [PATCH 35/79] Include config options and CLI configure command for Azure Key Vault support. (#2745) ## Why make this change? Part of the work for https://github.com/Azure/data-api-builder/issues/2708 We are going to support storing secrets in Azure Key Vault (see: https://learn.microsoft.com/en-us/azure/key-vault/general/basic-concepts), and to do so we need to include properties in the config so that the user can configure exactly how they'd like to make use of this feature. This PR adds in the config properties, and CLI commands to configure them. ## What is this change? ### 1. Object Model Classes - **`AzureKeyVaultOptions`**: Top-level configuration class with endpoint and retry policy - **`RetryPolicyOptions`**: Detailed retry configuration with mode, counts, delays, and timeouts - **`RetryPolicyMode`**: Enum supporting `Fixed` and `Exponential` retry modes - Updated `RuntimeConfig` to include the new `AzureKeyVault` property ### 2. JSON Schema Updates Added `azure-key-vault` section to `dab.draft.schema.json` with: - `endpoint` property (string), required when `azure-key-vault` property is used - Optional `retry-policy` object with configurable properties - Proper defaults: mode=`exponential`, max-count=`3`, delay-seconds=`1`, max-delay-seconds=`60`, network-timeout-seconds=`60` ### 3. CLI Integration Added 6 new CLI options following DAB naming conventions: - `--azure-key-vault.endpoint`: Configure Key Vault endpoint URL - `--azure-key-vault.retry-policy.mode`: Set retry mode (fixed/exponential) - `--azure-key-vault.retry-policy.max-count`: Maximum retry attempts - `--azure-key-vault.retry-policy.delay-seconds`: Initial delay between retries - `--azure-key-vault.retry-policy.max-delay-seconds`: Maximum delay for exponential backoff - `--azure-key-vault.retry-policy.network-timeout-seconds`: Network timeout duration ### 4. Configuration Processing - Extended `ConfigGenerator` with `TryUpdateConfiguredAzureKeyVaultOptions` method - Comprehensive input validation with meaningful error messages - Proper handling of partial configurations (e.g., retry-policy without endpoint) - Full serialization/deserialization support ### 5. Json Serialization/Deserialization Custom JsonConverterFactory allows for `read` and `write` using custom logic and ensure that default properties not provided by the user are not written back out to configuration files. ## How was this tested? - Added new scenario to `ConfigureOptionsTests` to validate that we can correctly add the new options. ## Sample Request(s) ```bash # Configure Key Vault endpoint dab configure --azure-key-vault.endpoint "https://my-vault.vault.azure.net/" # Configure retry policy dab configure --azure-key-vault.retry-policy.mode exponential dab configure --azure-key-vault.retry-policy.max-count 5 dab configure --azure-key-vault.retry-policy.delay-seconds 2 dab configure --azure-key-vault.retry-policy.max-delay-seconds 120 ``` ```json { "azure-key-vault": { "endpoint": "https://my-vault.vault.azure.net/", "retry-policy": { "mode": "exponential", "max-count": 5, "delay-seconds": 2, "max-delay-seconds": 120, "network-timeout-seconds": 90 } } } ``` --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: Aniruddh Munde Co-authored-by: souvikghosh04 Co-authored-by: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> --- schemas/dab.draft.schema.json | 58 +++++ src/Cli.Tests/ConfigureOptionsTests.cs | 37 ++++ src/Cli.Tests/ValidateConfigTests.cs | 42 +++- src/Cli/Commands/ConfigureOptions.cs | 31 +++ src/Cli/ConfigGenerator.cs | 122 ++++++++++ .../AKVRetryPolicyOptionsConverterFactory.cs | 208 ++++++++++++++++++ src/Config/ObjectModel/AKVRetryPolicyMode.cs | 15 ++ .../ObjectModel/AKVRetryPolicyOptions.cs | 113 ++++++++++ .../ObjectModel/AzureKeyVaultOptions.cs | 15 ++ src/Config/ObjectModel/RuntimeConfig.cs | 10 +- src/Config/RuntimeConfigLoader.cs | 1 + .../DabCacheServiceIntegrationTests.cs | 1 + 12 files changed, 650 insertions(+), 3 deletions(-) create mode 100644 src/Config/Converters/AKVRetryPolicyOptionsConverterFactory.cs create mode 100644 src/Config/ObjectModel/AKVRetryPolicyMode.cs create mode 100644 src/Config/ObjectModel/AKVRetryPolicyOptions.cs create mode 100644 src/Config/ObjectModel/AzureKeyVaultOptions.cs diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json index 692b82b642..6893934ef3 100644 --- a/schemas/dab.draft.schema.json +++ b/schemas/dab.draft.schema.json @@ -467,6 +467,54 @@ } } }, + "azure-key-vault": { + "type": "object", + "description": "Azure Key Vault configuration for storing secrets", + "additionalProperties": false, + "properties": { + "endpoint": { + "type": "string", + "description": "Azure Key Vault endpoint URL" + }, + "retry-policy": { + "type": "object", + "description": "Retry policy configuration for Azure Key Vault operations", + "additionalProperties": false, + "properties": { + "mode": { + "type": "string", + "enum": ["fixed", "exponential"], + "default": "exponential", + "description": "Retry mode: fixed or exponential backoff" + }, + "max-count": { + "type": "integer", + "default": 3, + "minimum": 0, + "description": "Maximum number of retry attempts" + }, + "delay-seconds": { + "type": "integer", + "default": 1, + "minimum": 1, + "description": "Initial delay between retries in seconds" + }, + "max-delay-seconds": { + "type": "integer", + "default": 60, + "minimum": 1, + "description": "Maximum delay between retries in seconds (for exponential mode)" + }, + "network-timeout-seconds": { + "type": "integer", + "default": 60, + "minimum": 1, + "description": "Network timeout for requests in seconds" + } + } + } + } + }, "entities": { "type": "object", "description": "Entities that will be exposed via REST and/or GraphQL", @@ -879,6 +927,16 @@ } } }, + "if": { + "required": ["azure-key-vault"] + }, + "then": { + "properties": { + "azure-key-vault": { + "required": ["endpoint"] + } + } + }, "required": ["data-source", "entities"], "$defs": { "singular-plural": { diff --git a/src/Cli.Tests/ConfigureOptionsTests.cs b/src/Cli.Tests/ConfigureOptionsTests.cs index 8ee064e262..b278661f55 100644 --- a/src/Cli.Tests/ConfigureOptionsTests.cs +++ b/src/Cli.Tests/ConfigureOptionsTests.cs @@ -111,6 +111,43 @@ public void TestAddDepthLimitForGraphQL() Assert.AreEqual(maxDepthLimit, config.Runtime.GraphQL.DepthLimit); } + /// + /// Tests that running the "configure --azure-key-vault" commands on a config without AKV properties results + /// in a valid config being generated. + [TestMethod] + public void TestAddAKVOptions() + { + // Arrange + _fileSystem!.AddFile(TEST_RUNTIME_CONFIG_FILE, new MockFileData(INITIAL_CONFIG)); + + Assert.IsTrue(_fileSystem!.File.Exists(TEST_RUNTIME_CONFIG_FILE)); + + // Act: Attempts to add AKV options + ConfigureOptions options = new( + azureKeyVaultEndpoint: "foo", + azureKeyVaultRetryPolicyMaxCount: 1, + azureKeyVaultRetryPolicyDelaySeconds: 1, + azureKeyVaultRetryPolicyMaxDelaySeconds: 1, + azureKeyVaultRetryPolicyMode: AKVRetryPolicyMode.Exponential, + azureKeyVaultRetryPolicyNetworkTimeoutSeconds: 1, + config: TEST_RUNTIME_CONFIG_FILE + ); + bool isSuccess = TryConfigureSettings(options, _runtimeConfigLoader!, _fileSystem!); + + // Assert: Validate the AKV options are added. + Assert.IsTrue(isSuccess); + string updatedConfig = _fileSystem!.File.ReadAllText(TEST_RUNTIME_CONFIG_FILE); + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(updatedConfig, out RuntimeConfig? config)); + Assert.IsNotNull(config.AzureKeyVault); + Assert.IsNotNull(config.AzureKeyVault?.RetryPolicy); + Assert.AreEqual("foo", config.AzureKeyVault?.Endpoint); + Assert.AreEqual(AKVRetryPolicyMode.Exponential, config.AzureKeyVault?.RetryPolicy.Mode); + Assert.AreEqual(1, config.AzureKeyVault?.RetryPolicy.MaxCount); + Assert.AreEqual(1, config.AzureKeyVault?.RetryPolicy.DelaySeconds); + Assert.AreEqual(1, config.AzureKeyVault?.RetryPolicy.MaxDelaySeconds); + Assert.AreEqual(1, config.AzureKeyVault?.RetryPolicy.NetworkTimeoutSeconds); + } + /// /// Tests that running "dab configure --runtime.graphql.enabled" on a config with various values results /// in runtime. Takes in updated value for graphql.enabled and diff --git a/src/Cli.Tests/ValidateConfigTests.cs b/src/Cli.Tests/ValidateConfigTests.cs index 29826635b1..6cbc4b54f1 100644 --- a/src/Cli.Tests/ValidateConfigTests.cs +++ b/src/Cli.Tests/ValidateConfigTests.cs @@ -1,6 +1,9 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; + namespace Cli.Tests; /// /// Test for config file initialization. @@ -9,7 +12,7 @@ namespace Cli.Tests; public class ValidateConfigTests : VerifyBase { - private IFileSystem? _fileSystem; + private MockFileSystem? _fileSystem; private FileSystemRuntimeConfigLoader? _runtimeConfigLoader; [TestInitialize] @@ -271,4 +274,41 @@ public void ValidateConfigSchemaWhereConfigReferencesEnvironmentVariables() condition: loggerOutput.Contains("The config satisfies the schema requirements."), message: "RuntimeConfigValidator::ValidateConfigSchema(...) didn't communicate successful config schema validation."); } + + /// + /// Tests that validation fails when AKV options are configured without an endpoint. + /// + [TestMethod] + public async Task TestValidateAKVOptionsWithoutEndpointFails() + { + // Arrange + _fileSystem!.AddFile(TEST_RUNTIME_CONFIG_FILE, new MockFileData(INITIAL_CONFIG)); + Assert.IsTrue(_fileSystem!.File.Exists(TEST_RUNTIME_CONFIG_FILE)); + Mock mockRuntimeConfigProvider = new(_runtimeConfigLoader); + RuntimeConfigValidator validator = new(mockRuntimeConfigProvider.Object, _fileSystem, new Mock>().Object); + Mock mockLoggerFactory = new(); + Mock> mockLogger = new(); + mockLoggerFactory + .Setup(factory => factory.CreateLogger(typeof(JsonConfigSchemaValidator).FullName!)) + .Returns(mockLogger.Object); + + // Act: Attempts to add AKV options + ConfigureOptions options = new( + azureKeyVaultRetryPolicyMaxCount: 1, + azureKeyVaultRetryPolicyDelaySeconds: 1, + azureKeyVaultRetryPolicyMaxDelaySeconds: 1, + azureKeyVaultRetryPolicyMode: AKVRetryPolicyMode.Exponential, + azureKeyVaultRetryPolicyNetworkTimeoutSeconds: 1, + config: TEST_RUNTIME_CONFIG_FILE + ); + + bool isSuccess = TryConfigureSettings(options, _runtimeConfigLoader!, _fileSystem!); + + // Assert: Settings are configured, config parses, validation fails. + Assert.IsTrue(isSuccess); + string updatedConfig = _fileSystem!.File.ReadAllText(TEST_RUNTIME_CONFIG_FILE); + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(updatedConfig, out RuntimeConfig? config)); + JsonSchemaValidationResult result = await validator.ValidateConfigSchema(config, TEST_RUNTIME_CONFIG_FILE, mockLoggerFactory.Object); + Assert.IsFalse(result.IsValid); + } } diff --git a/src/Cli/Commands/ConfigureOptions.cs b/src/Cli/Commands/ConfigureOptions.cs index a11d6b65f9..24c9e54d8f 100644 --- a/src/Cli/Commands/ConfigureOptions.cs +++ b/src/Cli/Commands/ConfigureOptions.cs @@ -42,6 +42,12 @@ public ConfigureOptions( string? runtimeHostAuthenticationProvider = null, string? runtimeHostAuthenticationJwtAudience = null, string? runtimeHostAuthenticationJwtIssuer = null, + string? azureKeyVaultEndpoint = null, + AKVRetryPolicyMode? azureKeyVaultRetryPolicyMode = null, + int? azureKeyVaultRetryPolicyMaxCount = null, + int? azureKeyVaultRetryPolicyDelaySeconds = null, + int? azureKeyVaultRetryPolicyMaxDelaySeconds = null, + int? azureKeyVaultRetryPolicyNetworkTimeoutSeconds = null, string? config = null) : base(config) { @@ -72,6 +78,13 @@ public ConfigureOptions( RuntimeHostAuthenticationProvider = runtimeHostAuthenticationProvider; RuntimeHostAuthenticationJwtAudience = runtimeHostAuthenticationJwtAudience; RuntimeHostAuthenticationJwtIssuer = runtimeHostAuthenticationJwtIssuer; + // Azure Key Vault + AzureKeyVaultEndpoint = azureKeyVaultEndpoint; + AzureKeyVaultRetryPolicyMode = azureKeyVaultRetryPolicyMode; + AzureKeyVaultRetryPolicyMaxCount = azureKeyVaultRetryPolicyMaxCount; + AzureKeyVaultRetryPolicyDelaySeconds = azureKeyVaultRetryPolicyDelaySeconds; + AzureKeyVaultRetryPolicyMaxDelaySeconds = azureKeyVaultRetryPolicyMaxDelaySeconds; + AzureKeyVaultRetryPolicyNetworkTimeoutSeconds = azureKeyVaultRetryPolicyNetworkTimeoutSeconds; } [Option("data-source.database-type", Required = false, HelpText = "Database type. Allowed values: MSSQL, PostgreSQL, CosmosDB_NoSQL, MySQL.")] @@ -140,6 +153,24 @@ public ConfigureOptions( [Option("runtime.host.authentication.jwt.issuer", Required = false, HelpText = "Configure the entity that issued the Jwt Token.")] public string? RuntimeHostAuthenticationJwtIssuer { get; } + [Option("azure-key-vault.endpoint", Required = false, HelpText = "Configure the Azure Key Vault endpoint URL.")] + public string? AzureKeyVaultEndpoint { get; } + + [Option("azure-key-vault.retry-policy.mode", Required = false, HelpText = "Configure the retry policy mode. Allowed values: fixed, exponential. Default: exponential.")] + public AKVRetryPolicyMode? AzureKeyVaultRetryPolicyMode { get; } + + [Option("azure-key-vault.retry-policy.max-count", Required = false, HelpText = "Configure the maximum number of retry attempts. Default: 3.")] + public int? AzureKeyVaultRetryPolicyMaxCount { get; } + + [Option("azure-key-vault.retry-policy.delay-seconds", Required = false, HelpText = "Configure the initial delay between retries in seconds. Default: 1.")] + public int? AzureKeyVaultRetryPolicyDelaySeconds { get; } + + [Option("azure-key-vault.retry-policy.max-delay-seconds", Required = false, HelpText = "Configure the maximum delay between retries in seconds (for exponential mode). Default: 60.")] + public int? AzureKeyVaultRetryPolicyMaxDelaySeconds { get; } + + [Option("azure-key-vault.retry-policy.network-timeout-seconds", Required = false, HelpText = "Configure the network timeout for requests in seconds. Default: 60.")] + public int? AzureKeyVaultRetryPolicyNetworkTimeoutSeconds { get; } + public int Handler(ILogger logger, FileSystemRuntimeConfigLoader loader, IFileSystem fileSystem) { logger.LogInformation("{productName} {version}", PRODUCT_NAME, ProductInfo.GetProductVersion()); diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs index 7655b84cee..ced4649590 100644 --- a/src/Cli/ConfigGenerator.cs +++ b/src/Cli/ConfigGenerator.cs @@ -551,6 +551,11 @@ public static bool TryConfigureSettings(ConfigureOptions options, FileSystemRunt return false; } + if (!TryUpdateConfiguredAzureKeyVaultOptions(options, ref runtimeConfig)) + { + return false; + } + return WriteRuntimeConfigToFile(runtimeConfigFile, runtimeConfig, fileSystem); } @@ -1990,5 +1995,122 @@ public static bool TryAddTelemetry(AddTelemetryOptions options, FileSystemRuntim return WriteRuntimeConfigToFile(runtimeConfigFile, runtimeConfig, fileSystem); } + + /// + /// Attempts to update the Azure Key Vault configuration options based on the provided values. + /// Validates that any user-provided parameter value is valid and updates the runtime configuration accordingly. + /// + /// The configuration options provided by the user. + /// The runtime configuration to be updated. + /// True if the Azure Key Vault options were successfully configured; otherwise, false. + private static bool TryUpdateConfiguredAzureKeyVaultOptions( + ConfigureOptions options, + [NotNullWhen(true)] ref RuntimeConfig runtimeConfig) + { + try + { + AzureKeyVaultOptions? updatedAzureKeyVaultOptions = runtimeConfig.AzureKeyVault; + AKVRetryPolicyOptions? updatedRetryPolicyOptions = updatedAzureKeyVaultOptions?.RetryPolicy; + + // Azure Key Vault Endpoint + if (options.AzureKeyVaultEndpoint is not null) + { + updatedAzureKeyVaultOptions = updatedAzureKeyVaultOptions is not null + ? updatedAzureKeyVaultOptions with { Endpoint = options.AzureKeyVaultEndpoint } + : new AzureKeyVaultOptions { Endpoint = options.AzureKeyVaultEndpoint }; + _logger.LogInformation("Updated RuntimeConfig with azure-key-vault.endpoint as '{endpoint}'", options.AzureKeyVaultEndpoint); + } + + // Retry Policy Mode + if (options.AzureKeyVaultRetryPolicyMode is not null) + { + updatedRetryPolicyOptions = updatedRetryPolicyOptions is not null + ? updatedRetryPolicyOptions with { Mode = options.AzureKeyVaultRetryPolicyMode.Value, UserProvidedMode = true } + : new AKVRetryPolicyOptions { Mode = options.AzureKeyVaultRetryPolicyMode.Value, UserProvidedMode = true }; + _logger.LogInformation("Updated RuntimeConfig with azure-key-vault.retry-policy.mode as '{mode}'", options.AzureKeyVaultRetryPolicyMode.Value); + } + + // Retry Policy Max Count + if (options.AzureKeyVaultRetryPolicyMaxCount is not null) + { + if (options.AzureKeyVaultRetryPolicyMaxCount.Value < 1) + { + _logger.LogError("Failed to update azure-key-vault.retry-policy.max-count. Value must be at least 1."); + return false; + } + + updatedRetryPolicyOptions = updatedRetryPolicyOptions is not null + ? updatedRetryPolicyOptions with { MaxCount = options.AzureKeyVaultRetryPolicyMaxCount.Value, UserProvidedMaxCount = true } + : new AKVRetryPolicyOptions { MaxCount = options.AzureKeyVaultRetryPolicyMaxCount.Value, UserProvidedMaxCount = true }; + _logger.LogInformation("Updated RuntimeConfig with azure-key-vault.retry-policy.max-count as '{maxCount}'", options.AzureKeyVaultRetryPolicyMaxCount.Value); + } + + // Retry Policy Delay Seconds + if (options.AzureKeyVaultRetryPolicyDelaySeconds is not null) + { + if (options.AzureKeyVaultRetryPolicyDelaySeconds.Value < 1) + { + _logger.LogError("Failed to update azure-key-vault.retry-policy.delay-seconds. Value must be at least 1."); + return false; + } + + updatedRetryPolicyOptions = updatedRetryPolicyOptions is not null + ? updatedRetryPolicyOptions with { DelaySeconds = options.AzureKeyVaultRetryPolicyDelaySeconds.Value, UserProvidedDelaySeconds = true } + : new AKVRetryPolicyOptions { DelaySeconds = options.AzureKeyVaultRetryPolicyDelaySeconds.Value, UserProvidedDelaySeconds = true }; + _logger.LogInformation("Updated RuntimeConfig with azure-key-vault.retry-policy.delay-seconds as '{delaySeconds}'", options.AzureKeyVaultRetryPolicyDelaySeconds.Value); + } + + // Retry Policy Max Delay Seconds + if (options.AzureKeyVaultRetryPolicyMaxDelaySeconds is not null) + { + if (options.AzureKeyVaultRetryPolicyMaxDelaySeconds.Value < 1) + { + _logger.LogError("Failed to update azure-key-vault.retry-policy.max-delay-seconds. Value must be at least 1."); + return false; + } + + updatedRetryPolicyOptions = updatedRetryPolicyOptions is not null + ? updatedRetryPolicyOptions with { MaxDelaySeconds = options.AzureKeyVaultRetryPolicyMaxDelaySeconds.Value, UserProvidedMaxDelaySeconds = true } + : new AKVRetryPolicyOptions { MaxDelaySeconds = options.AzureKeyVaultRetryPolicyMaxDelaySeconds.Value, UserProvidedMaxDelaySeconds = true }; + _logger.LogInformation("Updated RuntimeConfig with azure-key-vault.retry-policy.max-delay-seconds as '{maxDelaySeconds}'", options.AzureKeyVaultRetryPolicyMaxDelaySeconds.Value); + } + + // Retry Policy Network Timeout Seconds + if (options.AzureKeyVaultRetryPolicyNetworkTimeoutSeconds is not null) + { + if (options.AzureKeyVaultRetryPolicyNetworkTimeoutSeconds.Value < 1) + { + _logger.LogError("Failed to update azure-key-vault.retry-policy.network-timeout-seconds. Value must be at least 1."); + return false; + } + + updatedRetryPolicyOptions = updatedRetryPolicyOptions is not null + ? updatedRetryPolicyOptions with { NetworkTimeoutSeconds = options.AzureKeyVaultRetryPolicyNetworkTimeoutSeconds.Value, UserProvidedNetworkTimeoutSeconds = true } + : new AKVRetryPolicyOptions { NetworkTimeoutSeconds = options.AzureKeyVaultRetryPolicyNetworkTimeoutSeconds.Value, UserProvidedNetworkTimeoutSeconds = true }; + _logger.LogInformation("Updated RuntimeConfig with azure-key-vault.retry-policy.network-timeout-seconds as '{networkTimeoutSeconds}'", options.AzureKeyVaultRetryPolicyNetworkTimeoutSeconds.Value); + } + + // Update Azure Key Vault options with retry policy if retry policy was modified + if (updatedRetryPolicyOptions is not null) + { + updatedAzureKeyVaultOptions = updatedAzureKeyVaultOptions is not null + ? updatedAzureKeyVaultOptions with { RetryPolicy = updatedRetryPolicyOptions } + : new AzureKeyVaultOptions { RetryPolicy = updatedRetryPolicyOptions }; + } + + // Update runtime config if Azure Key Vault options were modified + if (updatedAzureKeyVaultOptions is not null) + { + runtimeConfig = runtimeConfig with { AzureKeyVault = updatedAzureKeyVaultOptions }; + } + + return true; + } + catch (Exception ex) + { + _logger.LogError("Failed to update RuntimeConfig.AzureKeyVault with exception message: {exceptionMessage}.", ex.Message); + return false; + } + } } } diff --git a/src/Config/Converters/AKVRetryPolicyOptionsConverterFactory.cs b/src/Config/Converters/AKVRetryPolicyOptionsConverterFactory.cs new file mode 100644 index 0000000000..06d00b64d3 --- /dev/null +++ b/src/Config/Converters/AKVRetryPolicyOptionsConverterFactory.cs @@ -0,0 +1,208 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.ObjectModel; + +namespace Azure.DataApiBuilder.Config.Converters; + +/// +/// Defines how DAB reads and writes Azure Key Vault Retry Policies (JSON). +/// +internal class AKVRetryPolicyOptionsConverterFactory : JsonConverterFactory +{ + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// + public override bool CanConvert(Type typeToConvert) + { + return typeToConvert.IsAssignableTo(typeof(AKVRetryPolicyOptions)); + } + + /// + public override JsonConverter? CreateConverter(Type typeToConvert, JsonSerializerOptions options) + { + return new AKVRetryPolicyOptionsConverter(_replaceEnvVar); + } + + /// Whether to replace environment variable with its + /// value or not while deserializing. + internal AKVRetryPolicyOptionsConverterFactory(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + private class AKVRetryPolicyOptionsConverter : JsonConverter + { + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// Whether to replace environment variable with its + /// value or not while deserializing. + public AKVRetryPolicyOptionsConverter(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + /// + /// Defines how DAB reads AKV Retry Policy options and defines which values are + /// used to instantiate those options. + /// + /// Thrown when improperly formatted cache options are provided. + public override AKVRetryPolicyOptions? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType is JsonTokenType.StartObject) + { + AKVRetryPolicyMode? mode = null; + int? maxCount = null; + int? delaySeconds = null; + int? maxDelaySeconds = null; + int? networkTimeoutSeconds = null; + + while (reader.Read()) + { + if (reader.TokenType is JsonTokenType.EndObject) + { + return new AKVRetryPolicyOptions(mode, maxCount, delaySeconds, maxDelaySeconds, networkTimeoutSeconds); + } + + string? property = reader.GetString(); + reader.Read(); + + switch (property) + { + case "mode": + if (reader.TokenType is JsonTokenType.Null) + { + mode = null; + } + else + { + mode = EnumExtensions.Deserialize(reader.DeserializeString(_replaceEnvVar)!); + } + + break; + case "max-count": + if (reader.TokenType is JsonTokenType.Null) + { + maxCount = null; + } + else + { + int parseMaxCount = reader.GetInt32(); + if (parseMaxCount < 0) + { + throw new JsonException($"Invalid value for max-count: {parseMaxCount}. Value must not be negative."); + } + + maxCount = parseMaxCount; + } + + break; + case "delay-seconds": + if (reader.TokenType is JsonTokenType.Null) + { + delaySeconds = null; + } + else + { + int parseDelaySeconds = reader.GetInt32(); + if (parseDelaySeconds <= 0) + { + throw new JsonException($"Invalid value for delay-seconds: {parseDelaySeconds}. Value must be greater than 0."); + } + + delaySeconds = parseDelaySeconds; + } + + break; + case "max-delay-seconds": + if (reader.TokenType is JsonTokenType.Null) + { + maxDelaySeconds = null; + } + else + { + int parseMaxDelaySeconds = reader.GetInt32(); + if (parseMaxDelaySeconds <= 0) + { + throw new JsonException($"Invalid value for max-delay-seconds: {parseMaxDelaySeconds}. Value must be greater than 0."); + } + + maxDelaySeconds = parseMaxDelaySeconds; + } + + break; + case "network-timeout-seconds": + if (reader.TokenType is JsonTokenType.Null) + { + networkTimeoutSeconds = null; + } + else + { + int parseNetworkTimeoutSeconds = reader.GetInt32(); + if (parseNetworkTimeoutSeconds <= 0) + { + throw new JsonException($"Invalid value for network-timeout-seconds: {parseNetworkTimeoutSeconds}. Value must be greater than 0."); + } + + networkTimeoutSeconds = parseNetworkTimeoutSeconds; + } + + break; + } + } + } + + throw new JsonException("Failed to read the Azure Key Vault Retry Policy Options"); + } + + /// + /// When writing the AKVRetryPolicyOptions back to a JSON file, only write the properties and values + /// when those AKVRetryPolicyOptions are user provided. + /// This avoids polluting the written JSON file with a property the user most likely + /// omitted when writing the original DAB runtime config file. + /// This Write operation is only used when a RuntimeConfig object is serialized to JSON. + /// + public override void Write(Utf8JsonWriter writer, AKVRetryPolicyOptions value, JsonSerializerOptions options) + { + writer.WriteStartObject(); + + if (value?.UserProvidedMode is true) + { + writer.WritePropertyName("mode"); + JsonSerializer.Serialize(writer, value.Mode, options); + } + + if (value?.UserProvidedMaxCount is true) + { + writer.WritePropertyName("max-count"); + JsonSerializer.Serialize(writer, value.MaxCount, options); + } + + if (value?.UserProvidedDelaySeconds is true) + { + writer.WritePropertyName("delay-seconds"); + JsonSerializer.Serialize(writer, value.DelaySeconds, options); + } + + if (value?.UserProvidedMaxDelaySeconds is true) + { + writer.WritePropertyName("max-delay-seconds"); + JsonSerializer.Serialize(writer, value.MaxDelaySeconds, options); + } + + if (value?.UserProvidedNetworkTimeoutSeconds is true) + { + writer.WritePropertyName("network-timeout-seconds"); + JsonSerializer.Serialize(writer, value.NetworkTimeoutSeconds, options); + } + + writer.WriteEndObject(); + } + } +} diff --git a/src/Config/ObjectModel/AKVRetryPolicyMode.cs b/src/Config/ObjectModel/AKVRetryPolicyMode.cs new file mode 100644 index 0000000000..ed9fafc792 --- /dev/null +++ b/src/Config/ObjectModel/AKVRetryPolicyMode.cs @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum AKVRetryPolicyMode +{ + // Fixed retry policy mode will use a fixed value when waiting on retries + Fixed, + // Exponential retry policy mode will use exponential back-off when waiting on retries + Exponential +} diff --git a/src/Config/ObjectModel/AKVRetryPolicyOptions.cs b/src/Config/ObjectModel/AKVRetryPolicyOptions.cs new file mode 100644 index 0000000000..4b62a5bd1e --- /dev/null +++ b/src/Config/ObjectModel/AKVRetryPolicyOptions.cs @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +public record AKVRetryPolicyOptions +{ + public const AKVRetryPolicyMode DEFAULT_MODE = AKVRetryPolicyMode.Exponential; + + public const int DEFAULT_MAX_COUNT = 3; + + public const int DEFAULT_DELAY_SECONDS = 1; + + public const int DEFAULT_MAX_DELAY_SECONDS = 60; + + public const int DEFAULT_NETWORK_TIMEOUT_SECONDS = 60; + + [JsonPropertyName("mode")] + public AKVRetryPolicyMode? Mode { get; init; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(Mode))] + public bool UserProvidedMode { get; init; } = false; + + [JsonPropertyName("max-count")] + public int? MaxCount { get; init; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(MaxCount))] + public bool UserProvidedMaxCount { get; init; } = false; + + [JsonPropertyName("delay-seconds")] + public int? DelaySeconds { get; init; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(DelaySeconds))] + public bool UserProvidedDelaySeconds { get; init; } = false; + + [JsonPropertyName("max-delay-seconds")] + public int? MaxDelaySeconds { get; init; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(MaxDelaySeconds))] + public bool UserProvidedMaxDelaySeconds { get; init; } = false; + + [JsonPropertyName("network-timeout-seconds")] + public int? NetworkTimeoutSeconds { get; init; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(NetworkTimeoutSeconds))] + public bool UserProvidedNetworkTimeoutSeconds { get; init; } = false; + + public AKVRetryPolicyOptions( + AKVRetryPolicyMode? mode = null, + int? maxCount = null, + int? delaySeconds = null, + int? maxDelaySeconds = null, + int? networkTimeoutSeconds = null) + { + if (mode is not null) + { + this.Mode = mode; + UserProvidedMode = true; + } + else + { + this.Mode = DEFAULT_MODE; + } + + if (maxCount is not null) + { + this.MaxCount = maxCount; + UserProvidedMaxCount = true; + } + else + { + this.MaxCount = DEFAULT_MAX_COUNT; + } + + if (delaySeconds is not null) + { + this.DelaySeconds = delaySeconds; + UserProvidedDelaySeconds = true; + } + else + { + this.DelaySeconds = DEFAULT_DELAY_SECONDS; + } + + if (maxDelaySeconds is not null) + { + this.MaxDelaySeconds = maxDelaySeconds; + UserProvidedMaxDelaySeconds = true; + } + else + { + this.MaxDelaySeconds = DEFAULT_MAX_DELAY_SECONDS; + } + + if (networkTimeoutSeconds is not null) + { + this.NetworkTimeoutSeconds = networkTimeoutSeconds; + UserProvidedNetworkTimeoutSeconds = true; + } + else + { + this.NetworkTimeoutSeconds = DEFAULT_NETWORK_TIMEOUT_SECONDS; + } + } +} diff --git a/src/Config/ObjectModel/AzureKeyVaultOptions.cs b/src/Config/ObjectModel/AzureKeyVaultOptions.cs new file mode 100644 index 0000000000..27094cd16f --- /dev/null +++ b/src/Config/ObjectModel/AzureKeyVaultOptions.cs @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +public record AzureKeyVaultOptions +{ + [JsonPropertyName("endpoint")] + public string? Endpoint { get; init; } + + [JsonPropertyName("retry-policy")] + public AKVRetryPolicyOptions? RetryPolicy { get; init; } +} diff --git a/src/Config/ObjectModel/RuntimeConfig.cs b/src/Config/ObjectModel/RuntimeConfig.cs index 1172b60a8f..de46a8e7d0 100644 --- a/src/Config/ObjectModel/RuntimeConfig.cs +++ b/src/Config/ObjectModel/RuntimeConfig.cs @@ -22,6 +22,9 @@ public record RuntimeConfig public RuntimeOptions? Runtime { get; init; } + [JsonPropertyName("azure-key-vault")] + public AzureKeyVaultOptions? AzureKeyVault { get; init; } + public virtual RuntimeEntities Entities { get; init; } public DataSourceFiles? DataSourceFiles { get; init; } @@ -216,11 +219,13 @@ public RuntimeConfig( DataSource DataSource, RuntimeEntities Entities, RuntimeOptions? Runtime = null, - DataSourceFiles? DataSourceFiles = null) + DataSourceFiles? DataSourceFiles = null, + AzureKeyVaultOptions? AzureKeyVault = null) { this.Schema = Schema ?? DEFAULT_CONFIG_SCHEMA_LINK; this.DataSource = DataSource; this.Runtime = Runtime; + this.AzureKeyVault = AzureKeyVault; this.Entities = Entities; this.DefaultDataSourceName = Guid.NewGuid().ToString(); @@ -305,7 +310,7 @@ public RuntimeConfig( /// Dictionary mapping datasourceName to datasource object. /// Dictionary mapping entityName to datasourceName. /// Datasource files which represent list of child runtimeconfigs for multi-db scenario. - public RuntimeConfig(string Schema, DataSource DataSource, RuntimeOptions Runtime, RuntimeEntities Entities, string DefaultDataSourceName, Dictionary DataSourceNameToDataSource, Dictionary EntityNameToDataSourceName, DataSourceFiles? DataSourceFiles = null) + public RuntimeConfig(string Schema, DataSource DataSource, RuntimeOptions Runtime, RuntimeEntities Entities, string DefaultDataSourceName, Dictionary DataSourceNameToDataSource, Dictionary EntityNameToDataSourceName, DataSourceFiles? DataSourceFiles = null, AzureKeyVaultOptions? AzureKeyVault = null) { this.Schema = Schema; this.DataSource = DataSource; @@ -315,6 +320,7 @@ public RuntimeConfig(string Schema, DataSource DataSource, RuntimeOptions Runtim _dataSourceNameToDataSource = DataSourceNameToDataSource; _entityNameToDataSourceName = EntityNameToDataSourceName; this.DataSourceFiles = DataSourceFiles; + this.AzureKeyVault = AzureKeyVault; SetupDataSourcesUsed(); } diff --git a/src/Config/RuntimeConfigLoader.cs b/src/Config/RuntimeConfigLoader.cs index b4f72335c3..462c08f3a8 100644 --- a/src/Config/RuntimeConfigLoader.cs +++ b/src/Config/RuntimeConfigLoader.cs @@ -258,6 +258,7 @@ public static JsonSerializerOptions GetSerializationOptions( options.Converters.Add(new MultipleMutationOptionsConverter(options)); options.Converters.Add(new DataSourceConverterFactory(replaceEnvVar)); options.Converters.Add(new HostOptionsConvertorFactory()); + options.Converters.Add(new AKVRetryPolicyOptionsConverterFactory(replaceEnvVar)); if (replaceEnvVar) { diff --git a/src/Service.Tests/Caching/DabCacheServiceIntegrationTests.cs b/src/Service.Tests/Caching/DabCacheServiceIntegrationTests.cs index 74f455c822..706a0c42ad 100644 --- a/src/Service.Tests/Caching/DabCacheServiceIntegrationTests.cs +++ b/src/Service.Tests/Caching/DabCacheServiceIntegrationTests.cs @@ -763,6 +763,7 @@ private static Mock CreateMockRuntimeConfigProvider(strin dataSource, entities, null, + null, null ); mockRuntimeConfig From 8904ef463c128219f238458ed66d3fb2a61fb3c7 Mon Sep 17 00:00:00 2001 From: vadeveka <52937801+vadeveka@users.noreply.github.com> Date: Wed, 16 Jul 2025 09:33:06 -0700 Subject: [PATCH 36/79] Ensure Create policy supported for DWSQL (#2760) ## Why make this change? Address #2759 i.e. RLS policy for create operation was not supported originally for DWSQL. ## What is this change? Update validation code to ensure DWSQL is not blocked when permissions contains create operation with a policy ## How was this tested? - [x] Integration Tests (uses sql server currently) - [x] Unit Tests - [x] manual testing against fabric warehouse --- config-generators/dwsql-commands.txt | 1 + .../Configurations/RuntimeConfigValidator.cs | 10 +- .../DwSqlGraphQLMutationTests.cs | 66 +++ .../GraphQLMutationTestBase.cs | 29 +- .../MsSqlGraphQLMutationTests.cs | 47 ++- src/Service.Tests/SqlTests/SqlTestHelper.cs | 6 +- .../UnitTests/ConfigValidationUnitTests.cs | 1 + src/Service.Tests/dab-config.DwSql.json | 377 +++++++++++------- src/Service.Tests/dab-config.MsSql.json | 20 +- 9 files changed, 388 insertions(+), 169 deletions(-) diff --git a/config-generators/dwsql-commands.txt b/config-generators/dwsql-commands.txt index 1f613d7f48..df4940ae59 100644 --- a/config-generators/dwsql-commands.txt +++ b/config-generators/dwsql-commands.txt @@ -57,6 +57,7 @@ update Publisher --config "dab-config.DwSql.json" --permissions "policy_tester_0 update Publisher --config "dab-config.DwSql.json" --permissions "policy_tester_06:read" --fields.include "*" --policy-database "@item.id eq 1940" update Publisher --config "dab-config.DwSql.json" --permissions "database_policy_tester:read" --policy-database "@item.id ne 1234 or @item.id gt 1940" update Publisher --config "dab-config.DwSql.json" --permissions "database_policy_tester:update" --policy-database "@item.id ne 1234" +update Publisher --config "dab-config.DwSql.json" --permissions "database_policy_tester:create" --policy-database "@item.name ne 'New publisher'" update Stock --config "dab-config.DwSql.json" --permissions "authenticated:create,read,update,delete" update Stock --config "dab-config.DwSql.json" --rest commodities --graphql true update Stock --config "dab-config.DwSql.json" --permissions "TestNestedFilterFieldIsNull_ColumnForbidden:read" diff --git a/src/Core/Configurations/RuntimeConfigValidator.cs b/src/Core/Configurations/RuntimeConfigValidator.cs index 4d293d0cd2..6ae7051563 100644 --- a/src/Core/Configurations/RuntimeConfigValidator.cs +++ b/src/Core/Configurations/RuntimeConfigValidator.cs @@ -42,6 +42,13 @@ public class RuntimeConfigValidator : IConfigValidator // of the form @claims.*** delimited by space character,end of the line or end of the string. private static readonly string _claimChars = @"@claims\.[^\s\)]*"; + // List of databases that support row level policy with create action + private static readonly HashSet _databaseTypesSupportingCreatePolicy = + [ + DatabaseType.MSSQL, + DatabaseType.DWSQL + ]; + // Error messages. public const string INVALID_CLAIMS_IN_POLICY_ERR_MSG = "One or more claim types supplied in the database policy are not supported."; @@ -808,7 +815,8 @@ public void ValidatePermissionsInConfig(RuntimeConfig runtimeConfig) DataSource entityDataSource = runtimeConfig.GetDataSourceFromEntityName(entityName); - if (entityDataSource.DatabaseType is not DatabaseType.MSSQL && !IsValidDatabasePolicyForAction(action)) + // Create operation does not support defining a database policy for certain database types. + if (!_databaseTypesSupportingCreatePolicy.Contains(entityDataSource.DatabaseType) && !IsValidDatabasePolicyForAction(action)) { throw new DataApiBuilderException( message: $"The Create action does not support defining a database policy." + diff --git a/src/Service.Tests/SqlTests/GraphQLMutationTests/DwSqlGraphQLMutationTests.cs b/src/Service.Tests/SqlTests/GraphQLMutationTests/DwSqlGraphQLMutationTests.cs index 5d063c31fd..18339b2448 100644 --- a/src/Service.Tests/SqlTests/GraphQLMutationTests/DwSqlGraphQLMutationTests.cs +++ b/src/Service.Tests/SqlTests/GraphQLMutationTests/DwSqlGraphQLMutationTests.cs @@ -103,6 +103,72 @@ public override async Task InsertMutationWithOnlyTypenameInSelectionSet() SqlTestHelper.PerformTestEqualJsonStrings(expected, actual.ToString()); } + /// + /// Do: Inserts new Publisher with name = 'New publisher' + /// Check: Mutation fails because the database policy (@item.name ne 'New publisher') prohibits insertion of records with name = 'New publisher'. + /// + [TestMethod] + public async Task InsertMutationFailingDatabasePolicy() + { + string errorMessage = "Could not insert row with given values."; + string msSqlQuery = @" + SELECT count(*) as count + FROM [publishers] + WHERE [name] = 'New publisher' + FOR JSON PATH, + INCLUDE_NULL_VALUES, + WITHOUT_ARRAY_WRAPPER + "; + + string graphQLMutationName = "createPublisher"; + string graphQLMutationPayload = @" + mutation { + createPublisher(item: { id: 1 name: ""New publisher"" }) { + result + } + } + "; + + await InsertMutationFailingDatabasePolicy( + dbQuery: msSqlQuery, + errorMessage: errorMessage, + roleName: "database_policy_tester", + graphQLMutationName: graphQLMutationName, + graphQLMutationPayload: graphQLMutationPayload); + } + + /// + /// Do: Inserts new Publisher with name = 'Not New publisher' + /// Check: Mutation succeeds because the database policy (@item.name ne 'New publisher') is passed + /// + [TestMethod] + public async Task InsertMutationWithDatabasePolicy() + { + string msSqlQuery = @" + SELECT COUNT(*) AS [count] + FROM [publishers] + WHERE [name] = 'Not New publisher' + FOR JSON PATH, + INCLUDE_NULL_VALUES, + WITHOUT_ARRAY_WRAPPER + "; + + string graphqlMutationName = "createPublisher"; + string graphQLMutationPayload = @" + mutation { + createPublisher(item: { id: 1 name: ""Not New publisher"" }) { + result + } + } + "; + + await InsertMutationWithDatabasePolicy( + dbQuery: msSqlQuery, + roleName: "database_policy_tester", + graphQLMutationName: graphqlMutationName, + graphQLMutationPayload: graphQLMutationPayload); + } + /// /// Do: Update book in database and return its updated fields /// Check: Result value of success is verified in the response. diff --git a/src/Service.Tests/SqlTests/GraphQLMutationTests/GraphQLMutationTestBase.cs b/src/Service.Tests/SqlTests/GraphQLMutationTests/GraphQLMutationTestBase.cs index d8cd8d7a7e..c9207a6672 100644 --- a/src/Service.Tests/SqlTests/GraphQLMutationTests/GraphQLMutationTestBase.cs +++ b/src/Service.Tests/SqlTests/GraphQLMutationTests/GraphQLMutationTestBase.cs @@ -105,19 +105,10 @@ public virtual async Task InsertMutationWithDefaultBuiltInFunctions(string dbQue /// SELECT query to validate expected result. /// Expected error message. /// Custom client role in whose context this authenticated request will be executed - public async Task InsertMutationFailingDatabasePolicy(string dbQuery, string errorMessage, string roleName) + /// graphql request payload + public async Task InsertMutationFailingDatabasePolicy(string dbQuery, string errorMessage, string roleName, string graphQLMutationName, string graphQLMutationPayload) { - string graphQLMutationName = "createPublisher"; - string graphQLMutation = @" - mutation { - createPublisher(item: { name: ""New publisher"" }) { - id - name - } - } - "; - - JsonElement result = await ExecuteGraphQLRequestAsync(graphQLMutation, graphQLMutationName, clientRoleHeader: roleName, isAuthenticated: true); + JsonElement result = await ExecuteGraphQLRequestAsync(graphQLMutationPayload, graphQLMutationName, clientRoleHeader: roleName, isAuthenticated: true); SqlTestHelper.TestForErrorInGraphQLResponse( result.ToString(), @@ -132,6 +123,20 @@ public async Task InsertMutationFailingDatabasePolicy(string dbQuery, string err Assert.AreEqual(dbResponseJson.RootElement.GetProperty("count").GetInt64(), 0); } + /// + /// Do: Attempt to insert a new publisher with name allowed by database policy (@item.name ne 'New publisher') + /// Check: Mutation succeeds. + /// + public async Task InsertMutationWithDatabasePolicy(string dbQuery, string roleName, string graphQLMutationName, string graphQLMutationPayload) + { + await ExecuteGraphQLRequestAsync(graphQLMutationPayload, graphQLMutationName, clientRoleHeader: roleName, isAuthenticated: true); + + string currentDbResponse = await GetDatabaseResultAsync(dbQuery); + + JsonDocument currentResult = JsonDocument.Parse(currentDbResponse); + Assert.AreEqual(1, currentResult.RootElement.GetProperty("count").GetInt64()); + } + /// /// Do: Inserts new book using variables to set its title and publisher_id /// Check: If book with the expected values of the new book is present in the database and diff --git a/src/Service.Tests/SqlTests/GraphQLMutationTests/MsSqlGraphQLMutationTests.cs b/src/Service.Tests/SqlTests/GraphQLMutationTests/MsSqlGraphQLMutationTests.cs index e9a2f07c66..78337d601f 100644 --- a/src/Service.Tests/SqlTests/GraphQLMutationTests/MsSqlGraphQLMutationTests.cs +++ b/src/Service.Tests/SqlTests/GraphQLMutationTests/MsSqlGraphQLMutationTests.cs @@ -96,10 +96,55 @@ FROM [publishers] WITHOUT_ARRAY_WRAPPER "; + string graphqlMutationName = "createPublisher"; + string graphQLMutationPayload = @" + mutation { + createPublisher(item: { name: ""New publisher"" }) { + id + name + } + } + "; + await InsertMutationFailingDatabasePolicy( dbQuery: msSqlQuery, errorMessage: errorMessage, - roleName: "database_policy_tester"); + roleName: "database_policy_tester", + graphQLMutationName: graphqlMutationName, + graphQLMutationPayload: graphQLMutationPayload); + } + + /// + /// Do: Inserts new Publisher with name = 'Not New publisher' + /// Check: Mutation succeeds because the database policy (@item.name ne 'New publisher') is passed + /// + [TestMethod] + public async Task InsertMutationWithDatabasePolicy() + { + string msSqlQuery = @" + SELECT COUNT(*) AS [count] + FROM [publishers] + WHERE [name] = 'Not New publisher' + FOR JSON PATH, + INCLUDE_NULL_VALUES, + WITHOUT_ARRAY_WRAPPER + "; + + string graphqlMutationName = "createPublisher"; + string graphQLMutationPayload = @" + mutation { + createPublisher(item: { name: ""Not New publisher"" }) { + id + name + } + } + "; + + await InsertMutationWithDatabasePolicy( + dbQuery: msSqlQuery, + roleName: "database_policy_tester", + graphQLMutationName: graphqlMutationName, + graphQLMutationPayload: graphQLMutationPayload); } /// diff --git a/src/Service.Tests/SqlTests/SqlTestHelper.cs b/src/Service.Tests/SqlTests/SqlTestHelper.cs index e6dbfaa8d1..6193d843a0 100644 --- a/src/Service.Tests/SqlTests/SqlTestHelper.cs +++ b/src/Service.Tests/SqlTests/SqlTestHelper.cs @@ -191,18 +191,18 @@ public static void TestForErrorInGraphQLResponse(string response, string message if (message is not null) { Console.WriteLine(response); - Assert.IsTrue(response.Contains(message), $"Message \"{message}\" not found in error"); + Assert.IsTrue(response.Contains(message), $"Message \"{message}\" not found in error {response}"); } if (statusCode != null) { - Assert.IsTrue(response.Contains($"\"code\":\"{statusCode}\""), $"Status code \"{statusCode}\" not found in error"); + Assert.IsTrue(response.Contains($"\"code\":\"{statusCode}\""), $"Status code \"{statusCode}\" not found in error {response}"); } if (path is not null) { Console.WriteLine(response); - Assert.IsTrue(response.Contains(path), $"Path \"{path}\" not found in error"); + Assert.IsTrue(response.Contains(path), $"Path \"{path}\" not found in error {response}"); } } diff --git a/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs b/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs index 3c49f8344e..16c850b0cd 100644 --- a/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs +++ b/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs @@ -192,6 +192,7 @@ public void InvalidActionSpecifiedForARole(string dbPolicy, EntityActionOperatio [DataRow(DatabaseType.MySQL, "", false, DisplayName = "Database Policy left empty for Create passes for MySQL")] [DataRow(DatabaseType.MySQL, " ", false, DisplayName = "Database Policy only whitespace for Create passes for MySQL")] [DataRow(DatabaseType.MSSQL, "2 eq @item.col3", false, DisplayName = "Database Policy defined for Create passes for MSSQL")] + [DataRow(DatabaseType.DWSQL, "2 eq @item.col3", false, DisplayName = "Database Policy defined for Create passes for DWSQL")] public void AddDatabasePolicyToCreateOperation(DatabaseType dbType, string dbPolicy, bool errorExpected) { EntityActionOperation action = EntityActionOperation.Create; diff --git a/src/Service.Tests/dab-config.DwSql.json b/src/Service.Tests/dab-config.DwSql.json index c4a9e6b09d..78f9e91480 100644 --- a/src/Service.Tests/dab-config.DwSql.json +++ b/src/Service.Tests/dab-config.DwSql.json @@ -2,7 +2,7 @@ "$schema": "https://github.com/Azure/data-api-builder/releases/download/vmajor.minor.patch/dab.draft.schema.json", "data-source": { "database-type": "dwsql", - "connection-string": "Server=tcp:{your_server}.database.windows.net,1433;Database={your_database};User ID={your_user_name};Password={your_password_here};Encrypt=True;TrustServerCertificate=False;Connection Timeout=30;", + "connection-string": "Server=tcp:127.0.0.1,1433;Persist Security Info=False;User ID=sa;Password=REPLACEME;MultipleActiveResultSets=False;Connection Timeout=5;", "options": { "set-session-context": true } @@ -76,23 +76,6 @@ } ] }, - { - "role": "database_policy_tester", - "actions": [ - { - "action": "update", - "policy": { - "database": "@item.id ne 1234" - } - }, - { - "action": "read", - "policy": { - "database": "@item.id ne 1234 or @item.id gt 1940" - } - } - ] - }, { "role": "policy_tester_01", "actions": [ @@ -252,14 +235,41 @@ "action": "delete" } ] + }, + { + "role": "database_policy_tester", + "actions": [ + { + "action": "create", + "policy": { + "database": "@item.name ne 'New publisher'" + } + }, + { + "action": "update", + "policy": { + "database": "@item.id ne 1234" + } + }, + { + "action": "read", + "policy": { + "database": "@item.id ne 1234 or @item.id gt 1940" + } + } + ] } ], "relationships": { "books": { "cardinality": "many", "target.entity": "Book", - "source.fields": [ "id" ], - "target.fields": [ "publisher_id" ], + "source.fields": [ + "id" + ], + "target.fields": [ + "publisher_id" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -336,6 +346,20 @@ } ] }, + { + "role": "database_policy_tester", + "actions": [ + { + "action": "read" + }, + { + "action": "update", + "policy": { + "database": "@item.pieceid ne 1" + } + } + ] + }, { "role": "test_role_with_noread", "actions": [ @@ -420,14 +444,6 @@ "enabled": true }, "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - }, { "role": "authenticated", "actions": [ @@ -445,6 +461,14 @@ } ] }, + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + }, { "role": "TestNestedFilterFieldIsNull_ColumnForbidden", "actions": [ @@ -857,27 +881,22 @@ ] } ], + "mappings": { + "id": "id", + "title": "title" + }, "relationships": { "websiteplacement": { "cardinality": "one", "target.entity": "BookWebsitePlacement", - "source.fields": [ "id" ], - "target.fields": [ "book_id" ], - "linking.source.fields": [], - "linking.target.fields": [] - }, - "authors": { - "cardinality": "many", - "target.entity": "Author", - "source.fields": [ "id" ], - "target.fields": [ "id" ], - "linking.object": "book_author_link", - "linking.source.fields": [ + "source.fields": [ + "id" + ], + "target.fields": [ "book_id" ], - "linking.target.fields": [ - "author_id" - ] + "linking.source.fields": [], + "linking.target.fields": [] }, "publishers": { "cardinality": "one", @@ -887,20 +906,39 @@ ], "target.fields": [ "id" - ] + ], + "linking.source.fields": [], + "linking.target.fields": [] }, "reviews": { "cardinality": "many", "target.entity": "Review", - "source.fields": [ "id" ], - "target.fields": [ "book_id" ], + "source.fields": [ + "id" + ], + "target.fields": [ + "book_id" + ], "linking.source.fields": [], "linking.target.fields": [] + }, + "authors": { + "cardinality": "many", + "target.entity": "Author", + "source.fields": [ + "id" + ], + "target.fields": [ + "id" + ], + "linking.object": "book_author_link", + "linking.source.fields": [ + "book_id" + ], + "linking.target.fields": [ + "author_id" + ] } - }, - "mappings": { - "id": "id", - "title": "title" } }, "BookWebsitePlacement": { @@ -958,8 +996,12 @@ "books": { "cardinality": "one", "target.entity": "Book", - "source.fields": [ "book_id" ], - "target.fields": [ "id" ], + "source.fields": [ + "book_id" + ], + "target.fields": [ + "id" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -1014,11 +1056,19 @@ "books": { "cardinality": "many", "target.entity": "Book", - "source.fields": [ "id" ], - "target.fields": [ "id" ], + "source.fields": [ + "id" + ], + "target.fields": [ + "id" + ], "linking.object": "book_author_link", - "linking.source.fields": [ "author_id" ], - "linking.target.fields": [ "book_id" ] + "linking.source.fields": [ + "author_id" + ], + "linking.target.fields": [ + "book_id" + ] } } }, @@ -1078,8 +1128,12 @@ "books": { "cardinality": "one", "target.entity": "Book", - "source.fields": [ "book_id" ], - "target.fields": [ "id" ], + "source.fields": [ + "book_id" + ], + "target.fields": [ + "id" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -1183,8 +1237,12 @@ "myseries": { "cardinality": "one", "target.entity": "series", - "source.fields": [ "series_id" ], - "target.fields": [ "id" ], + "source.fields": [ + "series_id" + ], + "target.fields": [ + "id" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -1470,8 +1528,12 @@ "fungus": { "cardinality": "one", "target.entity": "Fungus", - "source.fields": [ "species" ], - "target.fields": [ "habitat" ], + "source.fields": [ + "species" + ], + "target.fields": [ + "habitat" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -1552,11 +1614,15 @@ "spores": "hazards" }, "relationships": { - "shrub": { + "Shrub": { "cardinality": "one", "target.entity": "Shrub", - "source.fields": [ "habitat" ], - "target.fields": [ "species" ], + "source.fields": [ + "habitat" + ], + "target.fields": [ + "species" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -1667,19 +1733,6 @@ } ] }, - { - "role": "TestNestedFilterManyOne_ColumnForbidden", - "actions": [ - { - "action": "read", - "fields": { - "exclude": [ - "name" - ] - } - } - ] - }, { "role": "TestNestedFilterManyOne_EntityReadForbidden", "actions": [ @@ -1694,6 +1747,19 @@ } ] }, + { + "role": "TestNestedFilterManyOne_ColumnForbidden", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [ + "name" + ] + } + } + ] + }, { "role": "TestNestedFilterOneMany_ColumnForbidden", "actions": [ @@ -1715,8 +1781,12 @@ "comics": { "cardinality": "many", "target.entity": "Comic", - "source.fields": [ "id" ], - "target.fields": [ "series_id" ], + "source.fields": [ + "id" + ], + "target.fields": [ + "series_id" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -2209,16 +2279,16 @@ } ] }, - "GetBooks": { + "GetBook": { "source": { - "object": "get_books", + "object": "get_book_by_id", "type": "stored-procedure" }, "graphql": { - "enabled": true, - "operation": "query", + "enabled": false, + "operation": "mutation", "type": { - "singular": "GetBooks", + "singular": "GetBook", "plural": "GetBooks" } }, @@ -2247,16 +2317,16 @@ } ] }, - "GetBook": { + "GetBooks": { "source": { - "object": "get_book_by_id", + "object": "get_books", "type": "stored-procedure" }, "graphql": { - "enabled": false, - "operation": "mutation", + "enabled": true, + "operation": "query", "type": { - "singular": "GetBook", + "singular": "GetBooks", "plural": "GetBooks" } }, @@ -2301,7 +2371,7 @@ "rest": { "enabled": true, "methods": [ - "get" + "post" ] }, "permissions": [ @@ -2342,7 +2412,7 @@ "rest": { "enabled": true, "methods": [ - "get" + "post" ] }, "permissions": [ @@ -2380,7 +2450,7 @@ "rest": { "enabled": true, "methods": [ - "get" + "post" ] }, "permissions": [ @@ -2444,21 +2514,17 @@ } ] }, - "InsertAndDisplayAllBooksUnderGivenPublisher": { + "DeleteLastInsertedBook": { "source": { - "object": "insert_and_display_all_books_for_given_publisher", - "type": "stored-procedure", - "parameters": { - "title": "MyTitle", - "publisher_name": "MyPublisher" - } + "object": "delete_last_inserted_book", + "type": "stored-procedure" }, "graphql": { "enabled": true, "operation": "mutation", "type": { - "singular": "InsertAndDisplayAllBooksUnderGivenPublisher", - "plural": "InsertAndDisplayAllBooksUnderGivenPublishers" + "singular": "DeleteLastInsertedBook", + "plural": "DeleteLastInsertedBooks" } }, "rest": { @@ -2528,17 +2594,21 @@ } ] }, - "DeleteLastInsertedBook": { + "InsertAndDisplayAllBooksUnderGivenPublisher": { "source": { - "object": "delete_last_inserted_book", - "type": "stored-procedure" + "object": "insert_and_display_all_books_for_given_publisher", + "type": "stored-procedure", + "parameters": { + "title": "MyTitle", + "publisher_name": "MyPublisher" + } }, "graphql": { "enabled": true, "operation": "mutation", "type": { - "singular": "DeleteLastInsertedBook", - "plural": "DeleteLastInsertedBooks" + "singular": "InsertAndDisplayAllBooksUnderGivenPublisher", + "plural": "InsertAndDisplayAllBooksUnderGivenPublishers" } }, "rest": { @@ -2566,43 +2636,6 @@ } ] }, - "dbo_DimAccount": { - "source": "dbo.DimAccount", - "permissions": [ - { - "role": "anonymous", - "actions": [ - "read", - "create", - "update", - "delete" - ] - } - ], - "relationships": { - - "parent_account": { - "cardinality": "one", - "target.entity": "dbo_DimAccount", - "source.fields": [ - "ParentAccountKey" - ], - "target.fields": [ - "AccountKey" - ] - }, - "child_accounts": { - "cardinality": "many", - "target.entity": "dbo_DimAccount", - "source.fields": [ - "AccountKey" - ], - "target.fields": [ - "ParentAccountKey" - ] - } - } - }, "DateOnlyTable": { "source": { "object": "date_only_table", @@ -2627,10 +2660,62 @@ "actions": [ { "action": "*" + } + ] + } + ] + }, + "dbo_DimAccount": { + "source": { + "object": "DimAccount", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "dbo_DimAccount", + "plural": "dbo_DimAccounts" } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "*" + } ] + } + ], + "relationships": { + "parent_account": { + "cardinality": "one", + "target.entity": "dbo_DimAccount", + "source.fields": [ + "ParentAccountKey" + ], + "target.fields": [ + "AccountKey" + ], + "linking.source.fields": [], + "linking.target.fields": [] + }, + "child_accounts": { + "cardinality": "many", + "target.entity": "dbo_DimAccount", + "source.fields": [ + "AccountKey" + ], + "target.fields": [ + "ParentAccountKey" + ], + "linking.source.fields": [], + "linking.target.fields": [] + } } - ] } } -} +} \ No newline at end of file diff --git a/src/Service.Tests/dab-config.MsSql.json b/src/Service.Tests/dab-config.MsSql.json index c1eb906572..0f3b1e5f83 100644 --- a/src/Service.Tests/dab-config.MsSql.json +++ b/src/Service.Tests/dab-config.MsSql.json @@ -1969,8 +1969,12 @@ "fungus": { "cardinality": "one", "target.entity": "Fungus", - "source.fields": [ "species" ], - "target.fields": [ "habitat" ], + "source.fields": [ + "species" + ], + "target.fields": [ + "habitat" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -2048,11 +2052,15 @@ "spores": "hazards" }, "relationships": { - "shrub": { + "Shrub": { "cardinality": "one", "target.entity": "Shrub", - "source.fields": [ "habitat" ], - "target.fields": [ "species" ], + "source.fields": [ + "habitat" + ], + "target.fields": [ + "species" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -3792,4 +3800,4 @@ ] } } -} +} \ No newline at end of file From 7f5e0544488e6786c6652444554f64c142fb5a88 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Wed, 16 Jul 2025 21:19:34 +0200 Subject: [PATCH 37/79] Merge from source --- src/Core/Resolvers/DWSqlQueryBuilder.cs | 93 +++++++------------------ src/Core/Resolvers/MsSqlQueryBuilder.cs | 56 +++++++++------ src/Core/Resolvers/QueryExecutor.cs | 5 +- src/Core/Services/ExecutionHelper.cs | 1 + src/Service/dab-config.json | 81 ++++++++++++++++++++- 5 files changed, 144 insertions(+), 92 deletions(-) diff --git a/src/Core/Resolvers/DWSqlQueryBuilder.cs b/src/Core/Resolvers/DWSqlQueryBuilder.cs index 46f5dbbdd7..510155fbfb 100644 --- a/src/Core/Resolvers/DWSqlQueryBuilder.cs +++ b/src/Core/Resolvers/DWSqlQueryBuilder.cs @@ -170,24 +170,8 @@ private string BuildWithJsonFunc(SqlQueryStructure structure) /// private string BuildWithStringAgg(SqlQueryStructure structure, bool subQueryStructure = false) { - string subQueryAlias = "CountQuery"; - - string countSql = $" CROSS JOIN ( {BuildSqlCountQuery(structure)} ) {subQueryAlias}"; - - //Add a new column to the structure if not already there - if (!structure.Columns.Exists(c => c.ColumnName == "RecordCount")) - { - structure.Columns.Add(new LabelledColumn("", subQueryAlias, "RecordCount", "RecordCount", subQueryAlias)); - } - - //Add a subquery 'a' ti the structure - structure.JoinQueries.Add(subQueryAlias, structure); - string columns = GenerateColumnsAsJson(structure, subQueryStructure); - - structure.JoinQueries.Remove(subQueryAlias); - - string fromSql = $"{BuildSqlQuery(structure, countSql)}"; + string fromSql = $"{BuildSqlQuery(structure)}"; string query = $"SELECT {columns}" + $" FROM ({fromSql}) AS {QuoteIdentifier(structure.SourceAlias)}"; return query; @@ -204,11 +188,25 @@ private string BuildWithStringAgg(SqlQueryStructure structure, bool subQueryStru /// FROM dbo_books AS[table0] /// OUTER APPLY(SubQuery generated by recursive call to build function, will create the _subq tables) /// - private string BuildSqlQuery(SqlQueryStructure structure, string? subQuery) + private string BuildSqlQuery(SqlQueryStructure structure) { string dataIdent = QuoteIdentifier(SqlQueryStructure.DATA_IDENT); StringBuilder fromSql = new(); - + + fromSql.Append($"{QuoteIdentifier(structure.DatabaseObject.SchemaName)}.{QuoteIdentifier(structure.DatabaseObject.Name)} " + + $"AS {QuoteIdentifier($"{structure.SourceAlias}")}{Build(structure.Joins)}"); + + fromSql.Append(string.Join( + "", + structure.JoinQueries.Select( + x => $" OUTER APPLY ({BuildWithStringAgg(x.Value, true)}) AS {QuoteIdentifier(x.Key)}({dataIdent})"))); + + string predicates = JoinPredicateStrings( + structure.GetDbPolicyForOperation(EntityActionOperation.Read), + structure.FilterPredicates, + Build(structure.Predicates), + Build(structure.PaginationMetadata.PaginationPredicate)); + string aggregations = string.Empty; if (structure.GroupByMetadata.Aggregations.Count > 0) { @@ -222,27 +220,15 @@ private string BuildSqlQuery(SqlQueryStructure structure, string? subQuery) } } - fromSql.Append($"{QuoteIdentifier(structure.DatabaseObject.SchemaName)}.{QuoteIdentifier(structure.DatabaseObject.Name)} " + - $"AS {QuoteIdentifier($"{structure.SourceAlias}")}{Build(structure.Joins)}"); - - fromSql.Append(string.Join( - "", - structure.JoinQueries.Select( - x => $" OUTER APPLY ({BuildWithStringAgg(x.Value, true)}) AS {QuoteIdentifier(x.Key)}({dataIdent})"))); - - - - string query = $"SELECT {columns}" - + $" FROM {fromSql}" - + $" {subQuery}" + StringBuilder queryBuilder = new(); + queryBuilder.Append($"SELECT TOP {structure.Limit()} {WrappedColumns(structure)} {aggregations}"); + queryBuilder.Append($" FROM {fromSql}"); + queryBuilder.Append($" WHERE {predicates}"); - - - - // Add GROUP BY clause if there are any group by columns + // Add GROUP BY clause if there are any group by columns if (structure.GroupByMetadata.Fields.Any()) { - query.Append($" GROUP BY {string.Join(", ", structure.GroupByMetadata.Fields.Values.Select(c => Build(c)))}"); + queryBuilder.Append($" GROUP BY {string.Join(", ", structure.GroupByMetadata.Fields.Values.Select(c => Build(c)))}"); } if (structure.GroupByMetadata.Aggregations.Count > 0) @@ -253,47 +239,20 @@ private string BuildSqlQuery(SqlQueryStructure structure, string? subQuery) if (havingPredicates.Any()) { - query.Append($" HAVING {Build(havingPredicates)}"); + queryBuilder.Append($" HAVING {Build(havingPredicates)}"); } } if (structure.OrderByColumns.Any()) { - query.Append($" ORDER BY {Build(structure.OrderByColumns)}"); + queryBuilder.Append($" ORDER BY {Build(structure.OrderByColumns)}"); } - query.Append($" OFFSET {structure.Offset()} ROWS FETCH NEXT {structure.Limit()} ROWS ONLY"); + string query = queryBuilder.ToString(); - return query; } - private string BuildSqlCountQuery(SqlQueryStructure structure) - { - string dataIdent = QuoteIdentifier(SqlQueryStructure.DATA_IDENT); - StringBuilder fromSql = new(); - - fromSql.Append($"{QuoteIdentifier(structure.DatabaseObject.SchemaName)}.{QuoteIdentifier(structure.DatabaseObject.Name)} " + - $"AS {QuoteIdentifier($"{structure.SourceAlias}")}{Build(structure.Joins)}"); - - fromSql.Append(string.Join( - "", - structure.JoinQueries.Select( - x => $" OUTER APPLY ({BuildAsJson(x.Value, true)}) AS {QuoteIdentifier(x.Key)}({dataIdent})"))); - - string predicates = JoinPredicateStrings( - structure.GetDbPolicyForOperation(EntityActionOperation.Read), - structure.FilterPredicates, - Build(structure.Predicates), - Build(structure.PaginationMetadata.PaginationPredicate)); - - string query = $"SELECT cast(count(1) as varchar(50)) as RecordCount " - + $" FROM {fromSql}" - + $" WHERE {predicates}"; - return query; - } - - /// /// Generate the columns selected and wrap them with JSON_OBJECT /// Example: diff --git a/src/Core/Resolvers/MsSqlQueryBuilder.cs b/src/Core/Resolvers/MsSqlQueryBuilder.cs index 669fb4e2ea..60f7e26b88 100644 --- a/src/Core/Resolvers/MsSqlQueryBuilder.cs +++ b/src/Core/Resolvers/MsSqlQueryBuilder.cs @@ -32,7 +32,8 @@ public override string QuoteIdentifier(string ident) /// public string Build(SqlQueryStructure structure) { - string query; + StringBuilder query = new(); + string dataIdent = QuoteIdentifier(SqlQueryStructure.DATA_IDENT); string fromSql = $"{QuoteIdentifier(structure.DatabaseObject.SchemaName)}.{QuoteIdentifier(structure.DatabaseObject.Name)} " + $"AS {QuoteIdentifier($"{structure.SourceAlias}")}{Build(structure.Joins)}"; @@ -46,32 +47,47 @@ public string Build(SqlQueryStructure structure) string aggregations = BuildAggregationColumns(structure); - - string orderBy = $" ORDER BY {Build(structure.OrderByColumns)}"; - //Add recordcount if needed if (structure.IsListQuery) { - string recordCountSql = $"SELECT cast(count(1) as int) as RecordCount " - + $" FROM {fromSql}" - + $" WHERE {predicates}"; - fromSql += $" OUTER APPLY ({recordCountSql}) RecordCountQuery"; - query = $"SELECT {WrappedColumns(structure)}, RecordCountQuery.RecordCount" - + $" FROM {fromSql}" - + $" WHERE {predicates}" - + $" OFFSET {structure.Offset()} ROWS FETCH NEXT {structure.Limit()} ROWS ONLY"; + StringBuilder recordCountSql = new(); + + recordCountSql.Append($"SELECT cast(count(1) as int) as RecordCount ") + .Append($" FROM {fromSql}") + .Append($" WHERE {predicates}") + .Append(BuildGroupBy(structure)) + .Append(BuildHaving(structure)); + + fromSql += $" OUTER APPLY ({recordCountSql.ToString()}) RecordCountQuery"; + + query.Append ($"SELECT {WrappedColumns(structure)} {aggregations}, RecordCountQuery.RecordCount") + .Append($" FROM {fromSql}") + .Append($" WHERE {predicates}") + .Append(BuildGroupBy(structure)) + .Append(BuildHaving(structure)) + .Append(BuildOrderBy(structure)) + .Append($" OFFSET {structure.Offset()} ROWS FETCH NEXT {structure.Limit()} ROWS ONLY") + .Append(BuildJsonPath(structure)); } else { - query = $"SELECT {WrappedColumns(structure)}" - + $" FROM {fromSql}" - + $" WHERE {predicates}"; + query.Append ($"SELECT {WrappedColumns(structure)} {aggregations}") + .Append($" FROM {fromSql}") + .Append($" WHERE {predicates}") + .Append(BuildGroupBy(structure)) + .Append(BuildHaving(structure)) + .Append(BuildOrderBy(structure)) + .Append(BuildJsonPath(structure)); } - - query.Append(BuildGroupBy(structure)) - .Append(BuildHaving(structure)) - .Append(BuildOrderBy(structure)) - .Append(BuildJsonPath(structure)); + /* + query.Append($" FROM {fromSql}") + .Append($" WHERE {predicates}") + .Append(BuildGroupBy(structure)) + .Append(BuildHaving(structure)) + .Append(BuildOrderBy(structure)) + .Append(" OFFSET {structure.Offset()} ROWS FETCH NEXT {structure.Limit()} ROWS ONLY") + .Append(BuildJsonPath(structure)); + */ return query.ToString(); } diff --git a/src/Core/Resolvers/QueryExecutor.cs b/src/Core/Resolvers/QueryExecutor.cs index 543e0af166..32a1b3bf94 100644 --- a/src/Core/Resolvers/QueryExecutor.cs +++ b/src/Core/Resolvers/QueryExecutor.cs @@ -128,7 +128,6 @@ public QueryExecutor(DbExceptionParser dbExceptionParser, QueryExecutorLogger.LogDebug($"Paramaters: {string.Join(", ", parameters.Select(param => $"{param.Key}: {param.Value?.Value} (DbType: {param.Value?.DbType}, SqlDbType: {param.Value?.SqlDbType})"))}"); } - TResult? result = ExecuteQueryAgainstDb(conn, sqltext, parameters, dataReaderHandler, httpContext, dataSourceName, args); if (retryAttempt > 1) @@ -203,9 +202,8 @@ public QueryExecutor(DbExceptionParser dbExceptionParser, // When IsLateConfigured is true we are in a hosted scenario and do not reveal query information. if (!ConfigProvider.IsLateConfigured) { - QueryExecutorLogger.LogDebug("{correlationId} {ts} Executing query: {queryText}", correlationId, DateTime.Now.ToString() , sqltext); - string correlationId = HttpContextExtensions.GetLoggerCorrelationId(httpContext); + QueryExecutorLogger.LogDebug("{correlationId} {ts} Executing query: {queryText}", correlationId, DateTime.Now.ToString() , sqltext); QueryExecutorLogger.LogDebug("{correlationId} Executing query: {queryText}", correlationId, sqltext); } @@ -483,6 +481,7 @@ public async Task { dbDataReader.NextResult(); } + DbResultSet dbResultSet = new(resultProperties: GetResultPropertiesAsync(dbDataReader).Result ?? new()); long availableBytes = _maxResponseSizeBytes; while (await ReadAsync(dbDataReader)) diff --git a/src/Core/Services/ExecutionHelper.cs b/src/Core/Services/ExecutionHelper.cs index 737c2102f9..b5034ee02a 100644 --- a/src/Core/Services/ExecutionHelper.cs +++ b/src/Core/Services/ExecutionHelper.cs @@ -20,6 +20,7 @@ using HotChocolate.Execution.Processing; using HotChocolate.Language; using HotChocolate.Resolvers; +using HotChocolate.Types.Descriptors.Definitions; using NodaTime.Text; using Kestral = Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Http.HttpMethod; diff --git a/src/Service/dab-config.json b/src/Service/dab-config.json index b1099fe7d8..4a97aa10e8 100644 --- a/src/Service/dab-config.json +++ b/src/Service/dab-config.json @@ -45,7 +45,7 @@ "entities": { "BillOfLading": { "source": { - "object": "silver_ops.BillOfLading", + "object": "silver_ops.v_BillOfLading", "type": "table", "key-fields": [ "systemId" @@ -110,7 +110,7 @@ }, "BillOfLadingParty": { "source": { - "object": "silver_ops.BillOfLadingParty", + "object": "silver_ops.v_BillOfLadingParty", "type": "table", "key-fields": [ "systemId" @@ -617,6 +617,38 @@ } ] }, + + "InvoiceSearch": { + "source": { + "object": "silver_ops.usp_invoiceSearch", + "type": "stored-procedure", + "parameters": { + "searchString": "string", + "customerScope": "string" + } + }, + "graphql": { + "enabled": true, + "type": { + "singular": "InvoiceSearch", + "plural": "InvoiceSearches" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "execute" + } + ] + } + ] + }, + "GlobalSearch": { "source": { "object": "silver_ops.usp_globalSearch", @@ -2814,6 +2846,51 @@ } ] }, + "DelayEvent": { + "source": { + "object": "silver_trk.ww_DelayEvent", + "type": "table", + "key-fields": [ + "delayEventId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "DelayEvent", + "plural": "DelayEvents" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentId" + ], + "target.fields": [ + "id" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, "TrackingEvent": { "source": { "object": "silver_trk.v_TrackingEvent", From dc8d6ecbc36a58bed5bce7b4851ee2793e15e8c8 Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Wed, 16 Jul 2025 15:19:14 -0700 Subject: [PATCH 38/79] Addition & Deserialization of Azure Log Analytics Properties (#2727) ## Why make this change? Fixes issue #2726 ## What is this change? Adds the properties needed to support Azure Log Analytics to the DAB schema and configuration objects as well as the deserialization of those properties. ## How was this tested? - [ ] Integration Tests - [x] Unit Tests - [x] Manual Testing --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Co-authored-by: Ruben Cerna Co-authored-by: aaronburtle <93220300+aaronburtle@users.noreply.github.com> --- schemas/dab.draft.schema.json | 68 +++++++ .../AzureLogAnalyticsAuthOptionsConverter.cs | 112 ++++++++++++ ...zureLogAnalyticsOptionsConverterFactory.cs | 168 ++++++++++++++++++ .../AzureLogAnalyticsAuthOptions.cs | 80 +++++++++ .../ObjectModel/AzureLogAnalyticsOptions.cs | 116 ++++++++++++ src/Config/ObjectModel/TelemetryOptions.cs | 6 +- src/Config/RuntimeConfigLoader.cs | 2 + .../Configurations/RuntimeConfigValidator.cs | 21 +++ .../Configuration/ConfigurationTests.cs | 126 +++++++++++++ ...untimeConfigLoaderJsonDeserializerTests.cs | 6 +- src/Service/Startup.cs | 29 ++- 11 files changed, 730 insertions(+), 4 deletions(-) create mode 100644 src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs create mode 100644 src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs create mode 100644 src/Config/ObjectModel/AzureLogAnalyticsAuthOptions.cs create mode 100644 src/Config/ObjectModel/AzureLogAnalyticsOptions.cs diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json index 6893934ef3..082e2a8de5 100644 --- a/schemas/dab.draft.schema.json +++ b/schemas/dab.draft.schema.json @@ -410,6 +410,74 @@ }, "required": ["endpoint"] }, + "azure-log-analytics": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "description": "Allow enabling/disabling Azure Log Analytics.", + "default": false + }, + "auth": { + "type": "object", + "additionalProperties": false, + "properties": { + "workspace-id": { + "type": [ "string", "null" ], + "description": "Azure Log Analytics Workspace ID" + }, + "dcr-immutable-id": { + "type": [ "string", "null" ], + "description": "DCR ID for entra-id mode" + }, + "dce-endpoint": { + "type": [ "string", "null" ], + "description": "DCE endpoint for entra-id mode" + } + } + }, + "log-type": { + "type": "string", + "description": "Custom log table name in Log Analytics", + "default": "DabLogs" + }, + "flush-interval-seconds": { + "type": "integer", + "description": "Interval between log batch pushes (in seconds)", + "default": 5 + } + }, + "if": { + "properties": { + "enabled": { + "const": true + } + } + }, + "then": { + "properties": { + "auth": { + "properties": { + "workspace-id": { + "type": "string", + "description": "Azure Log Analytics Workspace ID" + }, + "dcr-immutable-id": { + "type": "string", + "description": "DCR ID for entra-id mode" + }, + "dce-endpoint": { + "type": "string", + "description": "DCE endpoint for entra-id mode" + } + }, + "required": [ "workspace-id", "dcr-immutable-id", "dce-endpoint" ] + } + }, + "required": [ "auth" ] + } + }, "log-level": { "type": "object", "description": "Global configuration of log level, defines logging severity levels for specific classes, when 'null' it will set logging level based on 'host: mode' property", diff --git a/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs b/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs new file mode 100644 index 0000000000..29f30c8b95 --- /dev/null +++ b/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs @@ -0,0 +1,112 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.ObjectModel; + +namespace Azure.DataApiBuilder.Config.Converters; + +internal class AzureLogAnalyticsAuthOptionsConverter : JsonConverter +{ + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// Whether to replace environment variable with its + /// value or not while deserializing. + public AzureLogAnalyticsAuthOptionsConverter(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + /// + /// Defines how DAB reads Azure Log Analytics Auth options and defines which values are + /// used to instantiate AzureLogAnalyticsAuthOptions. + /// + /// Thrown when improperly formatted Azure Log Analytics Auth options are provided. + public override AzureLogAnalyticsAuthOptions? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType is JsonTokenType.StartObject) + { + string? workspaceId = null; + string? dcrImmutableId = null; + string? dceEndpoint = null; + + while (reader.Read()) + { + if (reader.TokenType == JsonTokenType.EndObject) + { + return new AzureLogAnalyticsAuthOptions(workspaceId, dcrImmutableId, dceEndpoint); + } + + string? propertyName = reader.GetString(); + + reader.Read(); + switch (propertyName) + { + case "workspace-id": + if (reader.TokenType is not JsonTokenType.Null) + { + workspaceId = reader.DeserializeString(_replaceEnvVar); + } + + break; + + case "dcr-immutable-id": + if (reader.TokenType is not JsonTokenType.Null) + { + dcrImmutableId = reader.DeserializeString(_replaceEnvVar); + } + + break; + + case "dce-endpoint": + if (reader.TokenType is not JsonTokenType.Null) + { + dceEndpoint = reader.DeserializeString(_replaceEnvVar); + } + + break; + + default: + throw new JsonException($"Unexpected property {propertyName}"); + } + } + + } + + throw new JsonException("Failed to read the Azure Log Analytics Auth Options"); + } + + /// + /// When writing the AzureLogAnalyticsAuthOptions back to a JSON file, only write the properties + /// if they are user provided. This avoids polluting the written JSON file with properties + /// the user most likely omitted when writing the original DAB runtime config file. + /// This Write operation is only used when a RuntimeConfig object is serialized to JSON. + /// + public override void Write(Utf8JsonWriter writer, AzureLogAnalyticsAuthOptions value, JsonSerializerOptions options) + { + writer.WriteStartObject(); + + if (value?.UserProvidedWorkspaceId is true) + { + writer.WritePropertyName("workspace-id"); + JsonSerializer.Serialize(writer, value.WorkspaceId, options); + } + + if (value?.UserProvidedDcrImmutableId is true) + { + writer.WritePropertyName("dcr-immutable-id"); + JsonSerializer.Serialize(writer, value.DcrImmutableId, options); + } + + if (value?.UserProvidedDceEndpoint is true) + { + writer.WritePropertyName("dce-endpoint"); + JsonSerializer.Serialize(writer, value.DceEndpoint, options); + } + + writer.WriteEndObject(); + } +} diff --git a/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs b/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs new file mode 100644 index 0000000000..c327796ad6 --- /dev/null +++ b/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs @@ -0,0 +1,168 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.ObjectModel; + +namespace Azure.DataApiBuilder.Config.Converters; + +/// +/// Defines how DAB reads and writes Azure Log Analytics options. +/// +internal class AzureLogAnalyticsOptionsConverterFactory : JsonConverterFactory +{ + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// + public override bool CanConvert(Type typeToConvert) + { + return typeToConvert.IsAssignableTo(typeof(AzureLogAnalyticsOptions)); + } + + /// + public override JsonConverter? CreateConverter(Type typeToConvert, JsonSerializerOptions options) + { + return new AzureLogAnalyticsOptionsConverter(_replaceEnvVar); + } + + /// Whether to replace environment variable with its + /// value or not while deserializing. + internal AzureLogAnalyticsOptionsConverterFactory(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + private class AzureLogAnalyticsOptionsConverter : JsonConverter + { + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// Whether to replace environment variable with its + /// value or not while deserializing. + internal AzureLogAnalyticsOptionsConverter(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + /// + /// Defines how DAB reads Azure Log Analytics options and defines which values are + /// used to instantiate AzureLogAnalyticsOptions. + /// + /// Thrown when improperly formatted Azure Log Analytics options are provided. + public override AzureLogAnalyticsOptions? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType is JsonTokenType.StartObject) + { + AzureLogAnalyticsAuthOptionsConverter authOptionsConverter = new(_replaceEnvVar); + + bool? enabled = null; + AzureLogAnalyticsAuthOptions? auth = null; + string? logType = null; + int? flushIntervalSeconds = null; + + while (reader.Read()) + { + if (reader.TokenType == JsonTokenType.EndObject) + { + return new AzureLogAnalyticsOptions(enabled, auth, logType, flushIntervalSeconds); + } + + string? propertyName = reader.GetString(); + + reader.Read(); + switch (propertyName) + { + case "enabled": + if (reader.TokenType is not JsonTokenType.Null) + { + enabled = reader.GetBoolean(); + } + + break; + + case "auth": + auth = authOptionsConverter.Read(ref reader, typeToConvert, options); + break; + + case "log-type": + if (reader.TokenType is not JsonTokenType.Null) + { + logType = reader.DeserializeString(_replaceEnvVar); + } + + break; + + case "flush-interval-seconds": + if (reader.TokenType is not JsonTokenType.Null) + { + try + { + flushIntervalSeconds = reader.GetInt32(); + } + catch (FormatException) + { + throw new JsonException($"The JSON token value is of the incorrect numeric format."); + } + + if (flushIntervalSeconds <= 0) + { + throw new JsonException($"Invalid flush-interval-seconds: {flushIntervalSeconds}. Specify a number > 0."); + } + } + + break; + + default: + throw new JsonException($"Unexpected property {propertyName}"); + } + } + } + + throw new JsonException("Failed to read the Azure Log Analytics Options"); + } + + /// + /// When writing the AzureLogAnalyticsOptions back to a JSON file, only write the properties + /// if they are user provided. This avoids polluting the written JSON file with properties + /// the user most likely omitted when writing the original DAB runtime config file. + /// This Write operation is only used when a RuntimeConfig object is serialized to JSON. + /// + public override void Write(Utf8JsonWriter writer, AzureLogAnalyticsOptions value, JsonSerializerOptions options) + { + writer.WriteStartObject(); + + if (value?.UserProvidedEnabled is true) + { + writer.WritePropertyName("enabled"); + JsonSerializer.Serialize(writer, value.Enabled, options); + } + + if (value?.Auth is not null) + { + AzureLogAnalyticsAuthOptionsConverter authOptionsConverter = options.GetConverter(typeof(AzureLogAnalyticsAuthOptions)) as AzureLogAnalyticsAuthOptionsConverter ?? + throw new JsonException("Failed to get azure-log-analytics.auth options converter"); + + writer.WritePropertyName("auth"); + authOptionsConverter.Write(writer, value.Auth, options); + } + + if (value?.UserProvidedLogType is true) + { + writer.WritePropertyName("log-type"); + JsonSerializer.Serialize(writer, value.LogType, options); + } + + if (value?.UserProvidedFlushIntervalSeconds is true) + { + writer.WritePropertyName("flush-interval-seconds"); + JsonSerializer.Serialize(writer, value.FlushIntervalSeconds, options); + } + + writer.WriteEndObject(); + } + } +} diff --git a/src/Config/ObjectModel/AzureLogAnalyticsAuthOptions.cs b/src/Config/ObjectModel/AzureLogAnalyticsAuthOptions.cs new file mode 100644 index 0000000000..cc8ed9dffa --- /dev/null +++ b/src/Config/ObjectModel/AzureLogAnalyticsAuthOptions.cs @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +/// +/// Represents the authentication options for Azure Log Analytics. +/// +public record AzureLogAnalyticsAuthOptions +{ + /// + /// Whether Azure Log Analytics is enabled. + /// + public string? WorkspaceId { get; init; } + + /// + /// Authentication options for Azure Log Analytics. + /// + public string? DcrImmutableId { get; init; } + + /// + /// Custom log table name in Log Analytics. + /// + public string? DceEndpoint { get; init; } + + [JsonConstructor] + public AzureLogAnalyticsAuthOptions(string? workspaceId = null, string? dcrImmutableId = null, string? dceEndpoint = null) + { + if (workspaceId is not null) + { + WorkspaceId = workspaceId; + UserProvidedWorkspaceId = true; + } + + if (dcrImmutableId is not null) + { + DcrImmutableId = dcrImmutableId; + UserProvidedDcrImmutableId = true; + } + + if (dceEndpoint is not null) + { + DceEndpoint = dceEndpoint; + UserProvidedDceEndpoint = true; + } + } + + /// + /// Flag which informs CLI and JSON serializer whether to write workspace-id + /// property and value to the runtime config file. + /// When user doesn't provide the workspace-id property/value, which signals DAB to not write anything, + /// the DAB CLI should not write the current value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(WorkspaceId))] + public bool UserProvidedWorkspaceId { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write dcr-immutable-id + /// property and value to the runtime config file. + /// When user doesn't provide the dcr-immutable-id property/value, which signals DAB to not write anything, + /// the DAB CLI should not write the current value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(DcrImmutableId))] + public bool UserProvidedDcrImmutableId { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write dce-endpoint + /// property and value to the runtime config file. + /// When user doesn't provide the dce-endpoint property/value, which signals DAB to not write anything, + /// the DAB CLI should not write the current value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(DceEndpoint))] + public bool UserProvidedDceEndpoint { get; init; } = false; +} diff --git a/src/Config/ObjectModel/AzureLogAnalyticsOptions.cs b/src/Config/ObjectModel/AzureLogAnalyticsOptions.cs new file mode 100644 index 0000000000..9ba7a09bbd --- /dev/null +++ b/src/Config/ObjectModel/AzureLogAnalyticsOptions.cs @@ -0,0 +1,116 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +/// +/// Represents the options for configuring Azure Log Analytics. +/// Properties are nullable to support DAB CLI merge config +/// expected behavior. +/// +public record AzureLogAnalyticsOptions +{ + /// + /// Default enabled for Azure Log Analytics. + /// + public const bool DEFAULT_ENABLED = false; + + /// + /// Default log type for Azure Log Analytics. + /// + public const string DEFAULT_LOG_TYPE = "DabLogs"; + + /// + /// Default flush interval in seconds. + /// + public const int DEFAULT_FLUSH_INTERVAL_SECONDS = 5; + + /// + /// Whether Azure Log Analytics is enabled. + /// + public bool Enabled { get; init; } + + /// + /// Authentication options for Azure Log Analytics. + /// + public AzureLogAnalyticsAuthOptions? Auth { get; init; } + + /// + /// Custom log table name in Log Analytics. + /// + public string? LogType { get; init; } + + /// + /// Interval between log batch pushes (in seconds). + /// + public int? FlushIntervalSeconds { get; init; } + + [JsonConstructor] + public AzureLogAnalyticsOptions(bool? enabled = null, AzureLogAnalyticsAuthOptions? auth = null, string? logType = null, int? flushIntervalSeconds = null) + { + Auth = auth; + + if (enabled is not null) + { + Enabled = (bool)enabled; + UserProvidedEnabled = true; + } + else + { + Enabled = DEFAULT_ENABLED; + } + + if (logType is not null) + { + LogType = logType; + UserProvidedLogType = true; + } + else + { + LogType = DEFAULT_LOG_TYPE; + } + + if (flushIntervalSeconds is not null) + { + FlushIntervalSeconds = flushIntervalSeconds; + UserProvidedFlushIntervalSeconds = true; + } + else + { + FlushIntervalSeconds = DEFAULT_FLUSH_INTERVAL_SECONDS; + } + } + + /// + /// Flag which informs CLI and JSON serializer whether to write enabled + /// property and value to the runtime config file. + /// When user doesn't provide the enabled property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(Enabled))] + public bool UserProvidedEnabled { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write log-type + /// property and value to the runtime config file. + /// When user doesn't provide the log-type property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(LogType))] + public bool UserProvidedLogType { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write flush-interval-seconds + /// property and value to the runtime config file. + /// When user doesn't provide the flush-interval-seconds property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(FlushIntervalSeconds))] + public bool UserProvidedFlushIntervalSeconds { get; init; } = false; +} diff --git a/src/Config/ObjectModel/TelemetryOptions.cs b/src/Config/ObjectModel/TelemetryOptions.cs index ed2099f2a4..157b0d03b2 100644 --- a/src/Config/ObjectModel/TelemetryOptions.cs +++ b/src/Config/ObjectModel/TelemetryOptions.cs @@ -9,7 +9,11 @@ namespace Azure.DataApiBuilder.Config.ObjectModel; /// /// Represents the options for telemetry. /// -public record TelemetryOptions(ApplicationInsightsOptions? ApplicationInsights = null, OpenTelemetryOptions? OpenTelemetry = null, Dictionary? LoggerLevel = null) +public record TelemetryOptions( + ApplicationInsightsOptions? ApplicationInsights = null, + OpenTelemetryOptions? OpenTelemetry = null, + AzureLogAnalyticsOptions? AzureLogAnalytics = null, + Dictionary? LoggerLevel = null) { [JsonPropertyName("log-level")] public Dictionary? LoggerLevel { get; init; } = LoggerLevel; diff --git a/src/Config/RuntimeConfigLoader.cs b/src/Config/RuntimeConfigLoader.cs index 462c08f3a8..84f8a8b723 100644 --- a/src/Config/RuntimeConfigLoader.cs +++ b/src/Config/RuntimeConfigLoader.cs @@ -259,6 +259,8 @@ public static JsonSerializerOptions GetSerializationOptions( options.Converters.Add(new DataSourceConverterFactory(replaceEnvVar)); options.Converters.Add(new HostOptionsConvertorFactory()); options.Converters.Add(new AKVRetryPolicyOptionsConverterFactory(replaceEnvVar)); + options.Converters.Add(new AzureLogAnalyticsOptionsConverterFactory(replaceEnvVar)); + options.Converters.Add(new AzureLogAnalyticsAuthOptionsConverter(replaceEnvVar)); if (replaceEnvVar) { diff --git a/src/Core/Configurations/RuntimeConfigValidator.cs b/src/Core/Configurations/RuntimeConfigValidator.cs index 6ae7051563..5edce8af90 100644 --- a/src/Core/Configurations/RuntimeConfigValidator.cs +++ b/src/Core/Configurations/RuntimeConfigValidator.cs @@ -81,6 +81,7 @@ public void ValidateConfigProperties() ValidateGlobalEndpointRouteConfig(runtimeConfig); ValidateAppInsightsTelemetryConnectionString(runtimeConfig); ValidateLoggerFilters(runtimeConfig); + ValidateAzureLogAnalyticsAuth(runtimeConfig); // Running these graphQL validations only in development mode to ensure // fast startup of engine in production mode. @@ -156,6 +157,26 @@ public static void ValidateLoggerFilters(RuntimeConfig runtimeConfig) } } + /// + /// The auth options in Azure Log Analytics are required if it is enabled. + /// + public void ValidateAzureLogAnalyticsAuth(RuntimeConfig runtimeConfig) + { + if (runtimeConfig.Runtime!.Telemetry is not null && runtimeConfig.Runtime.Telemetry.AzureLogAnalytics is not null) + { + AzureLogAnalyticsOptions azureLogAnalyticsOptions = runtimeConfig.Runtime.Telemetry.AzureLogAnalytics; + AzureLogAnalyticsAuthOptions? azureLogAnalyticsAuthOptions = azureLogAnalyticsOptions.Auth; + if (azureLogAnalyticsOptions.Enabled && (azureLogAnalyticsAuthOptions is null || string.IsNullOrWhiteSpace(azureLogAnalyticsAuthOptions.WorkspaceId) || + string.IsNullOrWhiteSpace(azureLogAnalyticsAuthOptions.DcrImmutableId) || string.IsNullOrWhiteSpace(azureLogAnalyticsAuthOptions.DceEndpoint))) + { + HandleOrRecordException(new DataApiBuilderException( + message: "Azure Log Analytics Auth options 'workspace-id', 'dcr-immutable-id', and 'dce-endpoint' cannot be null or empty if enabled.", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + } + } + } + /// /// This method runs several validations against the config file such as schema validation, /// validation of entities metadata, validation of permissions, validation of entity configuration. diff --git a/src/Service.Tests/Configuration/ConfigurationTests.cs b/src/Service.Tests/Configuration/ConfigurationTests.cs index 73be078259..516ad7b917 100644 --- a/src/Service.Tests/Configuration/ConfigurationTests.cs +++ b/src/Service.Tests/Configuration/ConfigurationTests.cs @@ -4065,6 +4065,132 @@ private static RuntimeConfig InitializeRuntimeWithLogLevel(Dictionary + /// Tests different Azure Log Analytics values to see if they are serialized and deserialized correctly to the Json config + /// + [DataTestMethod] + [TestCategory(TestCategory.MSSQL)] + [DataRow(true, "WorkspaceId", "DcrImmutableId", "DceEndpoint", "TestDabLog", 1, true, "TestDabLog", 1)] + [DataRow(false, "", null, "", "", 10, false, "", 10)] + [DataRow(null, null, null, null, null, null, false, "DabLogs", 5)] + public void AzureLogAnalyticsSerialization( + bool? enabled, + string? workspaceId, + string? dcrImmutableId, + string? dceEndpoint, + string? logType, + int? flushIntSec, + bool expectedEnabled, + string expectedLogType, + int expectedFlushIntSec) + { + //Check if auth property and its values are expected to exist + bool expectedExistEnabled = enabled is not null; + bool expectedExistLogType = logType is not null; + bool expectedExistFlushIntSec = flushIntSec is not null; + bool expectedExistWorkspaceId = workspaceId is not null; + bool expectedExistDcrImmutableId = dcrImmutableId is not null; + bool expectedExistDceEndpoint = dceEndpoint is not null; + + AzureLogAnalyticsAuthOptions authOptions = new(workspaceId, dcrImmutableId, dceEndpoint); + AzureLogAnalyticsOptions azureLogAnalyticsOptions = new(enabled, authOptions, logType, flushIntSec); + RuntimeConfig configWithCustomLogLevel = InitializeRuntimeWithAzureLogAnalytics(azureLogAnalyticsOptions); + string configWithCustomLogLevelJson = configWithCustomLogLevel.ToJson(); + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(configWithCustomLogLevelJson, out RuntimeConfig? deserializedRuntimeConfig)); + + string serializedConfig = deserializedRuntimeConfig.ToJson(); + + using (JsonDocument parsedDocument = JsonDocument.Parse(serializedConfig)) + { + JsonElement root = parsedDocument.RootElement; + JsonElement runtimeElement = root.GetProperty("runtime"); + + //Validate azure-log-analytics property exists in runtime + JsonElement telemetryElement = runtimeElement.GetProperty("telemetry"); + bool azureLogAnalyticsPropertyExists = telemetryElement.TryGetProperty("azure-log-analytics", out JsonElement azureLogAnalyticsElement); + Assert.AreEqual(expected: true, actual: azureLogAnalyticsPropertyExists); + + //Validate the values inside the azure-log-analytics properties are of expected value + bool enabledExists = azureLogAnalyticsElement.TryGetProperty("enabled", out JsonElement enabledElement); + Assert.AreEqual(expected: expectedExistEnabled, actual: enabledExists); + if (enabledExists) + { + Assert.AreEqual(expectedEnabled, enabledElement.GetBoolean()); + } + + bool logTypeExists = azureLogAnalyticsElement.TryGetProperty("log-type", out JsonElement logTypeElement); + Assert.AreEqual(expected: expectedExistLogType, actual: logTypeExists); + if (logTypeExists) + { + Assert.AreEqual(expectedLogType, logTypeElement.GetString()); + } + + bool flushIntSecExists = azureLogAnalyticsElement.TryGetProperty("flush-interval-seconds", out JsonElement flushIntSecElement); + Assert.AreEqual(expected: expectedExistFlushIntSec, actual: flushIntSecExists); + if (flushIntSecExists) + { + Assert.AreEqual(expectedFlushIntSec, flushIntSecElement.GetInt32()); + } + + //Validate auth property exists inside of azure-log-analytics + bool authExists = azureLogAnalyticsElement.TryGetProperty("auth", out JsonElement authElement); + + //Validate the values inside the auth properties are of expected value + if (authExists) + { + bool workspaceIdExists = authElement.TryGetProperty("workspace-id", out JsonElement workspaceIdElement); + Assert.AreEqual(expectedExistWorkspaceId, workspaceIdExists); + if (workspaceIdExists) + { + Assert.AreEqual(expected: workspaceId, workspaceIdElement.GetString()); + } + + bool dcrImmutableIdExists = authElement.TryGetProperty("dcr-immutable-id", out JsonElement dcrImmutableIdElement); + Assert.AreEqual(expectedExistDcrImmutableId, dcrImmutableIdExists); + if (dcrImmutableIdExists) + { + Assert.AreEqual(expected: dcrImmutableId, dcrImmutableIdElement.GetString()); + } + + bool dceEndpointExists = authElement.TryGetProperty("dce-endpoint", out JsonElement dceEndpointElement); + Assert.AreEqual(expectedExistDceEndpoint, dceEndpointExists); + if (dceEndpointExists) + { + Assert.AreEqual(expected: dceEndpoint, dceEndpointElement.GetString()); + } + } + } + } + +#nullable disable + + /// + /// Helper method to create RuntimeConfig with specificed LogLevel value + /// + private static RuntimeConfig InitializeRuntimeWithAzureLogAnalytics(AzureLogAnalyticsOptions azureLogAnalyticsOptions) + { + TestHelper.SetupDatabaseEnvironment(MSSQL_ENVIRONMENT); + + FileSystemRuntimeConfigLoader baseLoader = TestHelper.GetRuntimeConfigLoader(); + baseLoader.TryLoadKnownConfig(out RuntimeConfig baseConfig); + + RuntimeConfig config = new( + Schema: baseConfig.Schema, + DataSource: baseConfig.DataSource, + Runtime: new( + Rest: new(), + GraphQL: new(), + Host: new(null, null), + Telemetry: new(AzureLogAnalytics: azureLogAnalyticsOptions) + ), + Entities: baseConfig.Entities + ); + + return config; + } + /// /// Validates the OpenAPI documentor behavior when enabling and disabling the global REST endpoint /// for the DAB engine. diff --git a/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs b/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs index d933fa827d..a7aaf21508 100644 --- a/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs +++ b/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs @@ -273,7 +273,7 @@ public void TestNullableOptionalProps() TryParseAndAssertOnDefaults("{" + emptyHostSubProps, out _); // Test with empty telemetry sub-properties - minJsonWithTelemetrySubProps.Append(@"{ ""application-insights"": { }, ""log-level"": { } } }"); + minJsonWithTelemetrySubProps.Append(@"{ ""application-insights"": { }, ""log-level"": { }, ""open-telemetry"": { }, ""azure-log-analytics"": { } } }"); string emptyTelemetrySubProps = minJsonWithTelemetrySubProps + "}"; TryParseAndAssertOnDefaults("{" + emptyTelemetrySubProps, out _); @@ -648,6 +648,10 @@ private static bool TryParseAndAssertOnDefaults(string json, out RuntimeConfig p Assert.IsTrue(parsedConfig.IsLogLevelNull()); Assert.IsTrue(parsedConfig.Runtime?.Telemetry?.ApplicationInsights is null || !parsedConfig.Runtime.Telemetry.ApplicationInsights.Enabled); + Assert.IsTrue(parsedConfig.Runtime?.Telemetry?.OpenTelemetry is null + || !parsedConfig.Runtime.Telemetry.OpenTelemetry.Enabled); + Assert.IsTrue(parsedConfig.Runtime?.Telemetry?.AzureLogAnalytics is null + || !parsedConfig.Runtime.Telemetry.AzureLogAnalytics.Enabled); return true; } diff --git a/src/Service/Startup.cs b/src/Service/Startup.cs index d8cb218b75..b417317aae 100644 --- a/src/Service/Startup.cs +++ b/src/Service/Startup.cs @@ -72,6 +72,7 @@ public class Startup(IConfiguration configuration, ILogger logger) public static ApplicationInsightsOptions AppInsightsOptions = new(); public static OpenTelemetryOptions OpenTelemetryOptions = new(); + public static AzureLogAnalyticsOptions AzureLogAnalyticsOptions = new(); public const string NO_HTTPS_REDIRECT_FLAG = "--no-https-redirect"; private readonly HotReloadEventHandler _hotReloadEventHandler = new(); private RuntimeConfigProvider? _configProvider; @@ -533,6 +534,7 @@ public void Configure(IApplicationBuilder app, IWebHostEnvironment env, RuntimeC // Configure Application Insights Telemetry ConfigureApplicationInsightsTelemetry(app, runtimeConfig); ConfigureOpenTelemetry(runtimeConfig); + ConfigureAzureLogAnalytics(runtimeConfig); // Config provided before starting the engine. isRuntimeReady = PerformOnConfigChangeAsync(app).Result; @@ -858,7 +860,6 @@ private void ConfigureApplicationInsightsTelemetry(IApplicationBuilder app, Runt /// is enabled, we can track different events and metrics. /// /// The provider used to load runtime configuration. - /// private void ConfigureOpenTelemetry(RuntimeConfig runtimeConfig) { if (runtimeConfig?.Runtime?.Telemetry is not null @@ -868,7 +869,7 @@ private void ConfigureOpenTelemetry(RuntimeConfig runtimeConfig) if (!OpenTelemetryOptions.Enabled) { - _logger.LogInformation("Open Telemetry are disabled."); + _logger.LogInformation("Open Telemetry is disabled."); return; } @@ -884,6 +885,30 @@ private void ConfigureOpenTelemetry(RuntimeConfig runtimeConfig) } } + /// + /// Configure Azure Log Analytics based on the loaded runtime configuration. If Azure Log Analytics + /// is enabled, we can track different events and metrics. + /// + /// The provider used to load runtime configuration. + private void ConfigureAzureLogAnalytics(RuntimeConfig runtimeConfig) + { + if (runtimeConfig?.Runtime?.Telemetry is not null + && runtimeConfig.Runtime.Telemetry.AzureLogAnalytics is not null) + { + AzureLogAnalyticsOptions = runtimeConfig.Runtime.Telemetry.AzureLogAnalytics; + + if (!AzureLogAnalyticsOptions.Enabled) + { + _logger.LogInformation("Azure Log Analytics is disabled."); + return; + } + + // Updating Startup Logger to Log from Startup Class. + ILoggerFactory? loggerFactory = Program.GetLoggerFactoryForLogLevel(MinimumLogLevel); + _logger = loggerFactory.CreateLogger(); + } + } + /// /// Sets Static Web Apps EasyAuth as the authentication scheme for the engine. /// From 58fefcb1c94290f899b838df16b5132b5be2dab5 Mon Sep 17 00:00:00 2001 From: souvikghosh04 Date: Fri, 18 Jul 2025 12:10:59 +0530 Subject: [PATCH 39/79] Platform agnostic port resolution for health endpoint (#2757) ## Why make this change? - Closes on #2765 To robustly determine the internal port used by the application for self-referential HTTP calls (such as health checks), especially in containerized and reverse-proxy scenarios (e.g., Azure Container Apps), without relying on the ASP.NET Core Forwarded Headers middleware. - Environment-agnostic: Works in Azure Container Apps, AKS, ACI, on-prem, and other containerized environments without requiring special middleware configuration. - Resilient: Handles a variety of deployment scenarios and ingress/proxy setups. - No security risk: Does not trust forwarded headers globally, reducing the risk of header spoofing. ## What is this change? - Refactored ResolveInternalPort() to robustly determine the correct port for internal HTTP calls by: - Parsing the ASPNETCORE_URLS environment variable for the first HTTP port, including support for wildcard bindings like http://+:1234 or http://*:8080. - Falling back to port 5000 if no valid port is found. - No longer relying on X-Forwarded-Port or Host headers, which are not trustworthy for internal routing. - Updated the health check HttpClient to always use http://localhost: for internal calls, ensuring reliability across all environments. - Updated HTTPS redirection logic to exclude /health (and /graphql if needed) from redirection, allowing internal HTTP health checks to succeed without being redirected. - No changes are required to middleware configuration for this logic to work. ## How was this tested? - [x] Integration Tests - [x] Unit Tests ## Sample Request(s) `GET /health` --------- Co-authored-by: Aniruddh Munde --- .../UnitTests/PortResolutionHelperTests.cs | 133 ++++++++++++++++++ src/Service/Startup.cs | 109 +++----------- src/Service/Utilities/PortResolutionHelper.cs | 117 +++++++++++++++ 3 files changed, 272 insertions(+), 87 deletions(-) create mode 100644 src/Service.Tests/UnitTests/PortResolutionHelperTests.cs create mode 100644 src/Service/Utilities/PortResolutionHelper.cs diff --git a/src/Service.Tests/UnitTests/PortResolutionHelperTests.cs b/src/Service.Tests/UnitTests/PortResolutionHelperTests.cs new file mode 100644 index 0000000000..8a2d38d3be --- /dev/null +++ b/src/Service.Tests/UnitTests/PortResolutionHelperTests.cs @@ -0,0 +1,133 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using Azure.DataApiBuilder.Service.Utilities; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace Azure.DataApiBuilder.Service.Tests.UnitTests +{ + /// + /// Tests for the class, which resolves the internal port used by the application + /// + [TestClass] + public class PortResolutionHelperTests + { + /// + /// Tests the method to ensure it resolves the correct + /// port. + /// + /// This test method sets the "ASPNETCORE_URLS" environment variable to various test + /// cases and verifies that the method returns the + /// expected port. It handles different URL formats and edge cases, including null or invalid inputs. + /// A string representing the ASP.NET Core URLs to be tested. + /// The expected port number that should be resolved. + [DataTestMethod] + [DataRow("http://localhost:5000", 5000)] + [DataRow("https://localhost:443", 443)] + [DataRow("http://+:1234", 1234)] + [DataRow("https://*:8443", 8443)] + [DataRow("http://localhost:5000;https://localhost:443", 5000)] + [DataRow("https://localhost:443;http://localhost:5000", 5000)] + [DataRow("http://localhost:5000,https://localhost:443", 5000)] + [DataRow(null, 5000)] + [DataRow("", 5000)] + [DataRow("http://localhost", 80)] + [DataRow("https://localhost", 443)] + [DataRow("http://[::1]:5000", 5000)] + [DataRow("http://localhost;https://localhost:8443", 80)] + [DataRow("https://localhost:8443;https://localhost:9443", 8443)] + [DataRow("invalid;http://localhost:5000", 5000)] + [DataRow("http://localhost:5000;invalid", 5000)] + [DataRow("http://+:", 5000)] + [DataRow("https://localhost:5001;http://localhost:5000", 5000)] + [DataRow("https://localhost:5001;https://localhost:5002", 5001)] + public void ResolveInternalPortResolvesCorrectPortPositiveTest(string aspnetcoreUrls, int expectedPort) + { + TestPortResolution(aspnetcoreUrls, null, expectedPort); + } + + /// + /// Tests that the method uses the "DEFAULT_PORT" + /// environment variable when the "ASPNETCORE_URLS" environment variable is not set. + /// + /// This test sets the "DEFAULT_PORT" environment variable to "4321" and verifies that + /// returns this value. It ensures that the method + /// correctly defaults to using "DEFAULT_PORT" when "ASPNETCORE_URLS" is null. + [TestMethod] + public void ResolveInternalPortUsesDefaultPortEnvVarTest() + { + TestPortResolution(null, "4321", 4321); + } + + /// + /// Tests that the method uses the default port when the + /// environment variable ASPNETCORE_URLS is set to invalid values. + /// + /// This test sets the ASPNETCORE_URLS environment variable to invalid URLs and + /// the DEFAULT_PORT environment variable to a valid port number. It verifies that correctly falls back to using the default port specified + /// by DEFAULT_PORT. + [TestMethod] + public void ResolveInternalPortUsesDefaultPortWhenUrlsAreInvalidTest() + { + TestPortResolution("invalid-url;another-invalid", "4321", 4321); + } + + /// + /// Tests that the method falls back to the default port + /// when the DEFAULT_PORT environment variable is set to a non-numeric value. + /// + /// This test sets the DEFAULT_PORT environment variable to an invalid value and + /// verifies that correctly falls back to using + /// the default port of 5000 when the DEFAULT_PORT cannot be parsed as a valid integer. + [TestMethod] + public void ResolveInternalPortFallsBackToDefaultWhenDefaultPortIsInvalidTest() + { + TestPortResolution(null, "abc", 5000); + } + + /// + /// Negative tests for the method. + /// + /// A string representing the ASP.NET Core URLs to be tested. + /// The expected port number that should be resolved. + [DataTestMethod] + [DataRow("http://localhost:5000 https://localhost:443", 5000)] // space invalid, falls back to default + [DataRow("http://localhost:5000|https://localhost:443", 5000)] // invalid delimiter, falls back to default + [DataRow("localhost:5000", 5000)] // missing scheme: fallback to default + [DataRow("http://:", 5000)] // incomplete URL: fallback to default + [DataRow("ftp://localhost:21", 5000)] // unsupported scheme: fallback to default + [DataRow("http://unix:/var/run/app.sock", 80)] // unix socket: defaults to 80 (no port specified) + [DataRow("http://unix:var/run/app.sock", 5000)] // malformed unix socket: fallback to default + [DataRow("http://unix:", 80)] // incomplete unix socket: defaults to 80 + public void ResolveInternalPortResolvesCorrectPortNegativeTest(string aspnetcoreUrls, int expectedPort) + { + TestPortResolution(aspnetcoreUrls, null, expectedPort); + } + + /// + /// Helper method to test port resolution with environment variables. + /// + /// The ASPNETCORE_URLS environment variable value to set. + /// The DEFAULT_PORT environment variable value to set. + /// The expected port number that should be resolved. + private static void TestPortResolution(string aspnetcoreUrls, string defaultPort, int expectedPort) + { + string originalUrls = Environment.GetEnvironmentVariable("ASPNETCORE_URLS"); + string originalDefaultPort = Environment.GetEnvironmentVariable("DEFAULT_PORT"); + Environment.SetEnvironmentVariable("ASPNETCORE_URLS", aspnetcoreUrls); + Environment.SetEnvironmentVariable("DEFAULT_PORT", defaultPort); + try + { + int port = PortResolutionHelper.ResolveInternalPort(); + Assert.AreEqual(expectedPort, port); + } + finally + { + Environment.SetEnvironmentVariable("ASPNETCORE_URLS", originalUrls); + Environment.SetEnvironmentVariable("DEFAULT_PORT", originalDefaultPort); + } + } + } +} diff --git a/src/Service/Startup.cs b/src/Service/Startup.cs index b417317aae..e21627fa05 100644 --- a/src/Service/Startup.cs +++ b/src/Service/Startup.cs @@ -28,6 +28,7 @@ using Azure.DataApiBuilder.Service.Exceptions; using Azure.DataApiBuilder.Service.HealthCheck; using Azure.DataApiBuilder.Service.Telemetry; +using Azure.DataApiBuilder.Service.Utilities; using HotChocolate; using HotChocolate.AspNetCore; using HotChocolate.Execution; @@ -49,7 +50,6 @@ using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; -using Microsoft.Extensions.Primitives; using NodaTime; using OpenTelemetry.Exporter; using OpenTelemetry.Logs; @@ -284,25 +284,9 @@ public void ConfigureServices(IServiceCollection services) services.AddHttpClient("ContextConfiguredHealthCheckClient") .ConfigureHttpClient((serviceProvider, client) => { - IHttpContextAccessor httpCtxAccessor = serviceProvider.GetRequiredService(); - HttpContext? httpContext = httpCtxAccessor.HttpContext; - string baseUri = string.Empty; - - if (httpContext is not null) - { - string scheme = httpContext.Request.Scheme; // "http" or "https" - string host = httpContext.Request.Host.Host ?? "localhost"; // e.g. "localhost" - int port = ResolveInternalPort(httpContext); - baseUri = $"{scheme}://{host}:{port}"; - client.BaseAddress = new Uri(baseUri); - } - else - { - // Optional fallback if ever needed in non-request scenarios - baseUri = $"http://localhost:{ResolveInternalPort()}"; - client.BaseAddress = new Uri(baseUri); - } - + int port = PortResolutionHelper.ResolveInternalPort(); + string baseUri = $"http://localhost:{port}"; + client.BaseAddress = new Uri(baseUri); _logger.LogInformation($"Configured HealthCheck HttpClient BaseAddress as: {baseUri}"); client.DefaultRequestHeaders.Accept.Clear(); client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); @@ -472,21 +456,21 @@ private void AddGraphQLService(IServiceCollection services, GraphQLRuntimeOption } server.AddErrorFilter(error => + { + if (error.Exception is not null) { - if (error.Exception is not null) - { - _logger.LogError(exception: error.Exception, message: "A GraphQL request execution error occurred."); - return error.WithMessage(error.Exception.Message); - } + _logger.LogError(exception: error.Exception, message: "A GraphQL request execution error occurred."); + return error.WithMessage(error.Exception.Message); + } - if (error.Code is not null) - { - _logger.LogError(message: "Error code: {errorCode}\nError message: {errorMessage}", error.Code, error.Message); - return error.WithMessage(error.Message); - } + if (error.Code is not null) + { + _logger.LogError(message: "Error code: {errorCode}\nError message: {errorMessage}", error.Code, error.Message); + return error.WithMessage(error.Message); + } - return error; - }) + return error; + }) .AddErrorFilter(error => { if (error.Exception is DataApiBuilderException thrownException) @@ -565,7 +549,12 @@ public void Configure(IApplicationBuilder app, IWebHostEnvironment env, RuntimeC if (!Program.IsHttpsRedirectionDisabled) { - app.UseHttpsRedirection(); + // Use HTTPS redirection for all endpoints except /health and /graphql. + // This is necessary because ContextConfiguredHealthCheckClient base URI is http://localhost:{port} for internal API calls + app.UseWhen( + context => !(context.Request.Path.StartsWithSegments("/health") || context.Request.Path.StartsWithSegments("/graphql")), + appBuilder => appBuilder.UseHttpsRedirection() + ); } // URL Rewrite middleware MUST be called prior to UseRouting(). @@ -1055,59 +1044,5 @@ public static void AddValidFilters() LoggerFilters.AddFilter(typeof(IAuthorizationResolver).FullName); LoggerFilters.AddFilter("default"); } - - /// - /// Get the internal port of the container. - /// - /// The HttpContext - /// The internal container port - private static int ResolveInternalPort(HttpContext? httpContext = null) - { - // Try X-Forwarded-Port if context is present - if (httpContext is not null && - httpContext.Request.Headers.TryGetValue("X-Forwarded-Port", out StringValues fwdPortVal) && - int.TryParse(fwdPortVal.ToString(), out int fwdPort) && - fwdPort > 0) - { - return fwdPort; - } - - // Infer scheme from context if available, else default to "http" - string scheme = httpContext?.Request.Scheme ?? "http"; - - // Check ASPNETCORE_URLS env var - string? aspnetcoreUrls = Environment.GetEnvironmentVariable("ASPNETCORE_URLS"); - - if (!string.IsNullOrWhiteSpace(aspnetcoreUrls)) - { - foreach (string part in aspnetcoreUrls.Split(new[] { ';', ',' }, StringSplitOptions.RemoveEmptyEntries)) - { - string trimmed = part.Trim(); - - // Handle wildcard format (e.g. http://+:5002) - if (trimmed.StartsWith($"{scheme}://+:", StringComparison.OrdinalIgnoreCase)) - { - int colonIndex = trimmed.LastIndexOf(':'); - if (colonIndex != -1 && - int.TryParse(trimmed.Substring(colonIndex + 1), out int wildcardPort) && - wildcardPort > 0) - { - return wildcardPort; - } - } - - // Handle standard URI format - if (trimmed.StartsWith($"{scheme}://", StringComparison.OrdinalIgnoreCase) && - Uri.TryCreate(trimmed, UriKind.Absolute, out Uri? uri)) - { - return uri.Port; - } - } - } - - // Fallback - return scheme.Equals("https", StringComparison.OrdinalIgnoreCase) ? 443 : 5000; - } - } } diff --git a/src/Service/Utilities/PortResolutionHelper.cs b/src/Service/Utilities/PortResolutionHelper.cs new file mode 100644 index 0000000000..c7e7f3befd --- /dev/null +++ b/src/Service/Utilities/PortResolutionHelper.cs @@ -0,0 +1,117 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; + +namespace Azure.DataApiBuilder.Service.Utilities +{ + /// + /// Provides methods to resolve the internal port for the application based on environment variables. + /// + public static class PortResolutionHelper + { + /// + /// Resolves the internal port used by the application based on environment variables and URL bindings. + /// + /// This method determines the port by checking the ASPNETCORE_URLS environment + /// variable for URL bindings. If a valid port is found in the URLs, it is returned. If no port is specified, + /// the method checks the DEFAULT_PORT environment variable for a fallback port. If neither is set, the + /// default port of 5000 is returned. + /// The resolved port number. Returns the port specified in ASPNETCORE_URLS, or the fallback port from + /// DEFAULT_PORT, or 5000 if no port is configured. + public static int ResolveInternalPort() + { + string? urls = Environment.GetEnvironmentVariable("ASPNETCORE_URLS"); + int? httpsPort = null; + + if (!string.IsNullOrWhiteSpace(urls)) + { + string[] parts = urls.Split(new[] { ';', ',' }, StringSplitOptions.RemoveEmptyEntries); + + foreach (string part in parts) + { + string trimmedPart = part.Trim(); + + // Try to parse as a valid URI first + if (Uri.TryCreate(trimmedPart, UriKind.Absolute, out Uri? uri) && + (uri.Scheme == Uri.UriSchemeHttp || uri.Scheme == Uri.UriSchemeHttps)) + { + if (uri.Scheme == Uri.UriSchemeHttp) + { + return uri.Port; + } + else if (uri.Scheme == Uri.UriSchemeHttps) + { + httpsPort ??= uri.Port; + } + + continue; + } + + // Handle known wildcard patterns (http/https with + or * as host) + // Example: http://+:1234 or http://*:1234 or https://+:1234 or https://*:1234 + if (trimmedPart.StartsWith("http://+:", StringComparison.OrdinalIgnoreCase) || + trimmedPart.StartsWith("http://*:", StringComparison.OrdinalIgnoreCase)) + { + string portString = trimmedPart.Substring(trimmedPart.LastIndexOf(':') + 1); + + if (int.TryParse(portString, out int port) && port > 0) + { + return port; + } + + continue; + } + + if (trimmedPart.StartsWith("https://+:", StringComparison.OrdinalIgnoreCase) || + trimmedPart.StartsWith("https://*:", StringComparison.OrdinalIgnoreCase)) + { + string portString = trimmedPart.Substring(trimmedPart.LastIndexOf(':') + 1); + + if (int.TryParse(portString, out int port) && port > 0) + { + httpsPort ??= port; + } + + continue; + } + } + } + + // If no HTTP, fallback to HTTPS port if present + if (httpsPort.HasValue) + { + return httpsPort.Value; + } + + // Check ASPNETCORE_HTTP_PORTS if ASPNETCORE_URLS is not set + string? httpPorts = Environment.GetEnvironmentVariable("ASPNETCORE_HTTP_PORTS"); + + if (!string.IsNullOrWhiteSpace(httpPorts)) + { + string[] portParts = httpPorts.Split(new[] { ';', ',' }, StringSplitOptions.RemoveEmptyEntries); + + foreach (string portPart in portParts) + { + string trimmedPort = portPart.Trim(); + + if (int.TryParse(trimmedPort, out int port) && port > 0) + { + return port; + } + } + } + + // Configurable fallback port + string? defaultPortEnv = Environment.GetEnvironmentVariable("DEFAULT_PORT"); + + if (int.TryParse(defaultPortEnv, out int defaultPort) && defaultPort > 0) + { + return defaultPort; + } + + // Default Kestrel port if not specified. + return 5000; + } + } +} From 79ffe378256cbe7d6dfaad2233cd290a6d8ebf29 Mon Sep 17 00:00:00 2001 From: vadeveka <52937801+vadeveka@users.noreply.github.com> Date: Mon, 21 Jul 2025 15:52:02 -0700 Subject: [PATCH 40/79] Remove caseInsensitive filter operator added back by incorrect merge (#2780) ## Why make this change? caseInsensitive filter operator is not implemented in the resolvers leading to runtime error when used. Including it in schema seems to be an oversight. Removing it for schema generated until we have an approach defined for case insensitive comparisons with different source types Relevant issue #2280 This was already addressed in PR https://github.com/Azure/data-api-builder/pull/2607 but was readded unintentionally by https://github.com/Azure/data-api-builder/pull/2348/files#diff-f74a036f2a72f2b3f2642c66613b149c5dca7a9696859a26d4ce95863c124d9f ## What is this change? Remove caseInsensitive filter operator from schema generated ## How was this tested? - [x] Integration Tests - [x] Unit Tests --- src/Service.GraphQLBuilder/Queries/StandardQueryInputs.cs | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/Service.GraphQLBuilder/Queries/StandardQueryInputs.cs b/src/Service.GraphQLBuilder/Queries/StandardQueryInputs.cs index 5ae516831d..aa6423d55d 100644 --- a/src/Service.GraphQLBuilder/Queries/StandardQueryInputs.cs +++ b/src/Service.GraphQLBuilder/Queries/StandardQueryInputs.cs @@ -45,8 +45,6 @@ public sealed class StandardQueryInputs private static readonly StringValueNode _startsWithDescription = new("Starts With"); private static readonly NameNode _endsWith = new("endsWith"); private static readonly StringValueNode _endsWithDescription = new("Ends With"); - private static readonly NameNode _caseInsensitive = new("caseInsensitive"); - private static readonly StringValueNode _caseInsensitiveDescription = new("Case Insensitive"); private static readonly NameNode _in = new("in"); private static readonly StringValueNode _inDescription = new("In"); @@ -154,7 +152,6 @@ private static InputObjectTypeDefinitionNode CreateStringFilter( new(null, _startsWith, _startsWithDescription, type, null, []), new(null, _endsWith, _endsWithDescription, type, null, []), new(null, _neq, _neqDescription, type, null, []), - new(null, _caseInsensitive, _caseInsensitiveDescription, type, null, []), new(null, _isNull, _isNullDescription, _boolean, null, []), new(null, _in, _inDescription, new ListTypeNode(type), null, []) ] From 0696e0ba824b8975796c3fb3479e25776f47374c Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Fri, 25 Jul 2025 15:33:09 +0200 Subject: [PATCH 41/79] shizzle --- src/Service/dab-config.json | 51 +++---------------------------------- 1 file changed, 3 insertions(+), 48 deletions(-) diff --git a/src/Service/dab-config.json b/src/Service/dab-config.json index 4a97aa10e8..efe95b9324 100644 --- a/src/Service/dab-config.json +++ b/src/Service/dab-config.json @@ -2,7 +2,7 @@ "$schema": "https://github.com/Azure/data-api-builder/releases/latest/download/dab.draft.schema.json", "data-source": { "database-type": "mssql", - "connection-string": "Data Source=nqf4kgvoqm4ufazdzriupb2pay-hmnbvxar2mgu7e3ng27fsqy3we.database.fabric.microsoft.com,1433;User ID=8bfaf0d6-fa20-4ed5-a450-0005ceb77729;Password=z5y8Q~hLcfdAflVrfnYoVxdavIJXZb5tlH~tAbRn;Pooling=True;Min Pool Size=0;Max Pool Size=100;Multiple Active Result Sets=False;Connect Timeout=30;Encrypt=False;Trust Server Certificate=True;Authentication=ActiveDirectoryServicePrincipal;Initial Catalog=apiLayer-345587c4-1232-457e-9761-b6bca3d72e2e", + "connection-string": "Data Source=nqf4kgvoqm4ufazdzriupb2pay-doa2ptopus4ufglh5rxt3is4yi.database.fabric.microsoft.com,1433;User ID=8bfaf0d6-fa20-4ed5-a450-0005ceb77729;Password=z5y8Q~hLcfdAflVrfnYoVxdavIJXZb5tlH~tAbRn;Pooling=True;Min Pool Size=0;Max Pool Size=100;Multiple Active Result Sets=False;Connect Timeout=30;Encrypt=False;Trust Server Certificate=True;Authentication=ActiveDirectoryServicePrincipal;Initial Catalog=apiLayer-fad0b3db-cb87-4a73-9c54-e1fc417bc08c", "options": { "set-session-context": true } @@ -45,7 +45,7 @@ "entities": { "BillOfLading": { "source": { - "object": "silver_ops.v_BillOfLading", + "object": "silver_ops.BillOfLading", "type": "table", "key-fields": [ "systemId" @@ -110,7 +110,7 @@ }, "BillOfLadingParty": { "source": { - "object": "silver_ops.v_BillOfLadingParty", + "object": "silver_ops.BillOfLadingParty", "type": "table", "key-fields": [ "systemId" @@ -2846,51 +2846,6 @@ } ] }, - "DelayEvent": { - "source": { - "object": "silver_trk.ww_DelayEvent", - "type": "table", - "key-fields": [ - "delayEventId" - ] - }, - "graphql": { - "enabled": true, - "type": { - "singular": "DelayEvent", - "plural": "DelayEvents" - } - }, - "rest": { - "enabled": true - }, - "relationships": { - "ShipmentEquipment": { - "cardinality": "one", - "target.entity": "ShipmentEquipment", - "source.fields": [ - "shipmentEquipmentId" - ], - "target.fields": [ - "id" - ] - } - }, - "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - } - ], - "cache": { - "enabled": true, - "ttl-seconds": 120 - } - }, "TrackingEvent": { "source": { "object": "silver_trk.v_TrackingEvent", From 7b6259b78f367c596d74faa2e5335a38ffed8d2a Mon Sep 17 00:00:00 2001 From: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Date: Wed, 30 Jul 2025 19:59:04 +0000 Subject: [PATCH 42/79] Adding 'Configure' Options to CLI for Azure Log Analytics (#2781) ## Why make this change? This change closes issue #2777 ## What is this change? This change extends the functionality of the `configure` CLI command by introducing support for Azure Log Analytics properties. With this enhancement, users can now configure the Azure Log Analytics properties inside of their config file without the need to directly edit it. This change also ensures that the validation of Azure Log Analytics works as intended. ## How was this tested? - [ ] Integration Tests - [X] Unit Tests ## Sample Request(s) CLI Updates Add support to dab configure: dab configure --runtime.telemetry.azure-log-analytics.enabled dab configure --runtime.telemetry.azure-log-analytics.auth.workspace-id dab configure --runtime.telemetry.azure-log-analytics.auth.dcr-immutable-id dab configure --runtime.telemetry.azure-log-analytics.auth.dce-endpoint dab configure --runtime.telemetry.azure-log-analytics.log-type dab configure --runtime.telemetry.azure-log-analytics.flush-interval-seconds --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: aaronburtle <93220300+aaronburtle@users.noreply.github.com> --- src/Cli.Tests/ConfigureOptionsTests.cs | 40 +++++++ src/Cli.Tests/ValidateConfigTests.cs | 35 ++++++ src/Cli/Commands/ConfigureOptions.cs | 31 +++++ src/Cli/ConfigGenerator.cs | 110 +++++++++++++++++- .../AzureLogAnalyticsAuthOptionsConverter.cs | 1 - ...zureLogAnalyticsOptionsConverterFactory.cs | 2 +- 6 files changed, 214 insertions(+), 5 deletions(-) diff --git a/src/Cli.Tests/ConfigureOptionsTests.cs b/src/Cli.Tests/ConfigureOptionsTests.cs index b278661f55..b833240039 100644 --- a/src/Cli.Tests/ConfigureOptionsTests.cs +++ b/src/Cli.Tests/ConfigureOptionsTests.cs @@ -148,6 +148,46 @@ public void TestAddAKVOptions() Assert.AreEqual(1, config.AzureKeyVault?.RetryPolicy.NetworkTimeoutSeconds); } + /// + /// Tests that running the "configure --azure-log-analytics" commands on a config without Azure Log Analytics properties results + /// in a valid config being generated. + [TestMethod] + public void TestAddAzureLogAnalyticsOptions() + { + // Arrange + _fileSystem!.AddFile(TEST_RUNTIME_CONFIG_FILE, new MockFileData(INITIAL_CONFIG)); + + Assert.IsTrue(_fileSystem!.File.Exists(TEST_RUNTIME_CONFIG_FILE)); + + // Act: Attempts to add Azure Log Analytics options + ConfigureOptions options = new( + azureLogAnalyticsEnabled: CliBool.True, + azureLogAnalyticsLogType: "log-type-test", + azureLogAnalyticsFlushIntervalSeconds: 1, + azureLogAnalyticsWorkspaceId: "workspace-id-test", + azureLogAnalyticsDcrImmutableId: "dcr-immutable-id-test", + azureLogAnalyticsDceEndpoint: "dce-endpoint-test", + config: TEST_RUNTIME_CONFIG_FILE + ); + + bool isSuccess = TryConfigureSettings(options, _runtimeConfigLoader!, _fileSystem!); + + // Assert: Validate the Azure Log Analytics options are added. + Assert.IsTrue(isSuccess); + string updatedConfig = _fileSystem!.File.ReadAllText(TEST_RUNTIME_CONFIG_FILE); + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(updatedConfig, out RuntimeConfig? config)); + Assert.IsNotNull(config.Runtime); + Assert.IsNotNull(config.Runtime.Telemetry); + Assert.IsNotNull(config.Runtime.Telemetry.AzureLogAnalytics); + Assert.AreEqual(true, config.Runtime.Telemetry.AzureLogAnalytics.Enabled); + Assert.AreEqual("log-type-test", config.Runtime.Telemetry.AzureLogAnalytics.LogType); + Assert.AreEqual(1, config.Runtime.Telemetry.AzureLogAnalytics.FlushIntervalSeconds); + Assert.IsNotNull(config.Runtime.Telemetry.AzureLogAnalytics.Auth); + Assert.AreEqual("workspace-id-test", config.Runtime.Telemetry.AzureLogAnalytics.Auth.WorkspaceId); + Assert.AreEqual("dcr-immutable-id-test", config.Runtime.Telemetry.AzureLogAnalytics.Auth.DcrImmutableId); + Assert.AreEqual("dce-endpoint-test", config.Runtime.Telemetry.AzureLogAnalytics.Auth.DceEndpoint); + } + /// /// Tests that running "dab configure --runtime.graphql.enabled" on a config with various values results /// in runtime. Takes in updated value for graphql.enabled and diff --git a/src/Cli.Tests/ValidateConfigTests.cs b/src/Cli.Tests/ValidateConfigTests.cs index 6cbc4b54f1..a0fc0807e6 100644 --- a/src/Cli.Tests/ValidateConfigTests.cs +++ b/src/Cli.Tests/ValidateConfigTests.cs @@ -311,4 +311,39 @@ public async Task TestValidateAKVOptionsWithoutEndpointFails() JsonSchemaValidationResult result = await validator.ValidateConfigSchema(config, TEST_RUNTIME_CONFIG_FILE, mockLoggerFactory.Object); Assert.IsFalse(result.IsValid); } + + /// + /// Tests that validation fails when Azure Log Analytics options are configured without the Auth options. + /// + [TestMethod] + public async Task TestValidateAzureLogAnalyticsOptionsWithoutAuthFails() + { + // Arrange + _fileSystem!.AddFile(TEST_RUNTIME_CONFIG_FILE, new MockFileData(INITIAL_CONFIG)); + Assert.IsTrue(_fileSystem!.File.Exists(TEST_RUNTIME_CONFIG_FILE)); + Mock mockRuntimeConfigProvider = new(_runtimeConfigLoader); + RuntimeConfigValidator validator = new(mockRuntimeConfigProvider.Object, _fileSystem, new Mock>().Object); + Mock mockLoggerFactory = new(); + Mock> mockLogger = new(); + mockLoggerFactory + .Setup(factory => factory.CreateLogger(typeof(JsonConfigSchemaValidator).FullName!)) + .Returns(mockLogger.Object); + + // Act: Attempts to add Azure Log Analytics options without Auth options + ConfigureOptions options = new( + azureLogAnalyticsEnabled: CliBool.True, + azureLogAnalyticsLogType: "log-type-test", + azureLogAnalyticsFlushIntervalSeconds: 1, + config: TEST_RUNTIME_CONFIG_FILE + ); + + bool isSuccess = TryConfigureSettings(options, _runtimeConfigLoader!, _fileSystem!); + + // Assert: Settings are configured, config parses, validation fails. + Assert.IsTrue(isSuccess); + string updatedConfig = _fileSystem!.File.ReadAllText(TEST_RUNTIME_CONFIG_FILE); + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(updatedConfig, out RuntimeConfig? config)); + JsonSchemaValidationResult result = await validator.ValidateConfigSchema(config, TEST_RUNTIME_CONFIG_FILE, mockLoggerFactory.Object); + Assert.IsFalse(result.IsValid); + } } diff --git a/src/Cli/Commands/ConfigureOptions.cs b/src/Cli/Commands/ConfigureOptions.cs index 24c9e54d8f..08c90ba8f9 100644 --- a/src/Cli/Commands/ConfigureOptions.cs +++ b/src/Cli/Commands/ConfigureOptions.cs @@ -48,6 +48,12 @@ public ConfigureOptions( int? azureKeyVaultRetryPolicyDelaySeconds = null, int? azureKeyVaultRetryPolicyMaxDelaySeconds = null, int? azureKeyVaultRetryPolicyNetworkTimeoutSeconds = null, + CliBool? azureLogAnalyticsEnabled = null, + string? azureLogAnalyticsLogType = null, + int? azureLogAnalyticsFlushIntervalSeconds = null, + string? azureLogAnalyticsWorkspaceId = null, + string? azureLogAnalyticsDcrImmutableId = null, + string? azureLogAnalyticsDceEndpoint = null, string? config = null) : base(config) { @@ -85,6 +91,13 @@ public ConfigureOptions( AzureKeyVaultRetryPolicyDelaySeconds = azureKeyVaultRetryPolicyDelaySeconds; AzureKeyVaultRetryPolicyMaxDelaySeconds = azureKeyVaultRetryPolicyMaxDelaySeconds; AzureKeyVaultRetryPolicyNetworkTimeoutSeconds = azureKeyVaultRetryPolicyNetworkTimeoutSeconds; + // Azure Log Analytics + AzureLogAnalyticsEnabled = azureLogAnalyticsEnabled; + AzureLogAnalyticsLogType = azureLogAnalyticsLogType; + AzureLogAnalyticsFlushIntervalSeconds = azureLogAnalyticsFlushIntervalSeconds; + AzureLogAnalyticsWorkspaceId = azureLogAnalyticsWorkspaceId; + AzureLogAnalyticsDcrImmutableId = azureLogAnalyticsDcrImmutableId; + AzureLogAnalyticsDceEndpoint = azureLogAnalyticsDceEndpoint; } [Option("data-source.database-type", Required = false, HelpText = "Database type. Allowed values: MSSQL, PostgreSQL, CosmosDB_NoSQL, MySQL.")] @@ -171,6 +184,24 @@ public ConfigureOptions( [Option("azure-key-vault.retry-policy.network-timeout-seconds", Required = false, HelpText = "Configure the network timeout for requests in seconds. Default: 60.")] public int? AzureKeyVaultRetryPolicyNetworkTimeoutSeconds { get; } + [Option("runtime.telemetry.azure-log-analytics.enabled", Default = CliBool.False, Required = false, HelpText = "Enable/Disable Azure Log Analytics.")] + public CliBool? AzureLogAnalyticsEnabled { get; } + + [Option("runtime.telemetry.azure-log-analytics.log-type", Required = false, HelpText = "Configure Log Type for Azure Log Analytics to find table to send telemetry data")] + public string? AzureLogAnalyticsLogType { get; } + + [Option("runtime.telemetry.azure-log-analytics.flush-interval-seconds", Required = false, HelpText = "Configure Flush Interval in seconds for Azure Log Analytics to specify the time interval to send the telemetry data")] + public int? AzureLogAnalyticsFlushIntervalSeconds { get; } + + [Option("runtime.telemetry.azure-log-analytics.auth.workspace-id", Required = false, HelpText = "Configure Workspace ID for Azure Log Analytics used to find workspace to connect")] + public string? AzureLogAnalyticsWorkspaceId { get; } + + [Option("runtime.telemetry.azure-log-analytics.auth.dcr-immutable-id", Required = false, HelpText = "Configure DCR Immutable ID for Azure Log Analytics to find the data collection rule that defines how data is collected")] + public string? AzureLogAnalyticsDcrImmutableId { get; } + + [Option("runtime.telemetry.azure-log-analytics.auth.dce-endpoint", Required = false, HelpText = "Configure DCE Endpoint for Azure Log Analytics to find table to send telemetry data")] + public string? AzureLogAnalyticsDceEndpoint { get; } + public int Handler(ILogger logger, FileSystemRuntimeConfigLoader loader, IFileSystem fileSystem) { logger.LogInformation("{productName} {version}", PRODUCT_NAME, ProductInfo.GetProductVersion()); diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs index ced4649590..b2c36c12c7 100644 --- a/src/Cli/ConfigGenerator.cs +++ b/src/Cli/ConfigGenerator.cs @@ -778,6 +778,26 @@ private static bool TryUpdateConfiguredRuntimeOptions( } } + // Telemetry: Azure Log Analytics + if (options.AzureLogAnalyticsEnabled is not null || + options.AzureLogAnalyticsLogType is not null || + options.AzureLogAnalyticsFlushIntervalSeconds is not null || + options.AzureLogAnalyticsWorkspaceId is not null || + options.AzureLogAnalyticsDcrImmutableId is not null || + options.AzureLogAnalyticsDceEndpoint is not null) + { + AzureLogAnalyticsOptions updatedAzureLogAnalyticsOptions = runtimeConfig?.Runtime?.Telemetry?.AzureLogAnalytics ?? new(); + bool status = TryUpdateConfiguredAzureLogAnalyticsOptions(options, ref updatedAzureLogAnalyticsOptions); + if (status) + { + runtimeConfig = runtimeConfig! with { Runtime = runtimeConfig.Runtime! with { Telemetry = runtimeConfig.Runtime!.Telemetry is not null ? runtimeConfig.Runtime!.Telemetry with { AzureLogAnalytics = updatedAzureLogAnalyticsOptions } : new TelemetryOptions(AzureLogAnalytics: updatedAzureLogAnalyticsOptions) } }; + } + else + { + return false; + } + } + return runtimeConfig != null; } @@ -844,7 +864,7 @@ private static bool TryUpdateConfiguredRestValues(ConfigureOptions options, ref /// /// options. /// updatedGraphQLOptions. - /// True if the value needs to be udpated in the runtime config, else false + /// True if the value needs to be updated in the runtime config, else false private static bool TryUpdateConfiguredGraphQLValues( ConfigureOptions options, ref GraphQLRuntimeOptions? updatedGraphQLOptions) @@ -910,7 +930,7 @@ private static bool TryUpdateConfiguredGraphQLValues( /// /// options. /// updatedCacheOptions. - /// True if the value needs to be udpated in the runtime config, else false + /// True if the value needs to be updated in the runtime config, else false private static bool TryUpdateConfiguredCacheValues( ConfigureOptions options, ref RuntimeCacheOptions? updatedCacheOptions) @@ -959,7 +979,7 @@ private static bool TryUpdateConfiguredCacheValues( /// /// options. /// updatedHostOptions. - /// True if the value needs to be udpated in the runtime config, else false + /// True if the value needs to be updated in the runtime config, else false private static bool TryUpdateConfiguredHostValues( ConfigureOptions options, ref HostOptions? updatedHostOptions) @@ -1095,6 +1115,90 @@ private static bool TryUpdateConfiguredHostValues( } } + /// + /// Attempts to update the Azure Log Analytics configuration options based on the provided values. + /// Validates that any user-provided parameter value is valid and updates the runtime configuration accordingly. + /// + /// The configuration options provided by the user. + /// The Azure Log Analytics options to be updated. + /// True if the Azure Log Analytics options were successfully configured; otherwise, false. + private static bool TryUpdateConfiguredAzureLogAnalyticsOptions( + ConfigureOptions options, + ref AzureLogAnalyticsOptions azureLogAnalyticsOptions) + { + try + { + AzureLogAnalyticsAuthOptions? updatedAuthOptions = azureLogAnalyticsOptions.Auth; + + // Runtime.Telemetry.AzureLogAnalytics.Enabled + if (options.AzureLogAnalyticsEnabled is not null) + { + azureLogAnalyticsOptions = azureLogAnalyticsOptions with { Enabled = options.AzureLogAnalyticsEnabled is CliBool.True, UserProvidedEnabled = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.enabled as '{options.AzureLogAnalyticsEnabled}'"); + } + + // Runtime.Telemetry.AzureLogAnalytics.LogType + if (options.AzureLogAnalyticsLogType is not null) + { + azureLogAnalyticsOptions = azureLogAnalyticsOptions with { LogType = options.AzureLogAnalyticsLogType, UserProvidedLogType = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.log-type as '{options.AzureLogAnalyticsLogType}'"); + } + + // Runtime.Telemetry.AzureLogAnalytics.FlushIntervalSeconds + if (options.AzureLogAnalyticsFlushIntervalSeconds is not null) + { + if (options.AzureLogAnalyticsFlushIntervalSeconds <= 0) + { + _logger.LogError("Failed to update configuration with runtime.telemetry.azure-log-analytics.flush-interval-seconds. Value must be a positive integer greater than 0."); + return false; + } + + azureLogAnalyticsOptions = azureLogAnalyticsOptions with { FlushIntervalSeconds = options.AzureLogAnalyticsFlushIntervalSeconds, UserProvidedFlushIntervalSeconds = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.flush-interval-seconds as '{options.AzureLogAnalyticsFlushIntervalSeconds}'"); + } + + // Runtime.Telemetry.AzureLogAnalytics.Auth.WorkspaceId + if (options.AzureLogAnalyticsWorkspaceId is not null) + { + updatedAuthOptions = updatedAuthOptions is not null + ? updatedAuthOptions with { WorkspaceId = options.AzureLogAnalyticsWorkspaceId, UserProvidedWorkspaceId = true } + : new AzureLogAnalyticsAuthOptions { WorkspaceId = options.AzureLogAnalyticsWorkspaceId, UserProvidedWorkspaceId = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.auth.workspace-id as '{options.AzureLogAnalyticsWorkspaceId}'"); + } + + // Runtime.Telemetry.AzureLogAnalytics.Auth.DcrImmutableId + if (options.AzureLogAnalyticsDcrImmutableId is not null) + { + updatedAuthOptions = updatedAuthOptions is not null + ? updatedAuthOptions with { DcrImmutableId = options.AzureLogAnalyticsDcrImmutableId, UserProvidedDcrImmutableId = true } + : new AzureLogAnalyticsAuthOptions { DcrImmutableId = options.AzureLogAnalyticsDcrImmutableId, UserProvidedDcrImmutableId = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.auth.dcr-immutable-id as '{options.AzureLogAnalyticsDcrImmutableId}'"); + } + + // Runtime.Telemetry.AzureLogAnalytics.Auth.DceEndpoint + if (options.AzureLogAnalyticsDceEndpoint is not null) + { + updatedAuthOptions = updatedAuthOptions is not null + ? updatedAuthOptions with { DceEndpoint = options.AzureLogAnalyticsDceEndpoint, UserProvidedDceEndpoint = true } + : new AzureLogAnalyticsAuthOptions { DceEndpoint = options.AzureLogAnalyticsDceEndpoint, UserProvidedDceEndpoint = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.auth.dce-endpoint as '{options.AzureLogAnalyticsDceEndpoint}'"); + } + + // Update Azure Log Analytics options with Auth options if it was modified + if (updatedAuthOptions is not null) + { + azureLogAnalyticsOptions = azureLogAnalyticsOptions with { Auth = updatedAuthOptions }; + } + + return true; + } + catch (Exception ex) + { + _logger.LogError($"Failed to update configuration with runtime.telemetry.azure-log-analytics. Exception message: {ex.Message}."); + return false; + } + } + /// /// Parse permission string to create PermissionSetting array. /// diff --git a/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs b/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs index 29f30c8b95..1d790b125d 100644 --- a/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs +++ b/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs @@ -73,7 +73,6 @@ public AzureLogAnalyticsAuthOptionsConverter(bool replaceEnvVar) throw new JsonException($"Unexpected property {propertyName}"); } } - } throw new JsonException("Failed to read the Azure Log Analytics Auth Options"); diff --git a/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs b/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs index c327796ad6..0121cb73f8 100644 --- a/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs +++ b/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs @@ -141,7 +141,7 @@ public override void Write(Utf8JsonWriter writer, AzureLogAnalyticsOptions value JsonSerializer.Serialize(writer, value.Enabled, options); } - if (value?.Auth is not null) + if (value?.Auth is not null && (value.Auth.UserProvidedWorkspaceId || value.Auth.UserProvidedDcrImmutableId || value.Auth.UserProvidedDceEndpoint)) { AzureLogAnalyticsAuthOptionsConverter authOptionsConverter = options.GetConverter(typeof(AzureLogAnalyticsAuthOptions)) as AzureLogAnalyticsAuthOptionsConverter ?? throw new JsonException("Failed to get azure-log-analytics.auth options converter"); From 37343c0b92ec1d7dd279b603e39775cdb36e430e Mon Sep 17 00:00:00 2001 From: vadeveka <52937801+vadeveka@users.noreply.github.com> Date: Thu, 31 Jul 2025 18:07:59 -0700 Subject: [PATCH 43/79] Handle unauthorized fields in aggregation (#2790) ## Why make this change? Closes #2776 Ensure authorization error thrown if fields in the groupBy argument or in the aggregation function are not allowed for the current role. ## What is this change? During groupBy argument parsing, check if the field is allowed access for current role. During aggregation function argument parsing, check if the field is allowed access for current role If no access, then throw authorization error ## How was this tested? - [x] Integration Tests ## Sample Request(s) Samples from development mode (stack traces will not be show in production mode) image image --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- config-generators/mssql-commands.txt | 1 + src/Config/DataApiBuilderException.cs | 2 + .../Sql Query Structures/SqlQueryStructure.cs | 35 ++++++++-- .../GraphQLAuthorizationHandlerTests.cs | 68 +++++++++++++++++++ ...tReadingRuntimeConfigForMsSql.verified.txt | 13 ++++ 5 files changed, 114 insertions(+), 5 deletions(-) diff --git a/config-generators/mssql-commands.txt b/config-generators/mssql-commands.txt index 158d4bd179..c36366df54 100644 --- a/config-generators/mssql-commands.txt +++ b/config-generators/mssql-commands.txt @@ -207,6 +207,7 @@ update BookNF --config "dab-config.MsSql.json" --permissions "TestNestedFilter_E update BookNF --config "dab-config.MsSql.json" --permissions "TestNestedFilter_ColumnForbidden:read" update BookNF --config "dab-config.MsSql.json" --permissions "TestNestedFilterChained_EntityReadForbidden:read" update BookNF --config "dab-config.MsSql.json" --permissions "TestNestedFilterChained_ColumnForbidden:read" +update BookNF --config "dab-config.MsSql.json" --permissions "TestFieldExcludedForAggregation:read" --fields.exclude "publisher_id" update BookNF --config "dab-config.MsSql.json" --relationship publishers --target.entity PublisherNF --cardinality one update BookNF --config "dab-config.MsSql.json" --relationship websiteplacement --target.entity BookWebsitePlacement --cardinality one update BookNF --config "dab-config.MsSql.json" --relationship reviews --target.entity Review --cardinality many diff --git a/src/Config/DataApiBuilderException.cs b/src/Config/DataApiBuilderException.cs index d322391b53..18b0395541 100644 --- a/src/Config/DataApiBuilderException.cs +++ b/src/Config/DataApiBuilderException.cs @@ -18,6 +18,8 @@ public class DataApiBuilderException : Exception public const string GRAPHQL_FILTER_FIELD_AUTHZ_FAILURE = "Access forbidden to a field referenced in the filter."; public const string AUTHORIZATION_FAILURE = "Authorization Failure: Access Not Allowed."; public const string GRAPHQL_MUTATION_FIELD_AUTHZ_FAILURE = "Unauthorized due to one or more fields in this mutation."; + public const string GRAPHQL_GROUPBY_FIELD_AUTHZ_FAILURE = "Access forbidden to field '{0}' referenced in the groupBy argument."; + public const string GRAPHQL_AGGREGATION_FIELD_AUTHZ_FAILURE = "Access forbidden to field '{0}' referenced in the aggregation function '{1}'."; public enum SubStatusCodes { diff --git a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs index cedb98a305..a0c65ae98a 100644 --- a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs @@ -459,7 +459,7 @@ private SqlQueryStructure( { if (isGroupByQuery) { - ProcessGroupByField(queryField, ctx); + ProcessGroupByField(queryField, ctx, authorizationResolver); } else { @@ -877,12 +877,14 @@ private void AddGraphQLFields(IReadOnlyList selections, RuntimeC /// } /// } /// - private void ProcessGroupByField(FieldNode groupByField, IMiddlewareContext ctx) + private void ProcessGroupByField(FieldNode groupByField, IMiddlewareContext ctx, IAuthorizationResolver authorizationResolver) { // Extract 'fields' argument ArgumentNode? fieldsArg = groupByField.Arguments.FirstOrDefault(a => a.Name.Value == QueryBuilder.GROUP_BY_FIELDS_FIELD_NAME); HashSet fieldsInArgument = new(); + string roleOfGraphQLRequest = Authorization.AuthorizationResolver.GetRoleOfGraphQLRequest(ctx); + if (fieldsArg is { Value: ListValueNode fieldsList }) { foreach (EnumValueNode value in fieldsList.Items) @@ -890,6 +892,18 @@ private void ProcessGroupByField(FieldNode groupByField, IMiddlewareContext ctx) string fieldName = value.Value; string columnName = MetadataProvider.TryGetBackingColumn(EntityName, fieldName, out string? backingColumn) ? backingColumn : fieldName; + // Validate that the current role has access to groupBy argument fields + IEnumerable roles = authorizationResolver.GetRolesForField(EntityName, field: columnName, operation: EntityActionOperation.Read); + if (roles != null && !roles.Contains(roleOfGraphQLRequest, StringComparer.OrdinalIgnoreCase)) + { + // raising exception for the first unauthorized groupBy field found + throw new DataApiBuilderException( + message: string.Format(DataApiBuilderException.GRAPHQL_GROUPBY_FIELD_AUTHZ_FAILURE, fieldName), + statusCode: HttpStatusCode.Forbidden, + subStatusCode: DataApiBuilderException.SubStatusCodes.AuthorizationCheckFailed + ); + } + GroupByMetadata.Fields[columnName] = new Column(DatabaseObject.SchemaName, DatabaseObject.Name, columnName, SourceAlias); AddColumn(fieldName, backingColumn ?? fieldName); fieldsInArgument.Add(fieldName); @@ -913,7 +927,7 @@ private void ProcessGroupByField(FieldNode groupByField, IMiddlewareContext ctx) case QueryBuilder.GROUP_BY_AGGREGATE_FIELD_NAME: GroupByMetadata.RequestedAggregations = true; - ProcessAggregations(field, ctx); + ProcessAggregations(field, ctx, authorizationResolver, roleOfGraphQLRequest); break; } } @@ -963,7 +977,7 @@ private void ProcessGroupByFieldSelections(FieldNode groupByFieldSelection, Hash /// /// The FieldNode representing the aggregations field in the GraphQL query. /// middleware context. - private void ProcessAggregations(FieldNode aggregationsField, IMiddlewareContext ctx) + private void ProcessAggregations(FieldNode aggregationsField, IMiddlewareContext ctx, IAuthorizationResolver authorizationResolver, string roleOfGraphQLRequest) { // If there are no selections in the aggregation field, exit early if (aggregationsField.SelectionSet == null) @@ -1010,7 +1024,18 @@ private void ProcessAggregations(FieldNode aggregationsField, IMiddlewareContext if (MetadataProvider.TryGetBackingColumn(EntityName, fieldName, out string? backingColumn)) { columnName = backingColumn; - fieldName = backingColumn; + } + + // Validate that the current role has access to field in the aggregation function argument + IEnumerable roles = authorizationResolver.GetRolesForField(EntityName, field: columnName, operation: EntityActionOperation.Read); + if (roles != null && !roles.Contains(roleOfGraphQLRequest, StringComparer.OrdinalIgnoreCase)) + { + // raising exception for the first unauthorized field found + throw new DataApiBuilderException( + message: string.Format(DataApiBuilderException.GRAPHQL_AGGREGATION_FIELD_AUTHZ_FAILURE, fieldName, operation), + statusCode: HttpStatusCode.Forbidden, + subStatusCode: DataApiBuilderException.SubStatusCodes.AuthorizationCheckFailed + ); } // Use the field alias if provided, otherwise default to the operation name diff --git a/src/Service.Tests/Authorization/GraphQL/GraphQLAuthorizationHandlerTests.cs b/src/Service.Tests/Authorization/GraphQL/GraphQLAuthorizationHandlerTests.cs index 92813ab105..a4d2fbb6d4 100644 --- a/src/Service.Tests/Authorization/GraphQL/GraphQLAuthorizationHandlerTests.cs +++ b/src/Service.Tests/Authorization/GraphQL/GraphQLAuthorizationHandlerTests.cs @@ -71,5 +71,73 @@ public async Task FieldAuthorizationProcessing(bool isAuthenticated, string clie SqlTestHelper.PerformTestEqualJsonStrings(expectedResult, actual.ToString()); } } + + /// + /// Tests that a GraphQL query with a groupBy operation on fields not allowed for aggregation results in an + /// appropriate error message. + /// + /// + [TestMethod] + public async Task Query_GroupBy_FieldNotAllowed() + { + string graphQLQueryName = "booksNF"; + string graphQLQuery = @"{ + booksNF { + groupBy (fields: [id, publisher_id]) { + fields { + id + publisher_id + } + } + } + } + "; + + JsonElement actual = await ExecuteGraphQLRequestAsync( + graphQLQuery, + graphQLQueryName, + isAuthenticated: true, + clientRoleHeader: "TestFieldExcludedForAggregation"); + + SqlTestHelper.TestForErrorInGraphQLResponse( + actual.ToString(), + message: "Access forbidden to field 'publisher_id' referenced in the groupBy argument.", + path: @"[""booksNF""]" + ); + } + + /// + /// Tests that a GraphQL query with a group by aggregation on a field not allowed for aggregation results in an + /// appropriate error message. + /// + /// + [TestMethod] + public async Task Query_GroupBy_Aggregation_FieldNotAllowed() + { + string graphQLQueryName = "booksNF"; + string graphQLQuery = @"{ + booksNF { + groupBy { + aggregations { + max (field: id) + min (field: publisher_id) + } + } + } + } + "; + + JsonElement actual = await ExecuteGraphQLRequestAsync( + graphQLQuery, + graphQLQueryName, + isAuthenticated: true, + clientRoleHeader: "TestFieldExcludedForAggregation"); + + SqlTestHelper.TestForErrorInGraphQLResponse( + actual.ToString(), + message: "Access forbidden to field 'publisher_id' referenced in the aggregation function 'min'.", + path: @"[""booksNF""]" + ); + } } } diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt index fa9a9cbcd7..541f7fc078 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt @@ -3442,6 +3442,19 @@ Action: Read } ] + }, + { + Role: TestFieldExcludedForAggregation, + Actions: [ + { + Action: Read, + Fields: { + Exclude: [ + publisher_id + ] + } + } + ] } ], Mappings: { From 75d93814e2b64f0a0f16cd5fe6c581b0e37260d3 Mon Sep 17 00:00:00 2001 From: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Date: Tue, 5 Aug 2025 16:17:35 +0000 Subject: [PATCH 44/79] Add logic to connect to Azure Log Analytics (#2787) ## Why make this change? Solves issue #2778 ## What is this change? This change adds logic to capture logs that DAB produces in order to send them to the users' Azure Log Analytics tables. It does this by creating a logger provider that is activated when the feature is enabled and sends the logs that are produced to a custom log collector. This custom log collector then flushes all of the saved logs periodically every certain amount of time to Azure Log Analytics. It is also important to note, that the name of one of the properties in the feature was changed from `workspace-id` to `custom-table-name` as I found that the feature did not need the `workspace-id` to connect to the table, but it needed the name of custom log that is created as a resource by the user. Files Created: - `AzureLogAnalyticsFlusherService` uploads the logs periodically to the Azure Log Analytics Workspace Table. - `AzureLogAnalyticsCustomLogCollector` it collects the logs from the rest of the services and pushes them to `AzureLogAnalyticsFlusherService`. - `AzureLogAnalyticsLoggerProvider` creates the `AzureLogAnalyticsLogger` to start the population of logs inside the `AzureLogAnalyticsCustomLogCollector`. - `AzureLogAnalyticsLog` is the base object that defines the structure of the logs that will be sent to Azure Log Analytics. ## How was this tested? - [ ] Integration Tests - [X] Unit Tests - [X] Manual Tests For the manual testing, I had to create various resources inside of Azure: - Create Azure Log Analytics Workspace and creating a table inside of it and decide on the `custom-table-name`. - Create `DCE Endpoint` which is used the entry point for DAB to send its logs - Create DCR or Data Collection Rule and set rules on the structure of the logs that it will receive, and set the workspace table and DCE Endpoint to which it will connect. - Create a VM that has a system assigned managed identity. - Assign permission on DCR to allow VM to write telemetry data. After creating all the necessary resources, you just need to run DAB inside of the VM to have Azure Log Analytics logs sent. You can follow the steps more in detail in the following link: https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-api?tabs=dce#create-data-collection-rule ## Sample Request(s) image --- schemas/dab.draft.schema.json | 10 +- src/Cli.Tests/ConfigureOptionsTests.cs | 4 +- src/Cli/Commands/ConfigureOptions.cs | 14 +- src/Cli/ConfigGenerator.cs | 12 +- .../AzureLogAnalyticsAuthOptionsConverter.cs | 14 +- ...zureLogAnalyticsOptionsConverterFactory.cs | 2 +- .../AzureLogAnalyticsAuthOptions.cs | 16 +- .../ObjectModel/AzureLogAnalyticsLogs.cs | 25 +++ .../Configurations/RuntimeConfigValidator.cs | 4 +- src/Directory.Packages.props | 149 +++++++-------- .../Configuration/ConfigurationTests.cs | 16 +- .../Telemetry/AzureLogAnalyticsTests.cs | 173 ++++++++++++++++++ .../{ => Telemetry}/OpenTelemetryTests.cs | 2 +- .../{ => Telemetry}/TelemetryTests.cs | 4 +- .../Azure.DataApiBuilder.Service.csproj | 1 + src/Service/Program.cs | 14 ++ src/Service/Startup.cs | 55 ++++++ .../AzureLogAnalyticsCustomLogCollector.cs | 74 ++++++++ .../AzureLogAnalyticsFlusherService.cs | 56 ++++++ .../Telemetry/AzureLogAnalyticsLogger.cs | 33 ++++ .../AzureLogAnalyticsLoggerProvider.cs | 26 +++ 21 files changed, 581 insertions(+), 123 deletions(-) create mode 100644 src/Config/ObjectModel/AzureLogAnalyticsLogs.cs create mode 100644 src/Service.Tests/Configuration/Telemetry/AzureLogAnalyticsTests.cs rename src/Service.Tests/Configuration/{ => Telemetry}/OpenTelemetryTests.cs (98%) rename src/Service.Tests/Configuration/{ => Telemetry}/TelemetryTests.cs (98%) create mode 100644 src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs create mode 100644 src/Service/Telemetry/AzureLogAnalyticsFlusherService.cs create mode 100644 src/Service/Telemetry/AzureLogAnalyticsLogger.cs create mode 100644 src/Service/Telemetry/AzureLogAnalyticsLoggerProvider.cs diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json index 082e2a8de5..d713df99b2 100644 --- a/schemas/dab.draft.schema.json +++ b/schemas/dab.draft.schema.json @@ -423,9 +423,9 @@ "type": "object", "additionalProperties": false, "properties": { - "workspace-id": { + "custom-table-name": { "type": [ "string", "null" ], - "description": "Azure Log Analytics Workspace ID" + "description": "Azure Log Analytics Custom Table Name for entra-id mode" }, "dcr-immutable-id": { "type": [ "string", "null" ], @@ -459,9 +459,9 @@ "properties": { "auth": { "properties": { - "workspace-id": { + "custom-table-name": { "type": "string", - "description": "Azure Log Analytics Workspace ID" + "description": "Azure Log Analytics Custom Table Name for entra-id mode" }, "dcr-immutable-id": { "type": "string", @@ -472,7 +472,7 @@ "description": "DCE endpoint for entra-id mode" } }, - "required": [ "workspace-id", "dcr-immutable-id", "dce-endpoint" ] + "required": [ "custom-table-name", "dcr-immutable-id", "dce-endpoint" ] } }, "required": [ "auth" ] diff --git a/src/Cli.Tests/ConfigureOptionsTests.cs b/src/Cli.Tests/ConfigureOptionsTests.cs index b833240039..dfd1bfb0cf 100644 --- a/src/Cli.Tests/ConfigureOptionsTests.cs +++ b/src/Cli.Tests/ConfigureOptionsTests.cs @@ -164,7 +164,7 @@ public void TestAddAzureLogAnalyticsOptions() azureLogAnalyticsEnabled: CliBool.True, azureLogAnalyticsLogType: "log-type-test", azureLogAnalyticsFlushIntervalSeconds: 1, - azureLogAnalyticsWorkspaceId: "workspace-id-test", + azureLogAnalyticsCustomTableName: "custom-table-name-test", azureLogAnalyticsDcrImmutableId: "dcr-immutable-id-test", azureLogAnalyticsDceEndpoint: "dce-endpoint-test", config: TEST_RUNTIME_CONFIG_FILE @@ -183,7 +183,7 @@ public void TestAddAzureLogAnalyticsOptions() Assert.AreEqual("log-type-test", config.Runtime.Telemetry.AzureLogAnalytics.LogType); Assert.AreEqual(1, config.Runtime.Telemetry.AzureLogAnalytics.FlushIntervalSeconds); Assert.IsNotNull(config.Runtime.Telemetry.AzureLogAnalytics.Auth); - Assert.AreEqual("workspace-id-test", config.Runtime.Telemetry.AzureLogAnalytics.Auth.WorkspaceId); + Assert.AreEqual("custom-table-name-test", config.Runtime.Telemetry.AzureLogAnalytics.Auth.CustomTableName); Assert.AreEqual("dcr-immutable-id-test", config.Runtime.Telemetry.AzureLogAnalytics.Auth.DcrImmutableId); Assert.AreEqual("dce-endpoint-test", config.Runtime.Telemetry.AzureLogAnalytics.Auth.DceEndpoint); } diff --git a/src/Cli/Commands/ConfigureOptions.cs b/src/Cli/Commands/ConfigureOptions.cs index 08c90ba8f9..bb4c10a208 100644 --- a/src/Cli/Commands/ConfigureOptions.cs +++ b/src/Cli/Commands/ConfigureOptions.cs @@ -51,7 +51,7 @@ public ConfigureOptions( CliBool? azureLogAnalyticsEnabled = null, string? azureLogAnalyticsLogType = null, int? azureLogAnalyticsFlushIntervalSeconds = null, - string? azureLogAnalyticsWorkspaceId = null, + string? azureLogAnalyticsCustomTableName = null, string? azureLogAnalyticsDcrImmutableId = null, string? azureLogAnalyticsDceEndpoint = null, string? config = null) @@ -95,7 +95,7 @@ public ConfigureOptions( AzureLogAnalyticsEnabled = azureLogAnalyticsEnabled; AzureLogAnalyticsLogType = azureLogAnalyticsLogType; AzureLogAnalyticsFlushIntervalSeconds = azureLogAnalyticsFlushIntervalSeconds; - AzureLogAnalyticsWorkspaceId = azureLogAnalyticsWorkspaceId; + AzureLogAnalyticsCustomTableName = azureLogAnalyticsCustomTableName; AzureLogAnalyticsDcrImmutableId = azureLogAnalyticsDcrImmutableId; AzureLogAnalyticsDceEndpoint = azureLogAnalyticsDceEndpoint; } @@ -184,17 +184,17 @@ public ConfigureOptions( [Option("azure-key-vault.retry-policy.network-timeout-seconds", Required = false, HelpText = "Configure the network timeout for requests in seconds. Default: 60.")] public int? AzureKeyVaultRetryPolicyNetworkTimeoutSeconds { get; } - [Option("runtime.telemetry.azure-log-analytics.enabled", Default = CliBool.False, Required = false, HelpText = "Enable/Disable Azure Log Analytics.")] + [Option("runtime.telemetry.azure-log-analytics.enabled", Required = false, HelpText = "Enable/Disable Azure Log Analytics. Default: False (boolean)")] public CliBool? AzureLogAnalyticsEnabled { get; } - [Option("runtime.telemetry.azure-log-analytics.log-type", Required = false, HelpText = "Configure Log Type for Azure Log Analytics to find table to send telemetry data")] + [Option("runtime.telemetry.azure-log-analytics.log-type", Required = false, HelpText = "Configure Log Type for Azure Log Analytics to find table to send telemetry data. Default: DABLogs")] public string? AzureLogAnalyticsLogType { get; } - [Option("runtime.telemetry.azure-log-analytics.flush-interval-seconds", Required = false, HelpText = "Configure Flush Interval in seconds for Azure Log Analytics to specify the time interval to send the telemetry data")] + [Option("runtime.telemetry.azure-log-analytics.flush-interval-seconds", Required = false, HelpText = "Configure Flush Interval in seconds for Azure Log Analytics to specify the time interval to send the telemetry data. Default: 5")] public int? AzureLogAnalyticsFlushIntervalSeconds { get; } - [Option("runtime.telemetry.azure-log-analytics.auth.workspace-id", Required = false, HelpText = "Configure Workspace ID for Azure Log Analytics used to find workspace to connect")] - public string? AzureLogAnalyticsWorkspaceId { get; } + [Option("runtime.telemetry.azure-log-analytics.auth.custom-table-name", Required = false, HelpText = "Configure Custom Table Name for Azure Log Analytics used to find table to connect")] + public string? AzureLogAnalyticsCustomTableName { get; } [Option("runtime.telemetry.azure-log-analytics.auth.dcr-immutable-id", Required = false, HelpText = "Configure DCR Immutable ID for Azure Log Analytics to find the data collection rule that defines how data is collected")] public string? AzureLogAnalyticsDcrImmutableId { get; } diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs index b2c36c12c7..6f1befba06 100644 --- a/src/Cli/ConfigGenerator.cs +++ b/src/Cli/ConfigGenerator.cs @@ -782,7 +782,7 @@ private static bool TryUpdateConfiguredRuntimeOptions( if (options.AzureLogAnalyticsEnabled is not null || options.AzureLogAnalyticsLogType is not null || options.AzureLogAnalyticsFlushIntervalSeconds is not null || - options.AzureLogAnalyticsWorkspaceId is not null || + options.AzureLogAnalyticsCustomTableName is not null || options.AzureLogAnalyticsDcrImmutableId is not null || options.AzureLogAnalyticsDceEndpoint is not null) { @@ -1157,13 +1157,13 @@ private static bool TryUpdateConfiguredAzureLogAnalyticsOptions( _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.flush-interval-seconds as '{options.AzureLogAnalyticsFlushIntervalSeconds}'"); } - // Runtime.Telemetry.AzureLogAnalytics.Auth.WorkspaceId - if (options.AzureLogAnalyticsWorkspaceId is not null) + // Runtime.Telemetry.AzureLogAnalytics.Auth.CustomTableName + if (options.AzureLogAnalyticsCustomTableName is not null) { updatedAuthOptions = updatedAuthOptions is not null - ? updatedAuthOptions with { WorkspaceId = options.AzureLogAnalyticsWorkspaceId, UserProvidedWorkspaceId = true } - : new AzureLogAnalyticsAuthOptions { WorkspaceId = options.AzureLogAnalyticsWorkspaceId, UserProvidedWorkspaceId = true }; - _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.auth.workspace-id as '{options.AzureLogAnalyticsWorkspaceId}'"); + ? updatedAuthOptions with { CustomTableName = options.AzureLogAnalyticsCustomTableName, UserProvidedCustomTableName = true } + : new AzureLogAnalyticsAuthOptions { CustomTableName = options.AzureLogAnalyticsCustomTableName, UserProvidedCustomTableName = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.auth.custom-table-name as '{options.AzureLogAnalyticsCustomTableName}'"); } // Runtime.Telemetry.AzureLogAnalytics.Auth.DcrImmutableId diff --git a/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs b/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs index 1d790b125d..1428c0d75f 100644 --- a/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs +++ b/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs @@ -29,7 +29,7 @@ public AzureLogAnalyticsAuthOptionsConverter(bool replaceEnvVar) { if (reader.TokenType is JsonTokenType.StartObject) { - string? workspaceId = null; + string? customTableName = null; string? dcrImmutableId = null; string? dceEndpoint = null; @@ -37,7 +37,7 @@ public AzureLogAnalyticsAuthOptionsConverter(bool replaceEnvVar) { if (reader.TokenType == JsonTokenType.EndObject) { - return new AzureLogAnalyticsAuthOptions(workspaceId, dcrImmutableId, dceEndpoint); + return new AzureLogAnalyticsAuthOptions(customTableName, dcrImmutableId, dceEndpoint); } string? propertyName = reader.GetString(); @@ -45,10 +45,10 @@ public AzureLogAnalyticsAuthOptionsConverter(bool replaceEnvVar) reader.Read(); switch (propertyName) { - case "workspace-id": + case "custom-table-name": if (reader.TokenType is not JsonTokenType.Null) { - workspaceId = reader.DeserializeString(_replaceEnvVar); + customTableName = reader.DeserializeString(_replaceEnvVar); } break; @@ -88,10 +88,10 @@ public override void Write(Utf8JsonWriter writer, AzureLogAnalyticsAuthOptions v { writer.WriteStartObject(); - if (value?.UserProvidedWorkspaceId is true) + if (value?.UserProvidedCustomTableName is true) { - writer.WritePropertyName("workspace-id"); - JsonSerializer.Serialize(writer, value.WorkspaceId, options); + writer.WritePropertyName("custom-table-name"); + JsonSerializer.Serialize(writer, value.CustomTableName, options); } if (value?.UserProvidedDcrImmutableId is true) diff --git a/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs b/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs index 0121cb73f8..895a4abb61 100644 --- a/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs +++ b/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs @@ -141,7 +141,7 @@ public override void Write(Utf8JsonWriter writer, AzureLogAnalyticsOptions value JsonSerializer.Serialize(writer, value.Enabled, options); } - if (value?.Auth is not null && (value.Auth.UserProvidedWorkspaceId || value.Auth.UserProvidedDcrImmutableId || value.Auth.UserProvidedDceEndpoint)) + if (value?.Auth is not null && (value.Auth.UserProvidedCustomTableName || value.Auth.UserProvidedDcrImmutableId || value.Auth.UserProvidedDceEndpoint)) { AzureLogAnalyticsAuthOptionsConverter authOptionsConverter = options.GetConverter(typeof(AzureLogAnalyticsAuthOptions)) as AzureLogAnalyticsAuthOptionsConverter ?? throw new JsonException("Failed to get azure-log-analytics.auth options converter"); diff --git a/src/Config/ObjectModel/AzureLogAnalyticsAuthOptions.cs b/src/Config/ObjectModel/AzureLogAnalyticsAuthOptions.cs index cc8ed9dffa..58b83630e9 100644 --- a/src/Config/ObjectModel/AzureLogAnalyticsAuthOptions.cs +++ b/src/Config/ObjectModel/AzureLogAnalyticsAuthOptions.cs @@ -14,7 +14,7 @@ public record AzureLogAnalyticsAuthOptions /// /// Whether Azure Log Analytics is enabled. /// - public string? WorkspaceId { get; init; } + public string? CustomTableName { get; init; } /// /// Authentication options for Azure Log Analytics. @@ -27,12 +27,12 @@ public record AzureLogAnalyticsAuthOptions public string? DceEndpoint { get; init; } [JsonConstructor] - public AzureLogAnalyticsAuthOptions(string? workspaceId = null, string? dcrImmutableId = null, string? dceEndpoint = null) + public AzureLogAnalyticsAuthOptions(string? customTableName = null, string? dcrImmutableId = null, string? dceEndpoint = null) { - if (workspaceId is not null) + if (customTableName is not null) { - WorkspaceId = workspaceId; - UserProvidedWorkspaceId = true; + CustomTableName = customTableName; + UserProvidedCustomTableName = true; } if (dcrImmutableId is not null) @@ -51,12 +51,12 @@ public AzureLogAnalyticsAuthOptions(string? workspaceId = null, string? dcrImmut /// /// Flag which informs CLI and JSON serializer whether to write workspace-id /// property and value to the runtime config file. - /// When user doesn't provide the workspace-id property/value, which signals DAB to not write anything, + /// When user doesn't provide the custom-table-name property/value, which signals DAB to not write anything, /// the DAB CLI should not write the current value to a serialized config. /// [JsonIgnore(Condition = JsonIgnoreCondition.Always)] - [MemberNotNullWhen(true, nameof(WorkspaceId))] - public bool UserProvidedWorkspaceId { get; init; } = false; + [MemberNotNullWhen(true, nameof(CustomTableName))] + public bool UserProvidedCustomTableName { get; init; } = false; /// /// Flag which informs CLI and JSON serializer whether to write dcr-immutable-id diff --git a/src/Config/ObjectModel/AzureLogAnalyticsLogs.cs b/src/Config/ObjectModel/AzureLogAnalyticsLogs.cs new file mode 100644 index 0000000000..1aefd92804 --- /dev/null +++ b/src/Config/ObjectModel/AzureLogAnalyticsLogs.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +/// +/// Class used to save the components for the logs that are sent to Azure Log Analytics +/// +public class AzureLogAnalyticsLogs +{ + public string Time { get; set; } + public string LogLevel { get; set; } + public string? Message { get; set; } + public string? Component { get; set; } + public string? LogType { get; set; } + + public AzureLogAnalyticsLogs(string time, string logLevel, string? message, string? component, string? logType = null) + { + Time = time; + LogLevel = logLevel; + Message = message; + Component = component; + LogType = logType; + } +} diff --git a/src/Core/Configurations/RuntimeConfigValidator.cs b/src/Core/Configurations/RuntimeConfigValidator.cs index 5edce8af90..f910d5bd76 100644 --- a/src/Core/Configurations/RuntimeConfigValidator.cs +++ b/src/Core/Configurations/RuntimeConfigValidator.cs @@ -166,11 +166,11 @@ public void ValidateAzureLogAnalyticsAuth(RuntimeConfig runtimeConfig) { AzureLogAnalyticsOptions azureLogAnalyticsOptions = runtimeConfig.Runtime.Telemetry.AzureLogAnalytics; AzureLogAnalyticsAuthOptions? azureLogAnalyticsAuthOptions = azureLogAnalyticsOptions.Auth; - if (azureLogAnalyticsOptions.Enabled && (azureLogAnalyticsAuthOptions is null || string.IsNullOrWhiteSpace(azureLogAnalyticsAuthOptions.WorkspaceId) || + if (azureLogAnalyticsOptions.Enabled && (azureLogAnalyticsAuthOptions is null || string.IsNullOrWhiteSpace(azureLogAnalyticsAuthOptions.CustomTableName) || string.IsNullOrWhiteSpace(azureLogAnalyticsAuthOptions.DcrImmutableId) || string.IsNullOrWhiteSpace(azureLogAnalyticsAuthOptions.DceEndpoint))) { HandleOrRecordException(new DataApiBuilderException( - message: "Azure Log Analytics Auth options 'workspace-id', 'dcr-immutable-id', and 'dce-endpoint' cannot be null or empty if enabled.", + message: "Azure Log Analytics Auth options 'custom-table-name', 'dcr-immutable-id', and 'dce-endpoint' cannot be null or empty if enabled.", statusCode: HttpStatusCode.ServiceUnavailable, subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); } diff --git a/src/Directory.Packages.props b/src/Directory.Packages.props index ee79b16b00..d00f43c478 100644 --- a/src/Directory.Packages.props +++ b/src/Directory.Packages.props @@ -1,78 +1,79 @@ - - true - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/Service.Tests/Configuration/ConfigurationTests.cs b/src/Service.Tests/Configuration/ConfigurationTests.cs index 516ad7b917..d2b596ed81 100644 --- a/src/Service.Tests/Configuration/ConfigurationTests.cs +++ b/src/Service.Tests/Configuration/ConfigurationTests.cs @@ -4072,12 +4072,12 @@ private static RuntimeConfig InitializeRuntimeWithLogLevel(Dictionary [DataTestMethod] [TestCategory(TestCategory.MSSQL)] - [DataRow(true, "WorkspaceId", "DcrImmutableId", "DceEndpoint", "TestDabLog", 1, true, "TestDabLog", 1)] + [DataRow(true, "CustomTableName", "DcrImmutableId", "DceEndpoint", "TestDabLog", 1, true, "TestDabLog", 1)] [DataRow(false, "", null, "", "", 10, false, "", 10)] [DataRow(null, null, null, null, null, null, false, "DabLogs", 5)] public void AzureLogAnalyticsSerialization( bool? enabled, - string? workspaceId, + string? customTableName, string? dcrImmutableId, string? dceEndpoint, string? logType, @@ -4090,11 +4090,11 @@ public void AzureLogAnalyticsSerialization( bool expectedExistEnabled = enabled is not null; bool expectedExistLogType = logType is not null; bool expectedExistFlushIntSec = flushIntSec is not null; - bool expectedExistWorkspaceId = workspaceId is not null; + bool expectedExistCustomTableName = customTableName is not null; bool expectedExistDcrImmutableId = dcrImmutableId is not null; bool expectedExistDceEndpoint = dceEndpoint is not null; - AzureLogAnalyticsAuthOptions authOptions = new(workspaceId, dcrImmutableId, dceEndpoint); + AzureLogAnalyticsAuthOptions authOptions = new(customTableName, dcrImmutableId, dceEndpoint); AzureLogAnalyticsOptions azureLogAnalyticsOptions = new(enabled, authOptions, logType, flushIntSec); RuntimeConfig configWithCustomLogLevel = InitializeRuntimeWithAzureLogAnalytics(azureLogAnalyticsOptions); string configWithCustomLogLevelJson = configWithCustomLogLevel.ToJson(); @@ -4140,11 +4140,11 @@ public void AzureLogAnalyticsSerialization( //Validate the values inside the auth properties are of expected value if (authExists) { - bool workspaceIdExists = authElement.TryGetProperty("workspace-id", out JsonElement workspaceIdElement); - Assert.AreEqual(expectedExistWorkspaceId, workspaceIdExists); - if (workspaceIdExists) + bool customTableNameExists = authElement.TryGetProperty("custom-table-name", out JsonElement customTableNameElement); + Assert.AreEqual(expectedExistCustomTableName, customTableNameExists); + if (customTableNameExists) { - Assert.AreEqual(expected: workspaceId, workspaceIdElement.GetString()); + Assert.AreEqual(expected: customTableName, customTableNameElement.GetString()); } bool dcrImmutableIdExists = authElement.TryGetProperty("dcr-immutable-id", out JsonElement dcrImmutableIdElement); diff --git a/src/Service.Tests/Configuration/Telemetry/AzureLogAnalyticsTests.cs b/src/Service.Tests/Configuration/Telemetry/AzureLogAnalyticsTests.cs new file mode 100644 index 0000000000..9b133b9ed3 --- /dev/null +++ b/src/Service.Tests/Configuration/Telemetry/AzureLogAnalyticsTests.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Service.Telemetry; +using Azure.Identity; +using Azure.Monitor.Ingestion; +using Microsoft.AspNetCore.TestHost; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; +using static Azure.DataApiBuilder.Service.Tests.Configuration.ConfigurationTests; + +namespace Azure.DataApiBuilder.Service.Tests.Configuration.Telemetry; + +/// +/// Contains tests for Azure Log Analytics functionality. +/// +[TestClass, TestCategory(TestCategory.MSSQL)] +public class AzureLogAnalyticsTests +{ + public TestContext TestContext { get; set; } + + private const string CONFIG_WITH_TELEMETRY = "dab-azure-log-analytics-test-config.json"; + private const string CONFIG_WITHOUT_TELEMETRY = "dab-no-azure-log-analytics-test-config.json"; + private static RuntimeConfig _configuration; + + /// + /// This is a helper function that creates runtime config file with specified telemetry options. + /// + /// Name of the config file to be created. + /// Whether telemetry is enabled or not. + /// Telemetry connection string. + public static void SetUpTelemetryInConfig(string configFileName, bool isLogAnalyticsEnabled, string logAnalyticsCustomTable, string logAnalyticsDcrImmutableId, string logAnalyticsDceEndpoint) + { + DataSource dataSource = new(DatabaseType.MSSQL, + GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); + + _configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions: new(), restOptions: new()); + + TelemetryOptions _testTelemetryOptions = new(AzureLogAnalytics: new AzureLogAnalyticsOptions(isLogAnalyticsEnabled, new AzureLogAnalyticsAuthOptions(logAnalyticsCustomTable, logAnalyticsDcrImmutableId, logAnalyticsDceEndpoint))); + _configuration = _configuration with { Runtime = _configuration.Runtime with { Telemetry = _testTelemetryOptions } }; + + File.WriteAllText(configFileName, _configuration.ToJson()); + } + + /// + /// Cleans up the test environment by deleting the runtime config with telemetry options. + /// + [TestCleanup] + public void CleanUpTelemetryConfig() + { + if (File.Exists(CONFIG_WITH_TELEMETRY)) + { + File.Delete(CONFIG_WITH_TELEMETRY); + } + + if (File.Exists(CONFIG_WITHOUT_TELEMETRY)) + { + File.Delete(CONFIG_WITHOUT_TELEMETRY); + } + } + + /// + /// Tests if the services are correctly enabled for Azure Log Analytics. + /// + [TestMethod] + public void TestAzureLogAnalyticsServicesEnabled() + { + // Arrange + SetUpTelemetryInConfig(CONFIG_WITH_TELEMETRY, true, "Custom-Table-Name-Test", "DCR-Immutable-ID-Test", "https://fake.dce.endpoint"); + + string[] args = new[] + { + $"--ConfigFileName={CONFIG_WITH_TELEMETRY}" + }; + using TestServer server = new(Program.CreateWebHostBuilder(args)); + + // Additional assertions to check if AzureLogAnalytics is enabled correctly in services + IServiceProvider serviceProvider = server.Services; + AzureLogAnalyticsCustomLogCollector customLogCollector = (AzureLogAnalyticsCustomLogCollector)serviceProvider.GetService(); + AzureLogAnalyticsFlusherService flusherService = serviceProvider.GetService(); + IEnumerable loggerProvidersServices = serviceProvider.GetServices(); + AzureLogAnalyticsLoggerProvider loggerProvider = loggerProvidersServices.OfType().FirstOrDefault(); + + // If customLogCollector, flusherService, and loggerProvider are not null when AzureLogAnalytics is enabled + Assert.IsNotNull(customLogCollector, "AzureLogAnalyticsCustomLogCollector should be registered."); + Assert.IsNotNull(flusherService, "AzureLogAnalyticsFlusherService should be registered."); + Assert.IsNotNull(loggerProvider, "AzureLogAnalyticsLoggerProvider should be registered."); + } + + /// + /// Tests if the logs are flushed correctly when Azure Log Analytics is enabled. + /// + [DataTestMethod] + [DataRow("Information Test Message", LogLevel.Information)] + [DataRow("Trace Test Message", LogLevel.Trace)] + [DataRow("Warning Test Message", LogLevel.Warning)] + public async Task TestAzureLogAnalyticsFlushServiceSucceed(string message, LogLevel logLevel) + { + // Arrange + CancellationTokenSource tokenSource = new(); + AzureLogAnalyticsOptions azureLogAnalyticsOptions = new(true, new AzureLogAnalyticsAuthOptions("custom-table-name-test", "dcr-immutable-id-test", "https://fake.dce.endpoint"), "DABLogs", 1); + CustomLogsIngestionClient customClient = new(azureLogAnalyticsOptions.Auth.DceEndpoint); + AzureLogAnalyticsCustomLogCollector customLogCollector = new(); + + ILoggerFactory loggerFactory = new LoggerFactory(); + ILogger logger = loggerFactory.CreateLogger(); + AzureLogAnalyticsFlusherService flusherService = new(azureLogAnalyticsOptions, customLogCollector, customClient, logger); + + // Act + await customLogCollector.LogAsync(message, logLevel); + + _ = Task.Run(() => flusherService.StartAsync(tokenSource.Token)); + + await Task.Delay(1000); + + // Assert + AzureLogAnalyticsLogs actualLog = customClient.LogAnalyticsLogs[0]; + Assert.AreEqual(logLevel.ToString(), actualLog.LogLevel); + Assert.AreEqual(message, actualLog.Message); + } + + /// + /// Tests if the services are correctly disabled for Azure Log Analytics. + /// + [TestMethod] + public void TestAzureLogAnalyticsServicesDisabled() + { + // Arrange + SetUpTelemetryInConfig(CONFIG_WITHOUT_TELEMETRY, false, null, null, null); + + string[] args = new[] + { + $"--ConfigFileName={CONFIG_WITHOUT_TELEMETRY}" + }; + using TestServer server = new(Program.CreateWebHostBuilder(args)); + + // Additional assertions to check if Azure Log Analytics is disabled correctly in services + IServiceProvider serviceProvider = server.Services; + AzureLogAnalyticsFlusherService flusherService = serviceProvider.GetService(); + AzureLogAnalyticsLoggerProvider loggerProvider = serviceProvider.GetService(); + + // If flusherService and loggerProvider are null, Azure Log Analytics is disabled + Assert.IsNull(flusherService, "AzureLogAnalyticsFlusherService should not be registered."); + Assert.IsNull(loggerProvider, "AzureLogAnalyticsLoggerProvider should not be registered."); + } + + /// + /// Custom logs ingestion to test that all the logs are being sent correctly to Azure Log Analytics + /// + private class CustomLogsIngestionClient : LogsIngestionClient + { + public List LogAnalyticsLogs { get; } = new(); + + public CustomLogsIngestionClient(string dceEndpoint) : base(new Uri(dceEndpoint), new DefaultAzureCredential()) { } // CodeQL [SM05137] DefaultAzureCredential will use Managed Identity if available or fallback to default. + + public async override Task UploadAsync(string ruleId, string streamName, IEnumerable logs, LogsUploadOptions options = null, CancellationToken cancellationToken = default) + { + LogAnalyticsLogs.AddRange(logs.Cast()); + + Response mockResponse = Response.FromValue(Mock.Of(), Mock.Of()); + return await Task.FromResult(mockResponse); + } + } +} diff --git a/src/Service.Tests/Configuration/OpenTelemetryTests.cs b/src/Service.Tests/Configuration/Telemetry/OpenTelemetryTests.cs similarity index 98% rename from src/Service.Tests/Configuration/OpenTelemetryTests.cs rename to src/Service.Tests/Configuration/Telemetry/OpenTelemetryTests.cs index 166dc7b001..3b8e374fe2 100644 --- a/src/Service.Tests/Configuration/OpenTelemetryTests.cs +++ b/src/Service.Tests/Configuration/Telemetry/OpenTelemetryTests.cs @@ -12,7 +12,7 @@ using OpenTelemetry.Trace; using static Azure.DataApiBuilder.Service.Tests.Configuration.ConfigurationTests; -namespace Azure.DataApiBuilder.Service.Tests.Configuration; +namespace Azure.DataApiBuilder.Service.Tests.Configuration.Telemetry; /// /// Contains tests for OpenTelemetry functionality. diff --git a/src/Service.Tests/Configuration/TelemetryTests.cs b/src/Service.Tests/Configuration/Telemetry/TelemetryTests.cs similarity index 98% rename from src/Service.Tests/Configuration/TelemetryTests.cs rename to src/Service.Tests/Configuration/Telemetry/TelemetryTests.cs index 3b49ddae5d..016cac8d1c 100644 --- a/src/Service.Tests/Configuration/TelemetryTests.cs +++ b/src/Service.Tests/Configuration/Telemetry/TelemetryTests.cs @@ -17,7 +17,7 @@ using Microsoft.VisualStudio.TestTools.UnitTesting; using static Azure.DataApiBuilder.Service.Tests.Configuration.ConfigurationTests; -namespace Azure.DataApiBuilder.Service.Tests.Configuration; +namespace Azure.DataApiBuilder.Service.Tests.Configuration.Telemetry; /// /// Contains tests for telemetry functionality. @@ -168,7 +168,7 @@ public async Task TestNoTelemetryItemsSentWhenDisabled_NonHostedScenario(bool is List telemetryItems = ((CustomTelemetryChannel)telemetryChannel).GetTelemetryItems(); // Assert that we are not sending any Traces/Requests/Exceptions to Telemetry - Assert.IsTrue(EnumerableUtilities.IsNullOrEmpty(telemetryItems)); + Assert.IsTrue(telemetryItems.IsNullOrEmpty()); } /// diff --git a/src/Service/Azure.DataApiBuilder.Service.csproj b/src/Service/Azure.DataApiBuilder.Service.csproj index e757ca4ee8..bb21361d4b 100644 --- a/src/Service/Azure.DataApiBuilder.Service.csproj +++ b/src/Service/Azure.DataApiBuilder.Service.csproj @@ -54,6 +54,7 @@ + diff --git a/src/Service/Program.cs b/src/Service/Program.cs index 6535069d3c..7009e489ce 100644 --- a/src/Service/Program.cs +++ b/src/Service/Program.cs @@ -195,6 +195,20 @@ public static ILoggerFactory GetLoggerFactoryForLogLevel(LogLevel logLevel, Tele }); } + if (Startup.IsAzureLogAnalyticsAvailable(Startup.AzureLogAnalyticsOptions)) + { + builder.AddProvider(new AzureLogAnalyticsLoggerProvider(Startup.CustomLogCollector)); + + if (logLevelInitializer is null) + { + builder.AddFilter(category: string.Empty, logLevel); + } + else + { + builder.AddFilter(category: string.Empty, level => level >= logLevelInitializer.MinLogLevel); + } + } + builder.AddConsole(); }); } diff --git a/src/Service/Startup.cs b/src/Service/Startup.cs index e21627fa05..57858ff2f5 100644 --- a/src/Service/Startup.cs +++ b/src/Service/Startup.cs @@ -29,6 +29,8 @@ using Azure.DataApiBuilder.Service.HealthCheck; using Azure.DataApiBuilder.Service.Telemetry; using Azure.DataApiBuilder.Service.Utilities; +using Azure.Identity; +using Azure.Monitor.Ingestion; using HotChocolate; using HotChocolate.AspNetCore; using HotChocolate.Execution; @@ -70,6 +72,7 @@ public class Startup(IConfiguration configuration, ILogger logger) public static bool IsLogLevelOverriddenByCli; + public static AzureLogAnalyticsCustomLogCollector CustomLogCollector = new(); public static ApplicationInsightsOptions AppInsightsOptions = new(); public static OpenTelemetryOptions OpenTelemetryOptions = new(); public static AzureLogAnalyticsOptions AzureLogAnalyticsOptions = new(); @@ -173,6 +176,22 @@ public void ConfigureServices(IServiceCollection services) }); } + if (runtimeConfigAvailable + && runtimeConfig?.Runtime?.Telemetry?.AzureLogAnalytics is not null + && IsAzureLogAnalyticsAvailable(runtimeConfig.Runtime.Telemetry.AzureLogAnalytics)) + { + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(sp => + { + AzureLogAnalyticsOptions options = runtimeConfig.Runtime.Telemetry.AzureLogAnalytics; + DefaultAzureCredential credential = new(); + LogsIngestionClient logsIngestionClient = new(new Uri(options.Auth!.DceEndpoint!), credential); + return new AzureLogAnalyticsFlusherService(options, CustomLogCollector, logsIngestionClient, _logger); + }); + services.AddHostedService(sp => sp.GetRequiredService()); + } + services.AddSingleton(implementationFactory: serviceProvider => { LogLevelInitializer logLevelInit = new(MinimumLogLevel, typeof(RuntimeConfigValidator).FullName, _configProvider, _hotReloadEventHandler); @@ -892,6 +911,30 @@ private void ConfigureAzureLogAnalytics(RuntimeConfig runtimeConfig) return; } + bool isAuthIncomplete = false; + if (string.IsNullOrEmpty(AzureLogAnalyticsOptions.Auth?.CustomTableName)) + { + _logger.LogError("Logs won't be sent to Azure Log Analytics because the Custom Table Name is not available in the config file."); + isAuthIncomplete = true; + } + + if (string.IsNullOrEmpty(AzureLogAnalyticsOptions.Auth?.DcrImmutableId)) + { + _logger.LogError("Logs won't be sent to Azure Log Analytics because the DCR Immutable Id is not available in the config file."); + isAuthIncomplete = true; + } + + if (string.IsNullOrEmpty(AzureLogAnalyticsOptions.Auth?.DceEndpoint)) + { + _logger.LogError("Logs won't be sent to Azure Log Analytics because the DCE Endpoint is not available in the config file."); + isAuthIncomplete = true; + } + + if (isAuthIncomplete) + { + return; + } + // Updating Startup Logger to Log from Startup Class. ILoggerFactory? loggerFactory = Program.GetLoggerFactoryForLogLevel(MinimumLogLevel); _logger = loggerFactory.CreateLogger(); @@ -1044,5 +1087,17 @@ public static void AddValidFilters() LoggerFilters.AddFilter(typeof(IAuthorizationResolver).FullName); LoggerFilters.AddFilter("default"); } + + /// + /// Helper function that returns if AzureLogAnalytics feature is enabled and properly configured. + /// + public static bool IsAzureLogAnalyticsAvailable(AzureLogAnalyticsOptions azureLogAnalyticsOptions) + { + return azureLogAnalyticsOptions.Auth is not null + && azureLogAnalyticsOptions.Enabled + && !string.IsNullOrWhiteSpace(azureLogAnalyticsOptions.Auth.CustomTableName) + && !string.IsNullOrWhiteSpace(azureLogAnalyticsOptions.Auth.DcrImmutableId) + && !string.IsNullOrWhiteSpace(azureLogAnalyticsOptions.Auth.DceEndpoint); + } } } diff --git a/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs b/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs new file mode 100644 index 0000000000..fd02935379 --- /dev/null +++ b/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Threading.Channels; +using System.Threading.Tasks; +using Azure.DataApiBuilder.Config.ObjectModel; +using Microsoft.Extensions.Logging; + +namespace Azure.DataApiBuilder.Service.Telemetry; + +/// +/// Interface for customized log collector. +/// +public interface ICustomLogCollector +{ + Task LogAsync(string message, LogLevel loggingLevel, string? source = null); + Task> DequeueAllAsync(string logType, int flushIntervalSeconds); +} + +/// +/// Log collector customized to retrieve and send all of the logs created by DAB. +/// +public class AzureLogAnalyticsCustomLogCollector : ICustomLogCollector +{ + private readonly Channel _logs = Channel.CreateUnbounded(); + + /// + /// Adds one log to the channel asynchronously, and saves the time at which it was created. + /// + /// Structured log message. + /// Severity of log event. + /// Class from which log event originated. + public async Task LogAsync(string message, LogLevel logLevel, string? source = null) + { + DateTime dateTime = DateTime.UtcNow; + await _logs.Writer.WriteAsync( + new AzureLogAnalyticsLogs( + dateTime.ToString("o"), + logLevel.ToString(), + message, + source)); + } + + /// + /// Creates a list periodically from the logs that are currently saved. + /// + /// Custom name to distinguish the logs sent from DAB to Azure Log Analytics. + /// Period of time between each list of logs is sent. + /// List of logs structured to be sent to Azure Log Analytics. + public async Task> DequeueAllAsync(string logType, int flushIntervalSeconds) + { + List list = new(); + Stopwatch time = Stopwatch.StartNew(); + + if (await _logs.Reader.WaitToReadAsync()) + { + while (_logs.Reader.TryRead(out AzureLogAnalyticsLogs? item)) + { + item.LogType = logType; + list.Add(item); + + if (time.Elapsed >= TimeSpan.FromSeconds(flushIntervalSeconds)) + { + break; + } + } + } + + return list; + } +} diff --git a/src/Service/Telemetry/AzureLogAnalyticsFlusherService.cs b/src/Service/Telemetry/AzureLogAnalyticsFlusherService.cs new file mode 100644 index 0000000000..bfe600a095 --- /dev/null +++ b/src/Service/Telemetry/AzureLogAnalyticsFlusherService.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.Monitor.Ingestion; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; + +namespace Azure.DataApiBuilder.Service.Telemetry; + +/// +/// Service used to periodically flush logs to Azure Log Analytics +/// +public class AzureLogAnalyticsFlusherService : BackgroundService +{ + private readonly AzureLogAnalyticsOptions _options; + private readonly ICustomLogCollector _customLogCollector; + private readonly LogsIngestionClient _logsIngestionClient; + private readonly ILogger _logger; + + public AzureLogAnalyticsFlusherService(AzureLogAnalyticsOptions options, ICustomLogCollector customLogCollector, LogsIngestionClient logsIngestionClient, ILogger logger) + { + _options = options; + _customLogCollector = customLogCollector; + _logsIngestionClient = logsIngestionClient; + _logger = logger; + } + + /// + /// Function that will keep periodically flushing data logs as long as Azure Log Analytics is enabled. + /// + /// Token used to stop running service when program is shut down. + protected async override Task ExecuteAsync(CancellationToken stoppingToken) + { + while (true) + { + try + { + List logs = await _customLogCollector.DequeueAllAsync(_options.LogType!, (int)_options.FlushIntervalSeconds!); + + if (logs.Count > 0) + { + await _logsIngestionClient.UploadAsync(_options.Auth!.DcrImmutableId!, _options.Auth!.CustomTableName!, logs); + } + } + catch (Exception ex) + { + _logger.LogError($"Error uploading logs to Azure Log Analytics: {ex}"); + } + } + } +} diff --git a/src/Service/Telemetry/AzureLogAnalyticsLogger.cs b/src/Service/Telemetry/AzureLogAnalyticsLogger.cs new file mode 100644 index 0000000000..52579c3963 --- /dev/null +++ b/src/Service/Telemetry/AzureLogAnalyticsLogger.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using Microsoft.Extensions.Logging; + +namespace Azure.DataApiBuilder.Service.Telemetry; + +/// +/// Logger used to receive all the logs that will be sent to Azure Log Analytics +/// and are created by Data API builder while it is running. +/// +public class AzureLogAnalyticsLogger : ILogger +{ + private readonly string _className; + private readonly ICustomLogCollector _customLogCollector; + + public AzureLogAnalyticsLogger(string className, ICustomLogCollector customLogCollector) + { + _className = className; + _customLogCollector = customLogCollector; + } + + public IDisposable? BeginScope(TState state) where TState : notnull => default!; + + public bool IsEnabled(LogLevel logLevel) => true; + + public async void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter) + { + string message = formatter(state, exception); + await _customLogCollector.LogAsync(message, logLevel, _className); + } +} diff --git a/src/Service/Telemetry/AzureLogAnalyticsLoggerProvider.cs b/src/Service/Telemetry/AzureLogAnalyticsLoggerProvider.cs new file mode 100644 index 0000000000..71e17d548d --- /dev/null +++ b/src/Service/Telemetry/AzureLogAnalyticsLoggerProvider.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using Microsoft.Extensions.Logging; + +namespace Azure.DataApiBuilder.Service.Telemetry; + +/// +/// Adds an Azure Log Analytics logger named 'AzureLogAnalyticsLogger' to the . +/// +public class AzureLogAnalyticsLoggerProvider : ILoggerProvider +{ + private readonly ICustomLogCollector _customLogCollector; + + public AzureLogAnalyticsLoggerProvider(ICustomLogCollector customLogCollector) + { + _customLogCollector = customLogCollector; + } + + public ILogger CreateLogger(string className) + { + return new AzureLogAnalyticsLogger(className, _customLogCollector); + } + + public void Dispose() { } +} From 8ea24dba68b37921f99ff64d56c8a88b1e058b50 Mon Sep 17 00:00:00 2001 From: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Date: Wed, 6 Aug 2025 17:04:37 +0000 Subject: [PATCH 45/79] Change property name of log-type to dab-identifier for Azure Log Analytics (#2805) ## Why make this change? - This change closes issue #2804 The property `log-type` had a change in function, which also means that there needs to be a change in name. ## What is this change? Only changes the name of the property from `log-type` to `dab-identifier` since the changes in logic related to the changes in function were already implemented beforehand. ## How was this tested? - [ ] Integration Tests - [ ] Unit Tests - [X] Manual Tests Tested if the new property is serialized and deserialized correctly and if `configure` command works as expected with new property, while leaving no traces of the old property. ## Sample Request(s) --runtime.telemetry.azure-log-analytics.dab-identifier --- schemas/dab.draft.schema.json | 4 ++-- src/Cli.Tests/ConfigureOptionsTests.cs | 4 ++-- src/Cli.Tests/ValidateConfigTests.cs | 2 +- src/Cli/Commands/ConfigureOptions.cs | 8 +++---- src/Cli/ConfigGenerator.cs | 10 ++++---- ...zureLogAnalyticsOptionsConverterFactory.cs | 8 +++---- .../ObjectModel/AzureLogAnalyticsLogs.cs | 6 ++--- .../ObjectModel/AzureLogAnalyticsOptions.cs | 24 +++++++++---------- .../Configuration/ConfigurationTests.cs | 16 ++++++------- .../AzureLogAnalyticsCustomLogCollector.cs | 8 +++---- .../AzureLogAnalyticsFlusherService.cs | 2 +- 11 files changed, 46 insertions(+), 46 deletions(-) diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json index d713df99b2..3c19534cba 100644 --- a/schemas/dab.draft.schema.json +++ b/schemas/dab.draft.schema.json @@ -437,9 +437,9 @@ } } }, - "log-type": { + "dab-identifier": { "type": "string", - "description": "Custom log table name in Log Analytics", + "description": "Identifier passed on to Log Analytics", "default": "DabLogs" }, "flush-interval-seconds": { diff --git a/src/Cli.Tests/ConfigureOptionsTests.cs b/src/Cli.Tests/ConfigureOptionsTests.cs index dfd1bfb0cf..ca1922508a 100644 --- a/src/Cli.Tests/ConfigureOptionsTests.cs +++ b/src/Cli.Tests/ConfigureOptionsTests.cs @@ -162,7 +162,7 @@ public void TestAddAzureLogAnalyticsOptions() // Act: Attempts to add Azure Log Analytics options ConfigureOptions options = new( azureLogAnalyticsEnabled: CliBool.True, - azureLogAnalyticsLogType: "log-type-test", + azureLogAnalyticsDabIdentifier: "dab-identifier-test", azureLogAnalyticsFlushIntervalSeconds: 1, azureLogAnalyticsCustomTableName: "custom-table-name-test", azureLogAnalyticsDcrImmutableId: "dcr-immutable-id-test", @@ -180,7 +180,7 @@ public void TestAddAzureLogAnalyticsOptions() Assert.IsNotNull(config.Runtime.Telemetry); Assert.IsNotNull(config.Runtime.Telemetry.AzureLogAnalytics); Assert.AreEqual(true, config.Runtime.Telemetry.AzureLogAnalytics.Enabled); - Assert.AreEqual("log-type-test", config.Runtime.Telemetry.AzureLogAnalytics.LogType); + Assert.AreEqual("dab-identifier-test", config.Runtime.Telemetry.AzureLogAnalytics.DabIdentifier); Assert.AreEqual(1, config.Runtime.Telemetry.AzureLogAnalytics.FlushIntervalSeconds); Assert.IsNotNull(config.Runtime.Telemetry.AzureLogAnalytics.Auth); Assert.AreEqual("custom-table-name-test", config.Runtime.Telemetry.AzureLogAnalytics.Auth.CustomTableName); diff --git a/src/Cli.Tests/ValidateConfigTests.cs b/src/Cli.Tests/ValidateConfigTests.cs index a0fc0807e6..aeb016f007 100644 --- a/src/Cli.Tests/ValidateConfigTests.cs +++ b/src/Cli.Tests/ValidateConfigTests.cs @@ -332,7 +332,7 @@ public async Task TestValidateAzureLogAnalyticsOptionsWithoutAuthFails() // Act: Attempts to add Azure Log Analytics options without Auth options ConfigureOptions options = new( azureLogAnalyticsEnabled: CliBool.True, - azureLogAnalyticsLogType: "log-type-test", + azureLogAnalyticsDabIdentifier: "dab-identifier-test", azureLogAnalyticsFlushIntervalSeconds: 1, config: TEST_RUNTIME_CONFIG_FILE ); diff --git a/src/Cli/Commands/ConfigureOptions.cs b/src/Cli/Commands/ConfigureOptions.cs index bb4c10a208..8e8c14f6d3 100644 --- a/src/Cli/Commands/ConfigureOptions.cs +++ b/src/Cli/Commands/ConfigureOptions.cs @@ -49,7 +49,7 @@ public ConfigureOptions( int? azureKeyVaultRetryPolicyMaxDelaySeconds = null, int? azureKeyVaultRetryPolicyNetworkTimeoutSeconds = null, CliBool? azureLogAnalyticsEnabled = null, - string? azureLogAnalyticsLogType = null, + string? azureLogAnalyticsDabIdentifier = null, int? azureLogAnalyticsFlushIntervalSeconds = null, string? azureLogAnalyticsCustomTableName = null, string? azureLogAnalyticsDcrImmutableId = null, @@ -93,7 +93,7 @@ public ConfigureOptions( AzureKeyVaultRetryPolicyNetworkTimeoutSeconds = azureKeyVaultRetryPolicyNetworkTimeoutSeconds; // Azure Log Analytics AzureLogAnalyticsEnabled = azureLogAnalyticsEnabled; - AzureLogAnalyticsLogType = azureLogAnalyticsLogType; + AzureLogAnalyticsDabIdentifier = azureLogAnalyticsDabIdentifier; AzureLogAnalyticsFlushIntervalSeconds = azureLogAnalyticsFlushIntervalSeconds; AzureLogAnalyticsCustomTableName = azureLogAnalyticsCustomTableName; AzureLogAnalyticsDcrImmutableId = azureLogAnalyticsDcrImmutableId; @@ -187,8 +187,8 @@ public ConfigureOptions( [Option("runtime.telemetry.azure-log-analytics.enabled", Required = false, HelpText = "Enable/Disable Azure Log Analytics. Default: False (boolean)")] public CliBool? AzureLogAnalyticsEnabled { get; } - [Option("runtime.telemetry.azure-log-analytics.log-type", Required = false, HelpText = "Configure Log Type for Azure Log Analytics to find table to send telemetry data. Default: DABLogs")] - public string? AzureLogAnalyticsLogType { get; } + [Option("runtime.telemetry.azure-log-analytics.dab-identifier", Required = false, HelpText = "Configure DAB Identifier to allow user to differentiate which logs come from DAB in Azure Log Analytics . Default: DABLogs")] + public string? AzureLogAnalyticsDabIdentifier { get; } [Option("runtime.telemetry.azure-log-analytics.flush-interval-seconds", Required = false, HelpText = "Configure Flush Interval in seconds for Azure Log Analytics to specify the time interval to send the telemetry data. Default: 5")] public int? AzureLogAnalyticsFlushIntervalSeconds { get; } diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs index 6f1befba06..18e18f00a6 100644 --- a/src/Cli/ConfigGenerator.cs +++ b/src/Cli/ConfigGenerator.cs @@ -780,7 +780,7 @@ private static bool TryUpdateConfiguredRuntimeOptions( // Telemetry: Azure Log Analytics if (options.AzureLogAnalyticsEnabled is not null || - options.AzureLogAnalyticsLogType is not null || + options.AzureLogAnalyticsDabIdentifier is not null || options.AzureLogAnalyticsFlushIntervalSeconds is not null || options.AzureLogAnalyticsCustomTableName is not null || options.AzureLogAnalyticsDcrImmutableId is not null || @@ -1137,11 +1137,11 @@ private static bool TryUpdateConfiguredAzureLogAnalyticsOptions( _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.enabled as '{options.AzureLogAnalyticsEnabled}'"); } - // Runtime.Telemetry.AzureLogAnalytics.LogType - if (options.AzureLogAnalyticsLogType is not null) + // Runtime.Telemetry.AzureLogAnalytics.DabIdentifier + if (options.AzureLogAnalyticsDabIdentifier is not null) { - azureLogAnalyticsOptions = azureLogAnalyticsOptions with { LogType = options.AzureLogAnalyticsLogType, UserProvidedLogType = true }; - _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.log-type as '{options.AzureLogAnalyticsLogType}'"); + azureLogAnalyticsOptions = azureLogAnalyticsOptions with { DabIdentifier = options.AzureLogAnalyticsDabIdentifier, UserProvidedDabIdentifier = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.dab-identifier as '{options.AzureLogAnalyticsDabIdentifier}'"); } // Runtime.Telemetry.AzureLogAnalytics.FlushIntervalSeconds diff --git a/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs b/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs index 895a4abb61..3fcbe8c7bd 100644 --- a/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs +++ b/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs @@ -88,7 +88,7 @@ internal AzureLogAnalyticsOptionsConverter(bool replaceEnvVar) auth = authOptionsConverter.Read(ref reader, typeToConvert, options); break; - case "log-type": + case "dab-identifier": if (reader.TokenType is not JsonTokenType.Null) { logType = reader.DeserializeString(_replaceEnvVar); @@ -150,10 +150,10 @@ public override void Write(Utf8JsonWriter writer, AzureLogAnalyticsOptions value authOptionsConverter.Write(writer, value.Auth, options); } - if (value?.UserProvidedLogType is true) + if (value?.UserProvidedDabIdentifier is true) { - writer.WritePropertyName("log-type"); - JsonSerializer.Serialize(writer, value.LogType, options); + writer.WritePropertyName("dab-identifier"); + JsonSerializer.Serialize(writer, value.DabIdentifier, options); } if (value?.UserProvidedFlushIntervalSeconds is true) diff --git a/src/Config/ObjectModel/AzureLogAnalyticsLogs.cs b/src/Config/ObjectModel/AzureLogAnalyticsLogs.cs index 1aefd92804..8b914be681 100644 --- a/src/Config/ObjectModel/AzureLogAnalyticsLogs.cs +++ b/src/Config/ObjectModel/AzureLogAnalyticsLogs.cs @@ -12,14 +12,14 @@ public class AzureLogAnalyticsLogs public string LogLevel { get; set; } public string? Message { get; set; } public string? Component { get; set; } - public string? LogType { get; set; } + public string? Identifier { get; set; } - public AzureLogAnalyticsLogs(string time, string logLevel, string? message, string? component, string? logType = null) + public AzureLogAnalyticsLogs(string time, string logLevel, string? message, string? component, string? identifier = null) { Time = time; LogLevel = logLevel; Message = message; Component = component; - LogType = logType; + Identifier = identifier; } } diff --git a/src/Config/ObjectModel/AzureLogAnalyticsOptions.cs b/src/Config/ObjectModel/AzureLogAnalyticsOptions.cs index 9ba7a09bbd..d67e98be3a 100644 --- a/src/Config/ObjectModel/AzureLogAnalyticsOptions.cs +++ b/src/Config/ObjectModel/AzureLogAnalyticsOptions.cs @@ -21,7 +21,7 @@ public record AzureLogAnalyticsOptions /// /// Default log type for Azure Log Analytics. /// - public const string DEFAULT_LOG_TYPE = "DabLogs"; + public const string DEFAULT_DAB_IDENTIFIER = "DabLogs"; /// /// Default flush interval in seconds. @@ -39,9 +39,9 @@ public record AzureLogAnalyticsOptions public AzureLogAnalyticsAuthOptions? Auth { get; init; } /// - /// Custom log table name in Log Analytics. + /// Custom identifier name to send to Log Analytics. /// - public string? LogType { get; init; } + public string? DabIdentifier { get; init; } /// /// Interval between log batch pushes (in seconds). @@ -49,7 +49,7 @@ public record AzureLogAnalyticsOptions public int? FlushIntervalSeconds { get; init; } [JsonConstructor] - public AzureLogAnalyticsOptions(bool? enabled = null, AzureLogAnalyticsAuthOptions? auth = null, string? logType = null, int? flushIntervalSeconds = null) + public AzureLogAnalyticsOptions(bool? enabled = null, AzureLogAnalyticsAuthOptions? auth = null, string? dabIdentifier = null, int? flushIntervalSeconds = null) { Auth = auth; @@ -63,14 +63,14 @@ public AzureLogAnalyticsOptions(bool? enabled = null, AzureLogAnalyticsAuthOptio Enabled = DEFAULT_ENABLED; } - if (logType is not null) + if (dabIdentifier is not null) { - LogType = logType; - UserProvidedLogType = true; + DabIdentifier = dabIdentifier; + UserProvidedDabIdentifier = true; } else { - LogType = DEFAULT_LOG_TYPE; + DabIdentifier = DEFAULT_DAB_IDENTIFIER; } if (flushIntervalSeconds is not null) @@ -95,14 +95,14 @@ public AzureLogAnalyticsOptions(bool? enabled = null, AzureLogAnalyticsAuthOptio public bool UserProvidedEnabled { get; init; } = false; /// - /// Flag which informs CLI and JSON serializer whether to write log-type + /// Flag which informs CLI and JSON serializer whether to write dab-identifier /// property and value to the runtime config file. - /// When user doesn't provide the log-type property/value, which signals DAB to use the default, + /// When user doesn't provide the dab-identifier property/value, which signals DAB to use the default, /// the DAB CLI should not write the default value to a serialized config. /// [JsonIgnore(Condition = JsonIgnoreCondition.Always)] - [MemberNotNullWhen(true, nameof(LogType))] - public bool UserProvidedLogType { get; init; } = false; + [MemberNotNullWhen(true, nameof(DabIdentifier))] + public bool UserProvidedDabIdentifier { get; init; } = false; /// /// Flag which informs CLI and JSON serializer whether to write flush-interval-seconds diff --git a/src/Service.Tests/Configuration/ConfigurationTests.cs b/src/Service.Tests/Configuration/ConfigurationTests.cs index d2b596ed81..2836aff547 100644 --- a/src/Service.Tests/Configuration/ConfigurationTests.cs +++ b/src/Service.Tests/Configuration/ConfigurationTests.cs @@ -4080,22 +4080,22 @@ public void AzureLogAnalyticsSerialization( string? customTableName, string? dcrImmutableId, string? dceEndpoint, - string? logType, + string? dabIdentifier, int? flushIntSec, bool expectedEnabled, - string expectedLogType, + string expectedDabIdentifier, int expectedFlushIntSec) { //Check if auth property and its values are expected to exist bool expectedExistEnabled = enabled is not null; - bool expectedExistLogType = logType is not null; + bool expectedExistDabIdentifier = dabIdentifier is not null; bool expectedExistFlushIntSec = flushIntSec is not null; bool expectedExistCustomTableName = customTableName is not null; bool expectedExistDcrImmutableId = dcrImmutableId is not null; bool expectedExistDceEndpoint = dceEndpoint is not null; AzureLogAnalyticsAuthOptions authOptions = new(customTableName, dcrImmutableId, dceEndpoint); - AzureLogAnalyticsOptions azureLogAnalyticsOptions = new(enabled, authOptions, logType, flushIntSec); + AzureLogAnalyticsOptions azureLogAnalyticsOptions = new(enabled, authOptions, dabIdentifier, flushIntSec); RuntimeConfig configWithCustomLogLevel = InitializeRuntimeWithAzureLogAnalytics(azureLogAnalyticsOptions); string configWithCustomLogLevelJson = configWithCustomLogLevel.ToJson(); Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(configWithCustomLogLevelJson, out RuntimeConfig? deserializedRuntimeConfig)); @@ -4120,11 +4120,11 @@ public void AzureLogAnalyticsSerialization( Assert.AreEqual(expectedEnabled, enabledElement.GetBoolean()); } - bool logTypeExists = azureLogAnalyticsElement.TryGetProperty("log-type", out JsonElement logTypeElement); - Assert.AreEqual(expected: expectedExistLogType, actual: logTypeExists); - if (logTypeExists) + bool dabIdentifierExists = azureLogAnalyticsElement.TryGetProperty("dab-identifier", out JsonElement dabIdentifierElement); + Assert.AreEqual(expected: expectedExistDabIdentifier, actual: dabIdentifierExists); + if (dabIdentifierExists) { - Assert.AreEqual(expectedLogType, logTypeElement.GetString()); + Assert.AreEqual(expectedDabIdentifier, dabIdentifierElement.GetString()); } bool flushIntSecExists = azureLogAnalyticsElement.TryGetProperty("flush-interval-seconds", out JsonElement flushIntSecElement); diff --git a/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs b/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs index fd02935379..6e150f64af 100644 --- a/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs +++ b/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs @@ -17,7 +17,7 @@ namespace Azure.DataApiBuilder.Service.Telemetry; public interface ICustomLogCollector { Task LogAsync(string message, LogLevel loggingLevel, string? source = null); - Task> DequeueAllAsync(string logType, int flushIntervalSeconds); + Task> DequeueAllAsync(string dabIdentifier, int flushIntervalSeconds); } /// @@ -47,10 +47,10 @@ await _logs.Writer.WriteAsync( /// /// Creates a list periodically from the logs that are currently saved. /// - /// Custom name to distinguish the logs sent from DAB to Azure Log Analytics. + /// Custom name to distinguish the logs sent from DAB to Azure Log Analytics. /// Period of time between each list of logs is sent. /// List of logs structured to be sent to Azure Log Analytics. - public async Task> DequeueAllAsync(string logType, int flushIntervalSeconds) + public async Task> DequeueAllAsync(string dabIdentifier, int flushIntervalSeconds) { List list = new(); Stopwatch time = Stopwatch.StartNew(); @@ -59,7 +59,7 @@ public async Task> DequeueAllAsync(string logType, i { while (_logs.Reader.TryRead(out AzureLogAnalyticsLogs? item)) { - item.LogType = logType; + item.Identifier = dabIdentifier; list.Add(item); if (time.Elapsed >= TimeSpan.FromSeconds(flushIntervalSeconds)) diff --git a/src/Service/Telemetry/AzureLogAnalyticsFlusherService.cs b/src/Service/Telemetry/AzureLogAnalyticsFlusherService.cs index bfe600a095..3c157211e8 100644 --- a/src/Service/Telemetry/AzureLogAnalyticsFlusherService.cs +++ b/src/Service/Telemetry/AzureLogAnalyticsFlusherService.cs @@ -40,7 +40,7 @@ protected async override Task ExecuteAsync(CancellationToken stoppingToken) { try { - List logs = await _customLogCollector.DequeueAllAsync(_options.LogType!, (int)_options.FlushIntervalSeconds!); + List logs = await _customLogCollector.DequeueAllAsync(_options.DabIdentifier!, (int)_options.FlushIntervalSeconds!); if (logs.Count > 0) { From 2885360c35f96d49cad75f42692ff12483f59b83 Mon Sep 17 00:00:00 2001 From: souvikghosh04 Date: Fri, 8 Aug 2025 00:18:00 +0530 Subject: [PATCH 46/79] Support for relative nextLink using `next-link-relative` property for Reverse Proxy Scenarios (#2788) ## Why make this change? - Closes #2677 When Data API builder (DAB) is deployed behind a reverse proxy that handles SSL termination (converting https to http), the pagination nextLink URL in REST responses was generated with the http scheme instead of https. This resulted in broken next page links for clients accessing the service securely. The reported bug highlighted that the nextLink should use the original client-facing scheme and host, or provide a relative/schema-less link, or leverage the x-forwarded-proto and x-forwarded-host headers set by the reverse proxy. ## What is this change? - Users can also optionally set `x-forwarded-proto` and `x-forwarded-host headers` as needed to match their deployment environment. If not set, it will pick the default path from the request context. - Also refer [X-Forwarded-Proto](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Forwarded-Proto) and [X-Forwarded-Host](https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/X-Forwarded-Host). ## How was this tested? - [ ] Integration Tests - [x] Unit Tests ## Sample Request(s) - GET `http://localhost:5000/api/` --- schemas/dab.draft.schema.json | 5 + src/Config/ObjectModel/PaginationOptions.cs | 10 +- src/Config/ObjectModel/RuntimeConfig.cs | 5 + src/Core/Resolvers/SqlPaginationUtil.cs | 189 +++++++++++++++--- src/Core/Resolvers/SqlResponseHelpers.cs | 32 +-- .../Configuration/ConfigurationTests.cs | 169 +++++++++++++++- .../UnitTests/ConfigValidationUnitTests.cs | 14 +- 7 files changed, 366 insertions(+), 58 deletions(-) diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json index 3c19534cba..94ee03e500 100644 --- a/schemas/dab.draft.schema.json +++ b/schemas/dab.draft.schema.json @@ -168,6 +168,11 @@ "description": "Sets the default number of records returned in a single response. When this limit is reached, a continuation token is provided to retrieve the next page. If set to null, the default value is 100.", "default": 100, "minimum": 1 + }, + "next-link-relative": { + "type": "boolean", + "default": false, + "description": "When true, nextLink in paginated results will use a relative URL." } } }, diff --git a/src/Config/ObjectModel/PaginationOptions.cs b/src/Config/ObjectModel/PaginationOptions.cs index ab4bff29ff..ccb0808386 100644 --- a/src/Config/ObjectModel/PaginationOptions.cs +++ b/src/Config/ObjectModel/PaginationOptions.cs @@ -37,8 +37,14 @@ public record PaginationOptions [JsonPropertyName("max-page-size")] public int? MaxPageSize { get; init; } = null; + /// + /// When true, nextLink in paginated responses will be relative (default: false). + /// + [JsonPropertyName("next-link-relative")] + public bool? NextLinkRelative { get; init; } = false; + [JsonConstructor] - public PaginationOptions(int? DefaultPageSize = null, int? MaxPageSize = null) + public PaginationOptions(int? DefaultPageSize = null, int? MaxPageSize = null, bool? NextLinkRelative = null) { if (MaxPageSize is not null) { @@ -69,6 +75,8 @@ public PaginationOptions(int? DefaultPageSize = null, int? MaxPageSize = null) statusCode: HttpStatusCode.ServiceUnavailable, subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError); } + + this.NextLinkRelative = NextLinkRelative ?? false; } /// diff --git a/src/Config/ObjectModel/RuntimeConfig.cs b/src/Config/ObjectModel/RuntimeConfig.cs index de46a8e7d0..7cf8159952 100644 --- a/src/Config/ObjectModel/RuntimeConfig.cs +++ b/src/Config/ObjectModel/RuntimeConfig.cs @@ -592,6 +592,11 @@ public uint MaxPageSize() return (uint?)Runtime?.Pagination?.MaxPageSize ?? PaginationOptions.MAX_PAGE_SIZE; } + public bool NextLinkRelative() + { + return Runtime?.Pagination?.NextLinkRelative ?? false; + } + public int MaxResponseSizeMB() { return Runtime?.Host?.MaxResponseSizeMB ?? HostOptions.MAX_RESPONSE_LENGTH_DAB_ENGINE_MB; diff --git a/src/Core/Resolvers/SqlPaginationUtil.cs b/src/Core/Resolvers/SqlPaginationUtil.cs index b06c5b8aa5..bb9362015b 100644 --- a/src/Core/Resolvers/SqlPaginationUtil.cs +++ b/src/Core/Resolvers/SqlPaginationUtil.cs @@ -12,8 +12,10 @@ using Azure.DataApiBuilder.Core.Services; using Azure.DataApiBuilder.Service.Exceptions; using Azure.DataApiBuilder.Service.GraphQLBuilder.GraphQLTypes; -using Azure.DataApiBuilder.Service.GraphQLBuilder.Queries; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Http.Extensions; using Microsoft.AspNetCore.WebUtilities; +using QueryBuilder = Azure.DataApiBuilder.Service.GraphQLBuilder.Queries.QueryBuilder; namespace Azure.DataApiBuilder.Core.Resolvers { @@ -572,15 +574,50 @@ public static string Base64Decode(string base64EncodedData) } /// - /// Create the URL that will provide for the next page of results - /// using the same query options. - /// Return value formatted as a JSON array: [{"nextLink":"[base]/api/[entity]?[queryParams_URIescaped]$after=[base64encodedPaginationToken]"}] + /// Constructs the base Uri for Pagination /// - /// The request path excluding query parameters (e.g. https://localhost/api/myEntity) - /// Collection of query string parameters that are URI escaped. - /// The contents to add to the $after query parameter. Should be base64 encoded pagination token. - /// JSON element - array with nextLink. - public static JsonElement CreateNextLink(string path, NameValueCollection? queryStringParameters, string newAfterPayload) + /// + /// This method uses the "X-Forwarded-Proto" and "X-Forwarded-Host" headers to determine + /// the scheme and host of the request, falling back to the request's original scheme and host if the headers + /// are not present or invalid. The method ensures that the scheme is either "http" or "https" and that the host + /// is a valid hostname or IP address. + /// + /// The HTTP context containing the request information. + /// An optional base route to prepend to the request path. If not specified, no base route is used. + /// A string representing the fully constructed Base request URL for Pagination. + public static string ConstructBaseUriForPagination(HttpContext httpContext, string? baseRoute = null) + { + HttpRequest req = httpContext.Request; + + // use scheme from X-Forwarded-Proto or fallback to request scheme + string scheme = ResolveRequestScheme(req); + + // Use host from X-Forwarded-Host or fallback to request host + string host = ResolveRequestHost(req); + + // If the base route is not empty, we need to insert it into the URI before the rest path. + // Path is of the form ....restPath/pathNameForEntity. We want to insert the base route before the restPath. + // Finally, it will be of the form: .../baseRoute/restPath/pathNameForEntity. + return UriHelper.BuildAbsolute( + scheme: scheme, + host: new HostString(host), + pathBase: string.IsNullOrWhiteSpace(baseRoute) ? PathString.Empty : new PathString(baseRoute), + path: req.Path); + } + + /// + /// Builds a query string by appending or replacing the $after token with the specified value. + /// + /// This method does not include the in the returned query + /// string. It only processes and formats the query string parameters. + /// A collection of existing query string parameters. If , an empty collection is used. + /// The $after parameter, if present, will be removed before appending the new token. + /// The new value for the $after token. If this value is , empty, or whitespace, no + /// $after token will be appended. + /// A URL-encoded query string containing the updated parameters, including the new $after token if + /// specified. If no parameters are provided and is empty, an empty string is + /// returned. + public static string BuildQueryStringWithAfterToken(NameValueCollection? queryStringParameters, string newAfterPayload) { if (queryStringParameters is null) { @@ -588,33 +625,50 @@ public static JsonElement CreateNextLink(string path, NameValueCollection? query } else { - // Purge old $after value so this function can replace it. queryStringParameters.Remove("$after"); } - // To prevent regression of current behavior, retain the call to FormatQueryString - // which URI escapes other query parameters. Since $after has been removed, - // this will not affect the base64 encoded paging token. - string queryString = FormatQueryString(queryStringParameters: queryStringParameters); + // Format existing query string (URL encoded) + string queryString = FormatQueryString(queryStringParameters); - // When a new $after payload is provided, append it to the query string with the - // appropriate prefix: ? if $after is the only query parameter. & if $after is one of many query parameters. + // Append new $after token if (!string.IsNullOrWhiteSpace(newAfterPayload)) { string afterPrefix = string.IsNullOrWhiteSpace(queryString) ? "?" : "&"; queryString += $"{afterPrefix}{RequestParser.AFTER_URL}={newAfterPayload}"; } - // ValueKind will be array so we can differentiate from other objects in the response - // to be returned. - // [{"nextLink":"[base]/api/[entity]?[queryParams_URIescaped]$after=[base64encodedPaginationToken]"}] + // Construct final link + // return $"{path}{queryString}"; + return queryString; + } + + /// + /// Gets a consolidated next link for pagination in JSON format. + /// + /// The base Pagination Uri + /// The query string with after value + /// True, if the next link should be relative + /// + public static JsonElement GetConsolidatedNextLinkForPagination(string baseUri, string queryString, bool isNextLinkRelative = false) + { + UriBuilder uriBuilder = new(baseUri) + { + // Form final link by appending the query string + Query = queryString + }; + + // Construct final link- absolute or relative + string nextLinkValue = isNextLinkRelative + ? uriBuilder.Uri.PathAndQuery // returns just "/api/?$after...", no host + : uriBuilder.Uri.AbsoluteUri; // returns full URL + + // Return serialized JSON object string jsonString = JsonSerializer.Serialize(new[] { - new - { - nextLink = @$"{path}{queryString}" - } + new { nextLink = nextLinkValue } }); + return JsonSerializer.Deserialize(jsonString); } @@ -695,5 +749,94 @@ public static string FormatQueryString(NameValueCollection? queryStringParameter return queryString; } + + /// + /// Extracts and request scheme from "X-Forwarded-Proto" or falls back to the request scheme. + /// + /// The HTTP request. + /// The scheme string ("http" or "https"). + /// Thrown when client explicitly sets an invalid scheme. + private static string ResolveRequestScheme(HttpRequest req) + { + string? rawScheme = req.Headers["X-Forwarded-Proto"].FirstOrDefault(); + string? normalized = rawScheme?.Trim().ToLowerInvariant(); + + bool isExplicit = !string.IsNullOrEmpty(rawScheme); + bool isValid = IsValidScheme(normalized); + + if (isExplicit && !isValid) + { + // Log a warning and ignore the invalid value, fallback to request's scheme + Console.WriteLine($"Warning: Invalid scheme '{rawScheme}' in X-Forwarded-Proto header. Falling back to request scheme: '{req.Scheme}'."); + return req.Scheme; + } + + return isValid ? normalized! : req.Scheme; + } + + /// + /// Extracts the request host from "X-Forwarded-Host" or falls back to the request host. + /// + /// The HTTP request. + /// The host string. + /// Thrown when client explicitly sets an invalid host. + private static string ResolveRequestHost(HttpRequest req) + { + string? rawHost = req.Headers["X-Forwarded-Host"].FirstOrDefault(); + string? trimmed = rawHost?.Trim(); + + bool isExplicit = !string.IsNullOrEmpty(rawHost); + bool isValid = IsValidHost(trimmed); + + if (isExplicit && !isValid) + { + // Log a warning and ignore the invalid value, fallback to request's host + Console.WriteLine($"Warning: Invalid host '{rawHost}' in X-Forwarded-Host header. Falling back to request host: '{req.Host}'."); + return req.Host.ToString(); + } + + return isValid ? trimmed! : req.Host.ToString(); + } + + /// + /// Checks if the provided scheme is valid. + /// + /// Scheme, e.g., "http" or "https". + /// True if valid, otherwise false. + private static bool IsValidScheme(string? scheme) + { + return scheme is "http" or "https"; + } + + /// + /// Checks if the provided host is a valid hostname or IP address. + /// + /// The host name (with optional port). + /// True if valid, otherwise false. + private static bool IsValidHost(string? host) + { + if (string.IsNullOrWhiteSpace(host)) + { + return false; + } + + // Reject dangerous characters + if (host.Contains('\r') || host.Contains('\n') || host.Contains(' ') || + host.Contains('<') || host.Contains('>') || host.Contains('@')) + { + return false; + } + + // Validate host part (exclude port if present) + string hostnamePart = host.Split(':')[0]; + + if (Uri.CheckHostName(hostnamePart) == UriHostNameType.Unknown) + { + return false; + } + + // Final sanity check: ensure it parses into a full URI + return Uri.TryCreate($"http://{host}", UriKind.Absolute, out _); + } } } diff --git a/src/Core/Resolvers/SqlResponseHelpers.cs b/src/Core/Resolvers/SqlResponseHelpers.cs index 7701d662d3..8b0a0edb67 100644 --- a/src/Core/Resolvers/SqlResponseHelpers.cs +++ b/src/Core/Resolvers/SqlResponseHelpers.cs @@ -89,29 +89,18 @@ public static OkObjectResult FormatFindResult( tableName: context.DatabaseObject.Name, sqlMetadataProvider: sqlMetadataProvider); - // nextLink is the URL needed to get the next page of records using the same query options - // with $after base64 encoded for opaqueness - string path = UriHelper.GetEncodedUrl(httpContext!.Request).Split('?')[0]; + string basePaginationUri = SqlPaginationUtil.ConstructBaseUriForPagination(httpContext, runtimeConfig.Runtime?.BaseRoute); - // If the base route is not empty, we need to insert it into the URI before the rest path. - string? baseRoute = runtimeConfig.Runtime?.BaseRoute; - if (!string.IsNullOrWhiteSpace(baseRoute)) - { - HttpRequest request = httpContext!.Request; - - // Path is of the form ....restPath/pathNameForEntity. We want to insert the base route before the restPath. - // Finally, it will be of the form: .../baseRoute/restPath/pathNameForEntity. - path = UriHelper.BuildAbsolute( - scheme: request.Scheme, - host: request.Host, - pathBase: baseRoute, - path: request.Path); - } + // Build the query string with the $after token. + string queryString = SqlPaginationUtil.BuildQueryStringWithAfterToken( + queryStringParameters: context!.ParsedQueryString, + newAfterPayload: after); - JsonElement nextLink = SqlPaginationUtil.CreateNextLink( - path, - queryStringParameters: context!.ParsedQueryString, - after); + // Get the final consolidated nextLink for the pagination. + JsonElement nextLink = SqlPaginationUtil.GetConsolidatedNextLinkForPagination( + baseUri: basePaginationUri, + queryString: queryString, + isNextLinkRelative: runtimeConfig.NextLinkRelative()); // When there are extra fields present, they are removed before returning the response. if (extraFieldsInResponse.Count > 0) @@ -424,6 +413,5 @@ public static OkObjectResult OkMutationResponse(JsonElement jsonResult) value = resultEnumerated }); } - } } diff --git a/src/Service.Tests/Configuration/ConfigurationTests.cs b/src/Service.Tests/Configuration/ConfigurationTests.cs index 2836aff547..5f71cc4d77 100644 --- a/src/Service.Tests/Configuration/ConfigurationTests.cs +++ b/src/Service.Tests/Configuration/ConfigurationTests.cs @@ -4393,9 +4393,11 @@ public async Task OpenApi_EntityLevelRestEndpoint() /// did not come across two $after query parameters. This addresses a customer raised issue where two $after /// query parameters were returned by DAB. /// - [TestMethod] + [DataTestMethod] + [DataRow(false, DisplayName = "NextLinkRelative is false")] + [DataRow(true, DisplayName = "NextLinkRelative is true")] [TestCategory(TestCategory.MSSQL)] - public async Task ValidateNextLinkUsage() + public async Task ValidateNextLinkUsage(bool isNextLinkRelative) { // Arrange - Setup test server with entity that has >1 record so that results can be paged. // A short cut to using an entity with >100 records is to just include the $first=1 filter @@ -4419,7 +4421,21 @@ public async Task ValidateNextLinkUsage() { ENTITY_NAME, requiredEntity } }; - CreateCustomConfigFile(entityMap, enableGlobalRest: true); + PaginationOptions paginationOptions = null; + + if (isNextLinkRelative) + { + paginationOptions = new PaginationOptions + { + DefaultPageSize = 1, + MaxPageSize = 1, + UserProvidedDefaultPageSize = true, + UserProvidedMaxPageSize = true, + NextLinkRelative = true + }; + } + + CreateCustomConfigFile(entityMap, enableGlobalRest: true, paginationOptions: paginationOptions); string[] args = new[] { @@ -4453,7 +4469,23 @@ public async Task ValidateNextLinkUsage() Dictionary followNextLinkResponseProperties = JsonSerializer.Deserialize>(followNextLinkResponseBody); string followUpResponseNextLink = followNextLinkResponseProperties["nextLink"].ToString(); - Uri nextLink = new(uriString: followUpResponseNextLink); + + // Build the Uri from nextLink string for query parsing. + // If relative, combine with base; if absolute, use as is. + Uri nextLink = null; + if (Uri.IsWellFormedUriString(followUpResponseNextLink, UriKind.Absolute)) + { + nextLink = new(followUpResponseNextLink, UriKind.Absolute); + } + else if (Uri.IsWellFormedUriString(followUpResponseNextLink, UriKind.Relative)) + { + nextLink = new(new("http://localhost:5000"), followUpResponseNextLink); + } + else + { + Assert.Fail($"Invalid nextLink URI format: {followUpResponseNextLink}"); + } + NameValueCollection parsedQueryParameters = HttpUtility.ParseQueryString(query: nextLink.Query); Assert.AreEqual(expected: false, actual: parsedQueryParameters["$after"].Contains(','), message: "nextLink erroneously contained two $after query parameters that were joined by HttpUtility.ParseQueryString(queryString)."); Assert.AreNotEqual(notExpected: nextLinkUri, actual: followUpResponseNextLink, message: "The follow up request erroneously returned the same nextLink value."); @@ -4467,6 +4499,114 @@ public async Task ValidateNextLinkUsage() { Assert.Fail(message: "$after query parameter was not a valid base64 encoded value."); } + + // Validate nextLink is relative if nextLinkRelative is true or false otherwise. + // The assertion is now done directly on the original string, not on the parsed Uri object. + if (isNextLinkRelative) + { + // The server returned a relative URL, so it should NOT start with http/https + Assert.IsFalse(Uri.IsWellFormedUriString(followUpResponseNextLink, UriKind.Absolute), + $"nextLink was expected to be relative but was absolute: {followUpResponseNextLink}"); + Assert.IsTrue(followUpResponseNextLink.StartsWith("/"), + $"nextLink was expected to start with '/' (relative), got: {followUpResponseNextLink}"); + } + else + { + Assert.IsTrue(Uri.IsWellFormedUriString(followUpResponseNextLink, UriKind.Absolute), + $"nextLink was expected to be absolute but was relative: {followUpResponseNextLink}"); + Assert.IsTrue(followUpResponseNextLink.StartsWith("http"), + $"nextLink was expected to start with http/https, got: {followUpResponseNextLink}"); + } + } + + /// + /// Validates X-Forwarded headers for nextLink in Pagination + /// + /// The X-Forwarded-Host value + /// The X-Forwarded-Proto value + [DataTestMethod] + [DataRow("localhost:5000", "http", DisplayName = "Forwarded Host and HTTP Protocol")] + [DataRow("myhost.com", "https", DisplayName = "Forwarded Host and HTTPS Protocol")] + [TestCategory(TestCategory.MSSQL)] + public async Task ValidateNextLinkRespectsXForwardedHostAndProto(string forwardedHost, string forwardedProto) + { + // Arrange - Setup test server with entity that has >1 record so that results can be paged. + const string ENTITY_NAME = "Bookmark"; + + Entity requiredEntity = new( + Source: new("bookmarks", EntitySourceType.Table, null, null), + Rest: new(Enabled: true), + GraphQL: new(Singular: "", Plural: "", Enabled: false), + Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, + Relationships: null, + Mappings: null); + + Dictionary entityMap = new() + { + { ENTITY_NAME, requiredEntity } + }; + + PaginationOptions paginationOptions = new() + { + DefaultPageSize = 1, + MaxPageSize = 1, + UserProvidedDefaultPageSize = true, + UserProvidedMaxPageSize = true, + NextLinkRelative = false // Absolute nextLink required for this test + }; + + CreateCustomConfigFile(entityMap, enableGlobalRest: true, paginationOptions: paginationOptions); + + string[] args = new[] + { + $"--ConfigFileName={CUSTOM_CONFIG_FILENAME}" + }; + + using TestServer server = new(Program.CreateWebHostBuilder(args)); + using HttpClient client = server.CreateClient(); + + // Setup and send GET request with X-Forwarded-* headers + HttpRequestMessage initialPaginationRequest = new(HttpMethod.Get, $"{RestRuntimeOptions.DEFAULT_PATH}/{ENTITY_NAME}?$first=1"); + initialPaginationRequest.Headers.Add("X-Forwarded-Host", forwardedHost); + initialPaginationRequest.Headers.Add("X-Forwarded-Proto", forwardedProto); + + HttpResponseMessage initialPaginationResponse = await client.SendAsync(initialPaginationRequest); + + // Assert + Assert.AreEqual(HttpStatusCode.OK, initialPaginationResponse.StatusCode, message: "Expected request to succeed."); + + // Process response body and get nextLink + string responseBody = await initialPaginationResponse.Content.ReadAsStringAsync(); + Dictionary responseProperties = JsonSerializer.Deserialize>(responseBody); + string nextLinkUri = responseProperties.ContainsKey("nextLink") ? responseProperties["nextLink"].ToString() : null; + + Assert.IsNotNull(nextLinkUri, "nextLink missing in initial response."); + + // Assert that nextLink uses the forwarded host and proto + Uri nextLink = new(nextLinkUri, UriKind.Absolute); + + // Split host/port if present + string expectedHost; + int expectedPort = -1; + string[] hostParts = forwardedHost.Split(':'); + + if (hostParts.Length == 2 && int.TryParse(hostParts[1], out int port)) + { + expectedHost = hostParts[0]; + expectedPort = port; + } + else + { + expectedHost = forwardedHost; + } + + Assert.AreEqual(forwardedProto, nextLink.Scheme, $"nextLink scheme should be '{forwardedProto}' but was '{nextLink.Scheme}'"); + Assert.AreEqual(expectedHost, nextLink.Host, $"nextLink host should be '{expectedHost}' but was '{nextLink.Host}'"); + + if (expectedPort != -1) + { + Assert.AreEqual(expectedPort, nextLink.Port, $"nextLink port should be '{expectedPort}' but was '{nextLink.Port}'"); + } } /// @@ -4742,22 +4882,31 @@ public async Task TestNoDepthLimitOnGrahQLInNonHostedMode(int? depthLimit) /// /// Collection of entityName -> Entity object. /// flag to enable or disabled REST globally. - private static void CreateCustomConfigFile(Dictionary entityMap, bool enableGlobalRest = true) + /// Optional pagination options to use in the runtime config. + private static void CreateCustomConfigFile(Dictionary entityMap, bool enableGlobalRest = true, PaginationOptions paginationOptions = null) { DataSource dataSource = new( DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); + HostOptions hostOptions = new(Cors: null, Authentication: new() { Provider = nameof(EasyAuthType.StaticWebApps) }); + RuntimeOptions runtime = paginationOptions != null + ? new( + Rest: new(Enabled: enableGlobalRest), + GraphQL: new(Enabled: true), + Host: hostOptions, + Pagination: paginationOptions) + : new( + Rest: new(Enabled: enableGlobalRest), + GraphQL: new(Enabled: true), + Host: hostOptions); + RuntimeConfig runtimeConfig = new( Schema: string.Empty, DataSource: dataSource, - Runtime: new( - Rest: new(Enabled: enableGlobalRest), - GraphQL: new(Enabled: true), - Host: hostOptions - ), + Runtime: runtime, Entities: new(entityMap)); File.WriteAllText( diff --git a/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs b/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs index 16c850b0cd..5a9d783376 100644 --- a/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs +++ b/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs @@ -2380,13 +2380,22 @@ public void TestRuntimeConfigSetupWithNonJsonConstructor() DisplayName = "DefaultPageSize cannot be 0")] [DataRow(true, 101, 100, "Pagination options invalid. The default page size cannot be greater than max page size", DisplayName = "DefaultPageSize cannot be greater than MaxPageSize")] + [DataRow(false, null, null, "", (int)PaginationOptions.DEFAULT_PAGE_SIZE, (int)PaginationOptions.MAX_PAGE_SIZE, null, + DisplayName = "NextLinkRelative should be false when no value provided in config")] + [DataRow(false, null, null, "", (int)PaginationOptions.DEFAULT_PAGE_SIZE, (int)PaginationOptions.MAX_PAGE_SIZE, true, + DisplayName = "NextLinkRelative should be true when explicitly set to true in config")] + [DataRow(false, null, null, "", (int)PaginationOptions.DEFAULT_PAGE_SIZE, (int)PaginationOptions.MAX_PAGE_SIZE, false, + DisplayName = "NextLinkRelative should be false when explicitly set to false in config")] + [DataRow(false, 1000, 10000, "", 1000, 10000, true, + DisplayName = "NextLinkRelative with custom page sizes")] public void ValidatePaginationOptionsInConfig( bool exceptionExpected, int? defaultPageSize, int? maxPageSize, string expectedExceptionMessage, int? expectedDefaultPageSize = null, - int? expectedMaxPageSize = null) + int? expectedMaxPageSize = null, + bool? nextLinkRelative = null) { try { @@ -2397,12 +2406,13 @@ public void ValidatePaginationOptionsInConfig( Rest: new(), GraphQL: new(), Host: new(Cors: null, Authentication: null), - Pagination: new PaginationOptions(defaultPageSize, maxPageSize) + Pagination: new PaginationOptions(defaultPageSize, maxPageSize, nextLinkRelative) ), Entities: new(new Dictionary())); Assert.AreEqual((uint)expectedDefaultPageSize, runtimeConfig.DefaultPageSize()); Assert.AreEqual((uint)expectedMaxPageSize, runtimeConfig.MaxPageSize()); + Assert.AreEqual(expected: nextLinkRelative ?? false, actual: runtimeConfig.NextLinkRelative()); } catch (DataApiBuilderException dabException) { From b032ff8e44294d08dd919d35e3bce82a0d3bc5f4 Mon Sep 17 00:00:00 2001 From: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Date: Fri, 8 Aug 2025 06:29:08 +0000 Subject: [PATCH 47/79] Change use of DefaultAzureCredential to ManagedIdentityCredential for Azure Log Analytics (#2811) ## Why make this change? - It resolves issue #2810 `ManagedIdentityCredential` class only authenticates with Azure Managed Identity which makes it a better fit to use in AzureLogAnalytics than `DefaultAzureCredential`. ## What is this change? We stop using the `DefaultAzureCredential` class and instead start using the `ManagedIdentityCredential` class. Important Note: As of this moment with this change, the Azure Log Analytics feature can only authenticate against system-assigned identities. ## How was this tested? - [ ] Integration Tests - [ ] Unit Tests - [X] Manual Tests This change cannot be directly tested in the pipeline so it was tested by using a VM and ensuring the logs are correctly sent to Azure Log Analytics table. - See #2787 for more information on how this was set up. ## Sample Request(s) image --- src/Service/Startup.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Service/Startup.cs b/src/Service/Startup.cs index 57858ff2f5..a98f2ab15c 100644 --- a/src/Service/Startup.cs +++ b/src/Service/Startup.cs @@ -185,7 +185,7 @@ public void ConfigureServices(IServiceCollection services) services.AddSingleton(sp => { AzureLogAnalyticsOptions options = runtimeConfig.Runtime.Telemetry.AzureLogAnalytics; - DefaultAzureCredential credential = new(); + ManagedIdentityCredential credential = new(); LogsIngestionClient logsIngestionClient = new(new Uri(options.Auth!.DceEndpoint!), credential); return new AzureLogAnalyticsFlusherService(options, CustomLogCollector, logsIngestionClient, _logger); }); From c5c7031a86275ea5b33cf11e31acac48965b9185 Mon Sep 17 00:00:00 2001 From: souvikghosh04 Date: Sat, 9 Aug 2025 04:45:23 +0530 Subject: [PATCH 48/79] Update DAB version to 1.7 (#2808) ## Why make this change? Update DAB version to 1.7 --- src/Directory.Build.props | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Directory.Build.props b/src/Directory.Build.props index 7427cfb1c2..26ad392ae8 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -2,7 +2,7 @@ enable ..\out - 1.6 + 1.7 From ab6f32c4d15aa8b038d47eb089cf3c8c69d8471b Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Thu, 14 Aug 2025 15:20:16 +0200 Subject: [PATCH 49/79] aaaa --- src/Service/dab-config.json | 148 +++++++++++++++++++++++++++++++++++- 1 file changed, 145 insertions(+), 3 deletions(-) diff --git a/src/Service/dab-config.json b/src/Service/dab-config.json index efe95b9324..31cde65058 100644 --- a/src/Service/dab-config.json +++ b/src/Service/dab-config.json @@ -2,7 +2,7 @@ "$schema": "https://github.com/Azure/data-api-builder/releases/latest/download/dab.draft.schema.json", "data-source": { "database-type": "mssql", - "connection-string": "Data Source=nqf4kgvoqm4ufazdzriupb2pay-doa2ptopus4ufglh5rxt3is4yi.database.fabric.microsoft.com,1433;User ID=8bfaf0d6-fa20-4ed5-a450-0005ceb77729;Password=z5y8Q~hLcfdAflVrfnYoVxdavIJXZb5tlH~tAbRn;Pooling=True;Min Pool Size=0;Max Pool Size=100;Multiple Active Result Sets=False;Connect Timeout=30;Encrypt=False;Trust Server Certificate=True;Authentication=ActiveDirectoryServicePrincipal;Initial Catalog=apiLayer-fad0b3db-cb87-4a73-9c54-e1fc417bc08c", + "connection-string": "Data Source=nqf4kgvoqm4ufazdzriupb2pay-hmnbvxar2mgu7e3ng27fsqy3we.database.fabric.microsoft.com,1433;User ID=8bfaf0d6-fa20-4ed5-a450-0005ceb77729;Password=z5y8Q~hLcfdAflVrfnYoVxdavIJXZb5tlH~tAbRn;Pooling=True;Min Pool Size=0;Max Pool Size=100;Multiple Active Result Sets=False;Connect Timeout=30;Encrypt=False;Trust Server Certificate=True;Authentication=ActiveDirectoryServicePrincipal;Initial Catalog=apiLayer-345587c4-1232-457e-9761-b6bca3d72e2e", "options": { "set-session-context": true } @@ -45,7 +45,7 @@ "entities": { "BillOfLading": { "source": { - "object": "silver_ops.BillOfLading", + "object": "silver_ops.v_BillOfLading", "type": "table", "key-fields": [ "systemId" @@ -110,7 +110,7 @@ }, "BillOfLadingParty": { "source": { - "object": "silver_ops.BillOfLadingParty", + "object": "silver_ops.v_BillOfLadingParty", "type": "table", "key-fields": [ "systemId" @@ -1694,6 +1694,39 @@ "ttl-seconds": 120 } }, + "geoPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "geoPort", + "plural": "geoPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, "PurchaseDocument": { "source": { "object": "silver_ops.PurchaseDocument", @@ -2405,6 +2438,52 @@ } ] }, + "TariffCarrier": { + "source": { + "object": "silver_ops.v_TariffVendors", + "type": "table", + "key-fields": [ + "SpecificCustVendNo" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffCarrier", + "plural": "TariffCarriers" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Tariff": { + "cardinality": "many", + "target.entity": "Tariff", + "source.fields": [ + "SpecificCustVendNo" + ], + "target.fields": [ + "SpecificCustVendNo" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "Tariff": { "source": { "object": "silver_ops.v_Tariff", @@ -2424,6 +2503,16 @@ "enabled": true }, "relationships": { + "TariffVendor": { + "cardinality": "one", + "target.entity": "TariffCarrier", + "source.fields": [ + "SpecificCustVendNo" + ], + "target.fields": [ + "SpecificCustVendNo" + ] + }, "TariffSurcharge": { "cardinality": "many", "target.entity": "TariffSurcharge", @@ -2846,6 +2935,51 @@ } ] }, + "DelayEvent": { + "source": { + "object": "silver_trk.ww_DelayEvent", + "type": "table", + "key-fields": [ + "delayEventId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "DelayEvent", + "plural": "DelayEvents" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentId" + ], + "target.fields": [ + "id" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, "TrackingEvent": { "source": { "object": "silver_trk.v_TrackingEvent", @@ -2933,6 +3067,14 @@ "enabled": true, "ttl-seconds": 120 }, + "relationships": { + "geoPort": { + "cardinality": "one", + "target.entity": "geoPort", + "source.fields": [ "locode" ], + "target.fields": [ "Code" ] + } + }, "permissions": [ { "role": "anonymous", From feb522f71bc245cc26495124ef38d2d45f6d462a Mon Sep 17 00:00:00 2001 From: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Date: Fri, 15 Aug 2025 18:52:14 +0000 Subject: [PATCH 50/79] Add Serialization & Deserialization of Sink File Properties (#2752) ## Why make this change? - This change solves issue #2576. We need to add the new `File Sink` properties that will later be used to send logs to `.txt` files locally. The properties need to have all the necessary components to be serialized and deserialized from the config file. ## What is this change? - This change adds the `File Sink` properties to the schema file. - Creates a new file `FileSinkConverter.cs` where the properties are serialized and deserialized. - Creates a new file `FileSinkOptions.cs` where the deserialized properties are turned to usable objects and adds the object to the `Telemetry` options. #### JSON Configuration Schema The configuration now supports the following structure under `runtime.telemetry`: ```json { "runtime": { "telemetry": { "file": { "enabled": true, "path": "/logs/dab-log.txt", "rolling-interval": "Day", "retained-file-count-limit": 7, "file-size-limit-bytes": 1048576 } } } } ``` ## How was this tested? - [ ] Integration Tests - [x] Unit Tests - [x] Manual Tests Tested that the newly created properties inside the config file are saved correctly as objects inside DAB and if the properties are written incorrectly, an exception is raised. --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> --- schemas/dab.draft.schema.json | 34 ++++ src/Config/Converters/FileSinkConverter.cs | 165 +++++++++++++++++ src/Config/ObjectModel/FileSinkOptions.cs | 167 ++++++++++++++++++ src/Config/ObjectModel/RollingIntervalMode.cs | 44 +++++ src/Config/ObjectModel/TelemetryOptions.cs | 6 + src/Config/RuntimeConfigLoader.cs | 1 + .../Configuration/ConfigurationTests.cs | 101 ++++++++++- ...untimeConfigLoaderJsonDeserializerTests.cs | 4 +- 8 files changed, 514 insertions(+), 8 deletions(-) create mode 100644 src/Config/Converters/FileSinkConverter.cs create mode 100644 src/Config/ObjectModel/FileSinkOptions.cs create mode 100644 src/Config/ObjectModel/RollingIntervalMode.cs diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json index 94ee03e500..fafdefc574 100644 --- a/schemas/dab.draft.schema.json +++ b/schemas/dab.draft.schema.json @@ -483,6 +483,40 @@ "required": [ "auth" ] } }, + "file": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "description": "Enable/disable file sink telemetry logging.", + "default": false + }, + "path": { + "type": "string", + "description": "File path for telemetry logs.", + "default": "/logs/dab-log.txt" + }, + "rolling-interval": { + "type": "string", + "description": "Rolling interval for log files.", + "default": "Day", + "enum": ["Minute", "Hour", "Day", "Month", "Year", "Infinite"] + }, + "retained-file-count-limit": { + "type": "integer", + "description": "Maximum number of retained log files.", + "default": 1, + "minimum": 1 + }, + "file-size-limit-bytes": { + "type": "integer", + "description": "Maximum file size in bytes before rolling.", + "default": 1048576, + "minimum": 1 + } + } + }, "log-level": { "type": "object", "description": "Global configuration of log level, defines logging severity levels for specific classes, when 'null' it will set logging level based on 'host: mode' property", diff --git a/src/Config/Converters/FileSinkConverter.cs b/src/Config/Converters/FileSinkConverter.cs new file mode 100644 index 0000000000..e5d68ca20e --- /dev/null +++ b/src/Config/Converters/FileSinkConverter.cs @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.ObjectModel; + +namespace Azure.DataApiBuilder.Config.Converters; +class FileSinkConverter : JsonConverter +{ + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// + /// Whether to replace environment variable with its value or not while deserializing. + /// + public FileSinkConverter(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + /// + /// Defines how DAB reads File Sink options and defines which values are + /// used to instantiate FileSinkOptions. + /// + /// Thrown when improperly formatted File Sink options are provided. + public override FileSinkOptions? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType == JsonTokenType.StartObject) + { + bool? enabled = null; + string? path = null; + RollingIntervalMode? rollingInterval = null; + int? retainedFileCountLimit = null; + int? fileSizeLimitBytes = null; + + while (reader.Read()) + { + if (reader.TokenType == JsonTokenType.EndObject) + { + return new FileSinkOptions(enabled, path, rollingInterval, retainedFileCountLimit, fileSizeLimitBytes); + } + + string? propertyName = reader.GetString(); + + reader.Read(); + switch (propertyName) + { + case "enabled": + if (reader.TokenType is not JsonTokenType.Null) + { + enabled = reader.GetBoolean(); + } + + break; + + case "path": + if (reader.TokenType is not JsonTokenType.Null) + { + path = reader.DeserializeString(_replaceEnvVar); + } + + break; + + case "rolling-interval": + if (reader.TokenType is not JsonTokenType.Null) + { + rollingInterval = EnumExtensions.Deserialize(reader.DeserializeString(_replaceEnvVar)!); + } + + break; + + case "retained-file-count-limit": + if (reader.TokenType is not JsonTokenType.Null) + { + try + { + retainedFileCountLimit = reader.GetInt32(); + } + catch (FormatException) + { + throw new JsonException($"The JSON token value is of the incorrect numeric format."); + } + + if (retainedFileCountLimit <= 0) + { + throw new JsonException($"Invalid retained-file-count-limit: {retainedFileCountLimit}. Specify a number > 0."); + } + } + + break; + + case "file-size-limit-bytes": + if (reader.TokenType is not JsonTokenType.Null) + { + try + { + fileSizeLimitBytes = reader.GetInt32(); + } + catch (FormatException) + { + throw new JsonException($"The JSON token value is of the incorrect numeric format."); + } + + if (retainedFileCountLimit <= 0) + { + throw new JsonException($"Invalid file-size-limit-bytes: {fileSizeLimitBytes}. Specify a number > 0."); + } + } + + break; + + default: + throw new JsonException($"Unexpected property {propertyName}"); + } + } + } + + throw new JsonException("Failed to read the File Sink Options"); + } + + /// + /// When writing the FileSinkOptions back to a JSON file, only write the properties + /// if they are user provided. This avoids polluting the written JSON file with properties + /// the user most likely omitted when writing the original DAB runtime config file. + /// This Write operation is only used when a RuntimeConfig object is serialized to JSON. + /// + public override void Write(Utf8JsonWriter writer, FileSinkOptions value, JsonSerializerOptions options) + { + writer.WriteStartObject(); + + if (value?.UserProvidedEnabled is true) + { + writer.WritePropertyName("enabled"); + JsonSerializer.Serialize(writer, value.Enabled, options); + } + + if (value?.UserProvidedPath is true) + { + writer.WritePropertyName("path"); + JsonSerializer.Serialize(writer, value.Path, options); + } + + if (value?.UserProvidedRollingInterval is true) + { + writer.WritePropertyName("rolling-interval"); + JsonSerializer.Serialize(writer, value.RollingInterval, options); + } + + if (value?.UserProvidedRetainedFileCountLimit is true) + { + writer.WritePropertyName("retained-file-count-limit"); + JsonSerializer.Serialize(writer, value.RetainedFileCountLimit, options); + } + + if (value?.UserProvidedFileSizeLimitBytes is true) + { + writer.WritePropertyName("file-size-limit-bytes"); + JsonSerializer.Serialize(writer, value.FileSizeLimitBytes, options); + } + + writer.WriteEndObject(); + } +} diff --git a/src/Config/ObjectModel/FileSinkOptions.cs b/src/Config/ObjectModel/FileSinkOptions.cs new file mode 100644 index 0000000000..e6cd20810b --- /dev/null +++ b/src/Config/ObjectModel/FileSinkOptions.cs @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +/// +/// Represents the options for configuring file sink telemetry. +/// +public record FileSinkOptions +{ + /// + /// Default enabled for File Sink. + /// + public const bool DEFAULT_ENABLED = false; + + /// + /// Default path for File Sink. + /// + public const string DEFAULT_PATH = "/logs/dab-log.txt"; + + /// + /// Default rolling interval for File Sink. + /// + public const string DEFAULT_ROLLING_INTERVAL = nameof(RollingIntervalMode.Day); + + /// + /// Default retained file count limit for File Sink. + /// + public const int DEFAULT_RETAINED_FILE_COUNT_LIMIT = 1; + + /// + /// Default file size limit bytes for File Sink. + /// + public const int DEFAULT_FILE_SIZE_LIMIT_BYTES = 1048576; + + /// + /// Whether File Sink is enabled. + /// + public bool Enabled { get; init; } + + /// + /// Path to the file where logs will be uploaded. + /// + public string? Path { get; init; } + + /// + /// Time it takes for files with logs to be discarded. + /// + public string? RollingInterval { get; init; } + + /// + /// Amount of files that can exist simultaneously in which logs are saved. + /// + public int? RetainedFileCountLimit { get; init; } + + /// + /// File size limit in bytes before a new file needs to be created. + /// + public int? FileSizeLimitBytes { get; init; } + + [JsonConstructor] + public FileSinkOptions(bool? enabled = null, string? path = null, RollingIntervalMode? rollingInterval = null, int? retainedFileCountLimit = null, int? fileSizeLimitBytes = null) + { + if (enabled is not null) + { + Enabled = (bool)enabled; + UserProvidedEnabled = true; + } + else + { + Enabled = DEFAULT_ENABLED; + } + + if (path is not null) + { + Path = path; + UserProvidedPath = true; + } + else + { + Path = DEFAULT_PATH; + } + + if (rollingInterval is not null) + { + RollingInterval = rollingInterval.ToString(); + UserProvidedRollingInterval = true; + } + else + { + RollingInterval = DEFAULT_ROLLING_INTERVAL; + } + + if (retainedFileCountLimit is not null) + { + RetainedFileCountLimit = retainedFileCountLimit; + UserProvidedRetainedFileCountLimit = true; + } + else + { + RetainedFileCountLimit = DEFAULT_RETAINED_FILE_COUNT_LIMIT; + } + + if (fileSizeLimitBytes is not null) + { + FileSizeLimitBytes = fileSizeLimitBytes; + UserProvidedFileSizeLimitBytes = true; + } + else + { + FileSizeLimitBytes = DEFAULT_FILE_SIZE_LIMIT_BYTES; + } + } + + /// + /// Flag which informs CLI and JSON serializer whether to write enabled + /// property/value to the runtime config file. + /// When user doesn't provide the enabled property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(Enabled))] + public bool UserProvidedEnabled { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write path + /// property/value to the runtime config file. + /// When user doesn't provide the path property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(Path))] + public bool UserProvidedPath { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write rolling-interval + /// property/value to the runtime config file. + /// When user doesn't provide the rolling-interval property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(RollingInterval))] + public bool UserProvidedRollingInterval { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write retained-file-count-limit + /// property/value to the runtime config file. + /// When user doesn't provide the retained-file-count-limit property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(RetainedFileCountLimit))] + public bool UserProvidedRetainedFileCountLimit { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write file-size-limit-bytes + /// property/value to the runtime config file. + /// When user doesn't provide the file-size-limit-bytes property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(FileSizeLimitBytes))] + public bool UserProvidedFileSizeLimitBytes { get; init; } = false; +} diff --git a/src/Config/ObjectModel/RollingIntervalMode.cs b/src/Config/ObjectModel/RollingIntervalMode.cs new file mode 100644 index 0000000000..df6d77e67b --- /dev/null +++ b/src/Config/ObjectModel/RollingIntervalMode.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +/// +/// Represents the rolling interval options for file sink. +/// The time it takes between the creation of new files. +/// +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum RollingIntervalMode +{ + /// + /// The log file will never roll; no time period information will be appended to the log file name. + /// + Infinite, + + /// + /// Roll every year. Filenames will have a four-digit year appended in the pattern yyyy. + /// + Year, + + /// + /// Roll every calendar month. Filenames will have yyyyMM appended. + /// + Month, + + /// + /// Roll every day. Filenames will have yyyyMMdd appended. + /// + Day, + + /// + /// Roll every hour. Filenames will have yyyyMMddHH appended. + /// + Hour, + + /// + /// Roll every minute. Filenames will have yyyyMMddHHmm appended. + /// + Minute +} diff --git a/src/Config/ObjectModel/TelemetryOptions.cs b/src/Config/ObjectModel/TelemetryOptions.cs index 157b0d03b2..b0343e53bc 100644 --- a/src/Config/ObjectModel/TelemetryOptions.cs +++ b/src/Config/ObjectModel/TelemetryOptions.cs @@ -9,10 +9,16 @@ namespace Azure.DataApiBuilder.Config.ObjectModel; /// /// Represents the options for telemetry. /// +/// Options for configuring Application Insights. +/// Options for configuring Open Telemetry. +/// Options for configuring Azure Log Analytics. +/// Options for configuring File Sink. +/// Options for configuring the Log Level filters. public record TelemetryOptions( ApplicationInsightsOptions? ApplicationInsights = null, OpenTelemetryOptions? OpenTelemetry = null, AzureLogAnalyticsOptions? AzureLogAnalytics = null, + FileSinkOptions? File = null, Dictionary? LoggerLevel = null) { [JsonPropertyName("log-level")] diff --git a/src/Config/RuntimeConfigLoader.cs b/src/Config/RuntimeConfigLoader.cs index 84f8a8b723..4a220af0ea 100644 --- a/src/Config/RuntimeConfigLoader.cs +++ b/src/Config/RuntimeConfigLoader.cs @@ -261,6 +261,7 @@ public static JsonSerializerOptions GetSerializationOptions( options.Converters.Add(new AKVRetryPolicyOptionsConverterFactory(replaceEnvVar)); options.Converters.Add(new AzureLogAnalyticsOptionsConverterFactory(replaceEnvVar)); options.Converters.Add(new AzureLogAnalyticsAuthOptionsConverter(replaceEnvVar)); + options.Converters.Add(new FileSinkConverter(replaceEnvVar)); if (replaceEnvVar) { diff --git a/src/Service.Tests/Configuration/ConfigurationTests.cs b/src/Service.Tests/Configuration/ConfigurationTests.cs index 5f71cc4d77..98cb89d919 100644 --- a/src/Service.Tests/Configuration/ConfigurationTests.cs +++ b/src/Service.Tests/Configuration/ConfigurationTests.cs @@ -4086,7 +4086,7 @@ public void AzureLogAnalyticsSerialization( string expectedDabIdentifier, int expectedFlushIntSec) { - //Check if auth property and its values are expected to exist + // Check if auth property and its values are expected to exist bool expectedExistEnabled = enabled is not null; bool expectedExistDabIdentifier = dabIdentifier is not null; bool expectedExistFlushIntSec = flushIntSec is not null; @@ -4096,7 +4096,8 @@ public void AzureLogAnalyticsSerialization( AzureLogAnalyticsAuthOptions authOptions = new(customTableName, dcrImmutableId, dceEndpoint); AzureLogAnalyticsOptions azureLogAnalyticsOptions = new(enabled, authOptions, dabIdentifier, flushIntSec); - RuntimeConfig configWithCustomLogLevel = InitializeRuntimeWithAzureLogAnalytics(azureLogAnalyticsOptions); + TelemetryOptions telemetryOptions = new(AzureLogAnalytics: azureLogAnalyticsOptions); + RuntimeConfig configWithCustomLogLevel = InitializeRuntimeWithTelemetry(telemetryOptions); string configWithCustomLogLevelJson = configWithCustomLogLevel.ToJson(); Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(configWithCustomLogLevelJson, out RuntimeConfig? deserializedRuntimeConfig)); @@ -4134,10 +4135,10 @@ public void AzureLogAnalyticsSerialization( Assert.AreEqual(expectedFlushIntSec, flushIntSecElement.GetInt32()); } - //Validate auth property exists inside of azure-log-analytics + // Validate auth property exists inside of azure-log-analytics bool authExists = azureLogAnalyticsElement.TryGetProperty("auth", out JsonElement authElement); - //Validate the values inside the auth properties are of expected value + // Validate the values inside the auth properties are of expected value if (authExists) { bool customTableNameExists = authElement.TryGetProperty("custom-table-name", out JsonElement customTableNameElement); @@ -4164,12 +4165,98 @@ public void AzureLogAnalyticsSerialization( } } + /// + /// Tests different File Sink values to see if they are serialized and deserialized correctly to the Json config + /// + [DataTestMethod] + [TestCategory(TestCategory.MSSQL)] + [DataRow(true, "/file/path/exists.txt", RollingIntervalMode.Minute, 27, 256, true, "/file/path/exists.txt", RollingIntervalMode.Minute, 27, 256)] + [DataRow(true, "/test/path.csv", RollingIntervalMode.Hour, 10, 3000, true, "/test/path.csv", RollingIntervalMode.Hour, 10, 3000)] + [DataRow(false, "C://absolute/file/path.log", RollingIntervalMode.Month, 2147483647, 2048, false, "C://absolute/file/path.log", RollingIntervalMode.Month, 2147483647, 2048)] + [DataRow(false, "D://absolute/test/path.txt", RollingIntervalMode.Year, 10, 2147483647, false, "D://absolute/test/path.txt", RollingIntervalMode.Year, 10, 2147483647)] + [DataRow(false, "", RollingIntervalMode.Infinite, 5, 512, false, "", RollingIntervalMode.Infinite, 5, 512)] + [DataRow(null, null, null, null, null, false, "/logs/dab-log.txt", RollingIntervalMode.Day, 1, 1048576)] + public void FileSinkSerialization( + bool? enabled, + string? path, + RollingIntervalMode? rollingInterval, + int? retainedFileCountLimit, + int? fileSizeLimitBytes, + bool expectedEnabled, + string expectedPath, + RollingIntervalMode expectedRollingInterval, + int expectedRetainedFileCountLimit, + int expectedFileSizeLimitBytes) + { + // Check if file values are expected to exist + bool isEnabledNull = enabled is null; + bool isPathNull = path is null; + bool isRollingIntervalNull = rollingInterval is null; + bool isRetainedFileCountLimitNull = retainedFileCountLimit is null; + bool isFileSizeLimitBytesNull = fileSizeLimitBytes is null; + + FileSinkOptions fileOptions = new(enabled, path, rollingInterval, retainedFileCountLimit, fileSizeLimitBytes); + TelemetryOptions telemetryOptions = new(File: fileOptions); + RuntimeConfig configWithCustomLogLevel = InitializeRuntimeWithTelemetry(telemetryOptions); + string configWithCustomLogLevelJson = configWithCustomLogLevel.ToJson(); + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(configWithCustomLogLevelJson, out RuntimeConfig? deserializedRuntimeConfig)); + + string serializedConfig = deserializedRuntimeConfig.ToJson(); + + using (JsonDocument parsedDocument = JsonDocument.Parse(serializedConfig)) + { + JsonElement root = parsedDocument.RootElement; + JsonElement runtimeElement = root.GetProperty("runtime"); + + // Validate file property exists in runtime + JsonElement telemetryElement = runtimeElement.GetProperty("telemetry"); + bool filePropertyExists = telemetryElement.TryGetProperty("file", out JsonElement fileElement); + Assert.AreEqual(expected: true, actual: filePropertyExists); + + // Validate the values inside the file properties are of expected value + bool enabledExists = fileElement.TryGetProperty("enabled", out JsonElement enabledElement); + Assert.AreEqual(expected: !isEnabledNull, actual: enabledExists); + if (enabledExists) + { + Assert.AreEqual(expectedEnabled, enabledElement.GetBoolean()); + } + + bool pathExists = fileElement.TryGetProperty("path", out JsonElement pathElement); + Assert.AreEqual(expected: !isPathNull, actual: pathExists); + if (pathExists) + { + Assert.AreEqual(expectedPath, pathElement.GetString()); + } + + bool rollingIntervalExists = fileElement.TryGetProperty("rolling-interval", out JsonElement rollingIntervalElement); + Assert.AreEqual(expected: !isRollingIntervalNull, actual: rollingIntervalExists); + if (rollingIntervalExists) + { + Assert.AreEqual(expectedRollingInterval.ToString(), rollingIntervalElement.GetString()); + } + + bool retainedFileCountLimitExists = fileElement.TryGetProperty("retained-file-count-limit", out JsonElement retainedFileCountLimitElement); + Assert.AreEqual(expected: !isRetainedFileCountLimitNull, actual: retainedFileCountLimitExists); + if (retainedFileCountLimitExists) + { + Assert.AreEqual(expectedRetainedFileCountLimit, retainedFileCountLimitElement.GetInt32()); + } + + bool fileSizeLimitBytesExists = fileElement.TryGetProperty("file-size-limit-bytes", out JsonElement fileSizeLimitBytesElement); + Assert.AreEqual(expected: !isFileSizeLimitBytesNull, actual: fileSizeLimitBytesExists); + if (fileSizeLimitBytesExists) + { + Assert.AreEqual(expectedFileSizeLimitBytes, fileSizeLimitBytesElement.GetInt32()); + } + } + } + #nullable disable /// - /// Helper method to create RuntimeConfig with specificed LogLevel value + /// Helper method to create RuntimeConfig with specified Telemetry options /// - private static RuntimeConfig InitializeRuntimeWithAzureLogAnalytics(AzureLogAnalyticsOptions azureLogAnalyticsOptions) + private static RuntimeConfig InitializeRuntimeWithTelemetry(TelemetryOptions telemetryOptions) { TestHelper.SetupDatabaseEnvironment(MSSQL_ENVIRONMENT); @@ -4183,7 +4270,7 @@ private static RuntimeConfig InitializeRuntimeWithAzureLogAnalytics(AzureLogAnal Rest: new(), GraphQL: new(), Host: new(null, null), - Telemetry: new(AzureLogAnalytics: azureLogAnalyticsOptions) + Telemetry: telemetryOptions ), Entities: baseConfig.Entities ); diff --git a/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs b/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs index a7aaf21508..8d7dae0541 100644 --- a/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs +++ b/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs @@ -273,7 +273,7 @@ public void TestNullableOptionalProps() TryParseAndAssertOnDefaults("{" + emptyHostSubProps, out _); // Test with empty telemetry sub-properties - minJsonWithTelemetrySubProps.Append(@"{ ""application-insights"": { }, ""log-level"": { }, ""open-telemetry"": { }, ""azure-log-analytics"": { } } }"); + minJsonWithTelemetrySubProps.Append(@"{ ""application-insights"": { }, ""log-level"": { }, ""open-telemetry"": { }, ""azure-log-analytics"": { }, ""file"": { } } }"); string emptyTelemetrySubProps = minJsonWithTelemetrySubProps + "}"; TryParseAndAssertOnDefaults("{" + emptyTelemetrySubProps, out _); @@ -652,6 +652,8 @@ private static bool TryParseAndAssertOnDefaults(string json, out RuntimeConfig p || !parsedConfig.Runtime.Telemetry.OpenTelemetry.Enabled); Assert.IsTrue(parsedConfig.Runtime?.Telemetry?.AzureLogAnalytics is null || !parsedConfig.Runtime.Telemetry.AzureLogAnalytics.Enabled); + Assert.IsTrue(parsedConfig.Runtime?.Telemetry?.File is null + || !parsedConfig.Runtime.Telemetry.File.Enabled); return true; } From 5f3643e04401ba146b0d5e72bf958de0f8c6aee5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Aug 2025 12:40:49 -0700 Subject: [PATCH 51/79] Bump dotnet-sdk from 8.0.412 to 8.0.413 (#2806) Bumps [dotnet-sdk](https://github.com/dotnet/sdk) from 8.0.412 to 8.0.413.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=dotnet-sdk&package-manager=dotnet_sdk&previous-version=8.0.412&new-version=8.0.413)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Co-authored-by: Aniruddh Munde --- global.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/global.json b/global.json index 5db7415f9c..49e22f8151 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "sdk": { - "version": "8.0.412", + "version": "8.0.413", "rollForward": "latestFeature" } } From 5e3e5e224a07be1e4ae3a00b89a6ab978139383e Mon Sep 17 00:00:00 2001 From: M4Al Date: Tue, 19 Aug 2025 20:02:56 +0200 Subject: [PATCH 52/79] Fix Session Context Key set as Read_Only & maintain original roles from the JWT token (#2344) ## Why make this change? - Fixes #2341 - The session context in SQL server is `read_only = 1` which prevents users from doing multiple requests on the same connection. - The row level security is not accurately implemented when using a JWT token. ## What is this change? Changes the session context from `read_only = 1` to `read_only = 0` to allow multiple requests to be done in the same connection, Creates a copy of the original 'roles' from the JWT token to use it on the SQL Filter Predicate to accurately implement row level security. ## How was this tested? - [ ] Integration Tests - [x] Unit Tests Updated `AuthorizationResolver` tests to ensure the original roles copy is working properly. ## Sample Request(s) Sample of a JWT token (only the relevant part) ``` { "aud": "api://ddcf6b31-5d01-407d-97cf-8efefc455d32", "iss": "https://sts.windows.net/9215c785-95c3-49b0-bdba-2062df5aedb5/", "roles": [ "user", "Allow_Customer_OPS025235", "Allow_Customer_OPS004095" ], "ver": "1.0" } ``` X-MS-API-ROLE: user before my change the extra 'roles' that do not match the X-MS-API-ROLE header would never reach the database context. With my change you can do things like this in SQL Predicates to filter out only subsets of the data: ``` CREATE FUNCTION dbo.ops_fact_order_Predicate(@CustomerNo varchar(max)) RETURNS TABLE WITH SCHEMABINDING AS RETURN SELECT 1 AS fn_securitypredicate_result WHERE @CustomerNo in ( select trim(replace(replace(replace([value], '"', ''), ']', ''), 'Allow_Customer_', '')) from STRING_SPLIT ( CAST(SESSION_CONTEXT(N'original_roles') as varchar(max)) , ',' , 0) where trim(replace(replace([value], '"', ''), ']', '')) like 'Allow_Customer%' ) CREATE SECURITY POLICY dbo.ops_fact_order_Policy ADD FILTER PREDICATE dbo.ops_fact_order_Predicate(CustomerNo) ON [gold_ops].[ops_fact_order]; ``` --------- Co-authored-by: KobeLenjou Co-authored-by: Aniruddh Munde Co-authored-by: Ruben Cerna Co-authored-by: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> --- src/Config/ObjectModel/AuthenticationOptions.cs | 1 + src/Core/Authorization/AuthorizationResolver.cs | 7 ++++++- src/Core/Resolvers/MsSqlQueryExecutor.cs | 2 +- .../Authorization/AuthorizationResolverUnitTests.cs | 6 ++++-- 4 files changed, 12 insertions(+), 4 deletions(-) diff --git a/src/Config/ObjectModel/AuthenticationOptions.cs b/src/Config/ObjectModel/AuthenticationOptions.cs index 189540fbe6..6750d6e807 100644 --- a/src/Config/ObjectModel/AuthenticationOptions.cs +++ b/src/Config/ObjectModel/AuthenticationOptions.cs @@ -17,6 +17,7 @@ public record AuthenticationOptions(string Provider = nameof(EasyAuthType.Static public const string CLIENT_PRINCIPAL_HEADER = "X-MS-CLIENT-PRINCIPAL"; public const string NAME_CLAIM_TYPE = "name"; public const string ROLE_CLAIM_TYPE = "roles"; + public const string ORIGINAL_ROLE_CLAIM_TYPE = "original_roles"; /// /// Returns whether the configured Provider matches an diff --git a/src/Core/Authorization/AuthorizationResolver.cs b/src/Core/Authorization/AuthorizationResolver.cs index 2ab6e70a4c..0f22b9cd28 100644 --- a/src/Core/Authorization/AuthorizationResolver.cs +++ b/src/Core/Authorization/AuthorizationResolver.cs @@ -617,9 +617,14 @@ public static Dictionary> GetAllAuthenticatedUserClaims(Http // into a list and storing that in resolvedClaims using the claimType as the key. foreach (Claim claim in identity.Claims) { - // 'roles' claim has already been processed. + // 'roles' claim has already been processed. But we preserve the original 'roles' claim. if (claim.Type.Equals(AuthenticationOptions.ROLE_CLAIM_TYPE)) { + if (!resolvedClaims.TryAdd(AuthenticationOptions.ORIGINAL_ROLE_CLAIM_TYPE, new List() { claim })) + { + resolvedClaims[AuthenticationOptions.ORIGINAL_ROLE_CLAIM_TYPE].Add(claim); + } + continue; } diff --git a/src/Core/Resolvers/MsSqlQueryExecutor.cs b/src/Core/Resolvers/MsSqlQueryExecutor.cs index 7a0260cd20..5cbe9f6a76 100644 --- a/src/Core/Resolvers/MsSqlQueryExecutor.cs +++ b/src/Core/Resolvers/MsSqlQueryExecutor.cs @@ -284,7 +284,7 @@ public override string GetSessionParamsQuery(HttpContext? httpContext, IDictiona string paramName = $"{SESSION_PARAM_NAME}{counter.Next()}"; parameters.Add(paramName, new(claimValue)); // Append statement to set read only param value - can be set only once for a connection. - string statementToSetReadOnlyParam = "EXEC sp_set_session_context " + $"'{claimType}', " + paramName + ", @read_only = 1;"; + string statementToSetReadOnlyParam = "EXEC sp_set_session_context " + $"'{claimType}', " + paramName + ", @read_only = 0;"; sessionMapQuery = sessionMapQuery.Append(statementToSetReadOnlyParam); } diff --git a/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs b/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs index 3c7c31a8ca..733ec15b24 100644 --- a/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs +++ b/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs @@ -1293,7 +1293,8 @@ public void UniqueClaimsResolvedForDbPolicy_SessionCtx_Usage() new("sub", "Aa_0RISCzzZ-abC1De2fGHIjKLMNo123pQ4rStUVWXY"), new("oid", "55296aad-ea7f-4c44-9a4c-bb1e8d43a005"), new(AuthenticationOptions.ROLE_CLAIM_TYPE, TEST_ROLE), - new(AuthenticationOptions.ROLE_CLAIM_TYPE, "ROLE2") + new(AuthenticationOptions.ROLE_CLAIM_TYPE, "ROLE2"), + new(AuthenticationOptions.ROLE_CLAIM_TYPE, "ROLE3") }; //Add identity object to the Mock context object. @@ -1315,6 +1316,7 @@ public void UniqueClaimsResolvedForDbPolicy_SessionCtx_Usage() Assert.AreEqual(expected: "Aa_0RISCzzZ-abC1De2fGHIjKLMNo123pQ4rStUVWXY", actual: claimsInRequestContext["sub"], message: "Expected the sub claim to be present."); Assert.AreEqual(expected: "55296aad-ea7f-4c44-9a4c-bb1e8d43a005", actual: claimsInRequestContext["oid"], message: "Expected the oid claim to be present."); Assert.AreEqual(claimsInRequestContext[AuthenticationOptions.ROLE_CLAIM_TYPE], actual: TEST_ROLE, message: "The roles claim should have the value:" + TEST_ROLE); + Assert.AreEqual(expected: "[\"" + TEST_ROLE + "\",\"ROLE2\",\"ROLE3\"]", actual: claimsInRequestContext[AuthenticationOptions.ORIGINAL_ROLE_CLAIM_TYPE], message: "Original roles should be preserved in a new context"); } /// @@ -1365,7 +1367,7 @@ public void ValidateUnauthenticatedUserClaimsAreNotResolvedWhenProcessingUserCla Dictionary resolvedClaims = AuthorizationResolver.GetProcessedUserClaims(context.Object); // Assert - Assert.AreEqual(expected: authenticatedUserclaims.Count, actual: resolvedClaims.Count, message: "Only two claims should be present."); + Assert.AreEqual(expected: authenticatedUserclaims.Count + 1, actual: resolvedClaims.Count, message: "Only " + (authenticatedUserclaims.Count + 1) + " claims should be present."); Assert.AreEqual(expected: "openid", actual: resolvedClaims["scp"], message: "Unexpected scp claim returned."); bool didResolveUnauthenticatedRoleClaim = resolvedClaims[AuthenticationOptions.ROLE_CLAIM_TYPE] == "Don't_Parse_This_Role"; From 51c2dcddc24eb61458866287db58176de073bac1 Mon Sep 17 00:00:00 2001 From: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Date: Fri, 22 Aug 2025 04:37:50 +0000 Subject: [PATCH 53/79] Add logic to send logs through File Sink (#2825) ## Why make this change? - Closes issue #2578 - In order to complete the File Sink feature, the logic that sends the logs created by DAB to the file is required. ## What is this change? - This change implements the logic that sends all the logs from DAB to the file through the path the user requests. This is done with the help of Serilog which is a logging library that simplifies the creation of file sinks. - The `Startup.cs` program creates the Serilog logger pipeline and adds it as part of the services so that it is used later by the `Program.cs` to set the different loggers with the Serilog pipeline and allow the logs to be sent to the file sink. - We also deleted the `RollingIntervalMode.cs` since we discovered that Serilog has its own rolling interval enum class, which makes the one implemented in DAB obsolete. ## How was this tested? - [ ] Integration Tests - [X] Unit Tests Created tests that check if the services needed for the File Sink exist when the File Sink property is enabled. Also, created test to check if the file sink with the appropriate name is created when the property is enabled. ## Sample Request(s) image image --- src/Config/Azure.DataApiBuilder.Config.csproj | 3 +- src/Config/Converters/FileSinkConverter.cs | 5 +- src/Config/ObjectModel/FileSinkOptions.cs | 21 +-- src/Config/ObjectModel/RollingIntervalMode.cs | 44 ----- src/Directory.Packages.props | 2 + .../Configuration/ConfigurationTests.cs | 17 +- .../Configuration/Telemetry/FileSinkTests.cs | 163 ++++++++++++++++++ .../Azure.DataApiBuilder.Service.csproj | 2 + src/Service/Program.cs | 24 ++- src/Service/Startup.cs | 62 ++++++- 10 files changed, 275 insertions(+), 68 deletions(-) delete mode 100644 src/Config/ObjectModel/RollingIntervalMode.cs create mode 100644 src/Service.Tests/Configuration/Telemetry/FileSinkTests.cs diff --git a/src/Config/Azure.DataApiBuilder.Config.csproj b/src/Config/Azure.DataApiBuilder.Config.csproj index 25dd0716f9..a494bc38ae 100644 --- a/src/Config/Azure.DataApiBuilder.Config.csproj +++ b/src/Config/Azure.DataApiBuilder.Config.csproj @@ -18,6 +18,7 @@ + @@ -25,7 +26,7 @@ - + diff --git a/src/Config/Converters/FileSinkConverter.cs b/src/Config/Converters/FileSinkConverter.cs index e5d68ca20e..e0107a11f6 100644 --- a/src/Config/Converters/FileSinkConverter.cs +++ b/src/Config/Converters/FileSinkConverter.cs @@ -4,6 +4,7 @@ using System.Text.Json; using System.Text.Json.Serialization; using Azure.DataApiBuilder.Config.ObjectModel; +using Serilog; namespace Azure.DataApiBuilder.Config.Converters; class FileSinkConverter : JsonConverter @@ -31,7 +32,7 @@ public FileSinkConverter(bool replaceEnvVar) { bool? enabled = null; string? path = null; - RollingIntervalMode? rollingInterval = null; + RollingInterval? rollingInterval = null; int? retainedFileCountLimit = null; int? fileSizeLimitBytes = null; @@ -66,7 +67,7 @@ public FileSinkConverter(bool replaceEnvVar) case "rolling-interval": if (reader.TokenType is not JsonTokenType.Null) { - rollingInterval = EnumExtensions.Deserialize(reader.DeserializeString(_replaceEnvVar)!); + rollingInterval = EnumExtensions.Deserialize(reader.DeserializeString(_replaceEnvVar)!); } break; diff --git a/src/Config/ObjectModel/FileSinkOptions.cs b/src/Config/ObjectModel/FileSinkOptions.cs index e6cd20810b..7e6674fcad 100644 --- a/src/Config/ObjectModel/FileSinkOptions.cs +++ b/src/Config/ObjectModel/FileSinkOptions.cs @@ -3,6 +3,7 @@ using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; +using Serilog; namespace Azure.DataApiBuilder.Config.ObjectModel; @@ -19,12 +20,12 @@ public record FileSinkOptions /// /// Default path for File Sink. /// - public const string DEFAULT_PATH = "/logs/dab-log.txt"; + public const string DEFAULT_PATH = @"logs\dab-log.txt"; /// /// Default rolling interval for File Sink. /// - public const string DEFAULT_ROLLING_INTERVAL = nameof(RollingIntervalMode.Day); + public const string DEFAULT_ROLLING_INTERVAL = nameof(Serilog.RollingInterval.Day); /// /// Default retained file count limit for File Sink. @@ -44,25 +45,25 @@ public record FileSinkOptions /// /// Path to the file where logs will be uploaded. /// - public string? Path { get; init; } + public string Path { get; init; } /// /// Time it takes for files with logs to be discarded. /// - public string? RollingInterval { get; init; } + public string RollingInterval { get; init; } /// /// Amount of files that can exist simultaneously in which logs are saved. /// - public int? RetainedFileCountLimit { get; init; } + public int RetainedFileCountLimit { get; init; } /// /// File size limit in bytes before a new file needs to be created. /// - public int? FileSizeLimitBytes { get; init; } + public int FileSizeLimitBytes { get; init; } [JsonConstructor] - public FileSinkOptions(bool? enabled = null, string? path = null, RollingIntervalMode? rollingInterval = null, int? retainedFileCountLimit = null, int? fileSizeLimitBytes = null) + public FileSinkOptions(bool? enabled = null, string? path = null, RollingInterval? rollingInterval = null, int? retainedFileCountLimit = null, int? fileSizeLimitBytes = null) { if (enabled is not null) { @@ -86,7 +87,7 @@ public FileSinkOptions(bool? enabled = null, string? path = null, RollingInterva if (rollingInterval is not null) { - RollingInterval = rollingInterval.ToString(); + RollingInterval = ((RollingInterval)rollingInterval).ToString(); UserProvidedRollingInterval = true; } else @@ -96,7 +97,7 @@ public FileSinkOptions(bool? enabled = null, string? path = null, RollingInterva if (retainedFileCountLimit is not null) { - RetainedFileCountLimit = retainedFileCountLimit; + RetainedFileCountLimit = (int)retainedFileCountLimit; UserProvidedRetainedFileCountLimit = true; } else @@ -106,7 +107,7 @@ public FileSinkOptions(bool? enabled = null, string? path = null, RollingInterva if (fileSizeLimitBytes is not null) { - FileSizeLimitBytes = fileSizeLimitBytes; + FileSizeLimitBytes = (int)fileSizeLimitBytes; UserProvidedFileSizeLimitBytes = true; } else diff --git a/src/Config/ObjectModel/RollingIntervalMode.cs b/src/Config/ObjectModel/RollingIntervalMode.cs deleted file mode 100644 index df6d77e67b..0000000000 --- a/src/Config/ObjectModel/RollingIntervalMode.cs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -using System.Text.Json.Serialization; - -namespace Azure.DataApiBuilder.Config.ObjectModel; - -/// -/// Represents the rolling interval options for file sink. -/// The time it takes between the creation of new files. -/// -[JsonConverter(typeof(JsonStringEnumConverter))] -public enum RollingIntervalMode -{ - /// - /// The log file will never roll; no time period information will be appended to the log file name. - /// - Infinite, - - /// - /// Roll every year. Filenames will have a four-digit year appended in the pattern yyyy. - /// - Year, - - /// - /// Roll every calendar month. Filenames will have yyyyMM appended. - /// - Month, - - /// - /// Roll every day. Filenames will have yyyyMMdd appended. - /// - Day, - - /// - /// Roll every hour. Filenames will have yyyyMMddHH appended. - /// - Hour, - - /// - /// Roll every minute. Filenames will have yyyyMMddHHmm appended. - /// - Minute -} diff --git a/src/Directory.Packages.props b/src/Directory.Packages.props index d00f43c478..da600c9f63 100644 --- a/src/Directory.Packages.props +++ b/src/Directory.Packages.props @@ -60,6 +60,8 @@ + + diff --git a/src/Service.Tests/Configuration/ConfigurationTests.cs b/src/Service.Tests/Configuration/ConfigurationTests.cs index 98cb89d919..2522806049 100644 --- a/src/Service.Tests/Configuration/ConfigurationTests.cs +++ b/src/Service.Tests/Configuration/ConfigurationTests.cs @@ -47,6 +47,7 @@ using Microsoft.VisualStudio.TestTools.UnitTesting; using Moq; using Moq.Protected; +using Serilog; using VerifyMSTest; using static Azure.DataApiBuilder.Config.FileSystemRuntimeConfigLoader; using static Azure.DataApiBuilder.Service.Tests.Configuration.ConfigurationEndpoints; @@ -4170,21 +4171,21 @@ public void AzureLogAnalyticsSerialization( /// [DataTestMethod] [TestCategory(TestCategory.MSSQL)] - [DataRow(true, "/file/path/exists.txt", RollingIntervalMode.Minute, 27, 256, true, "/file/path/exists.txt", RollingIntervalMode.Minute, 27, 256)] - [DataRow(true, "/test/path.csv", RollingIntervalMode.Hour, 10, 3000, true, "/test/path.csv", RollingIntervalMode.Hour, 10, 3000)] - [DataRow(false, "C://absolute/file/path.log", RollingIntervalMode.Month, 2147483647, 2048, false, "C://absolute/file/path.log", RollingIntervalMode.Month, 2147483647, 2048)] - [DataRow(false, "D://absolute/test/path.txt", RollingIntervalMode.Year, 10, 2147483647, false, "D://absolute/test/path.txt", RollingIntervalMode.Year, 10, 2147483647)] - [DataRow(false, "", RollingIntervalMode.Infinite, 5, 512, false, "", RollingIntervalMode.Infinite, 5, 512)] - [DataRow(null, null, null, null, null, false, "/logs/dab-log.txt", RollingIntervalMode.Day, 1, 1048576)] + [DataRow(true, "/file/path/exists.txt", RollingInterval.Minute, 27, 256, true, "/file/path/exists.txt", RollingInterval.Minute, 27, 256)] + [DataRow(true, "/test/path.csv", RollingInterval.Hour, 10, 3000, true, "/test/path.csv", RollingInterval.Hour, 10, 3000)] + [DataRow(false, "C://absolute/file/path.log", RollingInterval.Month, 2147483647, 2048, false, "C://absolute/file/path.log", RollingInterval.Month, 2147483647, 2048)] + [DataRow(false, "D://absolute/test/path.txt", RollingInterval.Year, 10, 2147483647, false, "D://absolute/test/path.txt", RollingInterval.Year, 10, 2147483647)] + [DataRow(false, "", RollingInterval.Infinite, 5, 512, false, "", RollingInterval.Infinite, 5, 512)] + [DataRow(null, null, null, null, null, false, "/logs/dab-log.txt", RollingInterval.Day, 1, 1048576)] public void FileSinkSerialization( bool? enabled, string? path, - RollingIntervalMode? rollingInterval, + RollingInterval? rollingInterval, int? retainedFileCountLimit, int? fileSizeLimitBytes, bool expectedEnabled, string expectedPath, - RollingIntervalMode expectedRollingInterval, + RollingInterval expectedRollingInterval, int expectedRetainedFileCountLimit, int expectedFileSizeLimitBytes) { diff --git a/src/Service.Tests/Configuration/Telemetry/FileSinkTests.cs b/src/Service.Tests/Configuration/Telemetry/FileSinkTests.cs new file mode 100644 index 0000000000..077e0098d8 --- /dev/null +++ b/src/Service.Tests/Configuration/Telemetry/FileSinkTests.cs @@ -0,0 +1,163 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.IO; +using System.Net.Http; +using System.Threading.Tasks; +using Azure.DataApiBuilder.Config.ObjectModel; +using Microsoft.AspNetCore.TestHost; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Serilog; +using Serilog.Core; +using static Azure.DataApiBuilder.Service.Tests.Configuration.ConfigurationTests; + +namespace Azure.DataApiBuilder.Service.Tests.Configuration.Telemetry; + +/// +/// Contains tests for File Sink functionality. +/// +[TestClass, TestCategory(TestCategory.MSSQL)] +public class FileSinkTests +{ + public TestContext TestContext { get; set; } + + private const string CONFIG_WITH_TELEMETRY = "dab-file-sink-test-config.json"; + private const string CONFIG_WITHOUT_TELEMETRY = "dab-no-file-sink-test-config.json"; + private static RuntimeConfig _configuration; + + /// + /// This is a helper function that creates runtime config file with specified telemetry options. + /// + /// Name of the config file to be created. + /// Whether File Sink is enabled or not. + /// Path where logs will be sent to. + /// Time it takes for logs to roll over to next file. + private static void SetUpTelemetryInConfig(string configFileName, bool isFileSinkEnabled, string fileSinkPath, RollingInterval? rollingInterval = null) + { + DataSource dataSource = new(DatabaseType.MSSQL, + GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); + + _configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions: new(), restOptions: new()); + + TelemetryOptions _testTelemetryOptions = new(File: new FileSinkOptions(isFileSinkEnabled, fileSinkPath, rollingInterval)); + _configuration = _configuration with { Runtime = _configuration.Runtime with { Telemetry = _testTelemetryOptions } }; + + File.WriteAllText(configFileName, _configuration.ToJson()); + } + + /// + /// Cleans up the test environment by deleting the runtime config with telemetry options. + /// + [TestCleanup] + public void CleanUpTelemetryConfig() + { + if (File.Exists(CONFIG_WITH_TELEMETRY)) + { + File.Delete(CONFIG_WITH_TELEMETRY); + } + + if (File.Exists(CONFIG_WITHOUT_TELEMETRY)) + { + File.Delete(CONFIG_WITHOUT_TELEMETRY); + } + } + + /// + /// Tests if the services are correctly enabled for File Sink. + /// + [TestMethod] + public void TestFileSinkServicesEnabled() + { + // Arrange + SetUpTelemetryInConfig(CONFIG_WITH_TELEMETRY, true, "/dab-log-test/file-sink-file.txt"); + + string[] args = new[] + { + $"--ConfigFileName={CONFIG_WITH_TELEMETRY}" + }; + using TestServer server = new(Program.CreateWebHostBuilder(args)); + + // Additional assertions to check if File Sink is enabled correctly in services + IServiceProvider serviceProvider = server.Services; + LoggerConfiguration serilogLoggerConfiguration = serviceProvider.GetService(); + Logger serilogLogger = serviceProvider.GetService(); + + // If serilogLoggerConfiguration and serilogLogger are not null, File Sink is enabled + Assert.IsNotNull(serilogLoggerConfiguration, "LoggerConfiguration for Serilog should be registered."); + Assert.IsNotNull(serilogLogger, "Logger for Serilog should be registered."); + } + + /// + /// Tests if the logs are flushed to the proper path when File Sink is enabled. + /// + /// + /// Tests if the logs are flushed to the proper path when File Sink is enabled. + /// + [DataTestMethod] + [DataRow("file-sink-test-file.txt")] + [DataRow("file-sink-test-file.log")] + [DataRow("file-sink-test-file.csv")] + public async Task TestFileSinkSucceed(string fileName) + { + // Arrange + SetUpTelemetryInConfig(CONFIG_WITH_TELEMETRY, true, fileName, RollingInterval.Infinite); + + string[] args = new[] + { + $"--ConfigFileName={CONFIG_WITH_TELEMETRY}" + }; + using TestServer server = new(Program.CreateWebHostBuilder(args)); + + // Act + using (HttpClient client = server.CreateClient()) + { + HttpRequestMessage restRequest = new(HttpMethod.Get, "/api/Book"); + await client.SendAsync(restRequest); + } + + server.Dispose(); + + // Assert + Assert.IsTrue(File.Exists(fileName)); + + bool containsInfo = false; + string[] allLines = File.ReadAllLines(fileName); + foreach (string line in allLines) + { + containsInfo = line.Contains("INF"); + if (containsInfo) + { + break; + } + } + + Assert.IsTrue(containsInfo); + } + + /// + /// Tests if the services are correctly disabled for File Sink. + /// + [TestMethod] + public void TestFileSinkServicesDisabled() + { + // Arrange + SetUpTelemetryInConfig(CONFIG_WITHOUT_TELEMETRY, false, null); + + string[] args = new[] + { + $"--ConfigFileName={CONFIG_WITHOUT_TELEMETRY}" + }; + using TestServer server = new(Program.CreateWebHostBuilder(args)); + + // Additional assertions to check if File Sink is enabled correctly in services + IServiceProvider serviceProvider = server.Services; + LoggerConfiguration serilogLoggerConfiguration = serviceProvider.GetService(); + Logger serilogLogger = serviceProvider.GetService(); + + // If serilogLoggerConfiguration and serilogLogger are null, File Sink is disabled + Assert.IsNull(serilogLoggerConfiguration, "LoggerConfiguration for Serilog should not be registered."); + Assert.IsNull(serilogLogger, "Logger for Serilog should not be registered."); + } +} diff --git a/src/Service/Azure.DataApiBuilder.Service.csproj b/src/Service/Azure.DataApiBuilder.Service.csproj index bb21361d4b..9f1558e504 100644 --- a/src/Service/Azure.DataApiBuilder.Service.csproj +++ b/src/Service/Azure.DataApiBuilder.Service.csproj @@ -75,6 +75,8 @@ + + diff --git a/src/Service/Program.cs b/src/Service/Program.cs index 7009e489ce..1059fd52ff 100644 --- a/src/Service/Program.cs +++ b/src/Service/Program.cs @@ -12,6 +12,7 @@ using Azure.DataApiBuilder.Service.Telemetry; using Microsoft.ApplicationInsights; using Microsoft.AspNetCore; +using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Hosting; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Hosting; @@ -20,6 +21,9 @@ using OpenTelemetry.Exporter; using OpenTelemetry.Logs; using OpenTelemetry.Resources; +using Serilog; +using Serilog.Core; +using Serilog.Extensions.Logging; namespace Azure.DataApiBuilder.Service { @@ -132,9 +136,11 @@ private static ParseResult GetParseResult(Command cmd, string[] args) /// /// Creates a LoggerFactory and add filter with the given LogLevel. /// - /// minimum log level. + /// Minimum log level. /// Telemetry client - public static ILoggerFactory GetLoggerFactoryForLogLevel(LogLevel logLevel, TelemetryClient? appTelemetryClient = null, LogLevelInitializer? logLevelInitializer = null) + /// Hot-reloadable log level + /// Core Serilog logging pipeline + public static ILoggerFactory GetLoggerFactoryForLogLevel(LogLevel logLevel, TelemetryClient? appTelemetryClient = null, LogLevelInitializer? logLevelInitializer = null, Logger? serilogLogger = null) { return LoggerFactory .Create(builder => @@ -209,6 +215,20 @@ public static ILoggerFactory GetLoggerFactoryForLogLevel(LogLevel logLevel, Tele } } + if (Startup.FileSinkOptions.Enabled && serilogLogger is not null) + { + builder.AddSerilog(serilogLogger); + + if (logLevelInitializer is null) + { + builder.AddFilter(category: string.Empty, logLevel); + } + else + { + builder.AddFilter(category: string.Empty, level => level >= logLevelInitializer.MinLogLevel); + } + } + builder.AddConsole(); }); } diff --git a/src/Service/Startup.cs b/src/Service/Startup.cs index a98f2ab15c..ce6b3077a4 100644 --- a/src/Service/Startup.cs +++ b/src/Service/Startup.cs @@ -58,6 +58,8 @@ using OpenTelemetry.Metrics; using OpenTelemetry.Resources; using OpenTelemetry.Trace; +using Serilog; +using Serilog.Core; using StackExchange.Redis; using ZiggyCreatures.Caching.Fusion; using ZiggyCreatures.Caching.Fusion.Backplane.StackExchangeRedis; @@ -76,6 +78,7 @@ public class Startup(IConfiguration configuration, ILogger logger) public static ApplicationInsightsOptions AppInsightsOptions = new(); public static OpenTelemetryOptions OpenTelemetryOptions = new(); public static AzureLogAnalyticsOptions AzureLogAnalyticsOptions = new(); + public static FileSinkOptions FileSinkOptions = new(); public const string NO_HTTPS_REDIRECT_FLAG = "--no-https-redirect"; private readonly HotReloadEventHandler _hotReloadEventHandler = new(); private RuntimeConfigProvider? _configProvider; @@ -192,6 +195,23 @@ public void ConfigureServices(IServiceCollection services) services.AddHostedService(sp => sp.GetRequiredService()); } + if (runtimeConfigAvailable + && runtimeConfig?.Runtime?.Telemetry?.File is not null + && runtimeConfig.Runtime.Telemetry.File.Enabled) + { + services.AddSingleton(sp => + { + FileSinkOptions options = runtimeConfig.Runtime.Telemetry.File; + return new LoggerConfiguration().WriteTo.File( + path: options.Path, + rollingInterval: (RollingInterval)Enum.Parse(typeof(RollingInterval), options.RollingInterval), + retainedFileCountLimit: options.RetainedFileCountLimit, + fileSizeLimitBytes: options.FileSizeLimitBytes, + rollOnFileSizeLimit: true); + }); + services.AddSingleton(sp => sp.GetRequiredService().MinimumLevel.Verbose().CreateLogger()); + } + services.AddSingleton(implementationFactory: serviceProvider => { LogLevelInitializer logLevelInit = new(MinimumLogLevel, typeof(RuntimeConfigValidator).FullName, _configProvider, _hotReloadEventHandler); @@ -538,6 +558,7 @@ public void Configure(IApplicationBuilder app, IWebHostEnvironment env, RuntimeC ConfigureApplicationInsightsTelemetry(app, runtimeConfig); ConfigureOpenTelemetry(runtimeConfig); ConfigureAzureLogAnalytics(runtimeConfig); + ConfigureFileSink(app, runtimeConfig); // Config provided before starting the engine. isRuntimeReady = PerformOnConfigChangeAsync(app).Result; @@ -709,8 +730,9 @@ public static ILoggerFactory CreateLoggerFactoryForHostedAndNonHostedScenario(IS } TelemetryClient? appTelemetryClient = serviceProvider.GetService(); + Logger? serilogLogger = serviceProvider.GetService(); - return Program.GetLoggerFactoryForLogLevel(logLevelInitializer.MinLogLevel, appTelemetryClient, logLevelInitializer); + return Program.GetLoggerFactoryForLogLevel(logLevelInitializer.MinLogLevel, appTelemetryClient, logLevelInitializer, serilogLogger); } /// @@ -941,6 +963,44 @@ private void ConfigureAzureLogAnalytics(RuntimeConfig runtimeConfig) } } + /// + /// Configure File Sink based on the loaded runtime configuration. If File Sink + /// is enabled, we can track different events and metrics. + /// + /// The application builder. + /// The provider used to load runtime configuration. + private void ConfigureFileSink(IApplicationBuilder app, RuntimeConfig runtimeConfig) + { + if (runtimeConfig?.Runtime?.Telemetry is not null + && runtimeConfig.Runtime.Telemetry.File is not null) + { + FileSinkOptions = runtimeConfig.Runtime.Telemetry.File; + + if (!FileSinkOptions.Enabled) + { + _logger.LogInformation("File is disabled."); + return; + } + + if (string.IsNullOrWhiteSpace(FileSinkOptions.Path)) + { + _logger.LogError("Logs won't be sent to File because the Path is not available in the config file."); + return; + } + + Logger? serilogLogger = app.ApplicationServices.GetService(); + if (serilogLogger is null) + { + _logger.LogError("Serilog Logger Configuration is not set."); + return; + } + + // Updating Startup Logger to Log from Startup Class. + ILoggerFactory? loggerFactory = Program.GetLoggerFactoryForLogLevel(logLevel: MinimumLogLevel, serilogLogger: serilogLogger); + _logger = loggerFactory.CreateLogger(); + } + } + /// /// Sets Static Web Apps EasyAuth as the authentication scheme for the engine. /// From 576f4c53602372954162cbe53ec771cf61a1e96c Mon Sep 17 00:00:00 2001 From: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Date: Fri, 22 Aug 2025 23:02:02 +0000 Subject: [PATCH 54/79] Adding 'Configure' options to CLI for File Sink (#2818) ## Why make this change? - Fixes issue #2577 - We want to allow the file sink properties to be configurable through the CLI command `dab configure`. ## What is this change? This change adds the file sink properties to the configure command and allows the user to change those properties through the CLI. It also ensures that the path property exists if file sink is enabled. - `ConfigOptions.cs`: Adds file sink properties to CLI command so that they can be configured by the user. - `ConfigGenerator.cs`: Writes the file sink properties to the config file, and errors out if the user tries to add an invalid value. - `RuntimeConfigValidator.cs`: Validates that `runtime.telemetry.file.path` is not empty or null if the file sink is enabled. ## How was this tested? - [ ] Integration Tests - [X] Unit Tests Added tests that ensure the configure commands and validation work. Also refactored the validation tests that followed the same pattern. ## Sample Request(s) --runtime.telemetry.file.enabled --runtime.telemetry.file.path --runtime.telemetry.file.rolling-interval --runtime.telemetry.file.retained-file-count-limit --runtime.telemetry.file.file-size-limit-bytes --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> --- schemas/dab.draft.schema.json | 12 ++- src/Cli.Tests/ConfigureOptionsTests.cs | 45 ++++++++++ src/Cli.Tests/ValidateConfigTests.cs | 67 ++++++++------ src/Cli/Commands/ConfigureOptions.cs | 28 ++++++ src/Cli/ConfigGenerator.cs | 88 +++++++++++++++++++ src/Config/Converters/FileSinkConverter.cs | 4 +- src/Config/ObjectModel/FileSinkOptions.cs | 6 +- .../Configurations/RuntimeConfigValidator.cs | 56 ++++++++++++ .../HotReload/ConfigurationHotReloadTests.cs | 1 + 9 files changed, 274 insertions(+), 33 deletions(-) diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json index fafdefc574..3f3004c9c6 100644 --- a/schemas/dab.draft.schema.json +++ b/schemas/dab.draft.schema.json @@ -501,7 +501,7 @@ "type": "string", "description": "Rolling interval for log files.", "default": "Day", - "enum": ["Minute", "Hour", "Day", "Month", "Year", "Infinite"] + "enum": [ "Minute", "Hour", "Day", "Month", "Year", "Infinite" ] }, "retained-file-count-limit": { "type": "integer", @@ -515,6 +515,16 @@ "default": 1048576, "minimum": 1 } + }, + "if": { + "properties": { + "enabled": { + "const": true + } + } + }, + "then": { + "required": [ "path" ] } }, "log-level": { diff --git a/src/Cli.Tests/ConfigureOptionsTests.cs b/src/Cli.Tests/ConfigureOptionsTests.cs index ca1922508a..073f349a67 100644 --- a/src/Cli.Tests/ConfigureOptionsTests.cs +++ b/src/Cli.Tests/ConfigureOptionsTests.cs @@ -1,6 +1,8 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. +using Serilog; + namespace Cli.Tests { /// @@ -188,6 +190,49 @@ public void TestAddAzureLogAnalyticsOptions() Assert.AreEqual("dce-endpoint-test", config.Runtime.Telemetry.AzureLogAnalytics.Auth.DceEndpoint); } + /// + /// Tests that running the "configure --file" commands on a config without file sink properties results + /// in a valid config being generated. + /// + [TestMethod] + public void TestAddFileSinkOptions() + { + // Arrange + string fileSinkPath = "/custom/log/path.txt"; + RollingInterval fileSinkRollingInterval = RollingInterval.Hour; + int fileSinkRetainedFileCountLimit = 5; + int fileSinkFileSizeLimitBytes = 2097152; + + _fileSystem!.AddFile(TEST_RUNTIME_CONFIG_FILE, new MockFileData(INITIAL_CONFIG)); + + Assert.IsTrue(_fileSystem!.File.Exists(TEST_RUNTIME_CONFIG_FILE)); + + // Act: Attempts to add file options + ConfigureOptions options = new( + fileSinkEnabled: CliBool.True, + fileSinkPath: fileSinkPath, + fileSinkRollingInterval: fileSinkRollingInterval, + fileSinkRetainedFileCountLimit: fileSinkRetainedFileCountLimit, + fileSinkFileSizeLimitBytes: fileSinkFileSizeLimitBytes, + config: TEST_RUNTIME_CONFIG_FILE + ); + + bool isSuccess = TryConfigureSettings(options, _runtimeConfigLoader!, _fileSystem!); + + // Assert: Validate the file options are added. + Assert.IsTrue(isSuccess); + string updatedConfig = _fileSystem!.File.ReadAllText(TEST_RUNTIME_CONFIG_FILE); + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(updatedConfig, out RuntimeConfig? config)); + Assert.IsNotNull(config.Runtime); + Assert.IsNotNull(config.Runtime.Telemetry); + Assert.IsNotNull(config.Runtime.Telemetry.File); + Assert.AreEqual(true, config.Runtime.Telemetry.File.Enabled); + Assert.AreEqual(fileSinkPath, config.Runtime.Telemetry.File.Path); + Assert.AreEqual(fileSinkRollingInterval.ToString(), config.Runtime.Telemetry.File.RollingInterval); + Assert.AreEqual(fileSinkRetainedFileCountLimit, config.Runtime.Telemetry.File.RetainedFileCountLimit); + Assert.AreEqual(fileSinkFileSizeLimitBytes, config.Runtime.Telemetry.File.FileSizeLimitBytes); + } + /// /// Tests that running "dab configure --runtime.graphql.enabled" on a config with various values results /// in runtime. Takes in updated value for graphql.enabled and diff --git a/src/Cli.Tests/ValidateConfigTests.cs b/src/Cli.Tests/ValidateConfigTests.cs index aeb016f007..78f2db1b6f 100644 --- a/src/Cli.Tests/ValidateConfigTests.cs +++ b/src/Cli.Tests/ValidateConfigTests.cs @@ -3,6 +3,7 @@ using Azure.DataApiBuilder.Core.Configurations; using Azure.DataApiBuilder.Core.Models; +using Serilog; namespace Cli.Tests; /// @@ -282,17 +283,6 @@ public void ValidateConfigSchemaWhereConfigReferencesEnvironmentVariables() public async Task TestValidateAKVOptionsWithoutEndpointFails() { // Arrange - _fileSystem!.AddFile(TEST_RUNTIME_CONFIG_FILE, new MockFileData(INITIAL_CONFIG)); - Assert.IsTrue(_fileSystem!.File.Exists(TEST_RUNTIME_CONFIG_FILE)); - Mock mockRuntimeConfigProvider = new(_runtimeConfigLoader); - RuntimeConfigValidator validator = new(mockRuntimeConfigProvider.Object, _fileSystem, new Mock>().Object); - Mock mockLoggerFactory = new(); - Mock> mockLogger = new(); - mockLoggerFactory - .Setup(factory => factory.CreateLogger(typeof(JsonConfigSchemaValidator).FullName!)) - .Returns(mockLogger.Object); - - // Act: Attempts to add AKV options ConfigureOptions options = new( azureKeyVaultRetryPolicyMaxCount: 1, azureKeyVaultRetryPolicyDelaySeconds: 1, @@ -302,14 +292,8 @@ public async Task TestValidateAKVOptionsWithoutEndpointFails() config: TEST_RUNTIME_CONFIG_FILE ); - bool isSuccess = TryConfigureSettings(options, _runtimeConfigLoader!, _fileSystem!); - - // Assert: Settings are configured, config parses, validation fails. - Assert.IsTrue(isSuccess); - string updatedConfig = _fileSystem!.File.ReadAllText(TEST_RUNTIME_CONFIG_FILE); - Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(updatedConfig, out RuntimeConfig? config)); - JsonSchemaValidationResult result = await validator.ValidateConfigSchema(config, TEST_RUNTIME_CONFIG_FILE, mockLoggerFactory.Object); - Assert.IsFalse(result.IsValid); + // Act + await ValidatePropertyOptionsFails(options); } /// @@ -319,24 +303,53 @@ public async Task TestValidateAKVOptionsWithoutEndpointFails() public async Task TestValidateAzureLogAnalyticsOptionsWithoutAuthFails() { // Arrange + ConfigureOptions options = new( + azureLogAnalyticsEnabled: CliBool.True, + azureLogAnalyticsDabIdentifier: "dab-identifier-test", + azureLogAnalyticsFlushIntervalSeconds: 1, + config: TEST_RUNTIME_CONFIG_FILE + ); + + // Act + await ValidatePropertyOptionsFails(options); + } + + /// + /// Tests that validation fails when File Sink options are configured without the 'path' property. + /// + [TestMethod] + public async Task TestValidateFileSinkOptionsWithoutPathFails() + { + // Arrange + ConfigureOptions options = new( + fileSinkEnabled: CliBool.True, + fileSinkRollingInterval: RollingInterval.Day, + fileSinkRetainedFileCountLimit: 1, + fileSinkFileSizeLimitBytes: 1024, + config: TEST_RUNTIME_CONFIG_FILE + ); + + // Act + await ValidatePropertyOptionsFails(options); + } + + /// + /// Helper function that ensures properties with missing options fail validation. + /// + private async Task ValidatePropertyOptionsFails(ConfigureOptions options) + { _fileSystem!.AddFile(TEST_RUNTIME_CONFIG_FILE, new MockFileData(INITIAL_CONFIG)); Assert.IsTrue(_fileSystem!.File.Exists(TEST_RUNTIME_CONFIG_FILE)); Mock mockRuntimeConfigProvider = new(_runtimeConfigLoader); RuntimeConfigValidator validator = new(mockRuntimeConfigProvider.Object, _fileSystem, new Mock>().Object); + Mock mockLoggerFactory = new(); Mock> mockLogger = new(); mockLoggerFactory .Setup(factory => factory.CreateLogger(typeof(JsonConfigSchemaValidator).FullName!)) .Returns(mockLogger.Object); - // Act: Attempts to add Azure Log Analytics options without Auth options - ConfigureOptions options = new( - azureLogAnalyticsEnabled: CliBool.True, - azureLogAnalyticsDabIdentifier: "dab-identifier-test", - azureLogAnalyticsFlushIntervalSeconds: 1, - config: TEST_RUNTIME_CONFIG_FILE - ); - + // Act: Attempts to add File Sink options without empty path bool isSuccess = TryConfigureSettings(options, _runtimeConfigLoader!, _fileSystem!); // Assert: Settings are configured, config parses, validation fails. diff --git a/src/Cli/Commands/ConfigureOptions.cs b/src/Cli/Commands/ConfigureOptions.cs index 8e8c14f6d3..4f61b2007b 100644 --- a/src/Cli/Commands/ConfigureOptions.cs +++ b/src/Cli/Commands/ConfigureOptions.cs @@ -8,7 +8,9 @@ using Cli.Constants; using CommandLine; using Microsoft.Extensions.Logging; +using Serilog; using static Cli.Utils; +using ILogger = Microsoft.Extensions.Logging.ILogger; namespace Cli.Commands { @@ -54,6 +56,11 @@ public ConfigureOptions( string? azureLogAnalyticsCustomTableName = null, string? azureLogAnalyticsDcrImmutableId = null, string? azureLogAnalyticsDceEndpoint = null, + CliBool? fileSinkEnabled = null, + string? fileSinkPath = null, + RollingInterval? fileSinkRollingInterval = null, + int? fileSinkRetainedFileCountLimit = null, + long? fileSinkFileSizeLimitBytes = null, string? config = null) : base(config) { @@ -98,6 +105,12 @@ public ConfigureOptions( AzureLogAnalyticsCustomTableName = azureLogAnalyticsCustomTableName; AzureLogAnalyticsDcrImmutableId = azureLogAnalyticsDcrImmutableId; AzureLogAnalyticsDceEndpoint = azureLogAnalyticsDceEndpoint; + // File + FileSinkEnabled = fileSinkEnabled; + FileSinkPath = fileSinkPath; + FileSinkRollingInterval = fileSinkRollingInterval; + FileSinkRetainedFileCountLimit = fileSinkRetainedFileCountLimit; + FileSinkFileSizeLimitBytes = fileSinkFileSizeLimitBytes; } [Option("data-source.database-type", Required = false, HelpText = "Database type. Allowed values: MSSQL, PostgreSQL, CosmosDB_NoSQL, MySQL.")] @@ -202,6 +215,21 @@ public ConfigureOptions( [Option("runtime.telemetry.azure-log-analytics.auth.dce-endpoint", Required = false, HelpText = "Configure DCE Endpoint for Azure Log Analytics to find table to send telemetry data")] public string? AzureLogAnalyticsDceEndpoint { get; } + [Option("runtime.telemetry.file.enabled", Required = false, HelpText = "Enable/Disable File Sink logging. Default: False (boolean)")] + public CliBool? FileSinkEnabled { get; } + + [Option("runtime.telemetry.file.path", Required = false, HelpText = "Configure path for File Sink logging. Default: /logs/dab-log.txt")] + public string? FileSinkPath { get; } + + [Option("runtime.telemetry.file.rolling-interval", Required = false, HelpText = "Configure rolling interval for File Sink logging. Default: Day")] + public RollingInterval? FileSinkRollingInterval { get; } + + [Option("runtime.telemetry.file.retained-file-count-limit", Required = false, HelpText = "Configure maximum number of retained files. Default: 1")] + public int? FileSinkRetainedFileCountLimit { get; } + + [Option("runtime.telemetry.file.file-size-limit-bytes", Required = false, HelpText = "Configure maximum file size limit in bytes. Default: 1048576")] + public long? FileSinkFileSizeLimitBytes { get; } + public int Handler(ILogger logger, FileSystemRuntimeConfigLoader loader, IFileSystem fileSystem) { logger.LogInformation("{productName} {version}", PRODUCT_NAME, ProductInfo.GetProductVersion()); diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs index 18e18f00a6..9cc53493fd 100644 --- a/src/Cli/ConfigGenerator.cs +++ b/src/Cli/ConfigGenerator.cs @@ -13,6 +13,7 @@ using Azure.DataApiBuilder.Service; using Cli.Commands; using Microsoft.Extensions.Logging; +using Serilog; using static Cli.Utils; namespace Cli @@ -798,6 +799,25 @@ options.AzureLogAnalyticsDcrImmutableId is not null || } } + // Telemetry: File Sink + if (options.FileSinkEnabled is not null || + options.FileSinkPath is not null || + options.FileSinkRollingInterval is not null || + options.FileSinkRetainedFileCountLimit is not null || + options.FileSinkFileSizeLimitBytes is not null) + { + FileSinkOptions updatedFileSinkOptions = runtimeConfig?.Runtime?.Telemetry?.File ?? new(); + bool status = TryUpdateConfiguredFileOptions(options, ref updatedFileSinkOptions); + if (status) + { + runtimeConfig = runtimeConfig! with { Runtime = runtimeConfig.Runtime! with { Telemetry = runtimeConfig.Runtime!.Telemetry is not null ? runtimeConfig.Runtime!.Telemetry with { File = updatedFileSinkOptions } : new TelemetryOptions(File: updatedFileSinkOptions) } }; + } + else + { + return false; + } + } + return runtimeConfig != null; } @@ -1199,6 +1219,74 @@ private static bool TryUpdateConfiguredAzureLogAnalyticsOptions( } } + /// + /// Updates the file sink options in the configuration. + /// + /// The configuration options provided by the user. + /// The file sink options to be updated. + /// True if the options were successfully updated; otherwise, false. + private static bool TryUpdateConfiguredFileOptions( + ConfigureOptions options, + ref FileSinkOptions fileOptions) + { + try + { + // Runtime.Telemetry.File.Enabled + if (options.FileSinkEnabled is not null) + { + fileOptions = fileOptions with { Enabled = options.FileSinkEnabled is CliBool.True, UserProvidedEnabled = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.file.enabled as '{options.FileSinkEnabled}'"); + } + + // Runtime.Telemetry.File.Path + if (options.FileSinkPath is not null) + { + fileOptions = fileOptions with { Path = options.FileSinkPath, UserProvidedPath = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.file.path as '{options.FileSinkPath}'"); + } + + // Runtime.Telemetry.File.RollingInterval + if (options.FileSinkRollingInterval is not null) + { + fileOptions = fileOptions with { RollingInterval = ((RollingInterval)options.FileSinkRollingInterval).ToString(), UserProvidedRollingInterval = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.file.rolling-interval as '{options.FileSinkRollingInterval}'"); + } + + // Runtime.Telemetry.File.RetainedFileCountLimit + if (options.FileSinkRetainedFileCountLimit is not null) + { + if (options.FileSinkRetainedFileCountLimit <= 0) + { + _logger.LogError("Failed to update configuration with runtime.telemetry.file.retained-file-count-limit. Value must be a positive integer greater than 0."); + return false; + } + + fileOptions = fileOptions with { RetainedFileCountLimit = (int)options.FileSinkRetainedFileCountLimit, UserProvidedRetainedFileCountLimit = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.file.retained-file-count-limit as '{options.FileSinkRetainedFileCountLimit}'"); + } + + // Runtime.Telemetry.File.FileSizeLimitBytes + if (options.FileSinkFileSizeLimitBytes is not null) + { + if (options.FileSinkFileSizeLimitBytes <= 0) + { + _logger.LogError("Failed to update configuration with runtime.telemetry.file.file-size-limit-bytes. Value must be a positive integer greater than 0."); + return false; + } + + fileOptions = fileOptions with { FileSizeLimitBytes = (long)options.FileSinkFileSizeLimitBytes, UserProvidedFileSizeLimitBytes = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.file.file-size-limit-bytes as '{options.FileSinkFileSizeLimitBytes}'"); + } + + return true; + } + catch (Exception ex) + { + _logger.LogError($"Failed to update configuration with runtime.telemetry.file. Exception message: {ex.Message}."); + return false; + } + } + /// /// Parse permission string to create PermissionSetting array. /// diff --git a/src/Config/Converters/FileSinkConverter.cs b/src/Config/Converters/FileSinkConverter.cs index e0107a11f6..cc7d138a1b 100644 --- a/src/Config/Converters/FileSinkConverter.cs +++ b/src/Config/Converters/FileSinkConverter.cs @@ -34,7 +34,7 @@ public FileSinkConverter(bool replaceEnvVar) string? path = null; RollingInterval? rollingInterval = null; int? retainedFileCountLimit = null; - int? fileSizeLimitBytes = null; + long? fileSizeLimitBytes = null; while (reader.Read()) { @@ -97,7 +97,7 @@ public FileSinkConverter(bool replaceEnvVar) { try { - fileSizeLimitBytes = reader.GetInt32(); + fileSizeLimitBytes = reader.GetInt64(); } catch (FormatException) { diff --git a/src/Config/ObjectModel/FileSinkOptions.cs b/src/Config/ObjectModel/FileSinkOptions.cs index 7e6674fcad..a5de58642f 100644 --- a/src/Config/ObjectModel/FileSinkOptions.cs +++ b/src/Config/ObjectModel/FileSinkOptions.cs @@ -60,10 +60,10 @@ public record FileSinkOptions /// /// File size limit in bytes before a new file needs to be created. /// - public int FileSizeLimitBytes { get; init; } + public long FileSizeLimitBytes { get; init; } [JsonConstructor] - public FileSinkOptions(bool? enabled = null, string? path = null, RollingInterval? rollingInterval = null, int? retainedFileCountLimit = null, int? fileSizeLimitBytes = null) + public FileSinkOptions(bool? enabled = null, string? path = null, RollingInterval? rollingInterval = null, int? retainedFileCountLimit = null, long? fileSizeLimitBytes = null) { if (enabled is not null) { @@ -107,7 +107,7 @@ public FileSinkOptions(bool? enabled = null, string? path = null, RollingInterva if (fileSizeLimitBytes is not null) { - FileSizeLimitBytes = (int)fileSizeLimitBytes; + FileSizeLimitBytes = (long)fileSizeLimitBytes; UserProvidedFileSizeLimitBytes = true; } else diff --git a/src/Core/Configurations/RuntimeConfigValidator.cs b/src/Core/Configurations/RuntimeConfigValidator.cs index f910d5bd76..12a8f82aa4 100644 --- a/src/Core/Configurations/RuntimeConfigValidator.cs +++ b/src/Core/Configurations/RuntimeConfigValidator.cs @@ -82,6 +82,7 @@ public void ValidateConfigProperties() ValidateAppInsightsTelemetryConnectionString(runtimeConfig); ValidateLoggerFilters(runtimeConfig); ValidateAzureLogAnalyticsAuth(runtimeConfig); + ValidateFileSinkPath(runtimeConfig); // Running these graphQL validations only in development mode to ensure // fast startup of engine in production mode. @@ -177,6 +178,61 @@ public void ValidateAzureLogAnalyticsAuth(RuntimeConfig runtimeConfig) } } + /// + /// The path in File Sink is required if it is enabled. + /// + public void ValidateFileSinkPath(RuntimeConfig runtimeConfig) + { + if (runtimeConfig.Runtime!.Telemetry is not null && runtimeConfig.Runtime.Telemetry.File is not null) + { + FileSinkOptions fileSinkOptions = runtimeConfig.Runtime.Telemetry.File; + if (fileSinkOptions.Enabled && string.IsNullOrWhiteSpace(fileSinkOptions.Path)) + { + HandleOrRecordException(new DataApiBuilderException( + message: "File option 'path' cannot be null or empty if enabled.", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + } + + if (fileSinkOptions.Path.Length > 260) + { + _logger.LogWarning("File option 'path' exceeds 260 characters, it is recommended that the path does not exceed this limit."); + } + + // Checks if path is valid by checking if there are any invalid characters and then + // attempting to retrieve the full path, returns an exception if it is unable. + try + { + string fileName = System.IO.Path.GetFileName(fileSinkOptions.Path); + if (string.IsNullOrWhiteSpace(fileName) || fileName.IndexOfAny(System.IO.Path.GetInvalidFileNameChars()) != -1) + { + HandleOrRecordException(new DataApiBuilderException( + message: "File option 'path' cannot have invalid characters in its directory or file name.", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + } + + string? directoryName = System.IO.Path.GetDirectoryName(fileSinkOptions.Path); + if (directoryName is not null && directoryName.IndexOfAny(System.IO.Path.GetInvalidPathChars()) != -1) + { + HandleOrRecordException(new DataApiBuilderException( + message: "File option 'path' cannot have invalid characters in its directory or file name.", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + } + + System.IO.Path.GetFullPath(fileSinkOptions.Path); + } + catch (Exception ex) + { + HandleOrRecordException(new DataApiBuilderException( + message: ex.Message, + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + } + } + } + /// /// This method runs several validations against the config file such as schema validation, /// validation of entities metadata, validation of permissions, validation of entity configuration. diff --git a/src/Service.Tests/Configuration/HotReload/ConfigurationHotReloadTests.cs b/src/Service.Tests/Configuration/HotReload/ConfigurationHotReloadTests.cs index d77bea21bb..d2ea7708ce 100644 --- a/src/Service.Tests/Configuration/HotReload/ConfigurationHotReloadTests.cs +++ b/src/Service.Tests/Configuration/HotReload/ConfigurationHotReloadTests.cs @@ -698,6 +698,7 @@ await ConfigurationHotReloadTests.WaitForConditionAsync( /// Invalid change that was added is a schema file that is not complete, which should be /// catched by the validator. /// + [Ignore] [TestCategory(MSSQL_ENVIRONMENT)] [TestMethod] public void HotReloadValidationFail() From c9c6ecaba36895dc6a2b2e11fe9fcb055e7a8339 Mon Sep 17 00:00:00 2001 From: Rahul Nishant <53243582+ranishan@users.noreply.github.com> Date: Wed, 3 Sep 2025 21:23:50 -0700 Subject: [PATCH 55/79] Throw user error when Stored procedures with unnamed aggregate columns fails due to column name returning as null (#2857) ## Why make this change? The change resolves the issue #2856 where the call to get stored procedure schema fails when the SQL Server stored procedure has columns containing values from aggregate functions that do not have an alias. e.g. CREATE TABLE books (Id INT, Name VARCHAR(255)); GO CREATE PROCEDURE [dbo].[GetMaxPrc] AS BEGIN -- This query returns an unnamed column SELECT MAX(Id) FROM dbo.books; END GO **Root Cause** The code uses the table sys.dm_exec_describe_first_result_set_for_object to identify the name of the column, type etc. As mentioned in the following doc, the "name" column in this table can be null if the column name cannot be determined. https://learn.microsoft.com/en-us/sql/relational-databases/system-dynamic-management-views/sys-dm-exec-describe-first-result-set-for-object-transact-sql?view=sql-server-ver17 ## What is this change? The change handles the failure by throwing a user exception with an appropriate message to help user fix the error. ## How was this tested? - [ ] Integration Tests - [X] Unit Tests ## Sample Request(s) - Example REST and/or GraphQL request to demonstrate modifications e.g. dab-config for the stored procedure mentioned earlier: { "entities": { "GetMaxId": { "source": { "object": "dbo.GetMaxPrc", "type": "stored-procedure" }, "graphql": { "type": "query" }, "permissions": [ { "role": "anonymous", "actions": [ "execute" ] } ] } } } --------- Co-authored-by: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .../MetadataProviders/SqlMetadataProvider.cs | 13 +++ .../UnitTests/SqlMetadataProviderUnitTests.cs | 98 +++++++++++++++++++ 2 files changed, 111 insertions(+) diff --git a/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs b/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs index dd4703d241..071c44fe05 100644 --- a/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs +++ b/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs @@ -1151,6 +1151,19 @@ private async Task PopulateResultSetDefinitionsForStoredProcedureAsync( Type resultFieldType = SqlToCLRType(element.GetProperty(BaseSqlQueryBuilder.STOREDPROC_COLUMN_SYSTEMTYPENAME).ToString()); bool isResultFieldNullable = element.GetProperty(BaseSqlQueryBuilder.STOREDPROC_COLUMN_ISNULLABLE).GetBoolean(); + // Validate that the stored procedure returns columns with proper names + // This commonly occurs when using aggregate functions or expressions without aliases + if (string.IsNullOrWhiteSpace(resultFieldName)) + { + throw new DataApiBuilderException( + message: $"The stored procedure '{dbStoredProcedureName}' returns a column without a name. " + + "This typically happens when using aggregate functions (like MAX, MIN, COUNT) or expressions " + + "without providing an alias. Please add column aliases to your SELECT statement. " + + "For example: 'SELECT MAX(id) AS MaxId' instead of 'SELECT MAX(id)'.", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ErrorInInitialization); + } + // Store the dictionary containing result set field with its type as Columns storedProcedureDefinition.Columns.TryAdd(resultFieldName, new(resultFieldType) { IsNullable = isResultFieldNullable }); } diff --git a/src/Service.Tests/UnitTests/SqlMetadataProviderUnitTests.cs b/src/Service.Tests/UnitTests/SqlMetadataProviderUnitTests.cs index 3c7427971d..f1f9c4255d 100644 --- a/src/Service.Tests/UnitTests/SqlMetadataProviderUnitTests.cs +++ b/src/Service.Tests/UnitTests/SqlMetadataProviderUnitTests.cs @@ -3,19 +3,23 @@ using System; using System.Collections.Generic; +using System.Data.Common; using System.IO; using System.Net; +using System.Text.Json.Nodes; using System.Threading.Tasks; using Azure.DataApiBuilder.Config.DatabasePrimitives; using Azure.DataApiBuilder.Config.ObjectModel; using Azure.DataApiBuilder.Core.Authorization; using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; using Azure.DataApiBuilder.Core.Resolvers; using Azure.DataApiBuilder.Core.Resolvers.Factories; using Azure.DataApiBuilder.Core.Services; using Azure.DataApiBuilder.Service.Exceptions; using Azure.DataApiBuilder.Service.Tests.Configuration; using Azure.DataApiBuilder.Service.Tests.SqlTests; +using Microsoft.AspNetCore.Http; using Microsoft.Data.SqlClient; using Microsoft.Extensions.Logging; using Microsoft.VisualStudio.TestTools.UnitTesting; @@ -399,6 +403,100 @@ public async Task ValidateInferredRelationshipInfoForPgSql() ValidateInferredRelationshipInfoForTables(); } + /// + /// Data-driven test to validate that DataApiBuilderException is thrown for various invalid resultFieldName values + /// during stored procedure result set definition population. + /// + [DataTestMethod, TestCategory(TestCategory.MSSQL)] + [DataRow(null, DisplayName = "Null result field name")] + [DataRow("", DisplayName = "Empty result field name")] + [DataRow(" ", DisplayName = "Multiple spaces result field name")] + public async Task ValidateExceptionForInvalidResultFieldNames(string invalidFieldName) + { + DatabaseEngine = TestCategory.MSSQL; + TestHelper.SetupDatabaseEnvironment(DatabaseEngine); + RuntimeConfig baseConfigFromDisk = SqlTestHelper.SetupRuntimeConfig(); + + // Create a RuntimeEntities with ONLY our test stored procedure entity + Dictionary entitiesDictionary = new() + { + { + "get_book_by_id", new Entity( + Source: new("dbo.get_book_by_id", EntitySourceType.StoredProcedure, null, null), + Rest: new(Enabled: true), + GraphQL: new("get_book_by_id", "get_book_by_ids", Enabled: true), + Permissions: new EntityPermission[] { + new( + Role: "anonymous", + Actions: new EntityAction[] { + new(Action: EntityActionOperation.Execute, Fields: null, Policy: null) + }) + }, + Relationships: null, + Mappings: null + ) + } + }; + + RuntimeEntities entities = new(entitiesDictionary); + RuntimeConfig runtimeConfig = baseConfigFromDisk with { Entities = entities }; + RuntimeConfigProvider runtimeConfigProvider = TestHelper.GenerateInMemoryRuntimeConfigProvider(runtimeConfig); + ILogger sqlMetadataLogger = new Mock>().Object; + + // Setup query builder + _queryBuilder = new MsSqlQueryBuilder(); + + try + { + string dataSourceName = runtimeConfigProvider.GetConfig().DefaultDataSourceName; + + // Create mock query executor that always returns JsonArray with invalid field name + Mock mockQueryExecutor = new(); + + // Create a JsonArray that simulates the stored procedure result with invalid field name + JsonArray invalidFieldJsonArray = new(); + JsonObject jsonObject = new() + { + [BaseSqlQueryBuilder.STOREDPROC_COLUMN_NAME] = invalidFieldName, // This will be null, empty, or whitespace + [BaseSqlQueryBuilder.STOREDPROC_COLUMN_SYSTEMTYPENAME] = "varchar", + [BaseSqlQueryBuilder.STOREDPROC_COLUMN_ISNULLABLE] = false + }; + invalidFieldJsonArray.Add(jsonObject); + + // Setup the mock to return our malformed JsonArray for all ExecuteQueryAsync calls + mockQueryExecutor.Setup(x => x.ExecuteQueryAsync( + It.IsAny(), + It.IsAny>(), + It.IsAny, Task>>(), + It.IsAny(), + It.IsAny(), + It.IsAny>())) + .ReturnsAsync(invalidFieldJsonArray); + + // Setup Mock query manager Factory + Mock queryManagerFactory = new(); + queryManagerFactory.Setup(x => x.GetQueryBuilder(It.IsAny())).Returns(_queryBuilder); + queryManagerFactory.Setup(x => x.GetQueryExecutor(It.IsAny())).Returns(mockQueryExecutor.Object); + + ISqlMetadataProvider sqlMetadataProvider = new MsSqlMetadataProvider( + runtimeConfigProvider, + queryManagerFactory.Object, + sqlMetadataLogger, + dataSourceName); + + await sqlMetadataProvider.InitializeAsync(); + Assert.Fail($"Expected DataApiBuilderException was not thrown for invalid resultFieldName: '{invalidFieldName}'."); + } + catch (DataApiBuilderException ex) + { + Assert.AreEqual(HttpStatusCode.ServiceUnavailable, ex.StatusCode); + Assert.AreEqual(DataApiBuilderException.SubStatusCodes.ErrorInInitialization, ex.SubStatusCode); + Assert.IsTrue(ex.Message.Contains("returns a column without a name")); + } + + TestHelper.UnsetAllDABEnvironmentVariables(); + } + /// /// Helper method for test methods ValidateInferredRelationshipInfoFor{MsSql, MySql, and PgSql}. /// This helper validates that an entity's relationship data is correctly inferred based on config and database supplied relationship metadata. From 1f0c41a72c9798007d491609f0fd9f29b198ee2a Mon Sep 17 00:00:00 2001 From: Aniruddh Munde Date: Thu, 4 Sep 2025 01:43:31 -0700 Subject: [PATCH 56/79] Update CODEOWNERS (#2848) Reflecting latest owners. Co-authored-by: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> --- CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CODEOWNERS b/CODEOWNERS index ed2b4835ef..c31746b5e4 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1,7 +1,7 @@ # These owners will be the default owners for everything in # the repo. Unless a later match takes precedence, # review when someone opens a pull request. -* @Aniruddh25 @aaronburtle @anushakolan @RubenCerna2079 @souvikghosh04 @ravishetye @neeraj-sharma2592 @sourabh1007 @vadeveka @Alekhya-Polavarapu @rusamant +* @Aniruddh25 @aaronburtle @anushakolan @RubenCerna2079 @souvikghosh04 @akashkumar58 @neeraj-sharma2592 @sourabh1007 @vadeveka @Alekhya-Polavarapu @rusamant code_of_conduct.md @jerrynixon contributing.md @jerrynixon From 6c54b8446d8dca39c317f13a3246e655a0dc8ad4 Mon Sep 17 00:00:00 2001 From: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Date: Thu, 4 Sep 2025 18:49:41 +0000 Subject: [PATCH 57/79] Fix Azure Log Analytics 'flush-interval' property (#2854) ## Why make this change? - This change fixes #2853 - The Azure Log Analytics feature is uploading the logs before the time interval ends. ## What is this change? - It fixes the logic that pushes the logs to the Azure Log Analytics workspace by ensuring it doesn't stop if there aren't any logs coming into the `CustomLogCollector`. ## How was this tested? - [ ] Integration Tests - [X] Unit Tests --- .../Telemetry/AzureLogAnalyticsTests.cs | 2 +- .../Telemetry/AzureLogAnalyticsCustomLogCollector.cs | 12 ++++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/Service.Tests/Configuration/Telemetry/AzureLogAnalyticsTests.cs b/src/Service.Tests/Configuration/Telemetry/AzureLogAnalyticsTests.cs index 9b133b9ed3..47bfa56cf2 100644 --- a/src/Service.Tests/Configuration/Telemetry/AzureLogAnalyticsTests.cs +++ b/src/Service.Tests/Configuration/Telemetry/AzureLogAnalyticsTests.cs @@ -120,7 +120,7 @@ public async Task TestAzureLogAnalyticsFlushServiceSucceed(string message, LogLe _ = Task.Run(() => flusherService.StartAsync(tokenSource.Token)); - await Task.Delay(1000); + await Task.Delay(2000); // Assert AzureLogAnalyticsLogs actualLog = customClient.LogAnalyticsLogs[0]; diff --git a/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs b/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs index 6e150f64af..130b872fb4 100644 --- a/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs +++ b/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs @@ -53,14 +53,18 @@ await _logs.Writer.WriteAsync( public async Task> DequeueAllAsync(string dabIdentifier, int flushIntervalSeconds) { List list = new(); - Stopwatch time = Stopwatch.StartNew(); if (await _logs.Reader.WaitToReadAsync()) { - while (_logs.Reader.TryRead(out AzureLogAnalyticsLogs? item)) + Stopwatch time = Stopwatch.StartNew(); + + while (true) { - item.Identifier = dabIdentifier; - list.Add(item); + if (_logs.Reader.TryRead(out AzureLogAnalyticsLogs? item)) + { + item.Identifier = dabIdentifier; + list.Add(item); + } if (time.Elapsed >= TimeSpan.FromSeconds(flushIntervalSeconds)) { From e553adf36b938ea3d963819e6bafcd66b6ccc43d Mon Sep 17 00:00:00 2001 From: vadeveka <52937801+vadeveka@users.noreply.github.com> Date: Tue, 9 Sep 2025 10:49:23 -0700 Subject: [PATCH 58/79] Ensure JsonDocuments cleaned up after request (#2865) ## Why make this change? Closes #2858 When stored procedures are applied with authorization policies in DAB configuration, then at runtime we observe that queries or mutations via stored procedures fail unexpectedly with object disposed exception. This PR fixes the bug. In the query and mutation root resolvers, the intermediate results in the form of JsonDocuments are registered for cleanup. These were getting cleaned even before the leaf resolvers executed so DAB would encounter exception accessing already disposed objects. If registerForCleanup is invoked without additional parameter specifying when to cleanup, then by default, the cleanup happens immediately after the current resolver execution. This was causing the objects to be disposed early for stored procedure flow before the leaf resolvers were executed. ## What is this change? Invoke registerForCleanup with the additional param specifying the cleanup to happen after request is complete. ## How was this tested? - [x] Integration Tests - [ ] Unit Tests --- config-generators/mssql-commands.txt | 1 + src/Core/Services/ExecutionHelper.cs | 7 ++- .../GraphQLAuthorizationHandlerTests.cs | 58 +++++++++++++++++++ ...tReadingRuntimeConfigForMsSql.verified.txt | 30 ++++++++++ src/Service.Tests/dab-config.MsSql.json | 43 ++++++++++++++ 5 files changed, 137 insertions(+), 2 deletions(-) diff --git a/config-generators/mssql-commands.txt b/config-generators/mssql-commands.txt index c36366df54..cecc6b522c 100644 --- a/config-generators/mssql-commands.txt +++ b/config-generators/mssql-commands.txt @@ -236,3 +236,4 @@ add dbo_DimAccount --config "dab-config.MsSql.json" --source "DimAccount" --perm update dbo_DimAccount --config "dab-config.MsSql.json" --relationship parent_account --target.entity dbo_DimAccount --cardinality one --relationship.fields "ParentAccountKey:AccountKey" update dbo_DimAccount --config "dab-config.MsSql.json" --relationship child_accounts --target.entity dbo_DimAccount --cardinality many --relationship.fields "AccountKey:ParentAccountKey" add DateOnlyTable --config "dab-config.MsSql.json" --source "date_only_table" --permissions "anonymous:*" --rest true --graphql true --source.key-fields "event_date" +add GetBooksAuth --config "dab-config.MsSql.json" --source "get_books" --source.type "stored-procedure" --permissions "teststoredprocauth:execute" --rest true --graphql true --graphql.operation "Query" --rest.methods "Get" diff --git a/src/Core/Services/ExecutionHelper.cs b/src/Core/Services/ExecutionHelper.cs index 9745adbcd5..28a22b9b39 100644 --- a/src/Core/Services/ExecutionHelper.cs +++ b/src/Core/Services/ExecutionHelper.cs @@ -78,7 +78,8 @@ public async ValueTask ExecuteQueryAsync(IMiddlewareContext context) } return ValueTask.CompletedTask; - }); + }, + cleanAfter: CleanAfter.Request); context.Result = result.Item1.Select(t => t.RootElement).ToArray(); SetNewMetadata(context, result.Item2); @@ -125,7 +126,8 @@ public async ValueTask ExecuteMutateAsync(IMiddlewareContext context) } return ValueTask.CompletedTask; - }); + }, + cleanAfter: CleanAfter.Request); context.Result = result.Item1.Select(t => t.RootElement).ToArray(); SetNewMetadata(context, result.Item2); @@ -312,6 +314,7 @@ private static void SetContextResult(IMiddlewareContext context, JsonDocument? r result.Dispose(); return ValueTask.CompletedTask; }); + // The disposal could occur before we were finished using the value from the jsondocument, // thus needing to ensure copying the root element. Hence, we clone the root element. context.Result = result.RootElement.Clone(); diff --git a/src/Service.Tests/Authorization/GraphQL/GraphQLAuthorizationHandlerTests.cs b/src/Service.Tests/Authorization/GraphQL/GraphQLAuthorizationHandlerTests.cs index a4d2fbb6d4..faefdebb4b 100644 --- a/src/Service.Tests/Authorization/GraphQL/GraphQLAuthorizationHandlerTests.cs +++ b/src/Service.Tests/Authorization/GraphQL/GraphQLAuthorizationHandlerTests.cs @@ -139,5 +139,63 @@ public async Task Query_GroupBy_Aggregation_FieldNotAllowed() path: @"[""booksNF""]" ); } + + /// + /// Tests that a GraphQL query backed by stored procedure with a client role is allowed access and returns results. + /// + /// + [TestMethod] + public async Task Query_StoredProc_Allowed() + { + string graphQLQueryName = "executeGetBooksAuth"; + string graphQLQuery = @"{ + executeGetBooksAuth { + id + title + publisher_id + } + }"; + + JsonElement actual = await ExecuteGraphQLRequestAsync( + graphQLQuery, + graphQLQueryName, + isAuthenticated: true, + clientRoleHeader: "teststoredprocauth"); + + string dbQuery = $"EXEC dbo.get_books"; + string expected = await GetDatabaseResultAsync(dbQuery, expectJson: false); + + SqlTestHelper.PerformTestEqualJsonStrings(expected, actual.ToString()); + } + + /// + /// Tests that a GraphQL query backed by stored procedure with a client role is not allowed access and results in an + /// appropriate error message. + /// + /// + [TestMethod] + public async Task Query_StoredProc_NotAllowed() + { + string graphQLQueryName = "executeGetBooksAuth"; + string graphQLQuery = @"{ + executeGetBooksAuth { + id + title + publisher_id + } + }"; + + JsonElement actual = await ExecuteGraphQLRequestAsync( + graphQLQuery, + graphQLQueryName, + isAuthenticated: true, + clientRoleHeader: "roledoesnotexist"); + + SqlTestHelper.TestForErrorInGraphQLResponse( + actual.ToString(), + message: "The current user is not authorized to access this resource.", + path: @"[""executeGetBooksAuth""]" + ); + } } } diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt index 541f7fc078..51b733b94e 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt @@ -3651,6 +3651,36 @@ } ] } + }, + { + GetBooksAuth: { + Source: { + Object: get_books, + Type: stored-procedure + }, + GraphQL: { + Singular: GetBooksAuth, + Plural: GetBooksAuths, + Enabled: true, + Operation: Query + }, + Rest: { + Methods: [ + Get + ], + Enabled: true + }, + Permissions: [ + { + Role: teststoredprocauth, + Actions: [ + { + Action: Execute + } + ] + } + ] + } } ] } \ No newline at end of file diff --git a/src/Service.Tests/dab-config.MsSql.json b/src/Service.Tests/dab-config.MsSql.json index 0f3b1e5f83..e57c7dce8c 100644 --- a/src/Service.Tests/dab-config.MsSql.json +++ b/src/Service.Tests/dab-config.MsSql.json @@ -3580,6 +3580,19 @@ "action": "read" } ] + }, + { + "role": "TestFieldExcludedForAggregation", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [ + "publisher_id" + ] + } + } + ] } ], "mappings": { @@ -3798,6 +3811,36 @@ ] } ] + }, + "GetBooksAuth": { + "source": { + "object": "get_books", + "type": "stored-procedure" + }, + "graphql": { + "enabled": true, + "operation": "query", + "type": { + "singular": "GetBooksAuth", + "plural": "GetBooksAuths" + } + }, + "rest": { + "enabled": true, + "methods": [ + "get" + ] + }, + "permissions": [ + { + "role": "teststoredprocauth", + "actions": [ + { + "action": "execute" + } + ] + } + ] } } } \ No newline at end of file From 36c270b743d7cb564c8dd159cd94babc89db25aa Mon Sep 17 00:00:00 2001 From: souvikghosh04 Date: Thu, 11 Sep 2025 23:50:58 +0530 Subject: [PATCH 59/79] Adding @stuartpa as reviewer (#2869) Adding @stuartpa as reviewer. Stuart is involved in MCP capability in DAB --- CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CODEOWNERS b/CODEOWNERS index c31746b5e4..cb1f7eb036 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1,7 +1,7 @@ # These owners will be the default owners for everything in # the repo. Unless a later match takes precedence, # review when someone opens a pull request. -* @Aniruddh25 @aaronburtle @anushakolan @RubenCerna2079 @souvikghosh04 @akashkumar58 @neeraj-sharma2592 @sourabh1007 @vadeveka @Alekhya-Polavarapu @rusamant +* @Aniruddh25 @aaronburtle @anushakolan @RubenCerna2079 @souvikghosh04 @akashkumar58 @neeraj-sharma2592 @sourabh1007 @vadeveka @Alekhya-Polavarapu @rusamant @stuartpa code_of_conduct.md @jerrynixon contributing.md @jerrynixon From 271cbf4cc3a1dcdc1f915e091bee6a2d64b8eb1e Mon Sep 17 00:00:00 2001 From: Michael Staib Date: Fri, 12 Sep 2025 23:26:41 +0200 Subject: [PATCH 60/79] Hot Chocolate 16 Upgrade (#2822) This PR will create the base for the MCP server integration that is coming with Hot Chocolate 16. - upgrades packages - upgrades type interceptors - upgrades executor hooks #2789 --------- Co-authored-by: souvikghosh04 Co-authored-by: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> --- src/Core/Models/GraphQLFilterParsers.cs | 22 ++--- src/Core/Resolvers/BaseQueryStructure.cs | 2 +- src/Core/Resolvers/CosmosQueryEngine.cs | 4 +- src/Core/Resolvers/IQueryEngine.cs | 4 +- .../BaseSqlQueryStructure.cs | 4 +- .../Sql Query Structures/SqlQueryStructure.cs | 18 ++-- src/Core/Resolvers/SqlMutationEngine.cs | 89 +++++++++++-------- src/Core/Resolvers/SqlQueryEngine.cs | 4 +- .../Services/BuildRequestStateMiddleware.cs | 6 +- src/Core/Services/Cache/DabCacheService.cs | 6 +- .../Services/DetermineStatusCodeMiddleware.cs | 4 +- src/Core/Services/ExecutionHelper.cs | 18 ++-- .../MultipleMutationInputValidator.cs | 2 +- src/Core/Services/ResolverTypeInterceptor.cs | 32 +++---- src/Directory.Packages.props | 17 ++-- .../Directives/RelationshipDirective.cs | 6 +- src/Service.GraphQLBuilder/GraphQLUtils.cs | 9 +- .../Queries/QueryBuilder.cs | 2 +- .../MultiSourceQueryExecutionUnitTests.cs | 2 +- src/Service/HealthCheck/HealthCheckHelper.cs | 52 +++++------ src/Service/Startup.cs | 13 +-- 21 files changed, 166 insertions(+), 150 deletions(-) diff --git a/src/Core/Models/GraphQLFilterParsers.cs b/src/Core/Models/GraphQLFilterParsers.cs index f93c7f9f3d..153def832f 100644 --- a/src/Core/Models/GraphQLFilterParsers.cs +++ b/src/Core/Models/GraphQLFilterParsers.cs @@ -43,7 +43,7 @@ public GQLFilterParser(RuntimeConfigProvider runtimeConfigProvider, IMetadataPro /// Parse a predicate for a *FilterInput input type /// /// The GraphQL context, used to get the query variables - /// An IInputField object which describes the schema of the filter argument + /// An IInputValueDefinition object which describes the schema of the filter argument /// The fields in the *FilterInput being processed /// The query structure for the entity being filtered providing /// the source alias of the underlying *FilterInput being processed, @@ -51,7 +51,7 @@ public GQLFilterParser(RuntimeConfigProvider runtimeConfigProvider, IMetadataPro /// and the function that parametrizes literals before they are written in string predicate operands. public Predicate Parse( IMiddlewareContext ctx, - IInputField filterArgumentSchema, + IInputValueDefinition filterArgumentSchema, List fields, BaseQueryStructure queryStructure) { @@ -285,7 +285,7 @@ public Predicate Parse( /// private void HandleNestedFilterForCosmos( IMiddlewareContext ctx, - IInputField filterField, + IInputValueDefinition filterField, List subfields, string columnName, string entityType, @@ -466,7 +466,7 @@ public HttpContext GetHttpContextFromMiddlewareContext(IMiddlewareContext ctx) /// the fields. /// /// The GraphQL context, used to get the query variables - /// An IInputField object which describes the schema of the scalar input argument (e.g. IntFilterInput) + /// An IInputValueDefinition object which describes the schema of the scalar input argument (e.g. IntFilterInput) /// The name of the field /// The subfields of the scalar field /// The db schema name to which the table belongs @@ -476,7 +476,7 @@ public HttpContext GetHttpContextFromMiddlewareContext(IMiddlewareContext ctx) /// Flag to give a hint about the node type. It is only applicable for CosmosDB private static Predicate ParseScalarType( IMiddlewareContext ctx, - IInputField argumentSchema, + IInputValueDefinition argumentSchema, string fieldName, List fields, string schemaName, @@ -498,8 +498,8 @@ private static Predicate ParseScalarType( /// If and/or is passed as empty, a predicate representing 1 != 1 is returned /// /// The GraphQL context, used to get the query variables - /// An IInputField object which describes the and/or filter input argument - /// An IInputField object which describes the base filter input argument (e.g. BookFilterInput) + /// An IInputValueDefinition object which describes the and/or filter input argument + /// An IInputValueDefinition object which describes the base filter input argument (e.g. BookFilterInput) /// to which the and/or belongs /// The subfields of the and/or field /// The db schema name to which the table belongs @@ -510,8 +510,8 @@ private static Predicate ParseScalarType( /// Parametrizes literals before they are written in string predicate operands private Predicate ParseAndOr( IMiddlewareContext ctx, - IInputField argumentSchema, - IInputField filterArgumentSchema, + IInputValueDefinition argumentSchema, + IInputValueDefinition filterArgumentSchema, List fields, BaseQueryStructure baseQuery, PredicateOperation op) @@ -600,14 +600,14 @@ public static class FieldFilterParser /// Parse a scalar field into a predicate /// /// The GraphQL context, used to get the query variables - /// An IInputField object which describes the schema of the scalar input argument (e.g. IntFilterInput) + /// An IInputValueDefinition object which describes the schema of the scalar input argument (e.g. IntFilterInput) /// The table column targeted by the field /// The subfields of the scalar field /// Parametrizes literals before they are written in string predicate operands /// Flag which gives a hint about the node type in the given schema. only for CosmosDB it can be of list type. Refer here. public static Predicate Parse( IMiddlewareContext ctx, - IInputField argumentSchema, + IInputValueDefinition argumentSchema, Column column, List fields, Func processLiterals, diff --git a/src/Core/Resolvers/BaseQueryStructure.cs b/src/Core/Resolvers/BaseQueryStructure.cs index 88d30e521c..7f5564f831 100644 --- a/src/Core/Resolvers/BaseQueryStructure.cs +++ b/src/Core/Resolvers/BaseQueryStructure.cs @@ -198,7 +198,7 @@ public virtual SourceDefinition GetUnderlyingSourceDefinition() /// /// Extracts the *Connection.items schema field from the *Connection schema field /// - internal static IObjectField ExtractItemsSchemaField(IObjectField connectionSchemaField) + internal static ObjectField ExtractItemsSchemaField(ObjectField connectionSchemaField) { return connectionSchemaField.Type.NamedType().Fields[QueryBuilder.PAGINATION_FIELD_NAME]; } diff --git a/src/Core/Resolvers/CosmosQueryEngine.cs b/src/Core/Resolvers/CosmosQueryEngine.cs index e9d4caa380..7525318089 100644 --- a/src/Core/Resolvers/CosmosQueryEngine.cs +++ b/src/Core/Resolvers/CosmosQueryEngine.cs @@ -241,14 +241,14 @@ public Task ExecuteAsync(StoredProcedureRequestContext context, s } /// - public JsonElement ResolveObject(JsonElement element, IObjectField fieldSchema, ref IMetadata metadata) + public JsonElement ResolveObject(JsonElement element, ObjectField fieldSchema, ref IMetadata metadata) { return element; } /// /// metadata is not used in this method, but it is required by the interface. - public object ResolveList(JsonElement array, IObjectField fieldSchema, ref IMetadata metadata) + public object ResolveList(JsonElement array, ObjectField fieldSchema, ref IMetadata metadata) { IType listType = fieldSchema.Type; // Is the List type nullable? [...]! vs [...] diff --git a/src/Core/Resolvers/IQueryEngine.cs b/src/Core/Resolvers/IQueryEngine.cs index 0350b3efd2..1b89d3df54 100644 --- a/src/Core/Resolvers/IQueryEngine.cs +++ b/src/Core/Resolvers/IQueryEngine.cs @@ -58,11 +58,11 @@ public interface IQueryEngine /// /// Resolves a jsonElement representing an inner object based on the field's schema and metadata /// - public JsonElement ResolveObject(JsonElement element, IObjectField fieldSchema, ref IMetadata metadata); + public JsonElement ResolveObject(JsonElement element, ObjectField fieldSchema, ref IMetadata metadata); /// /// Resolves a jsonElement representing a list type based on the field's schema and metadata /// - public object ResolveList(JsonElement array, IObjectField fieldSchema, ref IMetadata? metadata); + public object ResolveList(JsonElement array, ObjectField fieldSchema, ref IMetadata? metadata); } } diff --git a/src/Core/Resolvers/Sql Query Structures/BaseSqlQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/BaseSqlQueryStructure.cs index dd96d9a3fc..99a5b1e72c 100644 --- a/src/Core/Resolvers/Sql Query Structures/BaseSqlQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/BaseSqlQueryStructure.cs @@ -514,8 +514,8 @@ internal static List GetSubArgumentNamesFromGQLMutArguments if (mutationParameters.TryGetValue(fieldName, out object? item)) { - IObjectField fieldSchema = context.Selection.Field; - IInputField itemsArgumentSchema = fieldSchema.Arguments[fieldName]; + ObjectField fieldSchema = context.Selection.Field; + IInputValueDefinition itemsArgumentSchema = fieldSchema.Arguments[fieldName]; InputObjectType itemsArgumentObject = ExecutionHelper.InputObjectTypeFromIInputField(itemsArgumentSchema); // An inline argument was set diff --git a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs index a0c65ae98a..0370b44b83 100644 --- a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs @@ -172,7 +172,7 @@ public SqlQueryStructure( _ctx = ctx; IsMultipleCreateOperation = isMultipleCreateOperation; - IObjectField schemaField = _ctx.Selection.Field; + ObjectField schemaField = _ctx.Selection.Field; FieldNode? queryField = _ctx.Selection.SyntaxNode; IOutputType outputType = schemaField.Type; @@ -388,7 +388,7 @@ private SqlQueryStructure( IDictionary queryParams, ISqlMetadataProvider sqlMetadataProvider, IAuthorizationResolver authorizationResolver, - IObjectField schemaField, + ObjectField schemaField, FieldNode? queryField, IncrementingInteger counter, RuntimeConfigProvider runtimeConfigProvider, @@ -408,7 +408,7 @@ private SqlQueryStructure( // extract the query argument schemas before switching schemaField to point to *Connetion.items // since the pagination arguments are not placed on the items, but on the pagination query - IFieldCollection queryArgumentSchemas = schemaField.Arguments; + ArgumentCollection queryArgumentSchemas = schemaField.Arguments; PaginationMetadata.IsPaginated = QueryBuilder.IsPaginationType(_underlyingFieldType); @@ -796,7 +796,7 @@ private void AddGraphQLFields(IReadOnlyList selections, RuntimeC } else { - IObjectField? subschemaField = _underlyingFieldType.Fields[fieldName]; + ObjectField? subschemaField = _underlyingFieldType.Fields[fieldName]; if (_ctx == null) { @@ -986,14 +986,14 @@ private void ProcessAggregations(FieldNode aggregationsField, IMiddlewareContext } // Retrieve the schema field from the GraphQL context - IObjectField schemaField = ctx.Selection.Field; + ObjectField schemaField = ctx.Selection.Field; // Get the 'group by' field from the schema's entity type - IObjectField groupByField = schemaField.Type.NamedType() + ObjectField groupByField = schemaField.Type.NamedType() .Fields[QueryBuilder.GROUP_BY_FIELD_NAME]; // Get the 'aggregations' field from the 'group by' entity type - IObjectField aggregationsObjectField = groupByField.Type.NamedType() + ObjectField aggregationsObjectField = groupByField.Type.NamedType() .Fields[QueryBuilder.GROUP_BY_AGGREGATE_FIELD_NAME]; // Iterate through each selection in the aggregation field @@ -1064,7 +1064,7 @@ private void ProcessAggregations(FieldNode aggregationsField, IMiddlewareContext List filterFields = (List)havingArg.Value.Value!; // Retrieve the corresponding aggregation operation field from the schema - IObjectField operationObjectField = aggregationsObjectField.Type.NamedType() + ObjectField operationObjectField = aggregationsObjectField.Type.NamedType() .Fields[operation.ToString()]; // Parse the filtering conditions and apply them to the aggregation @@ -1105,7 +1105,7 @@ private void ProcessAggregations(FieldNode aggregationsField, IMiddlewareContext /// so we find their backing column names before creating the orderBy list. /// All the remaining primary key columns are also added to ensure there are no tie breaks. /// - private List ProcessGqlOrderByArg(List orderByFields, IInputField orderByArgumentSchema, bool isGroupByQuery = false) + private List ProcessGqlOrderByArg(List orderByFields, IInputValueDefinition orderByArgumentSchema, bool isGroupByQuery = false) { if (_ctx is null) { diff --git a/src/Core/Resolvers/SqlMutationEngine.cs b/src/Core/Resolvers/SqlMutationEngine.cs index 493b7900e7..dfc53449f8 100644 --- a/src/Core/Resolvers/SqlMutationEngine.cs +++ b/src/Core/Resolvers/SqlMutationEngine.cs @@ -106,7 +106,7 @@ public SqlMutationEngine( mutationOperation is EntityActionOperation.Create) { // Multiple create mutation request is validated to ensure that the request is valid semantically. - IInputField schemaForArgument = context.Selection.Field.Arguments[inputArgumentName]; + IInputValueDefinition schemaForArgument = context.Selection.Field.Arguments[inputArgumentName]; MultipleMutationEntityInputValidationContext multipleMutationEntityInputValidationContext = new( entityName: entityName, parentEntityName: string.Empty, @@ -1689,8 +1689,8 @@ private static void PopulateCurrentAndLinkingEntityParams( { if (mutationParameters.TryGetValue(rootFieldName, out object? inputParameters)) { - IObjectField fieldSchema = context.Selection.Field; - IInputField itemsArgumentSchema = fieldSchema.Arguments[rootFieldName]; + ObjectField fieldSchema = context.Selection.Field; + IInputValueDefinition itemsArgumentSchema = fieldSchema.Arguments[rootFieldName]; InputObjectType inputObjectType = ExecutionHelper.InputObjectTypeFromIInputField(itemsArgumentSchema); return GQLMultipleCreateArgumentToDictParamsHelper(context, inputObjectType, inputParameters); } @@ -1871,7 +1871,7 @@ private static void PopulateCurrentAndLinkingEntityParams( /// private static InputObjectType GetInputObjectTypeForAField(string fieldName, FieldCollection fields) { - if (fields.TryGetField(fieldName, out IInputField? field)) + if (fields.TryGetField(fieldName, out InputField? field)) { return ExecutionHelper.InputObjectTypeFromIInputField(field); } @@ -1886,7 +1886,7 @@ private static InputObjectType GetInputObjectTypeForAField(string fieldName, Fie /// /// The name of the entity. /// The parameters for the DELETE operation. - /// Metadataprovider for db on which to perform operation. + /// Metadata provider for db on which to perform operation. /// A dictionary of properties of the Db Data Reader like RecordsAffected, HasRows. private async Task?> PerformDeleteOperation( @@ -2127,7 +2127,7 @@ private void AuthorizeEntityAndFieldsForMutation( IDictionary parametersDictionary ) { - if (context.Selection.Field.Arguments.TryGetField(inputArgumentName, out IInputField? schemaForArgument)) + if (context.Selection.Field.Arguments.TryGetField(inputArgumentName, out Argument? schemaForArgument)) { // Dictionary to store all the entities and their corresponding exposed column names referenced in the mutation. Dictionary> entityToExposedColumns = new(); @@ -2173,42 +2173,53 @@ private void AuthorizeEntityAndFieldsForMutation( /// Dictionary to store all the entities and their corresponding exposed column names referenced in the mutation. /// Schema for the input field. /// Name of the entity. - /// Middleware Context. + /// Middleware context. /// Value for the input field. - /// 1. mutation { - /// createbook( - /// item: { - /// title: "book #1", - /// reviews: [{ content: "Good book." }, { content: "Great book." }], - /// publishers: { name: "Macmillan publishers" }, - /// authors: [{ birthdate: "1997-09-03", name: "Red house authors", royal_percentage: 4.6 }] - /// }) - /// { - /// id - /// } - /// 2. mutation { - /// createbooks( - /// items: [{ - /// title: "book #1", - /// reviews: [{ content: "Good book." }, { content: "Great book." }], - /// publishers: { name: "Macmillan publishers" }, - /// authors: [{ birthdate: "1997-09-03", name: "Red house authors", royal_percentage: 4.9 }] - /// }, - /// { - /// title: "book #2", - /// reviews: [{ content: "Awesome book." }, { content: "Average book." }], - /// publishers: { name: "Pearson Education" }, - /// authors: [{ birthdate: "1990-11-04", name: "Penguin Random House", royal_percentage: 8.2 }] - /// }]) - /// { - /// items{ - /// id - /// title - /// } - /// } + /// + /// Example 1 - Single item creation: + /// + /// mutation { + /// createbook( + /// item: { + /// title: "book #1", + /// reviews: [{ content: "Good book." }, { content: "Great book." }], + /// publishers: { name: "Macmillan publishers" }, + /// authors: [{ birthdate: "1997-09-03", name: "Red house authors", royal_percentage: 4.6 }] + /// }) + /// { + /// id + /// } + /// } + /// + /// + /// Example 2 - Multiple items creation: + /// + /// mutation { + /// createbooks( + /// items: [{ + /// title: "book #1", + /// reviews: [{ content: "Good book." }, { content: "Great book." }], + /// publishers: { name: "Macmillan publishers" }, + /// authors: [{ birthdate: "1997-09-03", name: "Red house authors", royal_percentage: 4.9 }] + /// }, + /// { + /// title: "book #2", + /// reviews: [{ content: "Awesome book." }, { content: "Average book." }], + /// publishers: { name: "Pearson Education" }, + /// authors: [{ birthdate: "1990-11-04", name: "Penguin Random House", royal_percentage: 8.2 }] + /// }]) + /// { + /// items { + /// id + /// title + /// } + /// } + /// } + /// + /// private void PopulateMutationEntityAndFieldsToAuthorize( Dictionary> entityToExposedColumns, - IInputField schema, + IInputValueDefinition schema, string entityName, IMiddlewareContext context, object parameters) diff --git a/src/Core/Resolvers/SqlQueryEngine.cs b/src/Core/Resolvers/SqlQueryEngine.cs index 12a305c574..7b261ecb2b 100644 --- a/src/Core/Resolvers/SqlQueryEngine.cs +++ b/src/Core/Resolvers/SqlQueryEngine.cs @@ -212,7 +212,7 @@ public async Task ExecuteAsync(StoredProcedureRequestContext cont } /// - public JsonElement ResolveObject(JsonElement element, IObjectField fieldSchema, ref IMetadata metadata) + public JsonElement ResolveObject(JsonElement element, ObjectField fieldSchema, ref IMetadata metadata) { PaginationMetadata parentMetadata = (PaginationMetadata)metadata; @@ -259,7 +259,7 @@ public JsonElement ResolveObject(JsonElement element, IObjectField fieldSchema, /// List of JsonElements parsed from the provided JSON array. /// Return type is 'object' instead of a 'List of JsonElements' because when this function returns JsonElement, /// the HC12 engine doesn't know how to handle the JsonElement and results in requests failing at runtime. - public object ResolveList(JsonElement array, IObjectField fieldSchema, ref IMetadata? metadata) + public object ResolveList(JsonElement array, ObjectField fieldSchema, ref IMetadata? metadata) { if (metadata is not null) { diff --git a/src/Core/Services/BuildRequestStateMiddleware.cs b/src/Core/Services/BuildRequestStateMiddleware.cs index 0cebec29e1..395538ce7d 100644 --- a/src/Core/Services/BuildRequestStateMiddleware.cs +++ b/src/Core/Services/BuildRequestStateMiddleware.cs @@ -32,7 +32,7 @@ public BuildRequestStateMiddleware(RequestDelegate next, RuntimeConfigProvider r /// http context's "X-MS-API-ROLE" header/value to HotChocolate's request context. /// /// HotChocolate execution request context. - public async ValueTask InvokeAsync(IRequestContext context) + public async ValueTask InvokeAsync(RequestContext context) { bool isIntrospectionQuery = context.Request.OperationName == "IntrospectionQuery"; ApiType apiType = ApiType.GraphQL; @@ -77,11 +77,11 @@ public async ValueTask InvokeAsync(IRequestContext context) // There is an error in GraphQL when ContextData is not null if (context.Result!.ContextData is not null) { - if (context.Result.ContextData.ContainsKey(WellKnownContextData.ValidationErrors)) + if (context.Result.ContextData.ContainsKey(ExecutionContextData.ValidationErrors)) { statusCode = HttpStatusCode.BadRequest; } - else if (context.Result.ContextData.ContainsKey(WellKnownContextData.OperationNotAllowed)) + else if (context.Result.ContextData.ContainsKey(ExecutionContextData.OperationNotAllowed)) { statusCode = HttpStatusCode.MethodNotAllowed; } diff --git a/src/Core/Services/Cache/DabCacheService.cs b/src/Core/Services/Cache/DabCacheService.cs index 942534fbec..5fab06691f 100644 --- a/src/Core/Services/Cache/DabCacheService.cs +++ b/src/Core/Services/Cache/DabCacheService.cs @@ -93,10 +93,10 @@ public DabCacheService(IFusionCache cache, ILogger? logger, IHt /// /// Try to get cacheValue from the cache with the derived cache key. /// - /// The type of value in the cache + /// The type of value in the cache /// Metadata used to create a cache key or fetch a response from the database. /// JSON Response - public MaybeValue? TryGet(DatabaseQueryMetadata queryMetadata, EntityCacheLevel cacheEntryLevel) + public MaybeValue? TryGet(DatabaseQueryMetadata queryMetadata, EntityCacheLevel cacheEntryLevel) { string cacheKey = CreateCacheKey(queryMetadata); FusionCacheEntryOptions options = new(); @@ -106,7 +106,7 @@ public DabCacheService(IFusionCache cache, ILogger? logger, IHt options.SetSkipDistributedCache(true, true); } - return _cache.TryGet(key: cacheKey); + return _cache.TryGet(key: cacheKey); } /// diff --git a/src/Core/Services/DetermineStatusCodeMiddleware.cs b/src/Core/Services/DetermineStatusCodeMiddleware.cs index 01384485d9..dcddc62971 100644 --- a/src/Core/Services/DetermineStatusCodeMiddleware.cs +++ b/src/Core/Services/DetermineStatusCodeMiddleware.cs @@ -18,7 +18,7 @@ public sealed class DetermineStatusCodeMiddleware(RequestDelegate next) { private const string ERROR_CODE = nameof(DataApiBuilderException.SubStatusCodes.DatabaseInputError); - public async ValueTask InvokeAsync(IRequestContext context) + public async ValueTask InvokeAsync(RequestContext context) { await next(context).ConfigureAwait(false); @@ -34,7 +34,7 @@ public async ValueTask InvokeAsync(IRequestContext context) contextData.AddRange(singleResult.ContextData); } - contextData[WellKnownContextData.HttpStatusCode] = HttpStatusCode.BadRequest; + contextData[ExecutionContextData.HttpStatusCode] = HttpStatusCode.BadRequest; context.Result = singleResult.WithContextData(contextData.ToImmutable()); } } diff --git a/src/Core/Services/ExecutionHelper.cs b/src/Core/Services/ExecutionHelper.cs index 28a22b9b39..a0a81f02cc 100644 --- a/src/Core/Services/ExecutionHelper.cs +++ b/src/Core/Services/ExecutionHelper.cs @@ -186,7 +186,7 @@ fieldValue.ValueKind is not (JsonValueKind.Undefined or JsonValueKind.Null)) { // The selection type can be a wrapper type like NonNullType or ListType. // To get the most inner type (aka the named type) we use our named type helper. - INamedType namedType = context.Selection.Field.Type.NamedType(); + ITypeDefinition namedType = context.Selection.Field.Type.NamedType(); // Each scalar in HotChocolate has a runtime type representation. // In order to let scalar values flow through the GraphQL type completion @@ -356,14 +356,14 @@ private static bool TryGetPropertyFromParent( /// the request context variable values needed to resolve value nodes represented as variables public static object? ExtractValueFromIValueNode( IValueNode value, - IInputField argumentSchema, + IInputValueDefinition argumentSchema, IVariableValueCollection variables) { // extract value from the variable if the IValueNode is a variable if (value.Kind == SyntaxKind.Variable) { string variableName = ((VariableNode)value).Name.Value; - IValueNode? variableValue = variables.GetVariable(variableName); + IValueNode? variableValue = variables.GetValue(variableName); if (variableValue is null) { @@ -414,16 +414,16 @@ private static bool TryGetPropertyFromParent( /// Value: (object) argument value /// public static IDictionary GetParametersFromSchemaAndQueryFields( - IObjectField schema, + ObjectField schema, FieldNode query, IVariableValueCollection variables) { IDictionary collectedParameters = new Dictionary(); // Fill the parameters dictionary with the default argument values - IFieldCollection schemaArguments = schema.Arguments; + ArgumentCollection schemaArguments = schema.Arguments; - // Example 'argumentSchemas' IInputField objects of type 'HotChocolate.Types.Argument': + // Example 'argumentSchemas' IInputValueDefinition objects of type 'HotChocolate.Types.Argument': // These are all default arguments defined in the schema for queries. // {first:int} // {after:String} @@ -431,7 +431,7 @@ private static bool TryGetPropertyFromParent( // {orderBy:entityOrderByInput} // The values in schemaArguments will have default values when the backing // entity is a stored procedure with runtime config defined default parameter values. - foreach (IInputField argument in schemaArguments) + foreach (IInputValueDefinition argument in schemaArguments) { if (argument.DefaultValue != null) { @@ -453,7 +453,7 @@ private static bool TryGetPropertyFromParent( foreach (ArgumentNode argument in passedArguments) { string argumentName = argument.Name.Value; - IInputField argumentSchema = schemaArguments[argumentName]; + IInputValueDefinition argumentSchema = schemaArguments[argumentName]; object? nodeValue = ExtractValueFromIValueNode( value: argument.Value, @@ -489,7 +489,7 @@ internal static IType InnerMostType(IType type) return InnerMostType(type.InnerType()); } - public static InputObjectType InputObjectTypeFromIInputField(IInputField field) + public static InputObjectType InputObjectTypeFromIInputField(IInputValueDefinition field) { return (InputObjectType)InnerMostType(field.Type); } diff --git a/src/Core/Services/MultipleMutationInputValidator.cs b/src/Core/Services/MultipleMutationInputValidator.cs index 5723795a30..946ae49951 100644 --- a/src/Core/Services/MultipleMutationInputValidator.cs +++ b/src/Core/Services/MultipleMutationInputValidator.cs @@ -70,7 +70,7 @@ public MultipleMutationInputValidator(IMetadataProviderFactory sqlMetadataProvid /// } /// } public void ValidateGraphQLValueNode( - IInputField schema, + IInputValueDefinition schema, IMiddlewareContext context, object? parameters, int nestingLevel, diff --git a/src/Core/Services/ResolverTypeInterceptor.cs b/src/Core/Services/ResolverTypeInterceptor.cs index 748a61db78..81cfcb21e5 100644 --- a/src/Core/Services/ResolverTypeInterceptor.cs +++ b/src/Core/Services/ResolverTypeInterceptor.cs @@ -5,12 +5,12 @@ using HotChocolate.Configuration; using HotChocolate.Language; using HotChocolate.Resolvers; -using HotChocolate.Types.Descriptors.Definitions; +using HotChocolate.Types.Descriptors.Configurations; internal sealed class ResolverTypeInterceptor : TypeInterceptor { - private readonly FieldMiddlewareDefinition _queryMiddleware; - private readonly FieldMiddlewareDefinition _mutationMiddleware; + private readonly FieldMiddlewareConfiguration _queryMiddleware; + private readonly FieldMiddlewareConfiguration _mutationMiddleware; private readonly PureFieldDelegate _leafFieldResolver; private readonly PureFieldDelegate _objectFieldResolver; private readonly PureFieldDelegate _listFieldResolver; @@ -22,7 +22,7 @@ internal sealed class ResolverTypeInterceptor : TypeInterceptor public ResolverTypeInterceptor(ExecutionHelper executionHelper) { _queryMiddleware = - new FieldMiddlewareDefinition( + new FieldMiddlewareConfiguration( next => async context => { await executionHelper.ExecuteQueryAsync(context).ConfigureAwait(false); @@ -30,21 +30,21 @@ public ResolverTypeInterceptor(ExecutionHelper executionHelper) }); _mutationMiddleware = - new FieldMiddlewareDefinition( + new FieldMiddlewareConfiguration( next => async context => { await executionHelper.ExecuteMutateAsync(context).ConfigureAwait(false); await next(context).ConfigureAwait(false); }); - _leafFieldResolver = ctx => ExecutionHelper.ExecuteLeafField(ctx); - _objectFieldResolver = ctx => executionHelper.ExecuteObjectField(ctx); - _listFieldResolver = ctx => executionHelper.ExecuteListField(ctx); + _leafFieldResolver = ExecutionHelper.ExecuteLeafField; + _objectFieldResolver = executionHelper.ExecuteObjectField; + _listFieldResolver = executionHelper.ExecuteListField; } public override void OnAfterResolveRootType( ITypeCompletionContext completionContext, - ObjectTypeDefinition definition, + ObjectTypeConfiguration definition, OperationType operationType) { switch (operationType) @@ -69,26 +69,26 @@ public override void OnAfterResolveRootType( public override void OnBeforeCompleteType( ITypeCompletionContext completionContext, - DefinitionBase? definition) + TypeSystemConfiguration? definition) { // We are only interested in object types here as only object types can have resolvers. - if (definition is not ObjectTypeDefinition objectTypeDef) + if (definition is not ObjectTypeConfiguration objectTypeConfig) { return; } if (ReferenceEquals(completionContext.Type, _queryType)) { - foreach (ObjectFieldDefinition field in objectTypeDef.Fields) + foreach (ObjectFieldConfiguration field in objectTypeConfig.Fields) { - field.MiddlewareDefinitions.Add(_queryMiddleware); + field.MiddlewareConfigurations.Add(_queryMiddleware); } } else if (ReferenceEquals(completionContext.Type, _mutationType)) { - foreach (ObjectFieldDefinition field in objectTypeDef.Fields) + foreach (ObjectFieldConfiguration field in objectTypeConfig.Fields) { - field.MiddlewareDefinitions.Add(_mutationMiddleware); + field.MiddlewareConfigurations.Add(_mutationMiddleware); } } else if (ReferenceEquals(completionContext.Type, _subscriptionType)) @@ -97,7 +97,7 @@ public override void OnBeforeCompleteType( } else { - foreach (ObjectFieldDefinition field in objectTypeDef.Fields) + foreach (ObjectFieldConfiguration field in objectTypeConfig.Fields) { if (field.Type is not null && completionContext.TryGetType(field.Type, out IType? type)) diff --git a/src/Directory.Packages.props b/src/Directory.Packages.props index da600c9f63..9213684e6e 100644 --- a/src/Directory.Packages.props +++ b/src/Directory.Packages.props @@ -8,14 +8,15 @@ - - - - - - - - + + + + + + + + + diff --git a/src/Service.GraphQLBuilder/Directives/RelationshipDirective.cs b/src/Service.GraphQLBuilder/Directives/RelationshipDirective.cs index 049327dffb..07d96fa824 100644 --- a/src/Service.GraphQLBuilder/Directives/RelationshipDirective.cs +++ b/src/Service.GraphQLBuilder/Directives/RelationshipDirective.cs @@ -50,11 +50,11 @@ public static string Target(FieldDefinitionNode field) /// /// Gets the target object type name for an input infield with a relationship directive. /// - /// The input field that is expected to have a relationship directive defined on it. + /// The input field that is expected to have a relationship directive defined on it. /// The name of the target object if the relationship is found, null otherwise. - public static string? GetTarget(IInputField infield) + public static string? GetTarget(IInputValueDefinition inputField) { - Directive? directive = (Directive?)infield.Directives.FirstOrDefault(DirectiveName); + Directive? directive = (Directive?)inputField.Directives.FirstOrDefault(DirectiveName); return directive?.GetArgumentValue("target"); } diff --git a/src/Service.GraphQLBuilder/GraphQLUtils.cs b/src/Service.GraphQLBuilder/GraphQLUtils.cs index ff43ae68fc..44c296c12c 100644 --- a/src/Service.GraphQLBuilder/GraphQLUtils.cs +++ b/src/Service.GraphQLBuilder/GraphQLUtils.cs @@ -202,10 +202,11 @@ public static bool CreateAuthorizationDirectiveIfNecessary( /// Collection of directives on GraphQL field. /// Value of @model directive, if present. /// True when name resolution succeeded, false otherwise. - public static bool TryExtractGraphQLFieldModelName(IDirectiveCollection fieldDirectives, + public static bool TryExtractGraphQLFieldModelName( + DirectiveCollection fieldDirectives, [NotNullWhen(true)] out string? modelName) { - modelName = fieldDirectives.FirstOrDefault()?.AsValue().Name; + modelName = fieldDirectives.FirstOrDefault()?.ToValue().Name; return !string.IsNullOrEmpty(modelName); } @@ -276,7 +277,7 @@ public static string GetEntityNameFromContext(IResolverContext context) // Example: CustomersConnectionObject - for get all scenarios. if (QueryBuilder.IsPaginationType(underlyingFieldType)) { - IObjectField subField = context.Selection.Type.NamedType() + ObjectField subField = context.Selection.Type.NamedType() .Fields[QueryBuilder.PAGINATION_FIELD_NAME]; type = subField.Type; underlyingFieldType = type.NamedType(); @@ -332,7 +333,7 @@ fieldSyntaxKind is SyntaxKind.StringValue || fieldSyntaxKind is SyntaxKind.Boole if (value.Kind == SyntaxKind.Variable) { string variableName = ((VariableNode)value).Name.Value; - IValueNode? variableValue = variables.GetVariable(variableName); + IValueNode? variableValue = variables.GetValue(variableName); return GetFieldDetails(variableValue, variables); } diff --git a/src/Service.GraphQLBuilder/Queries/QueryBuilder.cs b/src/Service.GraphQLBuilder/Queries/QueryBuilder.cs index 81842f9c60..a2cc63b2c2 100644 --- a/src/Service.GraphQLBuilder/Queries/QueryBuilder.cs +++ b/src/Service.GraphQLBuilder/Queries/QueryBuilder.cs @@ -236,7 +236,7 @@ public static ObjectTypeDefinitionNode AddQueryArgumentsForRelationships(ObjectT return node; } - public static ObjectType PaginationTypeToModelType(ObjectType underlyingFieldType, IReadOnlyCollection types) + public static ObjectType PaginationTypeToModelType(ObjectType underlyingFieldType, IReadOnlyCollection types) { IEnumerable modelTypes = types.Where(t => t is ObjectType) .Cast() diff --git a/src/Service.Tests/UnitTests/MultiSourceQueryExecutionUnitTests.cs b/src/Service.Tests/UnitTests/MultiSourceQueryExecutionUnitTests.cs index 986419f228..dd76845a04 100644 --- a/src/Service.Tests/UnitTests/MultiSourceQueryExecutionUnitTests.cs +++ b/src/Service.Tests/UnitTests/MultiSourceQueryExecutionUnitTests.cs @@ -109,7 +109,7 @@ public async Task TestMultiSourceQuery() .AddType() .AddType() .TryAddTypeInterceptor(new ResolverTypeInterceptor(new ExecutionHelper(queryEngineFactory.Object, mutationEngineFactory.Object, provider))); - ISchema schema = schemaBuilder.Create(); + Schema schema = schemaBuilder.Create(); IExecutionResult result = await schema.MakeExecutable().ExecuteAsync(_query); // client is mapped as belonging to the sql data source. diff --git a/src/Service/HealthCheck/HealthCheckHelper.cs b/src/Service/HealthCheck/HealthCheckHelper.cs index addb6b582a..9225c3aeb0 100644 --- a/src/Service/HealthCheck/HealthCheckHelper.cs +++ b/src/Service/HealthCheck/HealthCheckHelper.cs @@ -44,7 +44,7 @@ public HealthCheckHelper(ILogger logger, HttpUtilities httpUt /// /// GetHealthCheckResponse is the main function which fetches the HttpContext and then creates the comprehensive health check report. - /// Serializes the report to JSON and returns the response. + /// Serializes the report to JSON and returns the response. /// /// RuntimeConfig /// This function returns the comprehensive health report after calculating the response time of each datasource, rest and graphql health queries. @@ -54,13 +54,13 @@ public async Task GetHealthCheckResponseAsync(Ru // If the response has already been created, it will be reused. _logger.LogTrace("Comprehensive Health check is enabled in the runtime configuration."); - ComprehensiveHealthCheckReport ComprehensiveHealthCheckReport = new(); - UpdateVersionAndAppName(ref ComprehensiveHealthCheckReport); - UpdateTimestampOfResponse(ref ComprehensiveHealthCheckReport); - UpdateDabConfigurationDetails(ref ComprehensiveHealthCheckReport, runtimeConfig); - await UpdateHealthCheckDetailsAsync(ComprehensiveHealthCheckReport, runtimeConfig); - UpdateOverallHealthStatus(ref ComprehensiveHealthCheckReport); - return ComprehensiveHealthCheckReport; + ComprehensiveHealthCheckReport comprehensiveHealthCheckReport = new(); + UpdateVersionAndAppName(ref comprehensiveHealthCheckReport); + UpdateTimestampOfResponse(ref comprehensiveHealthCheckReport); + UpdateDabConfigurationDetails(ref comprehensiveHealthCheckReport, runtimeConfig); + await UpdateHealthCheckDetailsAsync(comprehensiveHealthCheckReport, runtimeConfig); + UpdateOverallHealthStatus(ref comprehensiveHealthCheckReport); + return comprehensiveHealthCheckReport; } // Updates the incoming role header with the appropriate value from the request headers. @@ -134,9 +134,9 @@ private static void UpdateTimestampOfResponse(ref ComprehensiveHealthCheckReport } // Updates the DAB configuration details coming from RuntimeConfig for the Health report. - private static void UpdateDabConfigurationDetails(ref ComprehensiveHealthCheckReport ComprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) + private static void UpdateDabConfigurationDetails(ref ComprehensiveHealthCheckReport comprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) { - ComprehensiveHealthCheckReport.ConfigurationDetails = new ConfigurationDetails + comprehensiveHealthCheckReport.ConfigurationDetails = new ConfigurationDetails { Rest = runtimeConfig.IsRestEnabled, GraphQL = runtimeConfig.IsGraphQLEnabled, @@ -147,30 +147,30 @@ private static void UpdateDabConfigurationDetails(ref ComprehensiveHealthCheckRe } // Main function to internally call for data source and entities health check. - private async Task UpdateHealthCheckDetailsAsync(ComprehensiveHealthCheckReport ComprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) + private async Task UpdateHealthCheckDetailsAsync(ComprehensiveHealthCheckReport comprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) { - ComprehensiveHealthCheckReport.Checks = new List(); - await UpdateDataSourceHealthCheckResultsAsync(ComprehensiveHealthCheckReport, runtimeConfig); - await UpdateEntityHealthCheckResultsAsync(ComprehensiveHealthCheckReport, runtimeConfig); + comprehensiveHealthCheckReport.Checks = new List(); + await UpdateDataSourceHealthCheckResultsAsync(comprehensiveHealthCheckReport, runtimeConfig); + await UpdateEntityHealthCheckResultsAsync(comprehensiveHealthCheckReport, runtimeConfig); } // Updates the DataSource Health Check Results in the response. - private async Task UpdateDataSourceHealthCheckResultsAsync(ComprehensiveHealthCheckReport ComprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) + private async Task UpdateDataSourceHealthCheckResultsAsync(ComprehensiveHealthCheckReport comprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) { - if (ComprehensiveHealthCheckReport.Checks != null && runtimeConfig.DataSource.IsDatasourceHealthEnabled) + if (comprehensiveHealthCheckReport.Checks != null && runtimeConfig.DataSource.IsDatasourceHealthEnabled) { string query = Utilities.GetDatSourceQuery(runtimeConfig.DataSource.DatabaseType); (int, string?) response = await ExecuteDatasourceQueryCheckAsync(query, runtimeConfig.DataSource.ConnectionString); bool isResponseTimeWithinThreshold = response.Item1 >= 0 && response.Item1 < runtimeConfig.DataSource.DatasourceThresholdMs; // Add DataSource Health Check Results - ComprehensiveHealthCheckReport.Checks.Add(new HealthCheckResultEntry + comprehensiveHealthCheckReport.Checks.Add(new HealthCheckResultEntry { Name = runtimeConfig?.DataSource?.Health?.Name ?? runtimeConfig?.DataSource?.DatabaseType.ToString(), ResponseTimeData = new ResponseTimeData { ResponseTimeMs = response.Item1, - ThresholdMs = runtimeConfig?.DataSource.DatasourceThresholdMs + ThresholdMs = runtimeConfig?.DataSource?.DatasourceThresholdMs }, Exception = !isResponseTimeWithinThreshold ? TIME_EXCEEDED_ERROR_MESSAGE : response.Item2, Tags = [HealthCheckConstants.DATASOURCE], @@ -195,7 +195,7 @@ private async Task UpdateDataSourceHealthCheckResultsAsync(ComprehensiveHealthCh return (HealthCheckConstants.ERROR_RESPONSE_TIME_MS, errorMessage); } - // Updates the Entity Health Check Results in the response. + // Updates the Entity Health Check Results in the response. // Goes through the entities one by one and executes the rest and graphql checks (if enabled). private async Task UpdateEntityHealthCheckResultsAsync(ComprehensiveHealthCheckReport report, RuntimeConfig runtimeConfig) { @@ -252,7 +252,7 @@ private async Task UpdateEntityHealthCheckResultsAsync(ComprehensiveHealthCheckR // Populates the Entity Health Check Results in the response for a particular entity. // Checks for Rest enabled and executes the rest query. // Checks for GraphQL enabled and executes the graphql query. - private async Task PopulateEntityHealthAsync(ComprehensiveHealthCheckReport ComprehensiveHealthCheckReport, KeyValuePair entity, RuntimeConfig runtimeConfig) + private async Task PopulateEntityHealthAsync(ComprehensiveHealthCheckReport comprehensiveHealthCheckReport, KeyValuePair entity, RuntimeConfig runtimeConfig) { // Global Rest and GraphQL Runtime Options RuntimeOptions? runtimeOptions = runtimeConfig.Runtime; @@ -265,7 +265,7 @@ private async Task PopulateEntityHealthAsync(ComprehensiveHealthCheckReport Comp { if (runtimeOptions.IsRestEnabled && entityValue.IsRestEnabled) { - ComprehensiveHealthCheckReport.Checks ??= new List(); + comprehensiveHealthCheckReport.Checks ??= new List(); // In case of REST API, use the path specified in [entity.path] (if present). // The path is trimmed to remove the leading '/' character. @@ -275,7 +275,7 @@ private async Task PopulateEntityHealthAsync(ComprehensiveHealthCheckReport Comp bool isResponseTimeWithinThreshold = response.Item1 >= 0 && response.Item1 < entityValue.EntityThresholdMs; // Add Entity Health Check Results - ComprehensiveHealthCheckReport.Checks.Add(new HealthCheckResultEntry + comprehensiveHealthCheckReport.Checks.Add(new HealthCheckResultEntry { Name = entityKeyName, ResponseTimeData = new ResponseTimeData @@ -291,12 +291,12 @@ private async Task PopulateEntityHealthAsync(ComprehensiveHealthCheckReport Comp if (runtimeOptions.IsGraphQLEnabled && entityValue.IsGraphQLEnabled) { - ComprehensiveHealthCheckReport.Checks ??= new List(); + comprehensiveHealthCheckReport.Checks ??= new List(); - (int, string?) response = await ExecuteGraphQLEntityQueryAsync(runtimeConfig.GraphQLPath, entityValue, entityKeyName); + (int, string?) response = await ExecuteGraphQlEntityQueryAsync(runtimeConfig.GraphQLPath, entityValue, entityKeyName); bool isResponseTimeWithinThreshold = response.Item1 >= 0 && response.Item1 < entityValue.EntityThresholdMs; - ComprehensiveHealthCheckReport.Checks.Add(new HealthCheckResultEntry + comprehensiveHealthCheckReport.Checks.Add(new HealthCheckResultEntry { Name = entityKeyName, ResponseTimeData = new ResponseTimeData @@ -329,7 +329,7 @@ private async Task PopulateEntityHealthAsync(ComprehensiveHealthCheckReport Comp } // Executes the GraphQL Entity Query and keeps track of the response time and error message. - private async Task<(int, string?)> ExecuteGraphQLEntityQueryAsync(string graphqlUriSuffix, Entity entity, string entityName) + private async Task<(int, string?)> ExecuteGraphQlEntityQueryAsync(string graphqlUriSuffix, Entity entity, string entityName) { string? errorMessage = null; if (entity != null) diff --git a/src/Service/Startup.cs b/src/Service/Startup.cs index ce6b3077a4..a23c23178a 100644 --- a/src/Service/Startup.cs +++ b/src/Service/Startup.cs @@ -472,7 +472,10 @@ private void AddGraphQLService(IServiceCollection services, GraphQLRuntimeOption .AddHttpRequestInterceptor() .ConfigureSchema((serviceProvider, schemaBuilder) => { - GraphQLSchemaCreator graphQLService = serviceProvider.GetRequiredService(); + // The GraphQLSchemaCreator is an application service that is not available on + // the schema specific service provider, this means we have to get it with + // the GetRootServiceProvider helper. + GraphQLSchemaCreator graphQLService = serviceProvider.GetRootServiceProvider().GetRequiredService(); graphQLService.InitializeSchemaAndResolvers(schemaBuilder); }) .AddHttpRequestInterceptor() @@ -666,10 +669,10 @@ public void Configure(IApplicationBuilder app, IWebHostEnvironment env, RuntimeC // without proper authorization headers. app.UseClientRoleHeaderAuthorizationMiddleware(); - IRequestExecutorResolver requestExecutorResolver = app.ApplicationServices.GetRequiredService(); + IRequestExecutorManager requestExecutorManager = app.ApplicationServices.GetRequiredService(); _hotReloadEventHandler.Subscribe( "GRAPHQL_SCHEMA_EVICTION_ON_CONFIG_CHANGED", - (_, _) => EvictGraphQLSchema(requestExecutorResolver)); + (_, _) => EvictGraphQLSchema(requestExecutorManager)); app.UseEndpoints(endpoints => { @@ -706,10 +709,10 @@ public void Configure(IApplicationBuilder app, IWebHostEnvironment env, RuntimeC /// /// Evicts the GraphQL schema from the request executor resolver. /// - private static void EvictGraphQLSchema(IRequestExecutorResolver requestExecutorResolver) + private static void EvictGraphQLSchema(IRequestExecutorManager requestExecutorResolver) { Console.WriteLine("Evicting old GraphQL schema."); - requestExecutorResolver.EvictRequestExecutor(); + requestExecutorResolver.EvictExecutor(); } /// From 51f672f1696b999064499c603a56c3316e82b562 Mon Sep 17 00:00:00 2001 From: Anusha Kolan Date: Mon, 15 Sep 2025 09:56:32 -0700 Subject: [PATCH 61/79] [MCP] Added description property to entities and GraphQL Schema. (#2861) ## Why make this change? - Adds support for entity-level descriptions in Data API Builder GraphQL schema. - This feature request aims to surface entity descriptions from the config file in the generated GraphQL schema, improving API documentation and discoverability. - See related discussion: [https://github.com/Azure/data-api-builder/issues/2834] ## What is this change? - Adds a `description` property to the entity model and ensures it is deserialized from the config. - Updates the GraphQL schema generator and converter to include entity descriptions as comments in the SDL and as HotChocolate type descriptions. - Adds unit tests to verify that entity descriptions appear in the generated schema. ## How was this tested? - [x] Unit Tests: Added tests to check for presence of entity description in generated GraphQL schema. - [x] Manual verification: Ran GraphQL introspection queries and checked schema SDL output. ## Sample Request(s) **GraphQL Introspection Query:** ```graphql { __type(name: "Todo") { name description } } ``` **Sample Query Response:** ``` { "data": { "__type": { "name": "Todo", "description": "Represents a todo item in the system" } } } ``` **Sample SDL output:** ``` """Represents a todo item in the system""" type Todo { ... } ``` --- schemas/dab.draft.schema.json | 4 + src/Config/ObjectModel/Entity.cs | 6 +- src/Core/Generator/SchemaGenerator.cs | 24 +++++ .../Sql/SchemaConverter.cs | 31 +++++- .../MsSqlGraphQLQueryTests.cs | 99 +++++++++++++++++++ 5 files changed, 159 insertions(+), 5 deletions(-) diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json index 3f3004c9c6..35f2e08270 100644 --- a/schemas/dab.draft.schema.json +++ b/schemas/dab.draft.schema.json @@ -640,6 +640,10 @@ "type": "object", "additionalProperties": false, "properties": { + "description": { + "type": "string", + "description": "Optional description for the entity. Will be surfaced in generated API documentation and GraphQL schema as comments." + }, "health": { "description": "Health check configuration for entity", "type": [ "object", "null" ], diff --git a/src/Config/ObjectModel/Entity.cs b/src/Config/ObjectModel/Entity.cs index 5660864088..4b56e0478c 100644 --- a/src/Config/ObjectModel/Entity.cs +++ b/src/Config/ObjectModel/Entity.cs @@ -23,10 +23,12 @@ namespace Azure.DataApiBuilder.Config.ObjectModel; /// how long that response should be valid in the cache. /// Defines whether to enable comprehensive health check for the entity /// and how many rows to return in query and under what threshold-ms. +/// Optional description for the entity. Used for API documentation and GraphQL schema comments. public record Entity { public const string PROPERTY_PATH = "path"; public const string PROPERTY_METHODS = "methods"; + public string? Description { get; init; } public EntitySource Source { get; init; } public EntityGraphQLOptions GraphQL { get; init; } public EntityRestOptions Rest { get; init; } @@ -50,7 +52,8 @@ public Entity( Dictionary? Relationships, EntityCacheOptions? Cache = null, bool IsLinkingEntity = false, - EntityHealthCheckConfig? Health = null) + EntityHealthCheckConfig? Health = null, + string? Description = null) { this.Health = Health; this.Source = Source; @@ -61,6 +64,7 @@ public Entity( this.Relationships = Relationships; this.Cache = Cache; this.IsLinkingEntity = IsLinkingEntity; + this.Description = Description; } /// diff --git a/src/Core/Generator/SchemaGenerator.cs b/src/Core/Generator/SchemaGenerator.cs index 3950da26b1..ebd2aa9be9 100644 --- a/src/Core/Generator/SchemaGenerator.cs +++ b/src/Core/Generator/SchemaGenerator.cs @@ -28,11 +28,16 @@ internal class SchemaGenerator // List of JSON documents to process. private List _data; + // Name of the Azure Cosmos DB container from which the JSON data is obtained. private string _containerName; + // Dictionary mapping plural entity names to singular names based on the provided configuration. private Dictionary _entityAndSingularNameMapping = new(); + // Entities from config for description lookup + private IReadOnlyDictionary? _entities; + /// /// Initializes a new instance of the class. /// @@ -57,6 +62,9 @@ private SchemaGenerator(List data, string containerName, RuntimeCo { _entityAndSingularNameMapping.Add(item.Value.GraphQL.Singular.Pascalize(), item.Key); } + + // Convert RuntimeEntities to Dictionary for description lookup + _entities = config.Entities.ToDictionary(x => x.Key, x => x.Value); } } @@ -129,6 +137,22 @@ private string GenerateGQLSchema() // Determine if the entity is the root entity. bool isRoot = entity.Key == _containerName.Pascalize(); + // Get description from config if available + string? description = null; + if (_entityAndSingularNameMapping.ContainsKey(entity.Key) && _entities != null) + { + string configEntityName = _entityAndSingularNameMapping[entity.Key]; + if (_entities.ContainsKey(configEntityName)) + { + description = _entities[configEntityName].Description; + } + } + + if (!string.IsNullOrWhiteSpace(description)) + { + sb.AppendLine($"\"\"\"{description}\"\"\""); + } + sb.Append($"type {entity.Key} "); // Append model directive if applicable. diff --git a/src/Service.GraphQLBuilder/Sql/SchemaConverter.cs b/src/Service.GraphQLBuilder/Sql/SchemaConverter.cs index 206faceeaf..f8926bb6b4 100644 --- a/src/Service.GraphQLBuilder/Sql/SchemaConverter.cs +++ b/src/Service.GraphQLBuilder/Sql/SchemaConverter.cs @@ -78,6 +78,18 @@ public static ObjectTypeDefinitionNode GenerateObjectTypeDefinitionForDatabaseOb subStatusCode: DataApiBuilderException.SubStatusCodes.NotSupported); } + StringValueNode? descriptionNode = null; + if (!string.IsNullOrWhiteSpace(configEntity.Description)) + { + descriptionNode = new StringValueNode(configEntity.Description); + } + + // Set the description node if available + if (descriptionNode != null) + { + objectDefinitionNode = objectDefinitionNode.WithDescription(descriptionNode); + } + return objectDefinitionNode; } @@ -122,6 +134,12 @@ private static ObjectTypeDefinitionNode CreateObjectTypeDefinitionForStoredProce } } + StringValueNode? descriptionNode = null; + if (!string.IsNullOrWhiteSpace(configEntity.Description)) + { + descriptionNode = new StringValueNode(configEntity.Description); + } + // Top-level object type definition name should be singular. // The singularPlural.Singular value is used, and if not configured, // the top-level entity name value is used. No singularization occurs @@ -129,7 +147,7 @@ private static ObjectTypeDefinitionNode CreateObjectTypeDefinitionForStoredProce return new ObjectTypeDefinitionNode( location: null, name: new(value: GetDefinedSingularName(entityName, configEntity)), - description: null, + description: descriptionNode, directives: GenerateObjectTypeDirectivesForEntity(entityName, configEntity, rolesAllowedForEntity), new List(), fields.Values.ToImmutableList()); @@ -213,6 +231,12 @@ private static ObjectTypeDefinitionNode CreateObjectTypeDefinitionForTableOrView } } + StringValueNode? descriptionNode = null; + if (!string.IsNullOrWhiteSpace(configEntity.Description)) + { + descriptionNode = new StringValueNode(configEntity.Description); + } + // Top-level object type definition name should be singular. // The singularPlural.Singular value is used, and if not configured, // the top-level entity name value is used. No singularization occurs @@ -220,7 +244,7 @@ private static ObjectTypeDefinitionNode CreateObjectTypeDefinitionForTableOrView return new ObjectTypeDefinitionNode( location: null, name: new(value: GetDefinedSingularName(entityName, configEntity)), - description: null, + description: descriptionNode, directives: GenerateObjectTypeDirectivesForEntity(entityName, configEntity, rolesAllowedForEntity), new List(), fieldDefinitionNodes.Values.ToImmutableList()); @@ -580,8 +604,7 @@ private static bool FindNullabilityOfRelationship( bool isNullableRelationship = false; SourceDefinition sourceDefinition = databaseObject.SourceDefinition; if (// Retrieve all the relationship information for the source entity which is backed by this table definition - sourceDefinition.SourceEntityRelationshipMap.TryGetValue(entityName, out RelationshipMetadata? relationshipInfo) - && + sourceDefinition.SourceEntityRelationshipMap.TryGetValue(entityName, out RelationshipMetadata? relationshipInfo) && // From the relationship information, obtain the foreign key definition for the given target entity relationshipInfo.TargetEntityToFkDefinitionMap.TryGetValue(targetEntityName, out List? listOfForeignKeys)) diff --git a/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs b/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs index bec185e4b4..f65e7a5088 100644 --- a/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs +++ b/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs @@ -779,6 +779,105 @@ public override async Task TestNoAggregationOptionsForTableWithoutNumericFields( await base.TestNoAggregationOptionsForTableWithoutNumericFields(); } + /// + /// Tests that the entity description is present as a GraphQL comment in the generated schema for MSSQL. + /// + [TestMethod] + public void TestEntityDescriptionInGraphQLSchema() + { + Entity entity = CreateEntityWithDescription("This is a test entity description for MSSQL."); + RuntimeConfig config = CreateRuntimeConfig(entity); + List jsonArray = [ + JsonDocument.Parse("{ \"id\": 1, \"name\": \"Test\" }") + ]; + + string actualSchema = Core.Generator.SchemaGenerator.Generate(jsonArray, "TestEntity", config); + string expectedComment = "\"\"\"This is a test entity description for MSSQL.\"\"\""; + Assert.IsTrue(actualSchema.Contains(expectedComment, StringComparison.Ordinal), "Entity description should be present as a GraphQL comment for MSSQL."); + } + + /// + /// Description = null should not emit GraphQL description block. + /// + [TestMethod] + public void TestEntityDescription_Null_NotInGraphQLSchema() + { + Entity entity = CreateEntityWithDescription(null); + RuntimeConfig config = CreateRuntimeConfig(entity); + string schema = Core.Generator.SchemaGenerator.Generate( + [JsonDocument.Parse("{\"id\":1}")], + "TestEntity", + config); + + Assert.IsFalse(schema.Contains("Test entity description null", StringComparison.Ordinal), "Null description must not appear in schema."); + Assert.IsTrue(schema.Contains("type TestEntity", StringComparison.Ordinal), "Type definition should still exist."); + } + + /// + /// Description = "" (empty) should not emit GraphQL description block. + /// + [TestMethod] + public void TestEntityDescription_Empty_NotInGraphQLSchema() + { + Entity entity = CreateEntityWithDescription(string.Empty); + RuntimeConfig config = CreateRuntimeConfig(entity); + string schema = Core.Generator.SchemaGenerator.Generate( + [JsonDocument.Parse("{\"id\":1}")], + "TestEntity", + config); + + Assert.IsFalse(schema.Contains("\"\"\"\"\"\"", StringComparison.Ordinal), "Empty description triple quotes should not be emitted."); + Assert.IsTrue(schema.Contains("type TestEntity", StringComparison.Ordinal), "Type definition should still exist."); + } + + /// + /// Description = whitespace should not emit GraphQL description block. + /// + [TestMethod] + public void TestEntityDescription_Whitespace_NotInGraphQLSchema() + { + Entity entity = CreateEntityWithDescription(" \t "); + RuntimeConfig config = CreateRuntimeConfig(entity); + string schema = Core.Generator.SchemaGenerator.Generate( + [JsonDocument.Parse("{\"id\":1}")], + "TestEntity", + config); + + Assert.IsFalse(schema.Contains("\"\"\"", StringComparison.Ordinal), "Whitespace-only description should not produce a GraphQL description block."); + Assert.IsTrue(schema.Contains("type TestEntity", StringComparison.Ordinal), "Type definition should still exist."); + } + + private static Entity CreateEntityWithDescription(string description) + { + EntitySource source = new("TestTable", EntitySourceType.Table, null, null); + EntityGraphQLOptions gqlOptions = new("TestEntity", "TestEntities", true); + EntityRestOptions restOptions = new(null, "/test", true); + return new( + source, + gqlOptions, + restOptions, + [], + null, + null, + null, + false, + null, + Description: description + ); + } + + private static RuntimeConfig CreateRuntimeConfig(Entity entity) + { + Dictionary entityDict = new() { { "TestEntity", entity } }; + RuntimeEntities entities = new(entityDict); + return new( + "", + new DataSource(DatabaseType.MSSQL, "", null), + entities, + null + ); + } + #endregion } } From 0eeeb2bb97ee37a052b7c7e733392c116db989e7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Sep 2025 17:55:54 +0000 Subject: [PATCH 62/79] Bump dotnet-sdk from 8.0.413 to 8.0.414 (#2870) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [dotnet-sdk](https://github.com/dotnet/sdk) from 8.0.413 to 8.0.414.
Release notes

Sourced from dotnet-sdk's releases.

.NET 8.0.20

Release

What's Changed

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=dotnet-sdk&package-manager=dotnet_sdk&previous-version=8.0.413&new-version=8.0.414)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> --- global.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/global.json b/global.json index 49e22f8151..7e9f2f6bb4 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "sdk": { - "version": "8.0.413", + "version": "8.0.414", "rollForward": "latestFeature" } } From 2834f70315831ca8f8e19005542e30caecd2ed50 Mon Sep 17 00:00:00 2001 From: Anusha Kolan Date: Wed, 17 Sep 2025 15:39:00 -0700 Subject: [PATCH 63/79] =?UTF-8?q?[MCP]=20Added=20entity=20description=20to?= =?UTF-8?q?=20OpenApiDocumentor,=20dab=20add=20and=20dab=20update=E2=80=A6?= =?UTF-8?q?=20(#2871)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Why make this change? - Provides semantic meaning to entities. - Useful in documentation (OpenAPI, CLI help, etc.). - Supports MCP tools and improves context for AI models. ## What is this change? - Adds support for specifying an entity description when using the `dab add` and `dab update` CLI commands. - Updates the OpenAPI document generation logic to include entity descriptions in the top-level array, improving API documentation for MCP suppport. - Refactors config serialization to only include the description field when present. - Adds and updates tests to verify that descriptions are correctly handled in CLI operations and OpenAPI output. ## How was this tested? This change is tested manually and with automated tests. Manually by running the commands shared below and checking if the `dab-config.json` is being updated right. Verifying the documentation generated in http://[localhost:5000/rest/openapi](http://localhost:5000/rest/openapi) has valid tags section. - [x] Integration Tests ## Sample Request(s) CLI Add entity with description. `dotnet src/out/cli/net8.0/Microsoft.DataApiBuilder.dll add --source MyTable--description "Represents a todo item in the system" MyEntity` CLI Update entity with description `dotnet src/out/cli/net8.0/Microsoft.DataApiBuilder.dll update MyEntity --description "Updated description"` OpenAPI Document Sample(Tags section): ``` "tags": [ { "name": "MyEntity", "description": "Represents a todo item in the system" } ] ``` --- src/Cli.Tests/AddEntityTests.cs | 44 +++++++++++++++++++ src/Cli.Tests/UpdateEntityTests.cs | 41 ++++++++++++++++- src/Cli/Commands/AddOptions.cs | 6 ++- src/Cli/Commands/EntityOptions.cs | 7 ++- src/Cli/Commands/UpdateOptions.cs | 6 ++- src/Cli/ConfigGenerator.cs | 6 ++- .../Services/OpenAPI/OpenApiDocumentor.cs | 21 +++++++-- .../StoredProcedureGeneration.cs | 21 ++++++++- 8 files changed, 139 insertions(+), 13 deletions(-) diff --git a/src/Cli.Tests/AddEntityTests.cs b/src/Cli.Tests/AddEntityTests.cs index 5dc218dcc6..53a2379557 100644 --- a/src/Cli.Tests/AddEntityTests.cs +++ b/src/Cli.Tests/AddEntityTests.cs @@ -31,6 +31,7 @@ public Task AddNewEntityWhenEntitiesEmpty() source: "MyTable", permissions: new string[] { "anonymous", "read,update" }, entity: "FirstEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, @@ -60,6 +61,7 @@ public Task AddNewEntityWhenEntitiesNotEmpty() source: "MyTable", permissions: new string[] { "anonymous", "*" }, entity: "SecondEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, @@ -91,6 +93,7 @@ public void AddDuplicateEntity() source: "MyTable", permissions: new string[] { "anonymous", "*" }, entity: "FirstEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, @@ -126,6 +129,7 @@ public Task AddEntityWithAnExistingNameButWithDifferentCase() source: "MyTable", permissions: new string[] { "anonymous", "*" }, entity: "FIRSTEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, @@ -156,6 +160,7 @@ public Task AddEntityWithCachingEnabled() source: "MyTable", permissions: new string[] { "anonymous", "*" }, entity: "CachingEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, @@ -192,6 +197,7 @@ public Task AddEntityWithPolicyAndFieldProperties( source: "MyTable", permissions: new string[] { "anonymous", "delete" }, entity: "MyEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, @@ -224,6 +230,7 @@ public Task AddNewEntityWhenEntitiesWithSourceAsStoredProcedure() source: "s001.book", permissions: new string[] { "anonymous", "execute" }, entity: "MyEntity", + description: null, sourceType: "stored-procedure", sourceParameters: new string[] { "param1:123", "param2:hello", "param3:true" }, sourceKeyFields: null, @@ -255,6 +262,7 @@ public Task TestAddStoredProcedureWithRestMethodsAndGraphQLOperations() source: "s001.book", permissions: new string[] { "anonymous", "execute" }, entity: "MyEntity", + description: null, sourceType: "stored-procedure", sourceParameters: new string[] { "param1:123", "param2:hello", "param3:true" }, sourceKeyFields: null, @@ -274,6 +282,38 @@ public Task TestAddStoredProcedureWithRestMethodsAndGraphQLOperations() return ExecuteVerifyTest(options); } + [TestMethod] + public void AddEntityWithDescriptionAndVerifyInConfig() + { + string description = "This is a test entity description."; + AddOptions options = new( + source: "MyTable", + permissions: new string[] { "anonymous", "read" }, + entity: "EntityWithDescription", + description: description, + sourceType: null, + sourceParameters: null, + sourceKeyFields: null, + restRoute: null, + graphQLType: null, + fieldsToInclude: new string[] { }, + fieldsToExclude: new string[] { }, + policyRequest: null, + policyDatabase: null, + cacheEnabled: null, + cacheTtl: null, + config: TEST_RUNTIME_CONFIG_FILE, + restMethodsForStoredProcedure: null, + graphQLOperationForStoredProcedure: null + ); + + string config = INITIAL_CONFIG; + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(config, out RuntimeConfig? runtimeConfig), "Loaded base config."); + Assert.IsTrue(TryAddNewEntity(options, runtimeConfig, out RuntimeConfig updatedRuntimeConfig), "Added entity to config."); + Assert.IsNotNull(updatedRuntimeConfig.Entities["EntityWithDescription"].Description); + Assert.AreEqual(description, updatedRuntimeConfig.Entities["EntityWithDescription"].Description); + } + /// /// Simple test to verify success on adding a new entity with source object for valid fields. /// @@ -305,6 +345,7 @@ public void TestAddNewEntityWithSourceObjectHavingValidFields( source: "testSource", permissions: new string[] { "anonymous", operations }, entity: "book", + description: null, sourceType: sourceType, sourceParameters: parameters, sourceKeyFields: keyFields, @@ -364,6 +405,7 @@ public Task TestAddNewSpWithDifferentRestAndGraphQLOptions( source: "s001.book", permissions: new string[] { "anonymous", "execute" }, entity: "MyEntity", + description: null, sourceType: "stored-procedure", sourceParameters: null, sourceKeyFields: null, @@ -399,6 +441,7 @@ public void TestAddStoredProcedureWithConflictingRestGraphQLOptions( source: "s001.book", permissions: new string[] { "anonymous", "execute" }, entity: "MyEntity", + description: null, sourceType: "stored-procedure", sourceParameters: null, sourceKeyFields: null, @@ -437,6 +480,7 @@ public void TestAddEntityPermissionWithInvalidOperation(IEnumerable perm source: "MyTable", permissions: permissions, entity: "MyEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, diff --git a/src/Cli.Tests/UpdateEntityTests.cs b/src/Cli.Tests/UpdateEntityTests.cs index 2719cf7df7..a500858c60 100644 --- a/src/Cli.Tests/UpdateEntityTests.cs +++ b/src/Cli.Tests/UpdateEntityTests.cs @@ -1063,6 +1063,41 @@ public void EnsureFailure_AddRelationshipToEntityWithDisabledGraphQL() Assert.IsFalse(VerifyCanUpdateRelationship(runtimeConfig, cardinality: "one", targetEntity: "SampleEntity2")); } + /// + /// Test to verify updating the description property of an entity. + /// + [TestMethod] + public void TestUpdateEntityDescription() + { + // Initial config with an old description + string initialConfig = GetInitialConfigString() + "," + @" + ""entities"": { + ""MyEntity"": { + ""source"": ""MyTable"", + ""description"": ""Old description"", + ""permissions"": [ + { + ""role"": ""anonymous"", + ""actions"": [""read""] + } + ] + } + } + }"; + + // UpdateOptions with a new description + UpdateOptions options = GenerateBaseUpdateOptions( + entity: "MyEntity", + description: "Updated description" + ); + + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(initialConfig, out RuntimeConfig? runtimeConfig), "Parsed config file."); + Assert.IsTrue(TryUpdateExistingEntity(options, runtimeConfig, out RuntimeConfig updatedRuntimeConfig), "Successfully updated entity in the config."); + + // Assert that the description was updated + Assert.AreEqual("Updated description", updatedRuntimeConfig.Entities["MyEntity"].Description); + } + private static string GetInitialConfigString() { return @"{" + @@ -1122,7 +1157,8 @@ private static UpdateOptions GenerateBaseUpdateOptions( IEnumerable? restMethodsForStoredProcedure = null, string? graphQLOperationForStoredProcedure = null, string? cacheEnabled = null, - string? cacheTtl = null + string? cacheTtl = null, + string? description = null ) { return new( @@ -1150,7 +1186,8 @@ private static UpdateOptions GenerateBaseUpdateOptions( cacheTtl: cacheTtl, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: restMethodsForStoredProcedure, - graphQLOperationForStoredProcedure: graphQLOperationForStoredProcedure + graphQLOperationForStoredProcedure: graphQLOperationForStoredProcedure, + description: description ); } diff --git a/src/Cli/Commands/AddOptions.cs b/src/Cli/Commands/AddOptions.cs index 26efee7a2f..60f575f469 100644 --- a/src/Cli/Commands/AddOptions.cs +++ b/src/Cli/Commands/AddOptions.cs @@ -34,7 +34,8 @@ public AddOptions( string? policyDatabase, string? cacheEnabled, string? cacheTtl, - string? config) + string? config, + string? description) : base(entity, sourceType, sourceParameters, @@ -49,7 +50,8 @@ public AddOptions( policyDatabase, cacheEnabled, cacheTtl, - config) + config, + description) { Source = source; Permissions = permissions; diff --git a/src/Cli/Commands/EntityOptions.cs b/src/Cli/Commands/EntityOptions.cs index 737c9bd047..d2173d1775 100644 --- a/src/Cli/Commands/EntityOptions.cs +++ b/src/Cli/Commands/EntityOptions.cs @@ -25,7 +25,8 @@ public EntityOptions( string? policyDatabase, string? cacheEnabled, string? cacheTtl, - string? config) + string? config, + string? description) : base(config) { Entity = entity; @@ -42,6 +43,7 @@ public EntityOptions( PolicyDatabase = policyDatabase; CacheEnabled = cacheEnabled; CacheTtl = cacheTtl; + Description = description; } // Entity is required but we have made required as false to have custom error message (more user friendly), if not provided. @@ -86,5 +88,8 @@ public EntityOptions( [Option("cache.ttl", Required = false, HelpText = "Specify time to live in seconds for cache entries for Entity.")] public string? CacheTtl { get; } + + [Option("description", Required = false, HelpText = "Description of the entity.")] + public string? Description { get; } } } diff --git a/src/Cli/Commands/UpdateOptions.cs b/src/Cli/Commands/UpdateOptions.cs index f757117f08..a98d77f66d 100644 --- a/src/Cli/Commands/UpdateOptions.cs +++ b/src/Cli/Commands/UpdateOptions.cs @@ -42,7 +42,8 @@ public UpdateOptions( string? policyDatabase, string? cacheEnabled, string? cacheTtl, - string config) + string config, + string? description) : base(entity, sourceType, sourceParameters, @@ -57,7 +58,8 @@ public UpdateOptions( policyDatabase, cacheEnabled, cacheTtl, - config) + config, + description) { Source = source; Permissions = permissions; diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs index 9cc53493fd..c7027ff78c 100644 --- a/src/Cli/ConfigGenerator.cs +++ b/src/Cli/ConfigGenerator.cs @@ -444,7 +444,8 @@ public static bool TryAddNewEntity(AddOptions options, RuntimeConfig initialRunt Permissions: permissionSettings, Relationships: null, Mappings: null, - Cache: cacheOptions); + Cache: cacheOptions, + Description: string.IsNullOrWhiteSpace(options.Description) ? null : options.Description); // Add entity to existing runtime config. IDictionary entities = new Dictionary(initialRuntimeConfig.Entities.Entities) @@ -1494,7 +1495,8 @@ public static bool TryUpdateExistingEntity(UpdateOptions options, RuntimeConfig Permissions: updatedPermissions, Relationships: updatedRelationships, Mappings: updatedMappings, - Cache: updatedCacheOptions); + Cache: updatedCacheOptions, + Description: string.IsNullOrWhiteSpace(options.Description) ? entity.Description : options.Description); IDictionary entities = new Dictionary(initialConfig.Entities.Entities) { [options.Entity] = updatedEntity diff --git a/src/Core/Services/OpenAPI/OpenApiDocumentor.cs b/src/Core/Services/OpenAPI/OpenApiDocumentor.cs index 003d7ddd13..4b6aaac78e 100644 --- a/src/Core/Services/OpenAPI/OpenApiDocumentor.cs +++ b/src/Core/Services/OpenAPI/OpenApiDocumentor.cs @@ -137,6 +137,19 @@ public void CreateDocument(bool doOverrideExistingDocument = false) Schemas = CreateComponentSchemas(runtimeConfig.Entities, runtimeConfig.DefaultDataSourceName) }; + // Collect all entity tags and their descriptions for the top-level tags array + List globalTags = new(); + foreach (KeyValuePair kvp in runtimeConfig.Entities) + { + Entity entity = kvp.Value; + string restPath = entity.Rest?.Path ?? kvp.Key; + globalTags.Add(new OpenApiTag + { + Name = restPath, + Description = string.IsNullOrWhiteSpace(entity.Description) ? null : entity.Description + }); + } + OpenApiDocument doc = new() { Info = new OpenApiInfo @@ -149,7 +162,8 @@ public void CreateDocument(bool doOverrideExistingDocument = false) new() { Url = url } }, Paths = BuildPaths(runtimeConfig.Entities, runtimeConfig.DefaultDataSourceName), - Components = components + Components = components, + Tags = globalTags }; _openApiDocument = doc; } @@ -212,10 +226,11 @@ private OpenApiPaths BuildPaths(RuntimeEntities entities, string defaultDataSour continue; } - // Explicitly exclude setting the tag's Description property since the Name property is self-explanatory. + // Set the tag's Description property to the entity's semantic description if present. OpenApiTag openApiTag = new() { - Name = entityRestPath + Name = entityRestPath, + Description = string.IsNullOrWhiteSpace(entity.Description) ? null : entity.Description }; // The OpenApiTag will categorize all paths created using the entity's name or overridden REST path value. diff --git a/src/Service.Tests/OpenApiDocumentor/StoredProcedureGeneration.cs b/src/Service.Tests/OpenApiDocumentor/StoredProcedureGeneration.cs index 01ee1ac0a9..b7105dfa45 100644 --- a/src/Service.Tests/OpenApiDocumentor/StoredProcedureGeneration.cs +++ b/src/Service.Tests/OpenApiDocumentor/StoredProcedureGeneration.cs @@ -59,7 +59,8 @@ public static void CreateEntities() Rest: new(Methods: EntityRestOptions.DEFAULT_SUPPORTED_VERBS), Permissions: OpenApiTestBootstrap.CreateBasicPermissions(), Mappings: null, - Relationships: null); + Relationships: null, + Description: "Represents a stored procedure for books"); Dictionary entities = new() { @@ -129,6 +130,24 @@ public void ValidateResponseBodyContents(string entityName, string[] expectedCol ValidateOpenApiReferenceContents(schemaComponentReference, expectedSchemaReferenceId, expectedColumns, expectedColumnJsonTypes); } + /// + /// Integration tests validating that entity descriptions are included in the OpenAPI document. + /// + [TestMethod] + public void OpenApiDocumentor_TagsIncludeEntityDescription() + { + // Arrange: The entity name and expected description + string entityName = "sp1"; + string expectedDescription = "Represents a stored procedure for books"; // Set this to your actual description + + // Act: Get the tags from the OpenAPI document + IList tags = _openApiDocument.Tags; + + // Assert: There is a tag for the entity and it includes the description + Assert.IsTrue(tags.Any(t => t.Name == entityName && t.Description == expectedDescription), + $"Expected tag for '{entityName}' with description '{expectedDescription}' not found."); + } + /// /// Validates that the provided OpenApiReference object has the expected schema reference id /// and that that id is present in the list of component schema in the OpenApi document. From a2f779cf124d1443a05c5394c47d381e0e9d4cd6 Mon Sep 17 00:00:00 2001 From: souvikghosh04 Date: Fri, 26 Sep 2025 06:11:38 +0530 Subject: [PATCH 64/79] Adding MCP capability in DAB (#2868) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Why make this change? - The linked issue proposes integrating an MCP (Model Context Protocol) server so AI/agent tooling (e.g., VS Code / Copilot–style agents) can introspect the configured data model and perform safe, structured operations against DAB-managed data sources. - This enables richer developer tooling, lowers friction for exploratory data access, and creates a foundation for future AI-assisted authoring and governance scenarios. ## What is this change? ### Introduces an MCP service layer that exposes: Schema / entity metadata derived from the existing DAB configuration (tables, stored procedures, relationships, GraphQL entity projections). Operation capabilities (read / create / update / delete) aligned with DAB authorization rules. A capability negotiation / handshake endpoint so MCP clients can discover features. MCP endpoint can be accessed with `/mcp` Sample request to discover tools- ``` POST: http://localhost:5000/mcp { "jsonrpc": "2.0", "id": "1", "method": "tools/list" } ``` ## How was this tested? The working of the MCP endpoint and the describe-entities tool is tested manually in the local environment. - Enable MCP in dab-config.json `"mcp": { "enabled": true, "path": "/mcp", "dml-tools": { "describe-entities": true } }` - The server was started locally and confirmed to be listening on `http://localhost:5000`. - Send a POST request to the MCP endpoint, `http://localhost:5000/mcp` - Use the Sample Requests shared for the body of the request. - The tools/list request successfully returned all registered tools, confirming that the MCP server and tool registry were initialized correctly. - The tools/call request for describe-entities returned the expected entity metadata. ## Sample Request(s) 1. Listing all tools available. `{ "jsonrpc": "2.0", "id": "1", "method": "tools/list", "params": {} }` 2. Use the describe-entities tool `{ "jsonrpc": "2.0", "id": "1", "method": "tools/call", "params": { "name": "describe-entities" } }` --------- Co-authored-by: Jerry Nixon Co-authored-by: Rahul Nishant <53243582+ranishan@users.noreply.github.com> Co-authored-by: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: Aniruddh Munde Co-authored-by: Ruben Cerna Co-authored-by: Anusha Kolan --- schemas/dab.draft.schema.json | 89 +++++++-- .../Azure.DataApiBuilder.Mcp.csproj | 22 ++ .../BuiltInTools/CreateRecordTool.cs | 87 ++++++++ .../BuiltInTools/DescribeEntitiesTool.cs | 83 ++++++++ .../Core/McpEndpointRouteBuilderExtensions.cs | 57 ++++++ .../Core/McpServerConfiguration.cs | 94 +++++++++ .../Core/McpServiceCollectionExtensions.cs | 65 ++++++ .../Core/McpToolRegistry.cs | 41 ++++ .../Core/McpToolRegistryInitializer.cs | 41 ++++ src/Azure.DataApiBuilder.Mcp/Model/Enums.cs | 19 ++ .../Model/IMcpTool.cs | 37 ++++ src/Azure.DataApiBuilder.sln | 6 + src/Cli.Tests/ConfigGeneratorTests.cs | 4 + src/Cli.Tests/ExporterTests.cs | 6 +- src/Cli.Tests/ModuleInitializer.cs | 12 ++ ...stMethodsAndGraphQLOperations.verified.txt | 4 + ...tyWithSourceAsStoredProcedure.verified.txt | 4 + ...tityWithSourceWithDefaultType.verified.txt | 4 + ...dingEntityWithoutIEnumerables.verified.txt | 4 + ...ests.TestInitForCosmosDBNoSql.verified.txt | 4 + ...toredProcedureWithRestMethods.verified.txt | 4 + ...stMethodsAndGraphQLOperations.verified.txt | 4 + ...itTests.CosmosDbNoSqlDatabase.verified.txt | 4 + ...ts.CosmosDbPostgreSqlDatabase.verified.txt | 4 + ...ionProviders_171ea8114ff71814.verified.txt | 4 + ...ionProviders_2df7a1794712f154.verified.txt | 4 + ...ionProviders_59fe1a10aa78899d.verified.txt | 4 + ...ionProviders_b95b637ea87f16a7.verified.txt | 4 + ...ionProviders_daacbd948b7ef72f.verified.txt | 4 + ...tStartingSlashWillHaveItAdded.verified.txt | 4 + .../InitTests.MsSQLDatabase.verified.txt | 4 + ...tStartingSlashWillHaveItAdded.verified.txt | 4 + ...ConfigWithoutConnectionString.verified.txt | 4 + ...lCharactersInConnectionString.verified.txt | 4 + ...ationOptions_0546bef37027a950.verified.txt | 4 + ...ationOptions_0ac567dd32a2e8f5.verified.txt | 4 + ...ationOptions_0c06949221514e77.verified.txt | 4 + ...ationOptions_18667ab7db033e9d.verified.txt | 4 + ...ationOptions_2f42f44c328eb020.verified.txt | 4 + ...ationOptions_3243d3f3441fdcc1.verified.txt | 4 + ...ationOptions_53350b8b47df2112.verified.txt | 4 + ...ationOptions_6584e0ec46b8a11d.verified.txt | 4 + ...ationOptions_81cc88db3d4eecfb.verified.txt | 4 + ...ationOptions_8ea187616dbb5577.verified.txt | 4 + ...ationOptions_905845c29560a3ef.verified.txt | 4 + ...ationOptions_b2fd24fab5b80917.verified.txt | 4 + ...ationOptions_bd7cd088755287c9.verified.txt | 4 + ...ationOptions_d2eccba2f836b380.verified.txt | 4 + ...ationOptions_d463eed7fe5e4bbe.verified.txt | 4 + ...ationOptions_d5520dd5c33f7b8d.verified.txt | 4 + ...ationOptions_eab4a6010e602b59.verified.txt | 4 + ...ationOptions_ecaa688829b4030e.verified.txt | 4 + src/Cli.Tests/UpdateEntityTests.cs | 4 +- src/Cli/Commands/ConfigureOptions.cs | 46 +++++ src/Cli/Commands/InitOptions.cs | 15 ++ src/Cli/ConfigGenerator.cs | 179 ++++++++++++++++- .../Converters/DmlToolsConfigConverter.cs | 188 ++++++++++++++++++ .../McpRuntimeOptionsConverterFactory.cs | 140 +++++++++++++ src/Config/DataApiBuilderException.cs | 4 + src/Config/ObjectModel/ApiType.cs | 1 + src/Config/ObjectModel/DmlToolsConfig.cs | 188 ++++++++++++++++++ src/Config/ObjectModel/McpRuntimeOptions.cs | 63 ++++++ src/Config/ObjectModel/RuntimeConfig.cs | 34 ++++ src/Config/ObjectModel/RuntimeOptions.cs | 9 + src/Config/RuntimeConfigLoader.cs | 2 + .../Configurations/RuntimeConfigValidator.cs | 64 +++++- src/Core/Services/RestService.cs | 8 + src/Directory.Packages.props | 46 +++-- .../Helpers/RuntimeConfigAuthHelper.cs | 1 + .../Authorization/AuthorizationHelpers.cs | 1 + .../AuthorizationResolverUnitTests.cs | 1 + .../Caching/HealthEndpointCachingTests.cs | 1 + .../AuthenticationConfigValidatorUnitTests.cs | 1 + .../Configuration/ConfigurationTests.cs | 57 ++++-- .../Configuration/HealthEndpointRolesTests.cs | 1 + .../Configuration/HealthEndpointTests.cs | 62 ++++-- .../AuthorizationResolverHotReloadTests.cs | 1 + .../Telemetry/AzureLogAnalyticsTests.cs | 2 +- .../Configuration/Telemetry/FileSinkTests.cs | 2 +- .../Telemetry/OpenTelemetryTests.cs | 2 +- .../Configuration/Telemetry/TelemetryTests.cs | 2 +- .../CosmosTests/MutationTests.cs | 6 +- src/Service.Tests/CosmosTests/QueryTests.cs | 3 +- .../SchemaGeneratorFactoryTests.cs | 2 +- .../MultipleMutationBuilderTests.cs | 1 + src/Service.Tests/ModuleInitializer.cs | 14 +- ...ReadingRuntimeConfigForCosmos.verified.txt | 4 + ...tReadingRuntimeConfigForMsSql.verified.txt | 4 + ...tReadingRuntimeConfigForMySql.verified.txt | 4 + ...ingRuntimeConfigForPostgreSql.verified.txt | 4 + .../DwSqlGraphQLQueryTests.cs | 2 + src/Service.Tests/SqlTests/SqlTestHelper.cs | 1 + .../UnitTests/ConfigValidationUnitTests.cs | 65 ++++-- .../UnitTests/DbExceptionParserUnitTests.cs | 2 + .../MultiSourceQueryExecutionUnitTests.cs | 2 + .../UnitTests/MySqlQueryExecutorUnitTests.cs | 1 + .../PostgreSqlQueryExecutorUnitTests.cs | 1 + .../UnitTests/RequestValidatorUnitTests.cs | 1 + .../UnitTests/RestServiceUnitTests.cs | 1 + ...untimeConfigLoaderJsonDeserializerTests.cs | 12 +- .../UnitTests/SqlQueryExecutorUnitTests.cs | 7 + src/Service.Tests/dab-config.MsSql.json | 10 +- .../Azure.DataApiBuilder.Service.csproj | 3 +- src/Service/HealthCheck/HealthCheckHelper.cs | 1 + .../HealthCheck/Model/ConfigurationDetails.cs | 3 + src/Service/Startup.cs | 7 + 106 files changed, 1970 insertions(+), 127 deletions(-) create mode 100644 src/Azure.DataApiBuilder.Mcp/Azure.DataApiBuilder.Mcp.csproj create mode 100644 src/Azure.DataApiBuilder.Mcp/BuiltInTools/CreateRecordTool.cs create mode 100644 src/Azure.DataApiBuilder.Mcp/BuiltInTools/DescribeEntitiesTool.cs create mode 100644 src/Azure.DataApiBuilder.Mcp/Core/McpEndpointRouteBuilderExtensions.cs create mode 100644 src/Azure.DataApiBuilder.Mcp/Core/McpServerConfiguration.cs create mode 100644 src/Azure.DataApiBuilder.Mcp/Core/McpServiceCollectionExtensions.cs create mode 100644 src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs create mode 100644 src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistryInitializer.cs create mode 100644 src/Azure.DataApiBuilder.Mcp/Model/Enums.cs create mode 100644 src/Azure.DataApiBuilder.Mcp/Model/IMcpTool.cs create mode 100644 src/Config/Converters/DmlToolsConfigConverter.cs create mode 100644 src/Config/Converters/McpRuntimeOptionsConverterFactory.cs create mode 100644 src/Config/ObjectModel/DmlToolsConfig.cs create mode 100644 src/Config/ObjectModel/McpRuntimeOptions.cs diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json index 35f2e08270..b348ac4a4f 100644 --- a/schemas/dab.draft.schema.json +++ b/schemas/dab.draft.schema.json @@ -158,13 +158,13 @@ "type": "object", "properties": { "max-page-size": { - "type": ["integer", "null"], + "type": [ "integer", "null" ], "description": "Defines the maximum number of records that can be returned in a single page of results. If set to null, the default value is 100,000.", "default": 100000, "minimum": 1 }, "default-page-size": { - "type": ["integer", "null"], + "type": [ "integer", "null" ], "description": "Sets the default number of records returned in a single response. When this limit is reached, a continuation token is provided to retrieve the next page. If set to null, the default value is 100.", "default": 100, "minimum": 1 @@ -214,7 +214,7 @@ "description": "Allow enabling/disabling GraphQL requests for all entities." }, "depth-limit": { - "type": ["integer", "null"], + "type": [ "integer", "null" ], "description": "Maximum allowed depth of a GraphQL query.", "default": null }, @@ -239,13 +239,74 @@ } } }, + "mcp": { + "type": "object", + "description": "Global MCP endpoint configuration", + "additionalProperties": false, + "properties": { + "path": { + "default": "/mcp", + "type": "string" + }, + "enabled": { + "type": "boolean", + "description": "Allow enabling/disabling MCP requests for all entities.", + "default": true + }, + "dml-tools": { + "oneOf": [ + { + "type": "boolean", + "description": "Enable/disable all DML tools with default settings." + }, + { + "type": "object", + "description": "Individual DML tools configuration", + "additionalProperties": false, + "properties": { + "describe-entities": { + "type": "boolean", + "description": "Enable/disable the describe-entities tool.", + "default": false + }, + "create-record": { + "type": "boolean", + "description": "Enable/disable the create-record tool.", + "default": false + }, + "read-records": { + "type": "boolean", + "description": "Enable/disable the read-records tool.", + "default": false + }, + "update-record": { + "type": "boolean", + "description": "Enable/disable the update-record tool.", + "default": false + }, + "delete-record": { + "type": "boolean", + "description": "Enable/disable the delete-record tool.", + "default": false + }, + "execute-entity": { + "type": "boolean", + "description": "Enable/disable the execute-entity tool.", + "default": false + } + } + } + ] + } + } + }, "host": { "type": "object", "description": "Global hosting configuration", "additionalProperties": false, "properties": { "max-response-size-mb": { - "type": ["integer", "null"], + "type": [ "integer", "null" ], "description": "Specifies the maximum size, in megabytes, of the database response allowed in a single result. If set to null, the default value is 158 MB.", "default": 158, "minimum": 1, @@ -253,12 +314,12 @@ }, "mode": { "description": "Set if running in Development or Production mode", - "type": ["string", "null"], + "type": [ "string", "null" ], "default": "production", - "enum": ["production", "development"] + "enum": [ "production", "development" ] }, "cors": { - "type": ["object", "null"], + "type": [ "object", "null" ], "description": "Configure CORS", "additionalProperties": false, "properties": { @@ -278,7 +339,7 @@ } }, "authentication": { - "type": ["object", "null"], + "type": [ "object", "null" ], "additionalProperties": false, "properties": { "provider": { @@ -322,7 +383,7 @@ "type": "string" } }, - "required": ["audience", "issuer"] + "required": [ "audience", "issuer" ] } }, "allOf": [ @@ -338,9 +399,9 @@ ] } }, - "required": ["provider"] + "required": [ "provider" ] }, - "then": { "required": ["jwt"] }, + "then": { "required": [ "jwt" ] }, "else": { "properties": { "jwt": false } } } ] @@ -382,7 +443,7 @@ "default": true } }, - "required": ["connection-string"] + "required": [ "connection-string" ] }, "open-telemetry": { "type": "object", @@ -405,7 +466,7 @@ "type": "string", "description": "Open Telemetry protocol", "default": "grpc", - "enum": ["grpc", "httpprotobuf"] + "enum": [ "grpc", "httpprotobuf" ] }, "enabled": { "type": "boolean", @@ -413,7 +474,7 @@ "default": true } }, - "required": ["endpoint"] + "required": [ "endpoint" ] }, "azure-log-analytics": { "type": "object", diff --git a/src/Azure.DataApiBuilder.Mcp/Azure.DataApiBuilder.Mcp.csproj b/src/Azure.DataApiBuilder.Mcp/Azure.DataApiBuilder.Mcp.csproj new file mode 100644 index 0000000000..f675f8d8d1 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Azure.DataApiBuilder.Mcp.csproj @@ -0,0 +1,22 @@ + + + + net8.0 + enable + enable + + + + + + + + + + + + + + + + diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/CreateRecordTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/CreateRecordTool.cs new file mode 100644 index 0000000000..ed5425c515 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/CreateRecordTool.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using Azure.DataApiBuilder.Mcp.Model; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.BuiltInTools +{ + public class CreateRecordTool : IMcpTool + { + public ToolType ToolType { get; } = ToolType.BuiltIn; + + public Tool GetToolMetadata() + { + return new Tool + { + Name = "create-record", + Description = "Creates a new record in the specified entity.", + InputSchema = JsonSerializer.Deserialize( + @"{ + ""type"": ""object"", + ""properties"": { + ""entity"": { + ""type"": ""string"", + ""description"": ""The name of the entity"" + }, + ""data"": { + ""type"": ""object"", + ""description"": ""The data for the new record"" + } + }, + ""required"": [""entity"", ""data""] + }" + ) + }; + } + + public Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default) + { + if (arguments == null) + { + return Task.FromResult(new CallToolResult + { + Content = [new TextContentBlock { Type = "text", Text = "Error: No arguments provided" }] + }); + } + + try + { + // Extract arguments + JsonElement root = arguments.RootElement; + + if (!root.TryGetProperty("entity", out JsonElement entityElement) || + !root.TryGetProperty("data", out JsonElement dataElement)) + { + return Task.FromResult(new CallToolResult + { + Content = [new TextContentBlock { Type = "text", Text = "Error: Missing required arguments 'entity' or 'data'" }] + }); + } + + string entityName = entityElement.GetString() ?? string.Empty; + + // TODO: Implement actual create logic using DAB's internal services + // For now, return a placeholder response + string result = $"Would create record in entity '{entityName}' with data: {dataElement.GetRawText()}"; + + return Task.FromResult(new CallToolResult + { + Content = [new TextContentBlock { Type = "text", Text = result }] + }); + } + catch (Exception ex) + { + return Task.FromResult(new CallToolResult + { + Content = [new TextContentBlock { Type = "text", Text = $"Error: {ex.Message}" }] + }); + } + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DescribeEntitiesTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DescribeEntitiesTool.cs new file mode 100644 index 0000000000..3e7ade6075 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DescribeEntitiesTool.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Mcp.Model; +using Microsoft.Extensions.DependencyInjection; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.BuiltInTools +{ + public class DescribeEntitiesTool : IMcpTool + { + public ToolType ToolType { get; } = ToolType.BuiltIn; + + public Tool GetToolMetadata() + { + return new Tool + { + Name = "describe-entities", + Description = "Lists and describes all entities in the database." + }; + } + + public Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default) + { + try + { + // Get the runtime config provider + RuntimeConfigProvider? runtimeConfigProvider = serviceProvider.GetService(); + if (runtimeConfigProvider == null || !runtimeConfigProvider.TryGetConfig(out RuntimeConfig? runtimeConfig)) + { + return Task.FromResult(new CallToolResult + { + Content = [new TextContentBlock { Type = "text", Text = "Error: Runtime configuration not available." }] + }); + } + + // Extract entity information from the runtime config + Dictionary entities = new(); + + if (runtimeConfig.Entities != null) + { + foreach (KeyValuePair entity in runtimeConfig.Entities) + { + entities[entity.Key] = new + { + source = entity.Value.Source, + permissions = entity.Value.Permissions?.Select(p => new + { + role = p.Role, + actions = p.Actions + }) + }; + } + } + + string entitiesJson = JsonSerializer.Serialize(entities, new JsonSerializerOptions + { + WriteIndented = true, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }); + + return Task.FromResult(new CallToolResult + { + Content = [new TextContentBlock { Type = "application/json", Text = entitiesJson }] + }); + } + catch (Exception ex) + { + return Task.FromResult(new CallToolResult + { + Content = [new TextContentBlock { Type = "text", Text = $"Error: {ex.Message}" }] + }); + } + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Core/McpEndpointRouteBuilderExtensions.cs b/src/Azure.DataApiBuilder.Mcp/Core/McpEndpointRouteBuilderExtensions.cs new file mode 100644 index 0000000000..6401e17e22 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Core/McpEndpointRouteBuilderExtensions.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Configurations; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Routing; + +namespace Azure.DataApiBuilder.Mcp.Core +{ + /// + /// Extension methods for mapping MCP endpoints to an . + /// + public static class McpEndpointRouteBuilderExtensions + { + /// + /// Maps the MCP endpoint to the specified if MCP is enabled in the runtime configuration. + /// + public static IEndpointRouteBuilder MapDabMcp( + this IEndpointRouteBuilder endpoints, + RuntimeConfigProvider runtimeConfigProvider, + [StringSyntax("Route")] string pattern = "") + { + if (!TryGetMcpOptions(runtimeConfigProvider, out McpRuntimeOptions? mcpOptions) || mcpOptions == null || !mcpOptions.Enabled) + { + return endpoints; + } + + string mcpPath = mcpOptions.Path ?? McpRuntimeOptions.DEFAULT_PATH; + + // Map the MCP endpoint + endpoints.MapMcp(mcpPath); + + return endpoints; + } + + /// + /// Gets MCP options from the runtime configuration + /// + /// Runtime config provider + /// MCP options + /// True if MCP options were found, false otherwise + private static bool TryGetMcpOptions(RuntimeConfigProvider runtimeConfigProvider, out McpRuntimeOptions? mcpOptions) + { + mcpOptions = null; + + if (!runtimeConfigProvider.TryGetConfig(out RuntimeConfig? runtimeConfig)) + { + return false; + } + + mcpOptions = runtimeConfig?.Runtime?.Mcp; + return mcpOptions != null; + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Core/McpServerConfiguration.cs b/src/Azure.DataApiBuilder.Mcp/Core/McpServerConfiguration.cs new file mode 100644 index 0000000000..86cccd2aaf --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Core/McpServerConfiguration.cs @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using Azure.DataApiBuilder.Mcp.Model; +using Microsoft.Extensions.DependencyInjection; +using ModelContextProtocol; +using ModelContextProtocol.Protocol; + +namespace Azure.DataApiBuilder.Mcp.Core +{ + /// + /// Configuration for MCP server capabilities and handlers + /// + internal static class McpServerConfiguration + { + /// + /// Configures the MCP server with tool capabilities + /// + internal static IServiceCollection ConfigureMcpServer(this IServiceCollection services) + { + services.AddMcpServer(options => + { + options.ServerInfo = new() { Name = "Data API builder MCP Server", Version = "1.0.0" }; + options.Capabilities = new() + { + Tools = new() + { + ListToolsHandler = (request, ct) => + { + McpToolRegistry? toolRegistry = request.Services?.GetRequiredService(); + if (toolRegistry == null) + { + throw new InvalidOperationException("Tool registry is not available."); + } + + List tools = toolRegistry.GetAllTools().ToList(); + + return ValueTask.FromResult(new ListToolsResult + { + Tools = tools + }); + }, + CallToolHandler = async (request, ct) => + { + McpToolRegistry? toolRegistry = request.Services?.GetRequiredService(); + if (toolRegistry == null) + { + throw new InvalidOperationException("Tool registry is not available."); + } + + string? toolName = request.Params?.Name; + if (string.IsNullOrEmpty(toolName)) + { + throw new McpException("Tool name is required."); + } + + if (!toolRegistry.TryGetTool(toolName, out IMcpTool? tool)) + { + throw new McpException($"Unknown tool: '{toolName}'"); + } + + JsonDocument? arguments = null; + if (request.Params?.Arguments != null) + { + // Convert IReadOnlyDictionary to JsonDocument + Dictionary jsonObject = new(); + foreach (KeyValuePair kvp in request.Params.Arguments) + { + jsonObject[kvp.Key] = kvp.Value; + } + + string json = JsonSerializer.Serialize(jsonObject); + arguments = JsonDocument.Parse(json); + } + + try + { + return await tool!.ExecuteAsync(arguments, request.Services!, ct); + } + finally + { + arguments?.Dispose(); + } + } + } + }; + }) + .WithHttpTransport(); + + return services; + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Core/McpServiceCollectionExtensions.cs b/src/Azure.DataApiBuilder.Mcp/Core/McpServiceCollectionExtensions.cs new file mode 100644 index 0000000000..01f6015786 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Core/McpServiceCollectionExtensions.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Reflection; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Mcp.Model; +using Microsoft.Extensions.DependencyInjection; + +namespace Azure.DataApiBuilder.Mcp.Core +{ + /// + /// Extension methods for configuring MCP services in the DI container + /// + public static class McpServiceCollectionExtensions + { + /// + /// Adds MCP server and related services to the service collection + /// + public static IServiceCollection AddDabMcpServer(this IServiceCollection services, RuntimeConfigProvider runtimeConfigProvider) + { + if (!runtimeConfigProvider.TryGetConfig(out RuntimeConfig? runtimeConfig)) + { + // If config is not available, skip MCP setup + return services; + } + + // Only add MCP server if it's enabled in the configuration + if (!runtimeConfig.IsMcpEnabled) + { + return services; + } + + // Register core MCP services + services.AddSingleton(); + services.AddHostedService(); + + // Auto-discover and register all MCP tools + RegisterAllMcpTools(services); + + // Configure MCP server + services.ConfigureMcpServer(); + + return services; + } + + /// + /// Automatically discovers and registers all classes implementing IMcpTool + /// + private static void RegisterAllMcpTools(IServiceCollection services) + { + Assembly mcpAssembly = typeof(IMcpTool).Assembly; + + IEnumerable toolTypes = mcpAssembly.GetTypes() + .Where(t => t.IsClass && + !t.IsAbstract && + typeof(IMcpTool).IsAssignableFrom(t)); + + foreach (Type toolType in toolTypes) + { + services.AddSingleton(typeof(IMcpTool), toolType); + } + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs b/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs new file mode 100644 index 0000000000..9c9b96d72b --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using Azure.DataApiBuilder.Mcp.Model; +using ModelContextProtocol.Protocol; + +namespace Azure.DataApiBuilder.Mcp.Core +{ + /// + /// Registry for managing MCP tools + /// + public class McpToolRegistry + { + private readonly Dictionary _tools = new(); + + /// + /// Registers a tool in the registry + /// + public void RegisterTool(IMcpTool tool) + { + Tool metadata = tool.GetToolMetadata(); + _tools[metadata.Name] = tool; + } + + /// + /// Gets all registered tools + /// + public IEnumerable GetAllTools() + { + return _tools.Values.Select(t => t.GetToolMetadata()); + } + + /// + /// Tries to get a tool by name + /// + public bool TryGetTool(string toolName, out IMcpTool? tool) + { + return _tools.TryGetValue(toolName, out tool); + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistryInitializer.cs b/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistryInitializer.cs new file mode 100644 index 0000000000..97d0dac7f3 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistryInitializer.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using Azure.DataApiBuilder.Mcp.Model; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + +namespace Azure.DataApiBuilder.Mcp.Core +{ + /// + /// Hosted service to initialize the MCP tool registry + /// + public class McpToolRegistryInitializer : IHostedService + { + private readonly IServiceProvider _serviceProvider; + private readonly McpToolRegistry _toolRegistry; + + public McpToolRegistryInitializer(IServiceProvider serviceProvider, McpToolRegistry toolRegistry) + { + _serviceProvider = serviceProvider; + _toolRegistry = toolRegistry; + } + + public Task StartAsync(CancellationToken cancellationToken) + { + // Register all IMcpTool implementations + IEnumerable tools = _serviceProvider.GetServices(); + foreach (IMcpTool tool in tools) + { + _toolRegistry.RegisterTool(tool); + } + + return Task.CompletedTask; + } + + public Task StopAsync(CancellationToken cancellationToken) + { + return Task.CompletedTask; + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Model/Enums.cs b/src/Azure.DataApiBuilder.Mcp/Model/Enums.cs new file mode 100644 index 0000000000..84ca49e1b0 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Model/Enums.cs @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +namespace Azure.DataApiBuilder.Mcp.Model +{ + public class McpEnums + { + /// + /// Specifies the type of tool. + /// + /// This enumeration defines whether a tool is a built-in tool provided by the system or + /// a custom tool defined by the user. + public enum ToolType + { + BuiltIn, + Custom + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Model/IMcpTool.cs b/src/Azure.DataApiBuilder.Mcp/Model/IMcpTool.cs new file mode 100644 index 0000000000..bbee6a9304 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Model/IMcpTool.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.Model +{ + /// + /// Interface for MCP tool implementations + /// + public interface IMcpTool + { + /// + /// Gets the type of the tool. + /// + ToolType ToolType { get; } + + /// + /// Gets the tool metadata + /// + Tool GetToolMetadata(); + + /// + /// Executes the tool with the provided arguments + /// + /// The JSON arguments passed to the tool + /// The service provider for resolving dependencies + /// Cancellation token + /// The tool execution result + Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default); + } +} diff --git a/src/Azure.DataApiBuilder.sln b/src/Azure.DataApiBuilder.sln index e7f61fa3ed..aa3c8e2bad 100644 --- a/src/Azure.DataApiBuilder.sln +++ b/src/Azure.DataApiBuilder.sln @@ -31,6 +31,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.DataApiBuilder.Core", EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.DataApiBuilder.Product", "Product\Azure.DataApiBuilder.Product.csproj", "{E3D2076C-EE49-43A0-8F92-5FC41EC99DA7}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Azure.DataApiBuilder.Mcp", "Azure.DataApiBuilder.Mcp\Azure.DataApiBuilder.Mcp.csproj", "{A287E849-A043-4F37-BC40-A87C4705F583}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -73,6 +75,10 @@ Global {E3D2076C-EE49-43A0-8F92-5FC41EC99DA7}.Debug|Any CPU.Build.0 = Debug|Any CPU {E3D2076C-EE49-43A0-8F92-5FC41EC99DA7}.Release|Any CPU.ActiveCfg = Release|Any CPU {E3D2076C-EE49-43A0-8F92-5FC41EC99DA7}.Release|Any CPU.Build.0 = Release|Any CPU + {A287E849-A043-4F37-BC40-A87C4705F583}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A287E849-A043-4F37-BC40-A87C4705F583}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A287E849-A043-4F37-BC40-A87C4705F583}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A287E849-A043-4F37-BC40-A87C4705F583}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/src/Cli.Tests/ConfigGeneratorTests.cs b/src/Cli.Tests/ConfigGeneratorTests.cs index 6094189f93..58e006b75d 100644 --- a/src/Cli.Tests/ConfigGeneratorTests.cs +++ b/src/Cli.Tests/ConfigGeneratorTests.cs @@ -163,6 +163,10 @@ public void TestSpecialCharactersInConnectionString() ""path"": ""/An_"", ""allow-introspection"": true }, + ""mcp"": { + ""enabled"": true, + ""path"": ""/mcp"" + }, ""host"": { ""cors"": { ""origins"": [], diff --git a/src/Cli.Tests/ExporterTests.cs b/src/Cli.Tests/ExporterTests.cs index aecd6455a3..3735dc43a1 100644 --- a/src/Cli.Tests/ExporterTests.cs +++ b/src/Cli.Tests/ExporterTests.cs @@ -21,7 +21,7 @@ public void ExportGraphQLFromDabService_LogsWhenHttpsWorks() RuntimeConfig runtimeConfig = new( Schema: "schema", DataSource: new DataSource(DatabaseType.MSSQL, "", new()), - Runtime: new(Rest: new(), GraphQL: new(), Host: new(null, null)), + Runtime: new(Rest: new(), GraphQL: new(), Mcp: new(), Host: new(null, null)), Entities: new(new Dictionary()) ); @@ -59,7 +59,7 @@ public void ExportGraphQLFromDabService_LogsFallbackToHttp_WhenHttpsFails() RuntimeConfig runtimeConfig = new( Schema: "schema", DataSource: new DataSource(DatabaseType.MSSQL, "", new()), - Runtime: new(Rest: new(), GraphQL: new(), Host: new(null, null)), + Runtime: new(Rest: new(), GraphQL: new(), Mcp: new(), Host: new(null, null)), Entities: new(new Dictionary()) ); @@ -105,7 +105,7 @@ public void ExportGraphQLFromDabService_ThrowsException_WhenBothHttpsAndHttpFail RuntimeConfig runtimeConfig = new( Schema: "schema", DataSource: new DataSource(DatabaseType.MSSQL, "", new()), - Runtime: new(Rest: new(), GraphQL: new(), Host: new(null, null)), + Runtime: new(Rest: new(), GraphQL: new(), Mcp: new(), Host: new(null, null)), Entities: new(new Dictionary()) ); diff --git a/src/Cli.Tests/ModuleInitializer.cs b/src/Cli.Tests/ModuleInitializer.cs index 2cfba899ea..e00dc00a89 100644 --- a/src/Cli.Tests/ModuleInitializer.cs +++ b/src/Cli.Tests/ModuleInitializer.cs @@ -47,6 +47,10 @@ public static void Init() VerifierSettings.IgnoreMember(options => options.IsGraphQLEnabled); // Ignore the entity IsGraphQLEnabled as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(entity => entity.IsGraphQLEnabled); + // Ignore the global IsMcpEnabled as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.IsMcpEnabled); + // Ignore the global RuntimeOptions.IsMcpEnabled as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(options => options.IsMcpEnabled); // Ignore the global IsHealthEnabled as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.IsHealthEnabled); // Ignore the global RuntimeOptions.IsHealthCheckEnabled as that's unimportant from a test standpoint. @@ -67,12 +71,18 @@ public static void Init() VerifierSettings.IgnoreMember(config => config.IsGraphQLEnabled); // Ignore the IsRestEnabled as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.IsRestEnabled); + // Ignore the IsMcpEnabled as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.IsMcpEnabled); + // Ignore the McpDmlTools as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.McpDmlTools); // Ignore the IsStaticWebAppsIdentityProvider as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.IsStaticWebAppsIdentityProvider); // Ignore the RestPath as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.RestPath); // Ignore the GraphQLPath as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.GraphQLPath); + // Ignore the McpPath as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.McpPath); // Ignore the AllowIntrospection as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.AllowIntrospection); // Ignore the EnableAggregation as that's unimportant from a test standpoint. @@ -101,6 +111,8 @@ public static void Init() VerifierSettings.IgnoreMember(options => options.UserProvidedDepthLimit); // Ignore EnableLegacyDateTimeScalar as that's not serialized in our config file. VerifierSettings.IgnoreMember(options => options.EnableLegacyDateTimeScalar); + // Ignore UserProvidedPath as that's not serialized in our config file. + VerifierSettings.IgnoreMember(options => options.UserProvidedPath); // Customise the path where we store snapshots, so they are easier to locate in a PR review. VerifyBase.DerivePathInfo( (sourceFile, projectDirectory, type, method) => new( diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt index a76f72b9a0..226c4e2a20 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt index 95415c1685..c4eb43648c 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt index ee8dbf6199..a77ecc134b 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt index 0d0afda2bf..a19694b688 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt index cbb2df5fb8..081c5f8e55 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt @@ -17,6 +17,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt index 0c20e9fc25..5a6a50d38e 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt index 27b20753d3..540a1b5a1d 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt b/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt index 2af3cbc907..b3f63dd336 100644 --- a/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt @@ -17,6 +17,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt b/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt index ca3b61588b..42e0ff5e2f 100644 --- a/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt index 93190d1d9d..0af93023dc 100644 --- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt index 5c52bc12c1..9e77b24d74 100644 --- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt index 7b0a4674eb..32f72a7a54 100644 --- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt index dc60d762cc..24416a0d02 100644 --- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt index 7a67eca701..6c674a4772 100644 --- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt b/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt index 8c2ffbbcac..b6aac13236 100644 --- a/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt @@ -16,6 +16,10 @@ Path: /abc, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt b/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt index da7937d1d9..8841c0f326 100644 --- a/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt b/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt index ef8c7173d5..68e4d231fd 100644 --- a/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt b/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt index 72f66f82c9..3c281ad6aa 100644 --- a/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt b/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt index 7b0a4674eb..32f72a7a54 100644 --- a/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt index cbaaa45754..888466ab4a 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt index da7937d1d9..8841c0f326 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt index 62fc407842..d56e05c483 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt @@ -21,6 +21,10 @@ } } }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt index 3285438ab7..bc31484242 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt index cbaaa45754..888466ab4a 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt index 3285438ab7..bc31484242 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt index a43e68277c..48f5e7a7c9 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt index 9740a85a77..8fa9677f1d 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt @@ -17,6 +17,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt index be47d537b2..e3108801f5 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt @@ -21,6 +21,10 @@ } } }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt index 673c21dae4..59f6636fb2 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt index cbaaa45754..888466ab4a 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt index 9740a85a77..8fa9677f1d 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt @@ -17,6 +17,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt index 9740a85a77..8fa9677f1d 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt @@ -17,6 +17,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt index a43e68277c..48f5e7a7c9 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt index 673c21dae4..59f6636fb2 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt index a43e68277c..48f5e7a7c9 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt index 3285438ab7..bc31484242 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt index 673c21dae4..59f6636fb2 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/UpdateEntityTests.cs b/src/Cli.Tests/UpdateEntityTests.cs index a500858c60..663334c5e8 100644 --- a/src/Cli.Tests/UpdateEntityTests.cs +++ b/src/Cli.Tests/UpdateEntityTests.cs @@ -1004,7 +1004,7 @@ public void TestVerifyCanUpdateRelationshipInvalidOptions(string db, string card RuntimeConfig runtimeConfig = new( Schema: "schema", DataSource: new DataSource(EnumExtensions.Deserialize(db), "", new()), - Runtime: new(Rest: new(), GraphQL: new(), Host: new(null, null)), + Runtime: new(Rest: new(), GraphQL: new(), Mcp: new(), Host: new(null, null)), Entities: new(new Dictionary()) ); @@ -1056,7 +1056,7 @@ public void EnsureFailure_AddRelationshipToEntityWithDisabledGraphQL() RuntimeConfig runtimeConfig = new( Schema: "schema", DataSource: new DataSource(DatabaseType.MSSQL, "", new()), - Runtime: new(Rest: new(), GraphQL: new(), Host: new(null, null)), + Runtime: new(Rest: new(), GraphQL: new(), Mcp: new(), Host: new(null, null)), Entities: new(entityMap) ); diff --git a/src/Cli/Commands/ConfigureOptions.cs b/src/Cli/Commands/ConfigureOptions.cs index 4f61b2007b..60cb12c3f8 100644 --- a/src/Cli/Commands/ConfigureOptions.cs +++ b/src/Cli/Commands/ConfigureOptions.cs @@ -36,6 +36,15 @@ public ConfigureOptions( bool? runtimeRestEnabled = null, string? runtimeRestPath = null, bool? runtimeRestRequestBodyStrict = null, + bool? runtimeMcpEnabled = null, + string? runtimeMcpPath = null, + bool? runtimeMcpDmlToolsEnabled = null, + bool? runtimeMcpDmlToolsDescribeEntitiesEnabled = null, + bool? runtimeMcpDmlToolsCreateRecordEnabled = null, + bool? runtimeMcpDmlToolsReadRecordsEnabled = null, + bool? runtimeMcpDmlToolsUpdateRecordEnabled = null, + bool? runtimeMcpDmlToolsDeleteRecordEnabled = null, + bool? runtimeMcpDmlToolsExecuteEntityEnabled = null, bool? runtimeCacheEnabled = null, int? runtimeCacheTtl = null, HostMode? runtimeHostMode = null, @@ -81,6 +90,16 @@ public ConfigureOptions( RuntimeRestEnabled = runtimeRestEnabled; RuntimeRestPath = runtimeRestPath; RuntimeRestRequestBodyStrict = runtimeRestRequestBodyStrict; + // Mcp + RuntimeMcpEnabled = runtimeMcpEnabled; + RuntimeMcpPath = runtimeMcpPath; + RuntimeMcpDmlToolsEnabled = runtimeMcpDmlToolsEnabled; + RuntimeMcpDmlToolsDescribeEntitiesEnabled = runtimeMcpDmlToolsDescribeEntitiesEnabled; + RuntimeMcpDmlToolsCreateRecordEnabled = runtimeMcpDmlToolsCreateRecordEnabled; + RuntimeMcpDmlToolsReadRecordsEnabled = runtimeMcpDmlToolsReadRecordsEnabled; + RuntimeMcpDmlToolsUpdateRecordEnabled = runtimeMcpDmlToolsUpdateRecordEnabled; + RuntimeMcpDmlToolsDeleteRecordEnabled = runtimeMcpDmlToolsDeleteRecordEnabled; + RuntimeMcpDmlToolsExecuteEntityEnabled = runtimeMcpDmlToolsExecuteEntityEnabled; // Cache RuntimeCacheEnabled = runtimeCacheEnabled; RuntimeCacheTTL = runtimeCacheTtl; @@ -155,6 +174,33 @@ public ConfigureOptions( [Option("runtime.rest.request-body-strict", Required = false, HelpText = "Prohibit extraneous REST request body fields. Default: true (boolean).")] public bool? RuntimeRestRequestBodyStrict { get; } + [Option("runtime.mcp.enabled", Required = false, HelpText = "Enable DAB's MCP endpoint. Default: true (boolean).")] + public bool? RuntimeMcpEnabled { get; } + + [Option("runtime.mcp.path", Required = false, HelpText = "Customize DAB's MCP endpoint path. Default: '/mcp' Conditions: Prefix path with '/'.")] + public string? RuntimeMcpPath { get; } + + [Option("runtime.mcp.dml-tools.enabled", Required = false, HelpText = "Enable DAB's MCP DML tools endpoint. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsEnabled { get; } + + [Option("runtime.mcp.dml-tools.describe-entities.enabled", Required = false, HelpText = "Enable DAB's MCP describe entities tool. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsDescribeEntitiesEnabled { get; } + + [Option("runtime.mcp.dml-tools.create-record.enabled", Required = false, HelpText = "Enable DAB's MCP create record tool. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsCreateRecordEnabled { get; } + + [Option("runtime.mcp.dml-tools.read-records.enabled", Required = false, HelpText = "Enable DAB's MCP read record tool. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsReadRecordsEnabled { get; } + + [Option("runtime.mcp.dml-tools.update-record.enabled", Required = false, HelpText = "Enable DAB's MCP update record tool. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsUpdateRecordEnabled { get; } + + [Option("runtime.mcp.dml-tools.delete-record.enabled", Required = false, HelpText = "Enable DAB's MCP delete record tool. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsDeleteRecordEnabled { get; } + + [Option("runtime.mcp.dml-tools.execute-entity.enabled", Required = false, HelpText = "Enable DAB's MCP execute entity tool. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsExecuteEntityEnabled { get; } + [Option("runtime.cache.enabled", Required = false, HelpText = "Enable DAB's cache globally. (You must also enable each entity's cache separately.). Default: false (boolean).")] public bool? RuntimeCacheEnabled { get; } diff --git a/src/Cli/Commands/InitOptions.cs b/src/Cli/Commands/InitOptions.cs index 5d5608a200..91786d99ff 100644 --- a/src/Cli/Commands/InitOptions.cs +++ b/src/Cli/Commands/InitOptions.cs @@ -35,8 +35,11 @@ public InitOptions( bool restDisabled = false, string graphQLPath = GraphQLRuntimeOptions.DEFAULT_PATH, bool graphqlDisabled = false, + string mcpPath = McpRuntimeOptions.DEFAULT_PATH, + bool mcpDisabled = false, CliBool restEnabled = CliBool.None, CliBool graphqlEnabled = CliBool.None, + CliBool mcpEnabled = CliBool.None, CliBool restRequestBodyStrict = CliBool.None, CliBool multipleCreateOperationEnabled = CliBool.None, string? config = null) @@ -58,8 +61,11 @@ public InitOptions( RestDisabled = restDisabled; GraphQLPath = graphQLPath; GraphQLDisabled = graphqlDisabled; + McpPath = mcpPath; + McpDisabled = mcpDisabled; RestEnabled = restEnabled; GraphQLEnabled = graphqlEnabled; + McpEnabled = mcpEnabled; RestRequestBodyStrict = restRequestBodyStrict; MultipleCreateOperationEnabled = multipleCreateOperationEnabled; } @@ -112,12 +118,21 @@ public InitOptions( [Option("graphql.disabled", Default = false, Required = false, HelpText = "Disables GraphQL endpoint for all entities.")] public bool GraphQLDisabled { get; } + [Option("mcp.path", Default = McpRuntimeOptions.DEFAULT_PATH, Required = false, HelpText = "Specify the MCP endpoint's default prefix.")] + public string McpPath { get; } + + [Option("mcp.disabled", Default = false, Required = false, HelpText = "Disables MCP endpoint for all entities.")] + public bool McpDisabled { get; } + [Option("rest.enabled", Required = false, HelpText = "(Default: true) Enables REST endpoint for all entities. Supported values: true, false.")] public CliBool RestEnabled { get; } [Option("graphql.enabled", Required = false, HelpText = "(Default: true) Enables GraphQL endpoint for all entities. Supported values: true, false.")] public CliBool GraphQLEnabled { get; } + [Option("mcp.enabled", Required = false, HelpText = "(Default: true) Enables MCP endpoint for all entities. Supported values: true, false.")] + public CliBool McpEnabled { get; } + // Since the rest.request-body-strict option does not have a default value, it is required to specify a value for this option if it is // included in the init command. [Option("rest.request-body-strict", Required = false, HelpText = "(Default: true) Allow extraneous fields in the request body for REST.")] diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs index c7027ff78c..886447b256 100644 --- a/src/Cli/ConfigGenerator.cs +++ b/src/Cli/ConfigGenerator.cs @@ -89,6 +89,7 @@ public static bool TryCreateRuntimeConfig(InitOptions options, FileSystemRuntime DatabaseType dbType = options.DatabaseType; string? restPath = options.RestPath; string graphQLPath = options.GraphQLPath; + string mcpPath = options.McpPath; string? runtimeBaseRoute = options.RuntimeBaseRoute; Dictionary dbOptions = new(); @@ -108,9 +109,10 @@ public static bool TryCreateRuntimeConfig(InitOptions options, FileSystemRuntime " We recommend that you use the --graphql.enabled option instead."); } - bool restEnabled, graphQLEnabled; + bool restEnabled, graphQLEnabled, mcpEnabled; if (!TryDetermineIfApiIsEnabled(options.RestDisabled, options.RestEnabled, ApiType.REST, out restEnabled) || - !TryDetermineIfApiIsEnabled(options.GraphQLDisabled, options.GraphQLEnabled, ApiType.GraphQL, out graphQLEnabled)) + !TryDetermineIfApiIsEnabled(options.GraphQLDisabled, options.GraphQLEnabled, ApiType.GraphQL, out graphQLEnabled) || + !TryDetermineIfMcpIsEnabled(options.McpEnabled, out mcpEnabled)) { return false; } @@ -262,6 +264,7 @@ public static bool TryCreateRuntimeConfig(InitOptions options, FileSystemRuntime Runtime: new( Rest: new(restEnabled, restPath ?? RestRuntimeOptions.DEFAULT_PATH, options.RestRequestBodyStrict is CliBool.False ? false : true), GraphQL: new(Enabled: graphQLEnabled, Path: graphQLPath, MultipleMutationOptions: multipleMutationOptions), + Mcp: new(mcpEnabled, mcpPath ?? McpRuntimeOptions.DEFAULT_PATH), Host: new( Cors: new(options.CorsOrigin?.ToArray() ?? Array.Empty()), Authentication: new( @@ -314,6 +317,17 @@ private static bool TryDetermineIfApiIsEnabled(bool apiDisabledOptionValue, CliB return true; } + /// + /// Helper method to determine if the mcp api is enabled or not based on the enabled/disabled options in the dab init command. + /// + /// True, if MCP is enabled + /// Out param isMcpEnabled + /// True if MCP is enabled + private static bool TryDetermineIfMcpIsEnabled(CliBool mcpEnabledOptionValue, out bool isMcpEnabled) + { + return TryDetermineIfApiIsEnabled(false, mcpEnabledOptionValue, ApiType.MCP, out isMcpEnabled); + } + /// /// Helper method to determine if the multiple create operation is enabled or not based on the inputs from dab init command. /// @@ -744,6 +758,23 @@ private static bool TryUpdateConfiguredRuntimeOptions( } } + // MCP: Enabled and Path + if (options.RuntimeMcpEnabled != null || + options.RuntimeMcpPath != null) + { + McpRuntimeOptions updatedMcpOptions = runtimeConfig?.Runtime?.Mcp ?? new(); + bool status = TryUpdateConfiguredMcpValues(options, ref updatedMcpOptions); + + if (status) + { + runtimeConfig = runtimeConfig! with { Runtime = runtimeConfig.Runtime! with { Mcp = updatedMcpOptions } }; + } + else + { + return false; + } + } + // Cache: Enabled and TTL if (options.RuntimeCacheEnabled != null || options.RuntimeCacheTTL != null) @@ -944,6 +975,142 @@ private static bool TryUpdateConfiguredGraphQLValues( } } + /// + /// Attempts to update the Config parameters in the Mcp runtime settings based on the provided value. + /// Validates that any user-provided values are valid and then returns true if the updated Mcp options + /// need to be overwritten on the existing config parameters + /// + /// options. + /// updatedMcpOptions + /// True if the value needs to be updated in the runtime config, else false + private static bool TryUpdateConfiguredMcpValues( + ConfigureOptions options, + ref McpRuntimeOptions updatedMcpOptions) + { + object? updatedValue; + + try + { + // Runtime.Mcp.Enabled + updatedValue = options?.RuntimeMcpEnabled; + if (updatedValue != null) + { + updatedMcpOptions = updatedMcpOptions! with { Enabled = (bool)updatedValue }; + _logger.LogInformation("Updated RuntimeConfig with Runtime.Mcp.Enabled as '{updatedValue}'", updatedValue); + } + + // Runtime.Mcp.Path + updatedValue = options?.RuntimeMcpPath; + if (updatedValue != null) + { + bool status = RuntimeConfigValidatorUtil.TryValidateUriComponent(uriComponent: (string)updatedValue, out string exceptionMessage); + if (status) + { + updatedMcpOptions = updatedMcpOptions! with { Path = (string)updatedValue }; + _logger.LogInformation("Updated RuntimeConfig with Runtime.Mcp.Path as '{updatedValue}'", updatedValue); + } + else + { + _logger.LogError("Failed to update Runtime.Mcp.Path as '{updatedValue}' due to exception message: {exceptionMessage}", updatedValue, exceptionMessage); + return false; + } + } + + // Handle DML tools configuration + bool hasToolUpdates = false; + DmlToolsConfig? currentDmlTools = updatedMcpOptions?.DmlTools; + + // If setting all tools at once + updatedValue = options?.RuntimeMcpDmlToolsEnabled; + if (updatedValue != null) + { + updatedMcpOptions = updatedMcpOptions! with { DmlTools = DmlToolsConfig.FromBoolean((bool)updatedValue) }; + _logger.LogInformation("Updated RuntimeConfig with Runtime.Mcp.Dml-Tools as '{updatedValue}'", updatedValue); + return true; // Return early since we're setting all tools at once + } + + // Handle individual tool updates + bool? describeEntities = currentDmlTools?.DescribeEntities; + bool? createRecord = currentDmlTools?.CreateRecord; + bool? readRecord = currentDmlTools?.ReadRecords; + bool? updateRecord = currentDmlTools?.UpdateRecord; + bool? deleteRecord = currentDmlTools?.DeleteRecord; + bool? executeEntity = currentDmlTools?.ExecuteEntity; + + updatedValue = options?.RuntimeMcpDmlToolsDescribeEntitiesEnabled; + if (updatedValue != null) + { + describeEntities = (bool)updatedValue; + hasToolUpdates = true; + _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.describe-entities as '{updatedValue}'", updatedValue); + } + + updatedValue = options?.RuntimeMcpDmlToolsCreateRecordEnabled; + if (updatedValue != null) + { + createRecord = (bool)updatedValue; + hasToolUpdates = true; + _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.create-record as '{updatedValue}'", updatedValue); + } + + updatedValue = options?.RuntimeMcpDmlToolsReadRecordsEnabled; + if (updatedValue != null) + { + readRecord = (bool)updatedValue; + hasToolUpdates = true; + _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.read-records as '{updatedValue}'", updatedValue); + } + + updatedValue = options?.RuntimeMcpDmlToolsUpdateRecordEnabled; + if (updatedValue != null) + { + updateRecord = (bool)updatedValue; + hasToolUpdates = true; + _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.update-record as '{updatedValue}'", updatedValue); + } + + updatedValue = options?.RuntimeMcpDmlToolsDeleteRecordEnabled; + if (updatedValue != null) + { + deleteRecord = (bool)updatedValue; + hasToolUpdates = true; + _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.delete-record as '{updatedValue}'", updatedValue); + } + + updatedValue = options?.RuntimeMcpDmlToolsExecuteEntityEnabled; + if (updatedValue != null) + { + executeEntity = (bool)updatedValue; + hasToolUpdates = true; + _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.execute-entity as '{updatedValue}'", updatedValue); + } + + if (hasToolUpdates) + { + updatedMcpOptions = updatedMcpOptions! with + { + DmlTools = new DmlToolsConfig + { + AllToolsEnabled = false, + DescribeEntities = describeEntities, + CreateRecord = createRecord, + ReadRecords = readRecord, + UpdateRecord = updateRecord, + DeleteRecord = deleteRecord, + ExecuteEntity = executeEntity + } + }; + } + + return true; + } + catch (Exception ex) + { + _logger.LogError("Failed to update RuntimeConfig.Mcp with exception message: {exceptionMessage}.", ex.Message); + return false; + } + } + /// /// Attempts to update the Config parameters in the Cache runtime settings based on the provided value. /// Validates user-provided parameters and then returns true if the updated Cache options @@ -2229,7 +2396,7 @@ private static bool TryUpdateConfiguredAzureKeyVaultOptions( { if (options.AzureKeyVaultRetryPolicyMaxCount.Value < 1) { - _logger.LogError("Failed to update azure-key-vault.retry-policy.max-count. Value must be at least 1."); + _logger.LogError("Failed to update configuration with runtime.azure-key-vault.retry-policy.max-count. Value must be a positive integer greater than 0."); return false; } @@ -2244,7 +2411,7 @@ private static bool TryUpdateConfiguredAzureKeyVaultOptions( { if (options.AzureKeyVaultRetryPolicyDelaySeconds.Value < 1) { - _logger.LogError("Failed to update azure-key-vault.retry-policy.delay-seconds. Value must be at least 1."); + _logger.LogError("Failed to update configuration with runtime.azure-key-vault.retry-policy.delay-seconds. Value must be a positive integer greater than 0."); return false; } @@ -2259,7 +2426,7 @@ private static bool TryUpdateConfiguredAzureKeyVaultOptions( { if (options.AzureKeyVaultRetryPolicyMaxDelaySeconds.Value < 1) { - _logger.LogError("Failed to update azure-key-vault.retry-policy.max-delay-seconds. Value must be at least 1."); + _logger.LogError("Failed to update configuration with runtime.azure-key-vault.retry-policy.max-delay-seconds. Value must be a positive integer greater than 0."); return false; } @@ -2274,7 +2441,7 @@ private static bool TryUpdateConfiguredAzureKeyVaultOptions( { if (options.AzureKeyVaultRetryPolicyNetworkTimeoutSeconds.Value < 1) { - _logger.LogError("Failed to update azure-key-vault.retry-policy.network-timeout-seconds. Value must be at least 1."); + _logger.LogError("Failed to update configuration with runtime.azure-key-vault.retry-policy.network-timeout-seconds. Value must be a positive integer greater than 0."); return false; } diff --git a/src/Config/Converters/DmlToolsConfigConverter.cs b/src/Config/Converters/DmlToolsConfigConverter.cs new file mode 100644 index 0000000000..9acef0f9b2 --- /dev/null +++ b/src/Config/Converters/DmlToolsConfigConverter.cs @@ -0,0 +1,188 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.ObjectModel; + +namespace Azure.DataApiBuilder.Config.Converters; + +/// +/// JSON converter for DmlToolsConfig that handles both boolean and object formats. +/// +internal class DmlToolsConfigConverter : JsonConverter +{ + /// + /// Reads DmlToolsConfig from JSON which can be either: + /// - A boolean: all tools are enabled/disabled + /// - An object: individual tool settings (unspecified tools default to true) + /// - Null/undefined: defaults to all tools enabled (true) + /// + public override DmlToolsConfig? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + // Handle null + if (reader.TokenType is JsonTokenType.Null) + { + // Return default config with all tools enabled + return DmlToolsConfig.Default; + } + + // Handle boolean format: "dml-tools": true/false + if (reader.TokenType is JsonTokenType.True || reader.TokenType is JsonTokenType.False) + { + bool enabled = reader.GetBoolean(); + return DmlToolsConfig.FromBoolean(enabled); + } + + // Handle object format + if (reader.TokenType is JsonTokenType.StartObject) + { + // When using object format, unspecified tools default to true + bool? describeEntities = null; + bool? createRecord = null; + bool? readRecords = null; + bool? updateRecord = null; + bool? deleteRecord = null; + bool? executeEntity = null; + + while (reader.Read()) + { + if (reader.TokenType is JsonTokenType.EndObject) + { + break; + } + + if (reader.TokenType is JsonTokenType.PropertyName) + { + string? property = reader.GetString(); + reader.Read(); + + // Handle the property value + if (reader.TokenType is JsonTokenType.True || reader.TokenType is JsonTokenType.False) + { + bool value = reader.GetBoolean(); + + switch (property?.ToLowerInvariant()) + { + case "describe-entities": + describeEntities = value; + break; + case "create-record": + createRecord = value; + break; + case "read-records": + readRecords = value; + break; + case "update-record": + updateRecord = value; + break; + case "delete-record": + deleteRecord = value; + break; + case "execute-entity": + executeEntity = value; + break; + default: + // Skip unknown properties + break; + } + } + else + { + // Error on non-boolean values for known properties + if (property?.ToLowerInvariant() is "describe-entities" or "create-record" + or "read-records" or "update-record" or "delete-record" or "execute-entity") + { + throw new JsonException($"Property '{property}' must be a boolean value."); + } + + // Skip unknown properties + reader.Skip(); + } + } + } + + // Create the config with specified values + // Unspecified values (null) will default to true in the DmlToolsConfig constructor + return new DmlToolsConfig( + allToolsEnabled: null, + describeEntities: describeEntities, + createRecord: createRecord, + readRecords: readRecords, + updateRecord: updateRecord, + deleteRecord: deleteRecord, + executeEntity: executeEntity); + } + + // For any other unexpected token type, return default (all enabled) + return DmlToolsConfig.Default; + } + + /// + /// Writes DmlToolsConfig to JSON. + /// - If all tools have the same value, writes as boolean + /// - Otherwise writes as object with only user-provided properties + /// + public override void Write(Utf8JsonWriter writer, DmlToolsConfig? value, JsonSerializerOptions options) + { + if (value is null) + { + return; + } + + // Check if any individual settings were provided by the user + bool hasIndividualSettings = value.UserProvidedDescribeEntities || + value.UserProvidedCreateRecord || + value.UserProvidedReadRecords || + value.UserProvidedUpdateRecord || + value.UserProvidedDeleteRecord || + value.UserProvidedExecuteEntity; + + // Only write the boolean value if it's provided by user + // This prevents writing "dml-tools": true when it's the default + if (!hasIndividualSettings && value.UserProvidedAllToolsEnabled) + { + writer.WritePropertyName("dml-tools"); + writer.WriteBooleanValue(value.AllToolsEnabled); + } + else + { + writer.WritePropertyName("dml-tools"); + + // Write as object with only user-provided properties + writer.WriteStartObject(); + + if (value.UserProvidedDescribeEntities && value.DescribeEntities.HasValue) + { + writer.WriteBoolean("describe-entities", value.DescribeEntities.Value); + } + + if (value.UserProvidedCreateRecord && value.CreateRecord.HasValue) + { + writer.WriteBoolean("create-record", value.CreateRecord.Value); + } + + if (value.UserProvidedReadRecords && value.ReadRecords.HasValue) + { + writer.WriteBoolean("read-records", value.ReadRecords.Value); + } + + if (value.UserProvidedUpdateRecord && value.UpdateRecord.HasValue) + { + writer.WriteBoolean("update-record", value.UpdateRecord.Value); + } + + if (value.UserProvidedDeleteRecord && value.DeleteRecord.HasValue) + { + writer.WriteBoolean("delete-record", value.DeleteRecord.Value); + } + + if (value.UserProvidedExecuteEntity && value.ExecuteEntity.HasValue) + { + writer.WriteBoolean("execute-entity", value.ExecuteEntity.Value); + } + + writer.WriteEndObject(); + } + } +} diff --git a/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs b/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs new file mode 100644 index 0000000000..db9acfa603 --- /dev/null +++ b/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.ObjectModel; + +namespace Azure.DataApiBuilder.Config.Converters; + +/// +/// JSON converter factory for McpRuntimeOptions that handles both boolean and object formats. +/// +internal class McpRuntimeOptionsConverterFactory : JsonConverterFactory +{ + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// + public override bool CanConvert(Type typeToConvert) + { + return typeToConvert.IsAssignableTo(typeof(McpRuntimeOptions)); + } + + /// + public override JsonConverter? CreateConverter(Type typeToConvert, JsonSerializerOptions options) + { + return new McpRuntimeOptionsConverter(_replaceEnvVar); + } + + internal McpRuntimeOptionsConverterFactory(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + private class McpRuntimeOptionsConverter : JsonConverter + { + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// Whether to replace environment variable with its + /// value or not while deserializing. + internal McpRuntimeOptionsConverter(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + /// + /// Defines how DAB reads MCP options and defines which values are + /// used to instantiate McpRuntimeOptions. + /// + /// Thrown when improperly formatted MCP options are provided. + public override McpRuntimeOptions? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType == JsonTokenType.True || reader.TokenType == JsonTokenType.False) + { + return new McpRuntimeOptions(Enabled: reader.GetBoolean()); + } + + if (reader.TokenType is JsonTokenType.StartObject) + { + DmlToolsConfigConverter dmlToolsConfigConverter = new(); + + bool enabled = true; + string? path = null; + DmlToolsConfig? dmlTools = null; + + while (reader.Read()) + { + if (reader.TokenType == JsonTokenType.EndObject) + { + return new McpRuntimeOptions(enabled, path, dmlTools); + } + + string? propertyName = reader.GetString(); + + reader.Read(); + switch (propertyName) + { + case "enabled": + if (reader.TokenType is not JsonTokenType.Null) + { + enabled = reader.GetBoolean(); + } + + break; + + case "path": + if (reader.TokenType is not JsonTokenType.Null) + { + path = reader.DeserializeString(_replaceEnvVar); + } + + break; + + case "dml-tools": + dmlTools = dmlToolsConfigConverter.Read(ref reader, typeToConvert, options); + break; + + default: + throw new JsonException($"Unexpected property {propertyName}"); + } + } + } + + throw new JsonException("Failed to read the MCP Options"); + } + + /// + /// When writing the McpRuntimeOptions back to a JSON file, only write the properties + /// if they are user provided. This avoids polluting the written JSON file with properties + /// the user most likely omitted when writing the original DAB runtime config file. + /// This Write operation is only used when a RuntimeConfig object is serialized to JSON. + /// + public override void Write(Utf8JsonWriter writer, McpRuntimeOptions value, JsonSerializerOptions options) + { + writer.WriteStartObject(); + writer.WriteBoolean("enabled", value.Enabled); + + if (value?.UserProvidedPath is true) + { + writer.WritePropertyName("path"); + JsonSerializer.Serialize(writer, value.Path, options); + } + + // Only write the boolean value if it's not the default (true) + // This prevents writing "dml-tools": true when it's the default + if (value?.DmlTools is not null) + { + DmlToolsConfigConverter dmlToolsOptionsConverter = options.GetConverter(typeof(DmlToolsConfig)) as DmlToolsConfigConverter ?? + throw new JsonException("Failed to get mcp.dml-tools options converter"); + + dmlToolsOptionsConverter.Write(writer, value.DmlTools, options); + } + + writer.WriteEndObject(); + } + } +} diff --git a/src/Config/DataApiBuilderException.cs b/src/Config/DataApiBuilderException.cs index 18b0395541..b7696c4deb 100644 --- a/src/Config/DataApiBuilderException.cs +++ b/src/Config/DataApiBuilderException.cs @@ -105,6 +105,10 @@ public enum SubStatusCodes /// GlobalRestEndpointDisabled, /// + /// Global MCP endpoint disabled in runtime configuration. + /// + GlobalMcpEndpointDisabled, + /// /// DataSource not found for multiple db scenario. /// DataSourceNotFound, diff --git a/src/Config/ObjectModel/ApiType.cs b/src/Config/ObjectModel/ApiType.cs index 5583e67098..fb57fe2859 100644 --- a/src/Config/ObjectModel/ApiType.cs +++ b/src/Config/ObjectModel/ApiType.cs @@ -10,6 +10,7 @@ public enum ApiType { REST, GraphQL, + MCP, // This is required to indicate features common between all APIs. All } diff --git a/src/Config/ObjectModel/DmlToolsConfig.cs b/src/Config/ObjectModel/DmlToolsConfig.cs new file mode 100644 index 0000000000..c14f8e49ed --- /dev/null +++ b/src/Config/ObjectModel/DmlToolsConfig.cs @@ -0,0 +1,188 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +/// +/// DML Tools configuration that can be either a boolean or object with individual tool settings +/// +public record DmlToolsConfig +{ + /// + /// Default value for all tools when not specified + /// + public const bool DEFAULT_ENABLED = true; + + /// + /// Indicates if all tools are enabled/disabled uniformly + /// + public bool AllToolsEnabled { get; init; } + + /// + /// Whether describe-entities tool is enabled + /// + public bool? DescribeEntities { get; init; } + + /// + /// Whether create-record tool is enabled + /// + public bool? CreateRecord { get; init; } + + /// + /// Whether read-records tool is enabled + /// + public bool? ReadRecords { get; init; } + + /// + /// Whether update-record tool is enabled + /// + public bool? UpdateRecord { get; init; } + + /// + /// Whether delete-record tool is enabled + /// + public bool? DeleteRecord { get; init; } + + /// + /// Whether execute-entity tool is enabled + /// + public bool? ExecuteEntity { get; init; } + + [JsonConstructor] + public DmlToolsConfig( + bool? allToolsEnabled = null, + bool? describeEntities = null, + bool? createRecord = null, + bool? readRecords = null, + bool? updateRecord = null, + bool? deleteRecord = null, + bool? executeEntity = null) + { + if (allToolsEnabled is not null) + { + AllToolsEnabled = allToolsEnabled.Value; + UserProvidedAllToolsEnabled = true; + } + else + { + AllToolsEnabled = DEFAULT_ENABLED; + } + + if (describeEntities is not null) + { + DescribeEntities = describeEntities; + UserProvidedDescribeEntities = true; + } + + if (createRecord is not null) + { + CreateRecord = createRecord; + UserProvidedCreateRecord = true; + } + + if (readRecords is not null) + { + ReadRecords = readRecords; + UserProvidedReadRecords = true; + } + + if (updateRecord is not null) + { + UpdateRecord = updateRecord; + UserProvidedUpdateRecord = true; + } + + if (deleteRecord is not null) + { + DeleteRecord = deleteRecord; + UserProvidedDeleteRecord = true; + } + + if (executeEntity is not null) + { + ExecuteEntity = executeEntity; + UserProvidedExecuteEntity = true; + } + } + + /// + /// Creates a DmlToolsConfig with all tools set to the same state + /// + public static DmlToolsConfig FromBoolean(bool enabled) + { + return new DmlToolsConfig + { + AllToolsEnabled = enabled, + DescribeEntities = null, + CreateRecord = null, + ReadRecords = null, + UpdateRecord = null, + DeleteRecord = null, + ExecuteEntity = null + }; + } + + /// + /// Creates a default DmlToolsConfig with all tools enabled + /// + public static DmlToolsConfig Default => FromBoolean(DEFAULT_ENABLED); + + /// + /// Flag which informs CLI and JSON serializer whether to write all-tools-enabled + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(AllToolsEnabled))] + public bool UserProvidedAllToolsEnabled { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write describe-entities + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(DescribeEntities))] + public bool UserProvidedDescribeEntities { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write create-record + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(CreateRecord))] + public bool UserProvidedCreateRecord { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write read-records + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(ReadRecords))] + public bool UserProvidedReadRecords { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write update-record + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(UpdateRecord))] + public bool UserProvidedUpdateRecord { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write delete-record + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(DeleteRecord))] + public bool UserProvidedDeleteRecord { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write execute-entity + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(ExecuteEntity))] + public bool UserProvidedExecuteEntity { get; init; } = false; +} diff --git a/src/Config/ObjectModel/McpRuntimeOptions.cs b/src/Config/ObjectModel/McpRuntimeOptions.cs new file mode 100644 index 0000000000..73d695ee4a --- /dev/null +++ b/src/Config/ObjectModel/McpRuntimeOptions.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.Converters; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +public record McpRuntimeOptions +{ + public const string DEFAULT_PATH = "/mcp"; + + /// + /// Whether MCP endpoints are enabled + /// + [JsonPropertyName("enabled")] + public bool Enabled { get; init; } = true; + + /// + /// The path where MCP endpoints will be exposed + /// + [JsonPropertyName("path")] + public string Path { get; init; } = DEFAULT_PATH; + + /// + /// Configuration for DML tools + /// + [JsonPropertyName("dml-tools")] + [JsonConverter(typeof(DmlToolsConfigConverter))] + public DmlToolsConfig? DmlTools { get; init; } + + [JsonConstructor] + public McpRuntimeOptions( + bool Enabled = true, + string? Path = null, + DmlToolsConfig? DmlTools = null) + { + this.Enabled = Enabled; + + if (Path is not null) + { + this.Path = Path; + UserProvidedPath = true; + } + else + { + this.Path = DEFAULT_PATH; + } + + this.DmlTools = DmlTools; + } + + /// + /// Flag which informs CLI and JSON serializer whether to write path + /// property and value to the runtime config file. + /// When user doesn't provide the path property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(Enabled))] + public bool UserProvidedPath { get; init; } = false; +} diff --git a/src/Config/ObjectModel/RuntimeConfig.cs b/src/Config/ObjectModel/RuntimeConfig.cs index 7cf8159952..a450e1265c 100644 --- a/src/Config/ObjectModel/RuntimeConfig.cs +++ b/src/Config/ObjectModel/RuntimeConfig.cs @@ -72,6 +72,15 @@ Runtime.Rest is null || Runtime.Rest.Enabled) && DataSource.DatabaseType != DatabaseType.CosmosDB_NoSQL; + /// + /// Retrieves the value of runtime.mcp.enabled property if present, default is true. + /// + [JsonIgnore] + public bool IsMcpEnabled => + Runtime is null || + Runtime.Mcp is null || + Runtime.Mcp.Enabled; + [JsonIgnore] public bool IsHealthEnabled => Runtime is null || @@ -127,6 +136,25 @@ public string GraphQLPath } } + /// + /// The path at which MCP API is available + /// + [JsonIgnore] + public string McpPath + { + get + { + if (Runtime is null || Runtime.Mcp is null || Runtime.Mcp.Path is null) + { + return McpRuntimeOptions.DEFAULT_PATH; + } + else + { + return Runtime.Mcp.Path; + } + } + } + /// /// Indicates whether introspection is allowed or not. /// @@ -707,4 +735,10 @@ public LogLevel GetConfiguredLogLevel(string loggerFilter = "") return LogLevel.Error; } + + /// + /// Gets the MCP DML tools configuration + /// + [JsonIgnore] + public DmlToolsConfig? McpDmlTools => Runtime?.Mcp?.DmlTools; } diff --git a/src/Config/ObjectModel/RuntimeOptions.cs b/src/Config/ObjectModel/RuntimeOptions.cs index 8e05df4b62..6f6c046651 100644 --- a/src/Config/ObjectModel/RuntimeOptions.cs +++ b/src/Config/ObjectModel/RuntimeOptions.cs @@ -10,6 +10,7 @@ public record RuntimeOptions { public RestRuntimeOptions? Rest { get; init; } public GraphQLRuntimeOptions? GraphQL { get; init; } + public McpRuntimeOptions? Mcp { get; init; } public HostOptions? Host { get; set; } public string? BaseRoute { get; init; } public TelemetryOptions? Telemetry { get; init; } @@ -21,6 +22,7 @@ public record RuntimeOptions public RuntimeOptions( RestRuntimeOptions? Rest, GraphQLRuntimeOptions? GraphQL, + McpRuntimeOptions? Mcp, HostOptions? Host, string? BaseRoute = null, TelemetryOptions? Telemetry = null, @@ -30,6 +32,7 @@ public RuntimeOptions( { this.Rest = Rest; this.GraphQL = GraphQL; + this.Mcp = Mcp; this.Host = Host; this.BaseRoute = BaseRoute; this.Telemetry = Telemetry; @@ -60,6 +63,12 @@ GraphQL is null || GraphQL?.Enabled is null || GraphQL?.Enabled is true; + [JsonIgnore] + public bool IsMcpEnabled => + Mcp is null || + Mcp?.Enabled is null || + Mcp?.Enabled is true; + [JsonIgnore] public bool IsHealthCheckEnabled => Health is null || diff --git a/src/Config/RuntimeConfigLoader.cs b/src/Config/RuntimeConfigLoader.cs index 4a220af0ea..f78c32ebc1 100644 --- a/src/Config/RuntimeConfigLoader.cs +++ b/src/Config/RuntimeConfigLoader.cs @@ -246,6 +246,8 @@ public static JsonSerializerOptions GetSerializationOptions( options.Converters.Add(new EntityHealthOptionsConvertorFactory()); options.Converters.Add(new RestRuntimeOptionsConverterFactory()); options.Converters.Add(new GraphQLRuntimeOptionsConverterFactory(replaceEnvVar)); + options.Converters.Add(new McpRuntimeOptionsConverterFactory(replaceEnvVar)); + options.Converters.Add(new DmlToolsConfigConverter()); options.Converters.Add(new EntitySourceConverterFactory(replaceEnvVar)); options.Converters.Add(new EntityGraphQLOptionsConverterFactory(replaceEnvVar)); options.Converters.Add(new EntityRestOptionsConverterFactory(replaceEnvVar)); diff --git a/src/Core/Configurations/RuntimeConfigValidator.cs b/src/Core/Configurations/RuntimeConfigValidator.cs index 12a8f82aa4..fd8f811c9e 100644 --- a/src/Core/Configurations/RuntimeConfigValidator.cs +++ b/src/Core/Configurations/RuntimeConfigValidator.cs @@ -702,11 +702,11 @@ private void ValidateNameRequirements(string entityName) /// The config that will be validated. public void ValidateGlobalEndpointRouteConfig(RuntimeConfig runtimeConfig) { - // Both REST and GraphQL endpoints cannot be disabled at the same time. - if (!runtimeConfig.IsRestEnabled && !runtimeConfig.IsGraphQLEnabled) + // REST, GraphQL and MCP endpoints cannot be disabled at the same time. + if (!runtimeConfig.IsRestEnabled && !runtimeConfig.IsGraphQLEnabled && !runtimeConfig.IsMcpEnabled) { HandleOrRecordException(new DataApiBuilderException( - message: $"Both GraphQL and REST endpoints are disabled.", + message: $"GraphQL, REST, and MCP endpoints are disabled.", statusCode: HttpStatusCode.ServiceUnavailable, subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); } @@ -735,19 +735,30 @@ public void ValidateGlobalEndpointRouteConfig(RuntimeConfig runtimeConfig) ValidateRestURI(runtimeConfig); ValidateGraphQLURI(runtimeConfig); - // Do not check for conflicts if GraphQL or REST endpoints are disabled. - if (!runtimeConfig.IsRestEnabled || !runtimeConfig.IsGraphQLEnabled) + ValidateMcpUri(runtimeConfig); + // Do not check for conflicts if two of the endpoints are disabled between GraphQL, REST, and MCP. + if ((!runtimeConfig.IsRestEnabled && !runtimeConfig.IsGraphQLEnabled) || + (!runtimeConfig.IsRestEnabled && !runtimeConfig.IsMcpEnabled) || + (!runtimeConfig.IsGraphQLEnabled && !runtimeConfig.IsMcpEnabled)) { return; } if (string.Equals( - a: runtimeConfig.RestPath, - b: runtimeConfig.GraphQLPath, - comparisonType: StringComparison.OrdinalIgnoreCase)) + a: runtimeConfig.RestPath, + b: runtimeConfig.GraphQLPath, + comparisonType: StringComparison.OrdinalIgnoreCase) || + string.Equals( + a: runtimeConfig.RestPath, + b: runtimeConfig.McpPath, + comparisonType: StringComparison.OrdinalIgnoreCase) || + string.Equals( + a: runtimeConfig.McpPath, + b: runtimeConfig.GraphQLPath, + comparisonType: StringComparison.OrdinalIgnoreCase)) { HandleOrRecordException(new DataApiBuilderException( - message: $"Conflicting GraphQL and REST path configuration.", + message: $"Conflicting path configuration between GraphQL, REST, and MCP.", statusCode: HttpStatusCode.ServiceUnavailable, subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); } @@ -794,6 +805,41 @@ public void ValidateGraphQLURI(RuntimeConfig runtimeConfig) } } + /// + /// Method to validate that the MCP URI (MCP path prefix). + /// + /// + public void ValidateMcpUri(RuntimeConfig runtimeConfig) + { + // Skip validation if MCP is not configured + if (runtimeConfig.Runtime?.Mcp is null) + { + return; + } + + // Get the MCP path from the configuration + string? mcpPath = runtimeConfig.Runtime.Mcp.Path; + + // Validate that the path is not null or empty when MCP is configured + if (string.IsNullOrWhiteSpace(mcpPath)) + { + HandleOrRecordException(new DataApiBuilderException( + message: "MCP path cannot be null or empty when MCP is configured.", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + return; + } + + // Validate the MCP path using the same validation as REST and GraphQL + if (!RuntimeConfigValidatorUtil.TryValidateUriComponent(mcpPath, out string exceptionMsgSuffix)) + { + HandleOrRecordException(new DataApiBuilderException( + message: $"MCP path {exceptionMsgSuffix}", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + } + } + private void ValidateAuthenticationOptions(RuntimeConfig runtimeConfig) { // Bypass validation of auth if there is no auth provided diff --git a/src/Core/Services/RestService.cs b/src/Core/Services/RestService.cs index 0cf9f8a374..6a2308dd83 100644 --- a/src/Core/Services/RestService.cs +++ b/src/Core/Services/RestService.cs @@ -391,6 +391,14 @@ public string GetRouteAfterPathBase(string route) // forward slash '/'. configuredRestPathBase = configuredRestPathBase.Substring(1); + if (route.Equals(_runtimeConfigProvider.GetConfig().McpPath.Substring(1))) + { + throw new DataApiBuilderException( + message: $"Route {route} was not found.", + statusCode: HttpStatusCode.NotFound, + subStatusCode: DataApiBuilderException.SubStatusCodes.GlobalMcpEndpointDisabled); + } + if (!route.StartsWith(configuredRestPathBase)) { throw new DataApiBuilderException( diff --git a/src/Directory.Packages.props b/src/Directory.Packages.props index 9213684e6e..14f097915c 100644 --- a/src/Directory.Packages.props +++ b/src/Directory.Packages.props @@ -29,6 +29,8 @@ + + @@ -58,25 +60,25 @@ We use an older version of Newtonsoft.Json.Schema because newer versions depend on Newtonsoft.Json >=13.0.3 which is not (and can not be made) available in Microsoft Private Nuget Feeds --> - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/Service.Tests/Authentication/Helpers/RuntimeConfigAuthHelper.cs b/src/Service.Tests/Authentication/Helpers/RuntimeConfigAuthHelper.cs index 12c7db4fce..07a8a565ec 100644 --- a/src/Service.Tests/Authentication/Helpers/RuntimeConfigAuthHelper.cs +++ b/src/Service.Tests/Authentication/Helpers/RuntimeConfigAuthHelper.cs @@ -20,6 +20,7 @@ internal static RuntimeConfig CreateTestConfigWithAuthNProvider(AuthenticationOp Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: hostOptions ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/Authorization/AuthorizationHelpers.cs b/src/Service.Tests/Authorization/AuthorizationHelpers.cs index 7c6948b484..85f05a1c3b 100644 --- a/src/Service.Tests/Authorization/AuthorizationHelpers.cs +++ b/src/Service.Tests/Authorization/AuthorizationHelpers.cs @@ -126,6 +126,7 @@ public static RuntimeConfig InitRuntimeConfig( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new( Cors: null, Authentication: new(authProvider, null) diff --git a/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs b/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs index 733ec15b24..39a77bffff 100644 --- a/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs +++ b/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs @@ -1441,6 +1441,7 @@ private static RuntimeConfig BuildTestRuntimeConfig(EntityPermission[] permissio Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) diff --git a/src/Service.Tests/Caching/HealthEndpointCachingTests.cs b/src/Service.Tests/Caching/HealthEndpointCachingTests.cs index 2dbff7cbb2..94216a4409 100644 --- a/src/Service.Tests/Caching/HealthEndpointCachingTests.cs +++ b/src/Service.Tests/Caching/HealthEndpointCachingTests.cs @@ -156,6 +156,7 @@ private static void CreateCustomConfigFile(Dictionary entityMap, Health: new(enabled: true, cacheTtlSeconds: cacheTtlSeconds), Rest: new(Enabled: true), GraphQL: new(Enabled: true), + Mcp: new(Enabled: true), Host: hostOptions ), Entities: new(entityMap)); diff --git a/src/Service.Tests/Configuration/AuthenticationConfigValidatorUnitTests.cs b/src/Service.Tests/Configuration/AuthenticationConfigValidatorUnitTests.cs index 8b01d29961..963211ae40 100644 --- a/src/Service.Tests/Configuration/AuthenticationConfigValidatorUnitTests.cs +++ b/src/Service.Tests/Configuration/AuthenticationConfigValidatorUnitTests.cs @@ -194,6 +194,7 @@ private static RuntimeConfig CreateRuntimeConfigWithOptionalAuthN(Authentication Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: hostOptions ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/Configuration/ConfigurationTests.cs b/src/Service.Tests/Configuration/ConfigurationTests.cs index 2522806049..0be24fa886 100644 --- a/src/Service.Tests/Configuration/ConfigurationTests.cs +++ b/src/Service.Tests/Configuration/ConfigurationTests.cs @@ -1608,7 +1608,7 @@ public async Task TestSqlMetadataForInvalidConfigEntities() GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, new(), new()); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, new(), new(), new()); // creating an entity with invalid table name Entity entityWithInvalidSourceName = new( @@ -1679,7 +1679,7 @@ public async Task TestSqlMetadataValidationForEntitiesWithInvalidSource() GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, new(), new()); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, new(), new(), new()); // creating an entity with invalid table name Entity entityWithInvalidSource = new( @@ -2214,7 +2214,7 @@ public async Task TestPathRewriteMiddlewareForGraphQL( GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, new()); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, new(), new()); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -2543,7 +2543,7 @@ public async Task TestGlobalFlagToEnableRestAndGraphQLForHostedAndNonHostedEnvir DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, null); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -2618,6 +2618,7 @@ public async Task ValidateErrorMessageForMutationWithoutReadPermission() { GraphQLRuntimeOptions graphqlOptions = new(Enabled: true); RestRuntimeOptions restRuntimeOptions = new(Enabled: false); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); @@ -2648,7 +2649,7 @@ public async Task ValidateErrorMessageForMutationWithoutReadPermission() Mappings: null); string entityName = "Stock"; - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -2919,6 +2920,7 @@ public async Task ValidateInheritanceOfReadPermissionFromAnonymous() { GraphQLRuntimeOptions graphqlOptions = new(Enabled: true); RestRuntimeOptions restRuntimeOptions = new(Enabled: false); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); @@ -2949,7 +2951,7 @@ public async Task ValidateInheritanceOfReadPermissionFromAnonymous() Mappings: null); string entityName = "Stock"; - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -3060,6 +3062,7 @@ public async Task ValidateLocationHeaderFieldForPostRequests(EntitySourceType en GraphQLRuntimeOptions graphqlOptions = new(Enabled: false); RestRuntimeOptions restRuntimeOptions = new(Enabled: true); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); @@ -3077,11 +3080,11 @@ public async Task ValidateLocationHeaderFieldForPostRequests(EntitySourceType en ); string entityName = "GetBooks"; - configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName); + configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName); } else { - configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions); + configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions); } const string CUSTOM_CONFIG = "custom-config.json"; @@ -3158,6 +3161,7 @@ public async Task ValidateLocationHeaderWhenBaseRouteIsConfigured( { GraphQLRuntimeOptions graphqlOptions = new(Enabled: false); RestRuntimeOptions restRuntimeOptions = new(Enabled: true); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); @@ -3175,11 +3179,11 @@ public async Task ValidateLocationHeaderWhenBaseRouteIsConfigured( ); string entityName = "GetBooks"; - configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName); + configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName); } else { - configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions); + configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions); } const string CUSTOM_CONFIG = "custom-config.json"; @@ -3188,7 +3192,7 @@ public async Task ValidateLocationHeaderWhenBaseRouteIsConfigured( HostOptions staticWebAppsHostOptions = new(null, authenticationOptions); RuntimeOptions runtimeOptions = configuration.Runtime; - RuntimeOptions baseRouteEnabledRuntimeOptions = new(runtimeOptions?.Rest, runtimeOptions?.GraphQL, staticWebAppsHostOptions, "/data-api"); + RuntimeOptions baseRouteEnabledRuntimeOptions = new(runtimeOptions?.Rest, runtimeOptions?.GraphQL, runtimeOptions?.Mcp, staticWebAppsHostOptions, "/data-api"); RuntimeConfig baseRouteEnabledConfig = configuration with { Runtime = baseRouteEnabledRuntimeOptions }; File.WriteAllText(CUSTOM_CONFIG, baseRouteEnabledConfig.ToJson()); @@ -3347,7 +3351,7 @@ public async Task TestEngineSupportViewsWithoutKeyFieldsInConfigForMsSQL() Mappings: null ); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, new(), new(), viewEntity, "books_view_all"); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, new(), new(), new(), viewEntity, "books_view_all"); const string CUSTOM_CONFIG = "custom-config.json"; @@ -3568,6 +3572,7 @@ public void TestProductionModeAppServiceEnvironmentCheck(HostMode hostMode, Easy RuntimeOptions runtimeOptions = new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, authenticationOptions, hostMode) ); RuntimeConfig configWithCustomHostMode = config with { Runtime = runtimeOptions }; @@ -3608,10 +3613,11 @@ public async Task TestSchemaIntrospectionQuery(bool enableIntrospection, bool ex { GraphQLRuntimeOptions graphqlOptions = new(AllowIntrospection: enableIntrospection); RestRuntimeOptions restRuntimeOptions = new(); + McpRuntimeOptions mcpRuntimeOptions = new(); DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -3660,6 +3666,7 @@ public void TestInvalidDatabaseColumnNameHandling( { GraphQLRuntimeOptions graphqlOptions = new(Enabled: globalGraphQLEnabled); RestRuntimeOptions restRuntimeOptions = new(Enabled: true); + McpRuntimeOptions mcpOptions = new(Enabled: true); DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); @@ -3683,7 +3690,7 @@ public void TestInvalidDatabaseColumnNameHandling( Mappings: mappings ); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, "graphqlNameCompat"); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpOptions, entity, "graphqlNameCompat"); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -3739,7 +3746,8 @@ public async Task OpenApi_InteractiveSwaggerUI( RuntimeConfig configuration = InitMinimalRuntimeConfig( dataSource: dataSource, graphqlOptions: new(), - restOptions: new(Path: customRestPath)); + restOptions: new(Path: customRestPath), + mcpOptions: new()); configuration = configuration with @@ -4057,6 +4065,7 @@ private static RuntimeConfig InitializeRuntimeWithLogLevel(Dictionary entityMap, ? new( Rest: new(Enabled: enableGlobalRest), GraphQL: new(Enabled: true), + Mcp: new(Enabled: true), Host: hostOptions, Pagination: paginationOptions) : new( Rest: new(Enabled: enableGlobalRest), GraphQL: new(Enabled: true), + Mcp: new(Enabled: true), Host: hostOptions); RuntimeConfig runtimeConfig = new( @@ -5312,6 +5324,8 @@ public static RuntimeConfig InitialzieRuntimeConfigForMultipleCreateTests(bool i RestRuntimeOptions restRuntimeOptions = new(Enabled: false); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); + DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); EntityAction createAction = new( @@ -5370,7 +5384,7 @@ public static RuntimeConfig InitialzieRuntimeConfigForMultipleCreateTests(bool i RuntimeConfig runtimeConfig = new(Schema: "IntegrationTestMinimalSchema", DataSource: dataSource, - Runtime: new(restRuntimeOptions, graphqlOptions, Host: new(Cors: null, Authentication: authenticationOptions, Mode: HostMode.Development), Cache: null), + Runtime: new(restRuntimeOptions, graphqlOptions, mcpRuntimeOptions, Host: new(Cors: null, Authentication: authenticationOptions, Mode: HostMode.Development), Cache: null), Entities: new(entityMap)); return runtimeConfig; } @@ -5383,6 +5397,7 @@ public static RuntimeConfig InitMinimalRuntimeConfig( DataSource dataSource, GraphQLRuntimeOptions graphqlOptions, RestRuntimeOptions restOptions, + McpRuntimeOptions mcpOptions, Entity entity = null, string entityName = null, RuntimeCacheOptions cacheOptions = null @@ -5420,7 +5435,7 @@ public static RuntimeConfig InitMinimalRuntimeConfig( return new( Schema: "IntegrationTestMinimalSchema", DataSource: dataSource, - Runtime: new(restOptions, graphqlOptions, + Runtime: new(restOptions, graphqlOptions, mcpOptions, Host: new(Cors: null, Authentication: authenticationOptions, Mode: HostMode.Development), Cache: cacheOptions ), @@ -5496,6 +5511,7 @@ private static RuntimeConfig CreateBasicRuntimeConfigWithNoEntity( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -5533,6 +5549,7 @@ private static RuntimeConfig CreateBasicRuntimeConfigWithSingleEntityAndAuthOpti Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: authenticationOptions) ), Entities: new(entityMap) diff --git a/src/Service.Tests/Configuration/HealthEndpointRolesTests.cs b/src/Service.Tests/Configuration/HealthEndpointRolesTests.cs index a492f2c167..2a83697a3a 100644 --- a/src/Service.Tests/Configuration/HealthEndpointRolesTests.cs +++ b/src/Service.Tests/Configuration/HealthEndpointRolesTests.cs @@ -127,6 +127,7 @@ private static void CreateCustomConfigFile(Dictionary entityMap, Health: new(enabled: true, roles: role != null ? new HashSet { role } : null), Rest: new(Enabled: true), GraphQL: new(Enabled: true), + Mcp: new(Enabled: true), Host: hostOptions ), Entities: new(entityMap)); diff --git a/src/Service.Tests/Configuration/HealthEndpointTests.cs b/src/Service.Tests/Configuration/HealthEndpointTests.cs index 4fd2e52bf4..70e14e0108 100644 --- a/src/Service.Tests/Configuration/HealthEndpointTests.cs +++ b/src/Service.Tests/Configuration/HealthEndpointTests.cs @@ -53,19 +53,36 @@ public void CleanupAfterEachTest() /// [TestMethod] [TestCategory(TestCategory.MSSQL)] - [DataRow(true, true, true, true, true, true, true, DisplayName = "Validate Health Report all enabled.")] - [DataRow(false, true, true, true, true, true, true, DisplayName = "Validate when Comprehensive Health Report is disabled")] - [DataRow(true, true, true, false, true, true, true, DisplayName = "Validate Health Report when data-source health is disabled")] - [DataRow(true, true, true, true, false, true, true, DisplayName = "Validate Health Report when entity health is disabled")] - [DataRow(true, false, true, true, true, true, true, DisplayName = "Validate Health Report when global rest health is disabled")] - [DataRow(true, true, true, true, true, false, true, DisplayName = "Validate Health Report when entity rest health is disabled")] - [DataRow(true, true, false, true, true, true, true, DisplayName = "Validate Health Report when global graphql health is disabled")] - [DataRow(true, true, true, true, true, true, false, DisplayName = "Validate Health Report when entity graphql health is disabled")] - public async Task ComprehensiveHealthEndpoint_ValidateContents(bool enableGlobalHealth, bool enableGlobalRest, bool enableGlobalGraphql, bool enableDatasourceHealth, bool enableEntityHealth, bool enableEntityRest, bool enableEntityGraphQL) + [DataRow(true, true, true, true, true, true, true, true, DisplayName = "Validate Health Report all enabled.")] + [DataRow(false, true, true, true, true, true, true, true, DisplayName = "Validate when Comprehensive Health Report is disabled")] + [DataRow(true, true, true, false, true, true, true, true, DisplayName = "Validate Health Report when global MCP health is disabled")] + [DataRow(true, true, true, true, false, true, true, true, DisplayName = "Validate Health Report when data-source health is disabled")] + [DataRow(true, true, true, true, true, false, true, true, DisplayName = "Validate Health Report when entity health is disabled")] + [DataRow(true, false, true, true, true, true, true, true, DisplayName = "Validate Health Report when global REST health is disabled")] + [DataRow(true, true, false, true, true, true, true, true, DisplayName = "Validate Health Report when global GraphQL health is disabled")] + [DataRow(true, true, true, true, true, true, false, true, DisplayName = "Validate Health Report when entity REST health is disabled")] + [DataRow(true, true, true, true, true, true, true, false, DisplayName = "Validate Health Report when entity GraphQL health is disabled")] + public async Task ComprehensiveHealthEndpoint_ValidateContents( + bool enableGlobalHealth, + bool enableGlobalRest, + bool enableGlobalGraphql, + bool enableGlobalMcp, + bool enableDatasourceHealth, + bool enableEntityHealth, + bool enableEntityRest, + bool enableEntityGraphQL) { - // Arrange - // Create a mock entity map with a single entity for testing - RuntimeConfig runtimeConfig = SetupCustomConfigFile(enableGlobalHealth, enableGlobalRest, enableGlobalGraphql, enableDatasourceHealth, enableEntityHealth, enableEntityRest, enableEntityGraphQL); + // The body remains exactly the same except passing enableGlobalMcp + RuntimeConfig runtimeConfig = SetupCustomConfigFile( + enableGlobalHealth, + enableGlobalRest, + enableGlobalGraphql, + enableGlobalMcp, + enableDatasourceHealth, + enableEntityHealth, + enableEntityRest, + enableEntityGraphQL); + WriteToCustomConfigFile(runtimeConfig); string[] args = new[] @@ -90,7 +107,7 @@ public async Task ComprehensiveHealthEndpoint_ValidateContents(bool enableGlobal Assert.AreEqual(expected: HttpStatusCode.OK, actual: response.StatusCode, message: "Received unexpected HTTP code from health check endpoint."); ValidateBasicDetailsHealthCheckResponse(responseProperties); - ValidateConfigurationDetailsHealthCheckResponse(responseProperties, enableGlobalRest, enableGlobalGraphql); + ValidateConfigurationDetailsHealthCheckResponse(responseProperties, enableGlobalRest, enableGlobalGraphql, enableGlobalMcp); ValidateIfAttributePresentInResponse(responseProperties, enableDatasourceHealth, HealthCheckConstants.DATASOURCE); ValidateIfAttributePresentInResponse(responseProperties, enableEntityHealth, HealthCheckConstants.ENDPOINT); if (enableEntityHealth) @@ -110,7 +127,7 @@ public async Task ComprehensiveHealthEndpoint_ValidateContents(bool enableGlobal public async Task TestHealthCheckRestResponseAsync() { // Arrange - RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true); + RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true, true); HttpUtilities httpUtilities = SetupRestTest(runtimeConfig); // Act @@ -139,7 +156,7 @@ public async Task TestHealthCheckRestResponseAsync() public async Task TestFailureHealthCheckRestResponseAsync() { // Arrange - RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true); + RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true, true); HttpUtilities httpUtilities = SetupGraphQLTest(runtimeConfig, HttpStatusCode.BadRequest); // Act @@ -167,7 +184,7 @@ public async Task TestFailureHealthCheckRestResponseAsync() public async Task TestHealthCheckGraphQLResponseAsync() { // Arrange - RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true); + RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true, true); HttpUtilities httpUtilities = SetupGraphQLTest(runtimeConfig); // Act @@ -191,7 +208,7 @@ public async Task TestHealthCheckGraphQLResponseAsync() public async Task TestFailureHealthCheckGraphQLResponseAsync() { // Arrange - RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true); + RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true, true); HttpUtilities httpUtilities = SetupGraphQLTest(runtimeConfig, HttpStatusCode.InternalServerError); // Act @@ -427,7 +444,7 @@ private static void ValidateConfigurationIsCorrectFlag(Dictionary responseProperties, bool enableGlobalRest, bool enableGlobalGraphQL) + private static void ValidateConfigurationDetailsHealthCheckResponse(Dictionary responseProperties, bool enableGlobalRest, bool enableGlobalGraphQL, bool enableGlobalMcp) { if (responseProperties.TryGetValue("configuration", out JsonElement configElement) && configElement.ValueKind == JsonValueKind.Object) { @@ -443,6 +460,8 @@ private static void ValidateConfigurationDetailsHealthCheckResponse(Dictionary @@ -520,7 +539,7 @@ private static RuntimeConfig SetupCustomConfigFile(bool enableGlobalHealth, bool ///
/// Collection of entityName -> Entity object. /// flag to enable or disabled REST globally. - private static RuntimeConfig CreateRuntimeConfig(Dictionary entityMap, bool enableGlobalRest = true, bool enableGlobalGraphql = true, bool enableGlobalHealth = true, bool enableDatasourceHealth = true, HostMode hostMode = HostMode.Production) + private static RuntimeConfig CreateRuntimeConfig(Dictionary entityMap, bool enableGlobalRest = true, bool enableGlobalGraphql = true, bool enabledGlobalMcp = true, bool enableGlobalHealth = true, bool enableDatasourceHealth = true, HostMode hostMode = HostMode.Production) { DataSource dataSource = new( DatabaseType.MSSQL, @@ -536,6 +555,7 @@ private static RuntimeConfig CreateRuntimeConfig(Dictionary enti Health: new(enabled: enableGlobalHealth), Rest: new(Enabled: enableGlobalRest), GraphQL: new(Enabled: enableGlobalGraphql), + Mcp: new(Enabled: enabledGlobalMcp), Host: hostOptions ), Entities: new(entityMap)); diff --git a/src/Service.Tests/Configuration/HotReload/AuthorizationResolverHotReloadTests.cs b/src/Service.Tests/Configuration/HotReload/AuthorizationResolverHotReloadTests.cs index cc397e0ca0..b5fcb6162b 100644 --- a/src/Service.Tests/Configuration/HotReload/AuthorizationResolverHotReloadTests.cs +++ b/src/Service.Tests/Configuration/HotReload/AuthorizationResolverHotReloadTests.cs @@ -131,6 +131,7 @@ private static void CreateCustomConfigFile(string fileName, Dictionary dbOptions = new(); HyphenatedNamingPolicy namingPolicy = new(); @@ -548,7 +549,7 @@ type Planet @model(name:""Planet"") { Mappings: null); string entityName = "Planet"; - RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName); + RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName); const string CUSTOM_CONFIG = "custom-config.json"; const string CUSTOM_SCHEMA = "custom-schema.gql"; @@ -642,6 +643,7 @@ type Planet @model(name:""Planet"") { }"; GraphQLRuntimeOptions graphqlOptions = new(Enabled: true); RestRuntimeOptions restRuntimeOptions = new(Enabled: false); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); Dictionary dbOptions = new(); HyphenatedNamingPolicy namingPolicy = new(); @@ -677,7 +679,7 @@ type Planet @model(name:""Planet"") { Mappings: null); string entityName = "Planet"; - RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName); + RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName); const string CUSTOM_CONFIG = "custom-config.json"; const string CUSTOM_SCHEMA = "custom-schema.gql"; diff --git a/src/Service.Tests/CosmosTests/QueryTests.cs b/src/Service.Tests/CosmosTests/QueryTests.cs index 97cffa3c98..c40c95c75b 100644 --- a/src/Service.Tests/CosmosTests/QueryTests.cs +++ b/src/Service.Tests/CosmosTests/QueryTests.cs @@ -682,6 +682,7 @@ type Planet @model(name:""Planet"") { GraphQLRuntimeOptions graphqlOptions = new(Enabled: true); RestRuntimeOptions restRuntimeOptions = new(Enabled: false); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); Dictionary dbOptions = new(); HyphenatedNamingPolicy namingPolicy = new(); @@ -724,7 +725,7 @@ type Planet @model(name:""Planet"") { string entityName = "Planet"; // cache configuration - RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName, new RuntimeCacheOptions() { Enabled = true, TtlSeconds = 5 }); + RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName, new RuntimeCacheOptions() { Enabled = true, TtlSeconds = 5 }); const string CUSTOM_CONFIG = "custom-config.json"; const string CUSTOM_SCHEMA = "custom-schema.gql"; diff --git a/src/Service.Tests/CosmosTests/SchemaGeneratorFactoryTests.cs b/src/Service.Tests/CosmosTests/SchemaGeneratorFactoryTests.cs index 20f415e3dc..562a5174d2 100644 --- a/src/Service.Tests/CosmosTests/SchemaGeneratorFactoryTests.cs +++ b/src/Service.Tests/CosmosTests/SchemaGeneratorFactoryTests.cs @@ -78,7 +78,7 @@ public async Task ExportGraphQLFromCosmosDB_GeneratesSchemaSuccessfully(string g {"database", globalDatabase}, {"container", globalContainer} }), - Runtime: new(Rest: null, GraphQL: new(), Host: new(null, null)), + Runtime: new(Rest: null, GraphQL: new(), Mcp: new(), Host: new(null, null)), Entities: new(new Dictionary() { {"Container1", new Entity( diff --git a/src/Service.Tests/GraphQLBuilder/MultipleMutationBuilderTests.cs b/src/Service.Tests/GraphQLBuilder/MultipleMutationBuilderTests.cs index 0ed64ca6ee..94665d7c18 100644 --- a/src/Service.Tests/GraphQLBuilder/MultipleMutationBuilderTests.cs +++ b/src/Service.Tests/GraphQLBuilder/MultipleMutationBuilderTests.cs @@ -360,6 +360,7 @@ private static RuntimeConfigProvider GetRuntimeConfigProvider() { Runtime = new RuntimeOptions(Rest: runtimeConfig.Runtime.Rest, GraphQL: new GraphQLRuntimeOptions(MultipleMutationOptions: new MultipleMutationOptions(new MultipleCreateOptions(enabled: true))), + Mcp: runtimeConfig.Runtime.Mcp, Host: runtimeConfig.Runtime.Host, BaseRoute: runtimeConfig.Runtime.BaseRoute, Telemetry: runtimeConfig.Runtime.Telemetry, diff --git a/src/Service.Tests/ModuleInitializer.cs b/src/Service.Tests/ModuleInitializer.cs index b099508604..ba0407ecd5 100644 --- a/src/Service.Tests/ModuleInitializer.cs +++ b/src/Service.Tests/ModuleInitializer.cs @@ -51,6 +51,10 @@ public static void Init() VerifierSettings.IgnoreMember(options => options.IsGraphQLEnabled); // Ignore the entity IsGraphQLEnabled as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(entity => entity.IsGraphQLEnabled); + // Ignore the global IsMcpEnabled as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.IsMcpEnabled); + // Ignore the global RuntimeOptions.IsMcpEnabled as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(options => options.IsMcpEnabled); // Ignore the global IsHealthEnabled as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.IsHealthEnabled); // Ignore the global RuntimeOptions.IsHealthCheckEnabled as that's unimportant from a test standpoint. @@ -69,16 +73,16 @@ public static void Init() VerifierSettings.IgnoreMember(config => config.CosmosDataSourceUsed); // Ignore the IsRequestBodyStrict as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.IsRequestBodyStrict); - // Ignore the IsGraphQLEnabled as that's unimportant from a test standpoint. - VerifierSettings.IgnoreMember(config => config.IsGraphQLEnabled); - // Ignore the IsRestEnabled as that's unimportant from a test standpoint. - VerifierSettings.IgnoreMember(config => config.IsRestEnabled); + // Ignore the McpDmlTools as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.McpDmlTools); // Ignore the IsStaticWebAppsIdentityProvider as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.IsStaticWebAppsIdentityProvider); // Ignore the RestPath as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.RestPath); // Ignore the GraphQLPath as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.GraphQLPath); + // Ignore the McpPath as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.McpPath); // Ignore the AllowIntrospection as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.AllowIntrospection); // Ignore the EnableAggregation as that's unimportant from a test standpoint. @@ -105,6 +109,8 @@ public static void Init() VerifierSettings.IgnoreMember(options => options.UserProvidedDepthLimit); // Ignore EnableLegacyDateTimeScalar as that's not serialized in our config file. VerifierSettings.IgnoreMember(options => options.EnableLegacyDateTimeScalar); + // Ignore UserProvidedPath as that's not serialized in our config file. + VerifierSettings.IgnoreMember(options => options.UserProvidedPath); // Customise the path where we store snapshots, so they are easier to locate in a PR review. VerifyBase.DerivePathInfo( (sourceFile, projectDirectory, type, method) => new( diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt index 51d8543ed5..420977ed26 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt @@ -17,6 +17,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt index 51b733b94e..4283eb432e 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt @@ -21,6 +21,10 @@ } } }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt index 23f67259d4..f34141c964 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt index a534867fee..75490a804b 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Service.Tests/SqlTests/GraphQLQueryTests/DwSqlGraphQLQueryTests.cs b/src/Service.Tests/SqlTests/GraphQLQueryTests/DwSqlGraphQLQueryTests.cs index fa977e48d5..8cf55c247d 100644 --- a/src/Service.Tests/SqlTests/GraphQLQueryTests/DwSqlGraphQLQueryTests.cs +++ b/src/Service.Tests/SqlTests/GraphQLQueryTests/DwSqlGraphQLQueryTests.cs @@ -1239,6 +1239,7 @@ public void TestEnableDwNto1JoinQueryFeatureFlagLoadedFromRuntime() { EnableDwNto1JoinQueryOptimization = true }), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -1261,6 +1262,7 @@ public void TestEnableDwNto1JoinQueryFeatureFlagDefaultValueLoaded() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/SqlTests/SqlTestHelper.cs b/src/Service.Tests/SqlTests/SqlTestHelper.cs index 6193d843a0..e739f6cc8c 100644 --- a/src/Service.Tests/SqlTests/SqlTestHelper.cs +++ b/src/Service.Tests/SqlTests/SqlTestHelper.cs @@ -389,6 +389,7 @@ public static RuntimeConfig InitBasicRuntimeConfigWithNoEntity( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: authenticationOptions) ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs b/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs index 5a9d783376..16caf29b49 100644 --- a/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs +++ b/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs @@ -257,6 +257,7 @@ public void TestAddingRelationshipWithInvalidTargetEntity() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) @@ -317,6 +318,7 @@ public void TestAddingRelationshipWithDisabledGraphQL() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) @@ -373,6 +375,7 @@ string relationshipEntity Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) @@ -461,6 +464,7 @@ public void TestRelationshipWithNoLinkingObjectAndEitherSourceOrTargetFieldIsNul Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap)); @@ -553,6 +557,7 @@ public void TestRelationshipWithoutSourceAndTargetFieldsMatching( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap)); @@ -626,6 +631,7 @@ public void TestRelationshipWithoutSourceAndTargetFieldsAsValidBackingColumns( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap)); @@ -755,6 +761,7 @@ public void TestRelationshipWithoutLinkingSourceAndTargetFieldsMatching( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap)); @@ -1011,6 +1018,7 @@ public void TestOperationValidityAndCasing(string operationName, bool exceptionE Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap)); @@ -1083,6 +1091,7 @@ public void ValidateGraphQLTypeNamesFromConfig(string entityNameFromConfig, bool Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) @@ -1440,21 +1449,27 @@ public void ValidateValidEntityDefinitionsDoesNotGenerateDuplicateQueries(Databa ///
/// GraphQL global path /// REST global path + /// MCP global path /// Exception expected [DataTestMethod] - [DataRow("/graphql", "/graphql", true)] - [DataRow("/api", "/api", true)] - [DataRow("/graphql", "/api", false)] - public void TestGlobalRouteValidation(string graphQLConfiguredPath, string restConfiguredPath, bool expectError) + [DataRow("/graphql", "/graphql", "/mcp", true, DisplayName = "GraphQL and REST conflict (same path).")] + [DataRow("/api", "/api", "/mcp", true, DisplayName = "REST and GraphQL conflict (same path).")] + [DataRow("/graphql", "/api", "/mcp", false, DisplayName = "GraphQL, REST, and MCP distinct.")] + // Extra case: conflict with MCP + [DataRow("/mcp", "/api", "/mcp", true, DisplayName = "MCP and GraphQL conflict (same path).")] + [DataRow("/graphql", "/mcp", "/mcp", true, DisplayName = "MCP and REST conflict (same path).")] + public void TestGlobalRouteValidation(string graphQLConfiguredPath, string restConfiguredPath, string mcpConfiguredPath, bool expectError) { GraphQLRuntimeOptions graphQL = new(Path: graphQLConfiguredPath); RestRuntimeOptions rest = new(Path: restConfiguredPath); + McpRuntimeOptions mcp = new(Path: mcpConfiguredPath); RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig( new(DatabaseType.MSSQL, "", Options: null), graphQL, - rest); - string expectedErrorMessage = "Conflicting GraphQL and REST path configuration."; + rest, + mcp); + string expectedErrorMessage = "Conflicting path configuration between GraphQL, REST, and MCP."; try { @@ -1671,11 +1686,16 @@ public void ValidateApiURIsAreWellFormed( { string graphQLPathPrefix = GraphQLRuntimeOptions.DEFAULT_PATH; string restPathPrefix = RestRuntimeOptions.DEFAULT_PATH; + string mcpPathPrefix = McpRuntimeOptions.DEFAULT_PATH; if (apiType is ApiType.REST) { restPathPrefix = apiPathPrefix; } + else if (apiType is ApiType.MCP) + { + mcpPathPrefix = apiPathPrefix; + } else { graphQLPathPrefix = apiPathPrefix; @@ -1683,11 +1703,13 @@ public void ValidateApiURIsAreWellFormed( GraphQLRuntimeOptions graphQL = new(Path: graphQLPathPrefix); RestRuntimeOptions rest = new(Path: restPathPrefix); + McpRuntimeOptions mcp = new(Enabled: false); RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig( new(DatabaseType.MSSQL, "", Options: null), graphQL, - rest); + rest, + mcp); RuntimeConfigValidator configValidator = InitializeRuntimeConfigValidator(); @@ -1710,25 +1732,33 @@ public void ValidateApiURIsAreWellFormed( ///
/// Boolean flag to indicate if REST endpoints are enabled globally. /// Boolean flag to indicate if GraphQL endpoints are enabled globally. + /// Boolean flag to indicate if MCP endpoints are enabled globally. /// Boolean flag to indicate if exception is expected. - [DataRow(true, true, false, DisplayName = "Both REST and GraphQL enabled.")] - [DataRow(true, false, false, DisplayName = "REST enabled, and GraphQL disabled.")] - [DataRow(false, true, false, DisplayName = "REST disabled, and GraphQL enabled.")] - [DataRow(false, false, true, DisplayName = "Both REST and GraphQL are disabled.")] + [DataRow(true, true, true, false, DisplayName = "REST, GraphQL, and MCP enabled.")] + [DataRow(true, true, false, false, DisplayName = "REST and GraphQL enabled, MCP disabled.")] + [DataRow(true, false, true, false, DisplayName = "REST enabled, GraphQL disabled, and MCP enabled.")] + [DataRow(true, false, false, false, DisplayName = "REST enabled, GraphQL and MCP disabled.")] + [DataRow(false, true, true, false, DisplayName = "REST disabled, GraphQL and MCP enabled.")] + [DataRow(false, true, false, false, DisplayName = "REST disabled, GraphQL enabled, and MCP disabled.")] + [DataRow(false, false, true, false, DisplayName = "REST and GraphQL disabled, MCP enabled.")] + [DataRow(false, false, false, true, DisplayName = "REST, GraphQL, and MCP disabled.")] [DataTestMethod] - public void EnsureFailureWhenBothRestAndGraphQLAreDisabled( + public void EnsureFailureWhenRestAndGraphQLAndMcpAreDisabled( bool restEnabled, bool graphqlEnabled, + bool mcpEnabled, bool expectError) { GraphQLRuntimeOptions graphQL = new(Enabled: graphqlEnabled); RestRuntimeOptions rest = new(Enabled: restEnabled); + McpRuntimeOptions mcp = new(Enabled: mcpEnabled); RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig( new(DatabaseType.MSSQL, "", Options: null), graphQL, - rest); - string expectedErrorMessage = "Both GraphQL and REST endpoints are disabled."; + rest, + mcp); + string expectedErrorMessage = "GraphQL, REST, and MCP endpoints are disabled."; try { @@ -1995,6 +2025,7 @@ public void ValidateRestMethodsForEntityInConfig( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null)), Entities: new(entityMap)); @@ -2068,6 +2099,7 @@ public void ValidateRestPathForEntityInConfig( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) @@ -2138,6 +2170,7 @@ public void ValidateUniqueRestPathsForEntitiesInConfig( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) @@ -2198,6 +2231,7 @@ public void ValidateRuntimeBaseRouteSettings( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: new(Provider: authenticationProvider, Jwt: null)), BaseRoute: runtimeBaseRoute ), @@ -2334,6 +2368,7 @@ public void TestRuntimeConfigSetupWithNonJsonConstructor() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new RuntimeEntities(entityMap), @@ -2405,6 +2440,7 @@ public void ValidatePaginationOptionsInConfig( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: null), Pagination: new PaginationOptions(defaultPageSize, maxPageSize, nextLinkRelative) ), @@ -2456,6 +2492,7 @@ public void ValidateMaxResponseSizeInConfig( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: null, MaxResponseSizeMB: providedMaxResponseSizeMB) ), Entities: new(new Dictionary())); diff --git a/src/Service.Tests/UnitTests/DbExceptionParserUnitTests.cs b/src/Service.Tests/UnitTests/DbExceptionParserUnitTests.cs index ba7f05251a..02801de3e2 100644 --- a/src/Service.Tests/UnitTests/DbExceptionParserUnitTests.cs +++ b/src/Service.Tests/UnitTests/DbExceptionParserUnitTests.cs @@ -38,6 +38,7 @@ public void VerifyCorrectErrorMessage(bool isDeveloperMode, string expected) Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null, isDeveloperMode ? HostMode.Development : HostMode.Production) ), Entities: new(new Dictionary()) @@ -80,6 +81,7 @@ public void TestIsTransientExceptionMethod(bool expected, int number) Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null, HostMode.Development) ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/UnitTests/MultiSourceQueryExecutionUnitTests.cs b/src/Service.Tests/UnitTests/MultiSourceQueryExecutionUnitTests.cs index dd76845a04..e06e140328 100644 --- a/src/Service.Tests/UnitTests/MultiSourceQueryExecutionUnitTests.cs +++ b/src/Service.Tests/UnitTests/MultiSourceQueryExecutionUnitTests.cs @@ -251,6 +251,7 @@ public async Task TestMultiSourceTokenSet() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: null) ), DefaultDataSourceName: DATA_SOURCE_NAME_1, @@ -312,6 +313,7 @@ private static RuntimeConfig GenerateMockRuntimeConfigForMultiDbScenario() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), // use prod mode to avoid having to mock config file watcher Host: new(Cors: null, Authentication: null, HostMode.Production) ), diff --git a/src/Service.Tests/UnitTests/MySqlQueryExecutorUnitTests.cs b/src/Service.Tests/UnitTests/MySqlQueryExecutorUnitTests.cs index 423234aa73..cbfef36664 100644 --- a/src/Service.Tests/UnitTests/MySqlQueryExecutorUnitTests.cs +++ b/src/Service.Tests/UnitTests/MySqlQueryExecutorUnitTests.cs @@ -46,6 +46,7 @@ public async Task TestHandleManagedIdentityAccess( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/UnitTests/PostgreSqlQueryExecutorUnitTests.cs b/src/Service.Tests/UnitTests/PostgreSqlQueryExecutorUnitTests.cs index f0db8b4742..ccaa90b353 100644 --- a/src/Service.Tests/UnitTests/PostgreSqlQueryExecutorUnitTests.cs +++ b/src/Service.Tests/UnitTests/PostgreSqlQueryExecutorUnitTests.cs @@ -57,6 +57,7 @@ public async Task TestHandleManagedIdentityAccess( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/UnitTests/RequestValidatorUnitTests.cs b/src/Service.Tests/UnitTests/RequestValidatorUnitTests.cs index a19823df18..186f254c51 100644 --- a/src/Service.Tests/UnitTests/RequestValidatorUnitTests.cs +++ b/src/Service.Tests/UnitTests/RequestValidatorUnitTests.cs @@ -356,6 +356,7 @@ public static void PerformTest( Runtime: new( Rest: new(Path: "/api"), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: null) ), Entities: new(new Dictionary() diff --git a/src/Service.Tests/UnitTests/RestServiceUnitTests.cs b/src/Service.Tests/UnitTests/RestServiceUnitTests.cs index 9d483bf1d2..1fa1a276ad 100644 --- a/src/Service.Tests/UnitTests/RestServiceUnitTests.cs +++ b/src/Service.Tests/UnitTests/RestServiceUnitTests.cs @@ -115,6 +115,7 @@ public static void InitializeTest(string restRoutePrefix, string entityName) Runtime: new( Rest: new(Path: restRoutePrefix), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs b/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs index 8d7dae0541..b98de993e2 100644 --- a/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs +++ b/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs @@ -259,7 +259,7 @@ public void TestNullableOptionalProps() TryParseAndAssertOnDefaults("{" + emptyRuntime, out _); // Test with empty sub properties of runtime - minJson.Append(@"{ ""rest"": { }, ""graphql"": { }, + minJson.Append(@"{ ""rest"": { }, ""graphql"": { }, ""mcp"": { }, ""base-route"" : """","); StringBuilder minJsonWithHostSubProps = new(minJson + @"""telemetry"" : { }, ""host"" : "); StringBuilder minJsonWithTelemetrySubProps = new(minJson + @"""host"" : { }, ""telemetry"" : "); @@ -423,6 +423,10 @@ public static string GetModifiedJsonString(string[] reps, string enumString) } } }, + ""mcp"": { + ""enabled"": true, + ""path"": """ + reps[++index % reps.Length] + @""" + }, ""host"": { ""mode"": ""development"", ""cors"": { @@ -506,6 +510,10 @@ public static string GetModifiedJsonString(string[] reps, string enumString) ""enabled"": true, ""path"": ""/graphql"" }, + ""mcp"": { + ""enabled"": true, + ""path"": ""/mcp"" + }, ""host"": { ""mode"": ""development"", ""cors"": { @@ -641,6 +649,8 @@ private static bool TryParseAndAssertOnDefaults(string json, out RuntimeConfig p Assert.AreEqual(RestRuntimeOptions.DEFAULT_PATH, parsedConfig.RestPath); Assert.IsTrue(parsedConfig.IsGraphQLEnabled); Assert.AreEqual(GraphQLRuntimeOptions.DEFAULT_PATH, parsedConfig.GraphQLPath); + Assert.IsTrue(parsedConfig.IsMcpEnabled); + Assert.AreEqual(McpRuntimeOptions.DEFAULT_PATH, parsedConfig.McpPath); Assert.IsTrue(parsedConfig.AllowIntrospection); Assert.IsFalse(parsedConfig.IsDevelopmentMode()); Assert.IsTrue(parsedConfig.IsStaticWebAppsIdentityProvider); diff --git a/src/Service.Tests/UnitTests/SqlQueryExecutorUnitTests.cs b/src/Service.Tests/UnitTests/SqlQueryExecutorUnitTests.cs index 92b076107a..908b7019c4 100644 --- a/src/Service.Tests/UnitTests/SqlQueryExecutorUnitTests.cs +++ b/src/Service.Tests/UnitTests/SqlQueryExecutorUnitTests.cs @@ -80,6 +80,7 @@ public async Task TestHandleManagedIdentityAccess( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -154,6 +155,7 @@ public async Task TestRetryPolicyExhaustingMaxAttempts() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -229,6 +231,7 @@ public void Test_DbCommandParameter_PopulatedWithCorrectDbTypes() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -344,6 +347,7 @@ public async Task TestHttpContextIsPopulatedWithDbExecutionTime() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -446,6 +450,7 @@ public void TestToValidateLockingOfHttpContextObjectDuringCalcuationOfDbExecutio Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -512,6 +517,7 @@ public void ValidateStreamingLogicAsync(int readDataLoops, bool exceptionExpecte Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: null, MaxResponseSizeMB: 5) ), Entities: new(new Dictionary())); @@ -573,6 +579,7 @@ public void ValidateStreamingLogicForStoredProcedures(int readDataLoops, bool ex Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: null, MaxResponseSizeMB: 4) ), Entities: new(new Dictionary())); diff --git a/src/Service.Tests/dab-config.MsSql.json b/src/Service.Tests/dab-config.MsSql.json index e57c7dce8c..d5e903d4f3 100644 --- a/src/Service.Tests/dab-config.MsSql.json +++ b/src/Service.Tests/dab-config.MsSql.json @@ -23,6 +23,11 @@ } } }, + "mcp": { + "enabled": true, + "path": "/mcp", + "dml-tools": true + }, "host": { "cors": { "origins": [ @@ -2314,7 +2319,8 @@ "Notebook": { "source": { "object": "notebooks", - "type": "table" + "type": "table", + "object-description": "Table containing notebook information" }, "graphql": { "enabled": true, @@ -3843,4 +3849,4 @@ ] } } -} \ No newline at end of file +} diff --git a/src/Service/Azure.DataApiBuilder.Service.csproj b/src/Service/Azure.DataApiBuilder.Service.csproj index 9f1558e504..6ea9c8dad2 100644 --- a/src/Service/Azure.DataApiBuilder.Service.csproj +++ b/src/Service/Azure.DataApiBuilder.Service.csproj @@ -1,4 +1,4 @@ - + net8.0 @@ -102,6 +102,7 @@
+ diff --git a/src/Service/HealthCheck/HealthCheckHelper.cs b/src/Service/HealthCheck/HealthCheckHelper.cs index 9225c3aeb0..452cb803a9 100644 --- a/src/Service/HealthCheck/HealthCheckHelper.cs +++ b/src/Service/HealthCheck/HealthCheckHelper.cs @@ -140,6 +140,7 @@ private static void UpdateDabConfigurationDetails(ref ComprehensiveHealthCheckRe { Rest = runtimeConfig.IsRestEnabled, GraphQL = runtimeConfig.IsGraphQLEnabled, + Mcp = runtimeConfig.IsMcpEnabled, Caching = runtimeConfig.IsCachingEnabled, Telemetry = runtimeConfig?.Runtime?.Telemetry != null, Mode = runtimeConfig?.Runtime?.Host?.Mode ?? HostMode.Production, // Modify to runtimeConfig.HostMode in Roles PR diff --git a/src/Service/HealthCheck/Model/ConfigurationDetails.cs b/src/Service/HealthCheck/Model/ConfigurationDetails.cs index c3989e0167..9ff007754e 100644 --- a/src/Service/HealthCheck/Model/ConfigurationDetails.cs +++ b/src/Service/HealthCheck/Model/ConfigurationDetails.cs @@ -18,6 +18,9 @@ public record ConfigurationDetails [JsonPropertyName("graphql")] public bool GraphQL { get; init; } + [JsonPropertyName("mcp")] + public bool Mcp { get; init; } + [JsonPropertyName("caching")] public bool Caching { get; init; } diff --git a/src/Service/Startup.cs b/src/Service/Startup.cs index a23c23178a..48a39d31d0 100644 --- a/src/Service/Startup.cs +++ b/src/Service/Startup.cs @@ -24,6 +24,7 @@ using Azure.DataApiBuilder.Core.Services.MetadataProviders; using Azure.DataApiBuilder.Core.Services.OpenAPI; using Azure.DataApiBuilder.Core.Telemetry; +using Azure.DataApiBuilder.Mcp.Core; using Azure.DataApiBuilder.Service.Controllers; using Azure.DataApiBuilder.Service.Exceptions; using Azure.DataApiBuilder.Service.HealthCheck; @@ -452,6 +453,9 @@ public void ConfigureServices(IServiceCollection services) } services.AddSingleton(); + + services.AddDabMcpServer(configProvider); + services.AddControllers(); } @@ -678,6 +682,9 @@ public void Configure(IApplicationBuilder app, IWebHostEnvironment env, RuntimeC { endpoints.MapControllers(); + // Special for MCP + endpoints.MapDabMcp(runtimeConfigProvider); + endpoints .MapGraphQL() .WithOptions(new GraphQLServerOptions From a86996d073ffb306ecea9ae615065a4359de017d Mon Sep 17 00:00:00 2001 From: souvikghosh04 Date: Tue, 7 Oct 2025 23:55:02 +0530 Subject: [PATCH 65/79] Remove contributor- Akash (#2897) ## Why make this change? Removing Akash from contributor --- CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CODEOWNERS b/CODEOWNERS index cb1f7eb036..ac65d7bc52 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1,7 +1,7 @@ # These owners will be the default owners for everything in # the repo. Unless a later match takes precedence, # review when someone opens a pull request. -* @Aniruddh25 @aaronburtle @anushakolan @RubenCerna2079 @souvikghosh04 @akashkumar58 @neeraj-sharma2592 @sourabh1007 @vadeveka @Alekhya-Polavarapu @rusamant @stuartpa +* @Aniruddh25 @aaronburtle @anushakolan @RubenCerna2079 @souvikghosh04 @neeraj-sharma2592 @sourabh1007 @vadeveka @Alekhya-Polavarapu @rusamant @stuartpa code_of_conduct.md @jerrynixon contributing.md @jerrynixon From d6404b3902d6e114a1394035093c6ae4d08f34ad Mon Sep 17 00:00:00 2001 From: souvikghosh04 Date: Wed, 15 Oct 2025 16:35:20 +0530 Subject: [PATCH 66/79] [MCP] Adding delete_record (#2889) ## Why make this change? ### Closes on - delete_record: #2830 ## What is this change? This PR implements built-int tool `delete_record` as part of built-in tools to support delete operation on a table entity. - deletes one record at a time - Entity name and Keys should be specified - Keys supports having a single primary key or composite key (refer sample request below) - Operation is performed based on permissions as configured in dab-config - Success or Failure message response is generated on execution of the delete operation ## How was this tested? Functional testing using Insomnia client by running DAB in localhost and local SQL DB database - MCP endpoint: http://localhost:5000/mcp - JSON payload (details below) - Querying and validating data in local database ## Sample Request(s) Delete by single Primary Key ``` POST: http://localhost:5000/mcp { "jsonrpc": "2.0", "id": 2, "method": "tools/call", "params": { "name": "delete_record", "arguments": { "entity": "Book", "keys": { "id": 5009 } } } } ``` Delete by a composite key ``` POST http://localhost:5000/mcp { "jsonrpc": "2.0", "id": 2, "method": "tools/call", "params": { "name": "delete_record", "arguments": { "entity": "Stock", "keys": { "categoryid": 10, "pieceid": 1 } } } } ``` --------- Co-authored-by: Jerry Nixon Co-authored-by: Rahul Nishant <53243582+ranishan@users.noreply.github.com> Co-authored-by: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: Aniruddh Munde Co-authored-by: Ruben Cerna Co-authored-by: Anusha Kolan --- .../BuiltInTools/DeleteRecordTool.cs | 346 ++++++++++++++++++ .../Utils/McpArgumentParser.cs | 127 +++++++ .../Utils/McpAuthorizationHelper.cs | 84 +++++ .../Utils/McpJsonHelper.cs | 65 ++++ .../Utils/McpResponseBuilder.cs | 103 ++++++ 5 files changed, 725 insertions(+) create mode 100644 src/Azure.DataApiBuilder.Mcp/BuiltInTools/DeleteRecordTool.cs create mode 100644 src/Azure.DataApiBuilder.Mcp/Utils/McpArgumentParser.cs create mode 100644 src/Azure.DataApiBuilder.Mcp/Utils/McpAuthorizationHelper.cs create mode 100644 src/Azure.DataApiBuilder.Mcp/Utils/McpJsonHelper.cs create mode 100644 src/Azure.DataApiBuilder.Mcp/Utils/McpResponseBuilder.cs diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DeleteRecordTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DeleteRecordTool.cs new file mode 100644 index 0000000000..86a5ce15ec --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DeleteRecordTool.cs @@ -0,0 +1,346 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Data.Common; +using System.Text.Json; +using Azure.DataApiBuilder.Auth; +using Azure.DataApiBuilder.Config.DatabasePrimitives; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; +using Azure.DataApiBuilder.Core.Resolvers; +using Azure.DataApiBuilder.Core.Resolvers.Factories; +using Azure.DataApiBuilder.Core.Services; +using Azure.DataApiBuilder.Core.Services.MetadataProviders; +using Azure.DataApiBuilder.Mcp.Model; +using Azure.DataApiBuilder.Mcp.Utils; +using Azure.DataApiBuilder.Service.Exceptions; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.BuiltInTools +{ + /// + /// Tool to delete records from a table/view entity configured in DAB. + /// Supports both simple and composite primary keys. + /// + public class DeleteRecordTool : IMcpTool + { + /// + /// Gets the type of the tool, which is BuiltIn for this implementation. + /// + public ToolType ToolType { get; } = ToolType.BuiltIn; + + /// + /// Gets the metadata for the delete-record tool, including its name, description, and input schema. + /// + public Tool GetToolMetadata() + { + return new Tool + { + Name = "delete_record", + Description = "Deletes a record from a table based on primary key or composite key", + InputSchema = JsonSerializer.Deserialize( + @"{ + ""type"": ""object"", + ""properties"": { + ""entity"": { + ""type"": ""string"", + ""description"": ""The name of the entity (table) as configured in dab-config. Required."" + }, + ""keys"": { + ""type"": ""object"", + ""description"": ""Primary key values to identify the record to delete. For composite keys, provide all key columns as properties. Required."" + } + }, + ""required"": [""entity"", ""keys""] + }" + ) + }; + } + + /// + /// Executes the delete-record tool, deleting an existing record in the specified entity using provided keys. + /// + public async Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default) + { + ILogger? logger = serviceProvider.GetService>(); + + try + { + // Cancellation check at the start + cancellationToken.ThrowIfCancellationRequested(); + + // 1) Resolve required services & configuration + RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService(); + RuntimeConfig config = runtimeConfigProvider.GetConfig(); + + // 2) Check if the tool is enabled in configuration before proceeding + if (config.McpDmlTools?.DeleteRecord != true) + { + return McpResponseBuilder.BuildErrorResult( + "ToolDisabled", + $"The {this.GetToolMetadata().Name} tool is disabled in the configuration.", + logger); + } + + // 3) Parsing & basic argument validation + if (arguments is null) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", "No arguments provided.", logger); + } + + if (!McpArgumentParser.TryParseEntityAndKeys(arguments.RootElement, out string entityName, out Dictionary keys, out string parseError)) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", parseError, logger); + } + + IMetadataProviderFactory metadataProviderFactory = serviceProvider.GetRequiredService(); + IMutationEngineFactory mutationEngineFactory = serviceProvider.GetRequiredService(); + + // 4) Resolve metadata for entity existence check + string dataSourceName; + ISqlMetadataProvider sqlMetadataProvider; + + try + { + dataSourceName = config.GetDataSourceNameFromEntityName(entityName); + sqlMetadataProvider = metadataProviderFactory.GetMetadataProvider(dataSourceName); + } + catch (Exception) + { + return McpResponseBuilder.BuildErrorResult("EntityNotFound", $"Entity '{entityName}' is not defined in the configuration.", logger); + } + + if (!sqlMetadataProvider.EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? dbObject) || dbObject is null) + { + return McpResponseBuilder.BuildErrorResult("EntityNotFound", $"Entity '{entityName}' is not defined in the configuration.", logger); + } + + // Validate it's a table or view + if (dbObject.SourceType != EntitySourceType.Table && dbObject.SourceType != EntitySourceType.View) + { + return McpResponseBuilder.BuildErrorResult("InvalidEntity", $"Entity '{entityName}' is not a table or view. Use 'execute-entity' for stored procedures.", logger); + } + + // 5) Authorization + IAuthorizationResolver authResolver = serviceProvider.GetRequiredService(); + IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService(); + HttpContext? httpContext = httpContextAccessor.HttpContext; + + if (!McpAuthorizationHelper.ValidateRoleContext(httpContext, authResolver, out string roleError)) + { + return McpResponseBuilder.BuildErrorResult("PermissionDenied", $"Permission denied: {roleError}", logger); + } + + if (!McpAuthorizationHelper.TryResolveAuthorizedRole( + httpContext!, + authResolver, + entityName, + EntityActionOperation.Delete, + out string? effectiveRole, + out string authError)) + { + return McpResponseBuilder.BuildErrorResult("PermissionDenied", $"Permission denied: {authError}", logger); + } + + // 6) Build and validate Delete context + RequestValidator requestValidator = new(metadataProviderFactory, runtimeConfigProvider); + + DeleteRequestContext context = new( + entityName: entityName, + dbo: dbObject, + isList: false); + + foreach (KeyValuePair kvp in keys) + { + if (kvp.Value is null) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", $"Primary key value for '{kvp.Key}' cannot be null.", logger); + } + + context.PrimaryKeyValuePairs[kvp.Key] = kvp.Value; + } + + requestValidator.ValidatePrimaryKey(context); + + // 7) Execute + DatabaseType dbType = config.GetDataSourceFromDataSourceName(dataSourceName).DatabaseType; + IMutationEngine mutationEngine = mutationEngineFactory.GetMutationEngine(dbType); + + IActionResult? mutationResult = null; + try + { + // Cancellation check before executing + cancellationToken.ThrowIfCancellationRequested(); + mutationResult = await mutationEngine.ExecuteAsync(context).ConfigureAwait(false); + } + catch (DataApiBuilderException dabEx) + { + // Handle specific DAB exceptions + logger?.LogError(dabEx, "Data API Builder error deleting record from {Entity}", entityName); + + string message = dabEx.Message; + + // Check for specific error patterns + if (message.Contains("Could not find item with", StringComparison.OrdinalIgnoreCase)) + { + string keyDetails = McpJsonHelper.FormatKeyDetails(keys); + return McpResponseBuilder.BuildErrorResult( + "RecordNotFound", + $"No record found with the specified primary key: {keyDetails}", + logger); + } + else if (message.Contains("violates foreign key constraint", StringComparison.OrdinalIgnoreCase) || + message.Contains("REFERENCE constraint", StringComparison.OrdinalIgnoreCase)) + { + return McpResponseBuilder.BuildErrorResult( + "ConstraintViolation", + "Cannot delete record due to foreign key constraint. Other records depend on this record.", + logger); + } + else if (message.Contains("permission", StringComparison.OrdinalIgnoreCase) || + message.Contains("authorization", StringComparison.OrdinalIgnoreCase)) + { + return McpResponseBuilder.BuildErrorResult( + "PermissionDenied", + "You do not have permission to delete this record.", + logger); + } + else if (message.Contains("invalid", StringComparison.OrdinalIgnoreCase) && + message.Contains("type", StringComparison.OrdinalIgnoreCase)) + { + return McpResponseBuilder.BuildErrorResult( + "InvalidArguments", + "Invalid data type for one or more key values.", + logger); + } + + // For any other DAB exceptions, return the message as-is + return McpResponseBuilder.BuildErrorResult( + "DataApiBuilderError", + dabEx.Message, + logger); + } + catch (SqlException sqlEx) + { + // Handle SQL Server specific errors + logger?.LogError(sqlEx, "SQL Server error deleting record from {Entity}", entityName); + string errorMessage = sqlEx.Number switch + { + 547 => "Cannot delete record due to foreign key constraint. Other records depend on this record.", + 2627 or 2601 => "Cannot delete record due to unique constraint violation.", + 229 or 262 => $"Permission denied to delete from table '{dbObject.FullName}'.", + 208 => $"Table '{dbObject.FullName}' not found in the database.", + _ => $"Database error: {sqlEx.Message}" + }; + return McpResponseBuilder.BuildErrorResult("DatabaseError", errorMessage, logger); + } + catch (DbException dbEx) + { + // Handle generic database exceptions (works for PostgreSQL, MySQL, etc.) + logger?.LogError(dbEx, "Database error deleting record from {Entity}", entityName); + + // Check for common patterns in error messages + string errorMsg = dbEx.Message.ToLowerInvariant(); + if (errorMsg.Contains("foreign key") || errorMsg.Contains("constraint")) + { + return McpResponseBuilder.BuildErrorResult( + "ConstraintViolation", + "Cannot delete record due to foreign key constraint. Other records depend on this record.", + logger); + } + else if (errorMsg.Contains("not found") || errorMsg.Contains("does not exist")) + { + return McpResponseBuilder.BuildErrorResult( + "RecordNotFound", + "No record found with the specified primary key.", + logger); + } + + return McpResponseBuilder.BuildErrorResult("DatabaseError", $"Database error: {dbEx.Message}", logger); + } + catch (InvalidOperationException ioEx) when (ioEx.Message.Contains("connection", StringComparison.OrdinalIgnoreCase)) + { + // Handle connection-related issues + logger?.LogError(ioEx, "Database connection error"); + return McpResponseBuilder.BuildErrorResult("ConnectionError", "Failed to connect to the database.", logger); + } + catch (TimeoutException timeoutEx) + { + // Handle query timeout + logger?.LogError(timeoutEx, "Delete operation timeout for {Entity}", entityName); + return McpResponseBuilder.BuildErrorResult("TimeoutError", "The delete operation timed out.", logger); + } + catch (Exception ex) + { + string errorMsg = ex.Message ?? string.Empty; + + if (errorMsg.Contains("Could not find", StringComparison.OrdinalIgnoreCase) || + errorMsg.Contains("record not found", StringComparison.OrdinalIgnoreCase)) + { + string keyDetails = McpJsonHelper.FormatKeyDetails(keys); + return McpResponseBuilder.BuildErrorResult( + "RecordNotFound", + $"No entity found with the given key {keyDetails}.", + logger); + } + else + { + // Re-throw unexpected exceptions + throw; + } + } + + // 8) Build response + // Based on SqlMutationEngine, delete operations typically return NoContentResult + // We build a success response with just the operation details + Dictionary responseData = new() + { + ["entity"] = entityName, + ["keyDetails"] = McpJsonHelper.FormatKeyDetails(keys), + ["message"] = "Record deleted successfully" + }; + + // If the mutation result is OkObjectResult (which would be unusual for delete), + // include the result value directly without re-serialization + if (mutationResult is OkObjectResult okObjectResult && okObjectResult.Value is not null) + { + responseData["result"] = okObjectResult.Value; + } + + return McpResponseBuilder.BuildSuccessResult( + responseData, + logger, + $"DeleteRecordTool success for entity {entityName}." + ); + } + catch (OperationCanceledException) + { + return McpResponseBuilder.BuildErrorResult("OperationCanceled", "The delete operation was canceled.", logger); + } + catch (ArgumentException argEx) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", argEx.Message, logger); + } + catch (Exception ex) + { + ILogger? innerLogger = serviceProvider.GetService>(); + innerLogger?.LogError(ex, "Unexpected error in DeleteRecordTool."); + + return McpResponseBuilder.BuildErrorResult( + "UnexpectedError", + "An unexpected error occurred during the delete operation.", + logger); + } + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpArgumentParser.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpArgumentParser.cs new file mode 100644 index 0000000000..04d14eb5d6 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpArgumentParser.cs @@ -0,0 +1,127 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; + +namespace Azure.DataApiBuilder.Mcp.Utils +{ + /// + /// Utility class for parsing MCP tool arguments. + /// + public static class McpArgumentParser + { + /// + /// Parses entity and keys arguments for delete/update operations. + /// + public static bool TryParseEntityAndKeys( + JsonElement root, + out string entityName, + out Dictionary keys, + out string error) + { + entityName = string.Empty; + keys = new Dictionary(); + error = string.Empty; + + if (!root.TryGetProperty("entity", out JsonElement entityEl) || + !root.TryGetProperty("keys", out JsonElement keysEl)) + { + error = "Missing required arguments 'entity' or 'keys'."; + return false; + } + + // Parse and validate entity name + entityName = entityEl.GetString() ?? string.Empty; + if (string.IsNullOrWhiteSpace(entityName)) + { + error = "Entity is required"; + return false; + } + + // Parse and validate keys + if (keysEl.ValueKind != JsonValueKind.Object) + { + error = "'keys' must be a JSON object."; + return false; + } + + try + { + keys = JsonSerializer.Deserialize>(keysEl) ?? new Dictionary(); + } + catch (Exception ex) + { + error = $"Failed to parse 'keys': {ex.Message}"; + return false; + } + + if (keys.Count == 0) + { + error = "Keys are required"; + return false; + } + + // Validate key values + foreach (KeyValuePair kv in keys) + { + if (kv.Value is null || (kv.Value is string str && string.IsNullOrWhiteSpace(str))) + { + error = $"Primary key value for '{kv.Key}' cannot be null or empty"; + return false; + } + } + + return true; + } + + /// + /// Parses entity, keys, and fields arguments for update operations. + /// + public static bool TryParseEntityKeysAndFields( + JsonElement root, + out string entityName, + out Dictionary keys, + out Dictionary fields, + out string error) + { + fields = new Dictionary(); + + // First parse entity and keys + if (!TryParseEntityAndKeys(root, out entityName, out keys, out error)) + { + return false; + } + + // Then parse fields + if (!root.TryGetProperty("fields", out JsonElement fieldsEl)) + { + error = "Missing required argument 'fields'."; + return false; + } + + if (fieldsEl.ValueKind != JsonValueKind.Object) + { + error = "'fields' must be a JSON object."; + return false; + } + + try + { + fields = JsonSerializer.Deserialize>(fieldsEl) ?? new Dictionary(); + } + catch (Exception ex) + { + error = $"Failed to parse 'fields': {ex.Message}"; + return false; + } + + if (fields.Count == 0) + { + error = "At least one field must be provided"; + return false; + } + + return true; + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpAuthorizationHelper.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpAuthorizationHelper.cs new file mode 100644 index 0000000000..1fdf7d45d3 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpAuthorizationHelper.cs @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using Azure.DataApiBuilder.Auth; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Authorization; +using Microsoft.AspNetCore.Http; + +namespace Azure.DataApiBuilder.Mcp.Utils +{ + /// + /// Helper class for MCP tool authorization operations. + /// + public static class McpAuthorizationHelper + { + /// + /// Validates if the current request has a valid role context. + /// + public static bool ValidateRoleContext( + HttpContext? httpContext, + IAuthorizationResolver authResolver, + out string error) + { + error = string.Empty; + + if (httpContext is null || !authResolver.IsValidRoleContext(httpContext)) + { + error = "Unable to resolve a valid role context"; + return false; + } + + return true; + } + + /// + /// Tries to resolve an authorized role for the given entity and operation. + /// + public static bool TryResolveAuthorizedRole( + HttpContext httpContext, + IAuthorizationResolver authorizationResolver, + string entityName, + EntityActionOperation operation, + out string? effectiveRole, + out string error) + { + effectiveRole = null; + error = string.Empty; + + string roleHeader = httpContext.Request.Headers[AuthorizationResolver.CLIENT_ROLE_HEADER].ToString(); + + if (string.IsNullOrWhiteSpace(roleHeader)) + { + error = "Client role header is missing or empty."; + return false; + } + + string[] roles = roleHeader + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + if (roles.Length == 0) + { + error = "Client role header is missing or empty."; + return false; + } + + foreach (string role in roles) + { + bool allowed = authorizationResolver.AreRoleAndOperationDefinedForEntity( + entityName, role, operation); + + if (allowed) + { + effectiveRole = role; + return true; + } + } + + error = $"You do not have permission to perform {operation} operation for this entity."; + return false; + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpJsonHelper.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpJsonHelper.cs new file mode 100644 index 0000000000..b3d3f11492 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpJsonHelper.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; + +namespace Azure.DataApiBuilder.Mcp.Utils +{ + /// + /// Helper methods for JSON operations in MCP tools. + /// + public static class McpJsonHelper + { + /// + /// Converts JsonElement to .NET object dynamically. + /// + public static object? GetJsonValue(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString(), + JsonValueKind.Number => + // Try to get as decimal first for maximum precision + element.TryGetDecimal(out decimal d) ? d : + element.TryGetInt64(out long l) ? l : + element.GetDouble(), + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.Null => null, + _ => element.GetRawText() // fallback for arrays/objects + }; + } + + /// + /// Extracts values from a JSON value array typically returned by DAB engine. + /// + public static Dictionary ExtractValuesFromEngineResult(JsonElement engineRootElement) + { + Dictionary resultData = new(); + + // Navigate to "value" array in the engine result + if (engineRootElement.TryGetProperty("value", out JsonElement valueArray) && + valueArray.ValueKind == JsonValueKind.Array && + valueArray.GetArrayLength() > 0) + { + JsonElement firstItem = valueArray[0]; + + // Include all properties from the result + foreach (JsonProperty prop in firstItem.EnumerateObject()) + { + resultData[prop.Name] = GetJsonValue(prop.Value); + } + } + + return resultData; + } + + /// + /// Creates a formatted key details string from a dictionary of key-value pairs. + /// + public static string FormatKeyDetails(Dictionary keys) + { + return string.Join(", ", keys.Select(k => $"{k.Key}={k.Value}")); + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpResponseBuilder.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpResponseBuilder.cs new file mode 100644 index 0000000000..afbccbda38 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpResponseBuilder.cs @@ -0,0 +1,103 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; +using ModelContextProtocol.Protocol; + +namespace Azure.DataApiBuilder.Mcp.Utils +{ + /// + /// Utility class for building standardized MCP tool responses. + /// + public static class McpResponseBuilder + { + /// + /// Builds a success response for MCP tools. + /// + public static CallToolResult BuildSuccessResult( + Dictionary responseData, + ILogger? logger = null, + string? logMessage = null) + { + responseData["status"] = "success"; + + string output = JsonSerializer.Serialize(responseData, new JsonSerializerOptions { WriteIndented = true }); + + if (logger != null && !string.IsNullOrEmpty(logMessage)) + { + logger.LogInformation(logMessage); + } + + return new CallToolResult + { + Content = new List + { + new TextContentBlock { Type = "text", Text = output } + } + }; + } + + /// + /// Builds an error response for MCP tools. + /// + public static CallToolResult BuildErrorResult( + string errorType, + string message, + ILogger? logger = null) + { + Dictionary errorObj = new() + { + ["status"] = "error", + ["error"] = new Dictionary + { + ["type"] = errorType, + ["message"] = message + } + }; + + string output = JsonSerializer.Serialize(errorObj, new JsonSerializerOptions { WriteIndented = true }); + + logger?.LogWarning("MCP Tool error {ErrorType}: {Message}", errorType, message); + + return new CallToolResult + { + Content = new List + { + new TextContentBlock { Type = "text", Text = output } + }, + IsError = true + }; + } + + /// + /// Extracts a JSON string from a typical IActionResult. + /// Falls back to "{}" for unsupported/empty cases to avoid leaking internals. + /// + public static string ExtractResultJson(IActionResult? result) + { + switch (result) + { + case ObjectResult obj: + if (obj.Value is JsonElement je) + { + return je.GetRawText(); + } + + if (obj.Value is JsonDocument jd) + { + return jd.RootElement.GetRawText(); + } + + return JsonSerializer.Serialize(obj.Value ?? new object()); + + case ContentResult content: + return string.IsNullOrWhiteSpace(content.Content) ? "{}" : content.Content; + + default: + return "{}"; + } + } + } +} From 3df5d9ff7baf139e2639cfcc8adbf1d366b8d178 Mon Sep 17 00:00:00 2001 From: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> Date: Wed, 15 Oct 2025 23:56:35 +0000 Subject: [PATCH 67/79] [MCP] Added read_records tool implementation (#2893) ## Why make this change? - #2833 One of the main DML tools that will be used is `read_records`, which will allow the MCP to read the records of an entity based on the needs of the user. ## What is this change? - Create the `read_records` tool: - First it ensures that the MCP has the proper authorization to run the query - Then it creates the context with the parameters that were requested by the user - Lastly it calls the `SqlQueryEngine` in order to create and run the query and receive the results - The `GenerateOrderByLists` function inside the `RequestParser.cs` file was changed from private to public in order to allow the `read_records` tool to also use it to generate the proper context for the query. - Some functions inside of the `SqlResponseHelper.cs` file were changed to check if the query request comes from the `read_records` tool. This is done in order to output the correct information, right now the REST requests can also return a `nextLink` object which gives the API link necessary to get values in the case that not all of them were shown. We want to do something similar with the `read_records` tool, however we only want to return the `after` object which is the parameter that allows the query to know the exact place where it left from the previous query. This gets rid of unecessary information that can be found in the `nextLink` object. Exceptions thrown when: 1. Entity is empty or null. 2. Parameters are not of the correct type. 3. Parameters are not correctly written. 4. Values inside `orderby` parameter are empty or null. (Note: `orderby` is an optional value as a whole, but the individual values it contains need to exist) 5. Not having necessary permissions. Errors: 1. PermissionDenied - No permssions to execute. 2. InvalidArguments - No arguments provided. 6. InvalidArguments - Some missing arguments. 7. EntityNotFound - Entity not defined in the configuration. 8. UnexpectedError - Any other UnexpectedError. ## How was this tested? - [ ] Integration Tests - [ ] Unit Tests - [x] Manual testing via MCP Inspector These scenarios were manually tested with the MCP Inspector, as automated tests are not yet implemented. ### Valid Cases 1. Successful Read - Provided valid entity and other parameters - Verified that the received values are the same for MCP as for REST endpoint 2. Permission Enforcement - Modified role permissions and verified that access control is enforced correctly. 3. Parameter Testing - Provided different valid values for the multiple optional parameters and ensured they work the same way they do with the REST endpoint. ### Failure Cases 1. Null/Empty Entity Name - Provided an empty string for entity - Received InvalidArguments error 2. Invalid Entity Name - Provided string that is not found in config - Received InvalidArguments error 3. Invalid Parameters - Provided non-existent fields for `select` parameter - Received InvalidArguments error - Provided invalid string for `filter` parameter - Received InvalidArguments error - Provided integer values less than or equal to 0 in the `first` parameter - Received InvalidArguments error - Provided invalid string for `orderby` parameter - Received InvalidArguments error - Provided invalid string for `after` parameter - Received InvalidArguments error 4. Unauthorized Role Context - Removed or misconfigured role-header - Received PermissionDenied error ## Sample Request(s) `{ Entity: Book }` `{ Select: title,publisher_id }` `{ First: 3 }` `{ Orderby: ["publisher_id asc", "title desc"] }` --- .../BuiltInTools/ReadRecordsTool.cs | 422 ++++++++++++++++++ src/Core/Parsers/FilterParser.cs | 1 - src/Core/Parsers/RequestParser.cs | 2 +- src/Core/Resolvers/SqlResponseHelpers.cs | 91 ++-- 4 files changed, 485 insertions(+), 31 deletions(-) create mode 100644 src/Azure.DataApiBuilder.Mcp/BuiltInTools/ReadRecordsTool.cs diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ReadRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ReadRecordsTool.cs new file mode 100644 index 0000000000..db1c761d2f --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ReadRecordsTool.cs @@ -0,0 +1,422 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Data.Common; +using System.Text.Json; +using Azure.DataApiBuilder.Auth; +using Azure.DataApiBuilder.Config.DatabasePrimitives; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Authorization; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; +using Azure.DataApiBuilder.Core.Parsers; +using Azure.DataApiBuilder.Core.Resolvers; +using Azure.DataApiBuilder.Core.Resolvers.Factories; +using Azure.DataApiBuilder.Core.Services; +using Azure.DataApiBuilder.Core.Services.MetadataProviders; +using Azure.DataApiBuilder.Mcp.Model; +using Azure.DataApiBuilder.Service.Exceptions; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.BuiltInTools +{ + public class ReadRecordsTool : IMcpTool + { + public ToolType ToolType { get; } = ToolType.BuiltIn; + + public Tool GetToolMetadata() + { + return new Tool + { + Name = "read_records", + Description = "Retrieves records from a given entity.", + InputSchema = JsonSerializer.Deserialize( + @"{ + ""type"": ""object"", + ""properties"": { + ""entity"": { + ""type"": ""string"", + ""description"": ""The name of the entity to read, as provided by the describe_entities tool. Required."" + }, + ""select"": { + ""type"": ""string"", + ""description"": ""A comma-separated list of field names to include in the response. If omitted, all fields are returned. Optional."" + }, + ""filter"": { + ""type"": ""string"", + ""description"": ""A case-insensitive OData-like expression that defines a query predicate. Supports logical grouping with parentheses and the operators eq, ne, gt, ge, lt, le, and, or, not. Examples: year ge 1990, date lt 2025-01-01T00:00:00Z, (title eq 'Foundation') and (available ne false). Optional."" + }, + ""first"": { + ""type"": ""integer"", + ""description"": ""The maximum number of records to return in the current page. Optional."" + }, + ""orderby"": { + ""type"": ""array"", + ""items"": { ""type"": ""string"" }, + ""description"": ""A list of field names and directions for sorting, for example 'name asc' or 'year desc'. Optional."" + }, + ""after"": { + ""type"": ""string"", + ""description"": ""A cursor token for retrieving the next page of results. Returned as 'after' in the previous response. Optional."" + } + } + }" + ) + }; + } + + public async Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default) + { + ILogger? logger = serviceProvider.GetService>(); + + // Get runtime config + RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService(); + RuntimeConfig runtimeConfig = runtimeConfigProvider.GetConfig(); + + if (runtimeConfig.McpDmlTools?.ReadRecords is not true) + { + return BuildErrorResult( + "ToolDisabled", + "The read_records tool is disabled in the configuration.", + logger); + } + + try + { + cancellationToken.ThrowIfCancellationRequested(); + + string entityName; + string? select = null; + string? filter = null; + int? first = null; + IEnumerable? orderby = null; + string? after = null; + + // Extract arguments + if (arguments == null) + { + return BuildErrorResult("InvalidArguments", "No arguments provided.", logger); + } + + JsonElement root = arguments.RootElement; + + if (!root.TryGetProperty("entity", out JsonElement entityElement) || string.IsNullOrWhiteSpace(entityElement.GetString())) + { + return BuildErrorResult("InvalidArguments", "Missing required argument 'entity'.", logger); + } + + entityName = entityElement.GetString()!; + + if (root.TryGetProperty("select", out JsonElement selectElement)) + { + select = selectElement.GetString(); + } + + if (root.TryGetProperty("filter", out JsonElement filterElement)) + { + filter = filterElement.GetString(); + } + + if (root.TryGetProperty("first", out JsonElement firstElement)) + { + first = firstElement.GetInt32(); + } + + if (root.TryGetProperty("orderby", out JsonElement orderbyElement)) + { + orderby = (IEnumerable?)orderbyElement.EnumerateArray().Select(e => e.GetString()); + } + + if (root.TryGetProperty("after", out JsonElement afterElement)) + { + after = afterElement.GetString(); + } + + // Get required services & configuration + IQueryEngineFactory queryEngineFactory = serviceProvider.GetRequiredService(); + IMetadataProviderFactory metadataProviderFactory = serviceProvider.GetRequiredService(); + + // Check metadata for entity exists + string dataSourceName; + ISqlMetadataProvider sqlMetadataProvider; + + try + { + dataSourceName = runtimeConfig.GetDataSourceNameFromEntityName(entityName); + sqlMetadataProvider = metadataProviderFactory.GetMetadataProvider(dataSourceName); + } + catch (Exception) + { + return BuildErrorResult("EntityNotFound", $"Entity '{entityName}' is not defined in the configuration.", logger); + } + + if (!sqlMetadataProvider.EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? dbObject) || dbObject is null) + { + return BuildErrorResult("EntityNotFound", $"Entity '{entityName}' is not defined in the configuration.", logger); + } + + // Authorization check in the existing entity + IAuthorizationResolver authResolver = serviceProvider.GetRequiredService(); + IAuthorizationService authorizationService = serviceProvider.GetRequiredService(); + IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService(); + HttpContext? httpContext = httpContextAccessor.HttpContext; + + if (httpContext is null || !authResolver.IsValidRoleContext(httpContext)) + { + return BuildErrorResult("PermissionDenied", $"You do not have permission to read records for entity '{entityName}'.", logger); + } + + if (!TryResolveAuthorizedRole(httpContext, authResolver, entityName, out string? effectiveRole, out string authError)) + { + return BuildErrorResult("PermissionDenied", authError, logger); + } + + // Build and validate Find context + RequestValidator requestValidator = new(metadataProviderFactory, runtimeConfigProvider); + FindRequestContext context = new(entityName, dbObject, true); + httpContext.Request.Method = "GET"; + + requestValidator.ValidateEntity(entityName); + + if (!string.IsNullOrWhiteSpace(select)) + { + // Update the context to specify which fields will be returned from the entity. + IEnumerable fieldsReturnedForFind = select.Split(",").ToList(); + context.UpdateReturnFields(fieldsReturnedForFind); + } + + if (!string.IsNullOrWhiteSpace(filter)) + { + string filterQueryString = $"?{RequestParser.FILTER_URL}={filter}"; + context.FilterClauseInUrl = sqlMetadataProvider.GetODataParser().GetFilterClause(filterQueryString, $"{context.EntityName}.{context.DatabaseObject.FullName}"); + } + + if (orderby is not null && orderby.Count() != 0) + { + string sortQueryString = $"?{RequestParser.SORT_URL}="; + foreach (string param in orderby) + { + if (string.IsNullOrWhiteSpace(param)) + { + return BuildErrorResult("InvalidArguments", "Parameters inside 'orderby' argument cannot be empty or null.", logger); + } + + sortQueryString += $"{param}, "; + } + + sortQueryString = sortQueryString.Substring(0, sortQueryString.Length - 2); + (context.OrderByClauseInUrl, context.OrderByClauseOfBackingColumns) = RequestParser.GenerateOrderByLists(context, sqlMetadataProvider, sortQueryString); + } + + context.First = first; + context.After = after; + + // The final authorization check on columns occurs after the request is fully parsed and validated. + requestValidator.ValidateRequestContext(context); + + AuthorizationResult authorizationResult = await authorizationService.AuthorizeAsync( + user: httpContext.User, + resource: context, + requirements: new[] { new ColumnsPermissionsRequirement() }); + if (!authorizationResult.Succeeded) + { + return BuildErrorResult("PermissionDenied", DataApiBuilderException.AUTHORIZATION_FAILURE, logger); + } + + // Execute + IQueryEngine queryEngine = queryEngineFactory.GetQueryEngine(sqlMetadataProvider.GetDatabaseType()); + JsonDocument? queryResult = await queryEngine.ExecuteAsync(context); + IActionResult actionResult = queryResult is null ? SqlResponseHelpers.FormatFindResult(JsonDocument.Parse("[]").RootElement.Clone(), context, metadataProviderFactory.GetMetadataProvider(dataSourceName), runtimeConfigProvider.GetConfig(), httpContext, true) + : SqlResponseHelpers.FormatFindResult(queryResult.RootElement.Clone(), context, metadataProviderFactory.GetMetadataProvider(dataSourceName), runtimeConfigProvider.GetConfig(), httpContext, true); + + // Normalize response + string rawPayloadJson = ExtractResultJson(actionResult); + JsonDocument result = JsonDocument.Parse(rawPayloadJson); + JsonElement queryRoot = result.RootElement; + + return BuildSuccessResult( + entityName, + queryRoot.Clone(), + logger); + } + catch (OperationCanceledException) + { + return BuildErrorResult("OperationCanceled", "The read operation was canceled.", logger); + } + catch (DbException argEx) + { + return BuildErrorResult("DatabaseOperationFailed", argEx.Message, logger); + } + catch (ArgumentException argEx) + { + return BuildErrorResult("InvalidArguments", argEx.Message, logger); + } + catch (DataApiBuilderException argEx) + { + return BuildErrorResult(argEx.StatusCode.ToString(), argEx.Message, logger); + } + catch (Exception) + { + return BuildErrorResult("UnexpectedError", "Unexpected error occurred in ReadRecordsTool.", logger); + } + } + + /// + /// Ensures that the role used on the request has the necessary authorizations. + /// + /// Contains request headers and metadata of the user. + /// Resolver used to check if role has necessary authorizations. + /// Name of the entity used in the request. + /// Role defined in client role header. + /// Error message given to the user. + /// True if the user role is authorized, along with the role. + private static bool TryResolveAuthorizedRole( + HttpContext httpContext, + IAuthorizationResolver authorizationResolver, + string entityName, + out string? effectiveRole, + out string error) + { + effectiveRole = null; + error = string.Empty; + + string roleHeader = httpContext.Request.Headers[AuthorizationResolver.CLIENT_ROLE_HEADER].ToString(); + + if (string.IsNullOrWhiteSpace(roleHeader)) + { + error = $"Client role header '{AuthorizationResolver.CLIENT_ROLE_HEADER}' is missing or empty."; + return false; + } + + string[] roles = roleHeader + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + if (roles.Length == 0) + { + error = $"Client role header '{AuthorizationResolver.CLIENT_ROLE_HEADER}' is missing or empty."; + return false; + } + + foreach (string role in roles) + { + bool allowed = authorizationResolver.AreRoleAndOperationDefinedForEntity( + entityName, role, EntityActionOperation.Read); + + if (allowed) + { + effectiveRole = role; + return true; + } + } + + error = $"You do not have permission to read records for entity '{entityName}'."; + return false; + } + + /// + /// Returns a result from the query in the case that it was successfully ran. + /// + /// Name of the entity used in the request. + /// Query result from engine. + /// MCP logger that returns all logged events. + private static CallToolResult BuildSuccessResult( + string entityName, + JsonElement engineRootElement, + ILogger? logger) + { + // Build normalized response + Dictionary normalized = new() + { + ["status"] = "success", + ["result"] = engineRootElement // only requested values + }; + + string output = JsonSerializer.Serialize(normalized, new JsonSerializerOptions { WriteIndented = true }); + + logger?.LogInformation("ReadRecordsTool success for entity {Entity}.", entityName); + + return new CallToolResult + { + Content = new List + { + new TextContentBlock { Type = "text", Text = output } + } + }; + } + + /// + /// Returns an error if the query failed to run at any point. + /// + /// Type of error that is encountered. + /// Error message given to the user. + /// MCP logger that returns all logged events. + private static CallToolResult BuildErrorResult( + string errorType, + string message, + ILogger? logger) + { + Dictionary errorObj = new() + { + ["status"] = "error", + ["error"] = new Dictionary + { + ["type"] = errorType, + ["message"] = message + } + }; + + string output = JsonSerializer.Serialize(errorObj); + + logger?.LogError("ReadRecordsTool error {ErrorType}: {Message}", errorType, message); + + return new CallToolResult + { + Content = + [ + new TextContentBlock { Type = "text", Text = output } + ], + IsError = true + }; + } + + /// + /// Extracts a JSON string from a typical IActionResult. + /// Falls back to "{}" for unsupported/empty cases to avoid leaking internals. + /// + private static string ExtractResultJson(IActionResult? result) + { + switch (result) + { + case ObjectResult obj: + if (obj.Value is JsonElement je) + { + return je.GetRawText(); + } + + if (obj.Value is JsonDocument jd) + { + return jd.RootElement.GetRawText(); + } + + return JsonSerializer.Serialize(obj.Value ?? new object()); + + case ContentResult content: + return string.IsNullOrWhiteSpace(content.Content) ? "{}" : content.Content; + + default: + return "{}"; + } + } + } +} diff --git a/src/Core/Parsers/FilterParser.cs b/src/Core/Parsers/FilterParser.cs index ec765e26a6..c9cfc1eb53 100644 --- a/src/Core/Parsers/FilterParser.cs +++ b/src/Core/Parsers/FilterParser.cs @@ -44,7 +44,6 @@ public FilterClause GetFilterClause(string filterQueryString, string resourcePat { if (_model is null) { - throw new DataApiBuilderException( message: "The runtime has not been initialized with an Edm model.", statusCode: HttpStatusCode.InternalServerError, diff --git a/src/Core/Parsers/RequestParser.cs b/src/Core/Parsers/RequestParser.cs index bb4dd8d51e..6402ce4ecb 100644 --- a/src/Core/Parsers/RequestParser.cs +++ b/src/Core/Parsers/RequestParser.cs @@ -147,7 +147,7 @@ public static void ParseQueryString(RestRequestContext context, ISqlMetadataProv /// associated with the sort param. /// A List /// - private static (List?, List?) GenerateOrderByLists(RestRequestContext context, + public static (List?, List?) GenerateOrderByLists(RestRequestContext context, ISqlMetadataProvider sqlMetadataProvider, string sortQueryString) { diff --git a/src/Core/Resolvers/SqlResponseHelpers.cs b/src/Core/Resolvers/SqlResponseHelpers.cs index 8b0a0edb67..d0bf768281 100644 --- a/src/Core/Resolvers/SqlResponseHelpers.cs +++ b/src/Core/Resolvers/SqlResponseHelpers.cs @@ -23,21 +23,23 @@ public class SqlResponseHelpers /// /// Format the results from a Find operation. Check if there is a requirement - /// for a nextLink, and if so, add this value to the array of JsonElements to + /// for a nextLink/after, and if so, add this value to the array of JsonElements to /// be used as part of the response. /// /// The JsonDocument from the query. /// The RequestContext. - /// the metadataprovider. + /// The metadataprovider. /// Runtimeconfig object /// HTTP context associated with the API request + /// True if request is done through MCP endpoint /// An OkObjectResult from a Find operation that has been correctly formatted. public static OkObjectResult FormatFindResult( JsonElement findOperationResponse, FindRequestContext context, ISqlMetadataProvider sqlMetadataProvider, RuntimeConfig runtimeConfig, - HttpContext httpContext) + HttpContext httpContext, + bool? isMcpRequest = null) { // When there are no rows returned from the database, the jsonElement will be an empty array. @@ -55,7 +57,7 @@ public static OkObjectResult FormatFindResult( uint maxPageSize = runtimeConfig.MaxPageSize(); // If the results are not a collection or if the query does not have a next page - // no nextLink is needed. So, the response is returned after removing the extra fields. + // no nextLink/after is needed. So, the response is returned after removing the extra fields. if (findOperationResponse.ValueKind is not JsonValueKind.Array || !SqlPaginationUtil.HasNext(findOperationResponse, context.First, defaultPageSize, maxPageSize)) { // If there are no additional fields present, the response is returned directly. When there @@ -89,27 +91,43 @@ public static OkObjectResult FormatFindResult( tableName: context.DatabaseObject.Name, sqlMetadataProvider: sqlMetadataProvider); - string basePaginationUri = SqlPaginationUtil.ConstructBaseUriForPagination(httpContext, runtimeConfig.Runtime?.BaseRoute); - - // Build the query string with the $after token. - string queryString = SqlPaginationUtil.BuildQueryStringWithAfterToken( - queryStringParameters: context!.ParsedQueryString, - newAfterPayload: after); - - // Get the final consolidated nextLink for the pagination. - JsonElement nextLink = SqlPaginationUtil.GetConsolidatedNextLinkForPagination( - baseUri: basePaginationUri, - queryString: queryString, - isNextLinkRelative: runtimeConfig.NextLinkRelative()); - // When there are extra fields present, they are removed before returning the response. if (extraFieldsInResponse.Count > 0) { rootEnumerated = RemoveExtraFieldsInResponseWithMultipleItems(rootEnumerated, extraFieldsInResponse); } - rootEnumerated.Add(nextLink); - return OkResponse(JsonSerializer.SerializeToElement(rootEnumerated)); + // Create an 'after' object if the request comes from MCP endpoint. + if (isMcpRequest is true) + { + string jsonString = JsonSerializer.Serialize(new[] + { + new { after = after } + }); + JsonElement afterElement = JsonSerializer.Deserialize(jsonString); + + rootEnumerated.Add(afterElement); + } + // Create a 'nextLink' object if the request comes from REST endpoint. + else + { + string basePaginationUri = SqlPaginationUtil.ConstructBaseUriForPagination(httpContext, runtimeConfig.Runtime?.BaseRoute); + + // Build the query string with the $after token. + string queryString = SqlPaginationUtil.BuildQueryStringWithAfterToken( + queryStringParameters: context!.ParsedQueryString, + newAfterPayload: after); + + // Get the final consolidated nextLink for the pagination. + JsonElement nextLink = SqlPaginationUtil.GetConsolidatedNextLinkForPagination( + baseUri: basePaginationUri, + queryString: queryString, + isNextLinkRelative: runtimeConfig.NextLinkRelative()); + + rootEnumerated.Add(nextLink); + } + + return OkResponse(JsonSerializer.SerializeToElement(rootEnumerated), isMcpRequest); } /// @@ -186,8 +204,9 @@ private static JsonElement RemoveExtraFieldsInResponseWithSingleItem(JsonElement /// form that complies with vNext Api guidelines. /// /// Value representing the Json results of the client's request. + /// True if request is done through MCP endpoint. /// Correctly formatted OkObjectResult. - public static OkObjectResult OkResponse(JsonElement jsonResult) + public static OkObjectResult OkResponse(JsonElement jsonResult, bool? isMcpRequest = null) { // For consistency we return all values as type Array if (jsonResult.ValueKind != JsonValueKind.Array) @@ -200,20 +219,34 @@ public static OkObjectResult OkResponse(JsonElement jsonResult) // More than 0 records, and the last element is of type array, then we have pagination if (resultEnumerated.Count > 0 && resultEnumerated[resultEnumerated.Count - 1].ValueKind == JsonValueKind.Array) { - // Get the nextLink + // Get the 'nextLink' or 'after' // resultEnumerated will be an array of the form - // [{object1}, {object2},...{objectlimit}, [{nextLinkObject}]] - // if the last element is of type array, we know it is nextLink - // we strip the "[" and "]" and then save the nextLink element - // into a dictionary with a key of "nextLink" and a value that - // represents the nextLink data we require. - string nextLinkJsonString = JsonSerializer.Serialize(resultEnumerated[resultEnumerated.Count - 1]); - Dictionary nextLink = JsonSerializer.Deserialize>(nextLinkJsonString[1..^1])!; + // [{object1}, {object2},...{objectlimit}, [{nextLinkObject/afterObject}]] + // if the last element is of type array, we know it is 'nextLink' + // if the request is done through the REST endpoint and it is + // 'after' if the request is done through the MCP endpoint, + // we strip the "[" and "]" and then save the element + // into a dictionary with a key of "nextLinkAfter" and a value that + // represents the nextLink/after data we require. + string nextLinkAfterJsonString = JsonSerializer.Serialize(resultEnumerated[resultEnumerated.Count - 1]); + Dictionary nextLinkAfter = JsonSerializer.Deserialize>(nextLinkAfterJsonString[1..^1])!; IEnumerable value = resultEnumerated.Take(resultEnumerated.Count - 1); + + // Check 'after' object if request is done through MCP endpoint. + if (isMcpRequest is true) + { + return new OkObjectResult(new + { + value = value, + after = nextLinkAfter["after"] + }); + } + + // Check 'nextLink' object if request is done through REST endpoint. return new OkObjectResult(new { value = value, - @nextLink = nextLink["nextLink"] + @nextLink = nextLinkAfter["nextLink"] }); } From cbd3b7ce0714588eaa24fb3b95a1850d8a5acce1 Mon Sep 17 00:00:00 2001 From: Anusha Kolan Date: Thu, 16 Oct 2025 15:52:42 -0700 Subject: [PATCH 68/79] [MCP] Added the update-record tool implementation. (#2888) ## Why make this change? This PR, - Adds the tool implementation for the `update-record` MCP tool in `Azure.DataApiBuilder.Mcp` to support updating records via MCP using keys and field payloads with full validation and authorization. - Adds a README to guide contributors on how to test MCP tools using the MCP Inspector. - Improves predicate construction logic in `SqlUpdateQueryStructure.cs` to support mapped column fields. ## What is this change? - Adds `docs/Testing/mcp-inspector-testing.md` with step-by-step instructions for running MCP Inspector locally, bypassing TLS, and connecting to MCP endpoints. - Introduces `UpdateRecordTool.cs` under BuiltInTools, enabling record updates via MCP with full validation, authorization, and mutation engine integration. - Updates `SqlUpdateQueryStructure.cs` to construct predicates using parameterized values for safer and more flexible SQL generation. Exceptions are thrown when: 1. Entity is null or empty. 2. Keys and Fields are not JSON objects. 3. Failed to parse Keys and Fields. 4. 0 keys provided. 5. 0 fields provided. 6. Key value is null or empty 7. field value is null or empty. Error: 1. PermissionDenied - No permssions to execute. 2. InvalidArguments - No arguments provided. 3. InvalidArguments - Some missing arguments. 4. EntityNotFound - Entity not defined in the configuration. 5. InvalidArguments - No entity found with given key. 6. UnexpectedError - Any other UnexpectedError. ## How was this tested? - [x] Manual testing via MCP Inspector - [ ] Integration Tests - [ ] Unit Tests These scenarios were manually tested using MCP Inspector, as automated tests are not yet implemented. ### Valid Cases ### 1. Successful Update - Provided valid entity, keys, and fields. - Verified that values were correctly updated in the database. - Confirmed that the response includes all the column values similar to REST API calls. 2. Permission Enforcement - Modified role permissions and verified that access control is enforced correctly. 3. Composite Key Handling - Tested entities requiring multiple keys. - Verified correct update behavior and response formatting. 4. Alias Mapping - Used mapped field name (e.g., order instead of position) and confirmed that only mapped names are accepted. ### Failure Cases ### 1. Empty Keys - Provided an empty keys object. - Received appropriate error indicating missing key values. 2. Empty Fields - Provided an empty fields object. - Received error indicating that at least one field is required. 3. Invalid Entity Name - Used a non-existent entity name. - Received EntityNotFound error. 4. Empty Entity String - Provided an empty string for entity. - Received InvalidArguments error. 5. Entity Not Configured - Used an entity not present in the configuration (e.g., Todos). - Received EntityNotFound error. 6. Null Entity - Provided null for the entity field. - Received InvalidArguments error. 8. Valid Key Format but Not in DB - Used correct key structure but no matching record exists. - Received error indicating no entity found with the given key. 9. Invalid or Null Key Values - Provided null or malformed key values. - Received InvalidArguments error. 10. Unauthorized Role Context - Removed or misconfigured role header. - Received PermissionDenied error. ## Sample Request(s) `{ "id": "00000000-0000-0000-0000-000000000001" }` `{ "title": "New Title", "order": 7 }` --- docs/Testing/mcp-inspector-testing.md | 23 + .../BuiltInTools/UpdateRecordTool.cs | 472 ++++++++++++++++++ .../SqlUpdateQueryStructure.cs | 4 +- 3 files changed, 497 insertions(+), 2 deletions(-) create mode 100644 docs/Testing/mcp-inspector-testing.md create mode 100644 src/Azure.DataApiBuilder.Mcp/BuiltInTools/UpdateRecordTool.cs diff --git a/docs/Testing/mcp-inspector-testing.md b/docs/Testing/mcp-inspector-testing.md new file mode 100644 index 0000000000..d6942311ea --- /dev/null +++ b/docs/Testing/mcp-inspector-testing.md @@ -0,0 +1,23 @@ + +# MCP Inspector Testing Guide + +Steps to run and test MCP tools using the https://www.npmjs.com/package/@modelcontextprotocol/inspector. +### Pre-requisite: +- Node.js must be installed on your system to run this code. +- Ensure that the DAB MCP server is running before attempting to connect with the inspector tool. + +### 1. **Install MCP Inspector** +npx @modelcontextprotocol/inspector + +### 2. ** Bypass TLS Verification (For Local Testing)** +set NODE_TLS_REJECT_UNAUTHORIZED=0 + +### 3. ** Open the inspector with pre-filled token.** +http://localhost:6274/?MCP_PROXY_AUTH_TOKEN= + +### 4. ** How to use the tool..** +- Set the transport type "Streamable HTTP". +- Set the URL "http://localhost:5000/mcp" and hit connect. +- Select a Tool from the dropdown list. +- Fill in the Parameters required for the tool. +- Click "Run" to execute the tool and view the response. \ No newline at end of file diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/UpdateRecordTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/UpdateRecordTool.cs new file mode 100644 index 0000000000..195a758454 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/UpdateRecordTool.cs @@ -0,0 +1,472 @@ +// Copyright (c) Microsoft. +// Licensed under the MIT License. + +using System.Text.Json; +using Azure.DataApiBuilder.Auth; +using Azure.DataApiBuilder.Config.DatabasePrimitives; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Authorization; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; +using Azure.DataApiBuilder.Core.Resolvers; +using Azure.DataApiBuilder.Core.Resolvers.Factories; +using Azure.DataApiBuilder.Core.Services; +using Azure.DataApiBuilder.Core.Services.MetadataProviders; +using Azure.DataApiBuilder.Mcp.Model; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.BuiltInTools +{ + /// + /// Updates an existing record in the specified entity using provided keys (PKs) and fields (new values). + /// Input schema: + /// { + /// "entity": "EntityName", + /// "keys": { "Id": 42, "TenantId": "ABC" }, + /// "fields": { "Status": "Closed", "Comment": "Done" } + /// } + /// + public class UpdateRecordTool : IMcpTool + { + /// + /// Gets the type of the tool, which is BuiltIn for this implementation. + /// + public ToolType ToolType { get; } = ToolType.BuiltIn; + + /// + /// Gets the metadata for the update_record tool, including its name, description, and input schema. + /// + public Tool GetToolMetadata() + { + return new Tool + { + Name = "update_record", + Description = "Updates an existing record in the specified entity. Requires 'keys' to locate the record and 'fields' to specify new values.", + InputSchema = JsonSerializer.Deserialize( + @"{ + ""type"": ""object"", + ""properties"": { + ""entity"": { + ""type"": ""string"", + ""description"": ""The name of the entity"" + }, + ""keys"": { + ""type"": ""object"", + ""description"": ""Key fields and their values to identify the record"" + }, + ""fields"": { + ""type"": ""object"", + ""description"": ""Fields and their new values to update"" + } + }, + ""required"": [""entity"", ""keys"", ""fields""] + }" + ) + }; + } + + /// + /// Executes the update_record tool, updating an existing record in the specified entity using provided keys and fields. + /// + /// The JSON arguments containing entity, keys, and fields. + /// The service provider for resolving dependencies. + /// A token to cancel the operation. + /// A representing the outcome of the update operation. + public async Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default) + { + ILogger? logger = serviceProvider.GetService>(); + + // 1) Resolve required services & configuration + + RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService(); + RuntimeConfig config = runtimeConfigProvider.GetConfig(); + + // 2)Check if the tool is enabled in configuration before proceeding. + if (config.McpDmlTools?.UpdateRecord != true) + { + return BuildErrorResult( + "ToolDisabled", + "The update_record tool is disabled in the configuration.", + logger); + } + + try + { + + cancellationToken.ThrowIfCancellationRequested(); + + // 3) Parsing & basic argument validation (entity, keys, fields) + if (arguments is null) + { + return BuildErrorResult("InvalidArguments", "No arguments provided.", logger); + } + + if (!TryParseArguments(arguments.RootElement, out string entityName, out Dictionary keys, out Dictionary fields, out string parseError)) + { + return BuildErrorResult("InvalidArguments", parseError, logger); + } + + IMetadataProviderFactory metadataProviderFactory = serviceProvider.GetRequiredService(); + IMutationEngineFactory mutationEngineFactory = serviceProvider.GetRequiredService(); + + // 4) Resolve metadata for entity existence check + string dataSourceName; + ISqlMetadataProvider sqlMetadataProvider; + + try + { + dataSourceName = config.GetDataSourceNameFromEntityName(entityName); + sqlMetadataProvider = metadataProviderFactory.GetMetadataProvider(dataSourceName); + } + catch (Exception) + { + return BuildErrorResult("EntityNotFound", $"Entity '{entityName}' is not defined in the configuration.", logger); + } + + if (!sqlMetadataProvider.EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? dbObject) || dbObject is null) + { + return BuildErrorResult("EntityNotFound", $"Entity '{entityName}' is not defined in the configuration.", logger); + } + + // 5) Authorization after we have a known entity + IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService(); + HttpContext? httpContext = httpContextAccessor.HttpContext; + IAuthorizationResolver authResolver = serviceProvider.GetRequiredService(); + + if (httpContext is null || !authResolver.IsValidRoleContext(httpContext)) + { + return BuildErrorResult("PermissionDenied", "Permission denied: unable to resolve a valid role context for update operation.", logger); + } + + if (!TryResolveAuthorizedRoleHasPermission(httpContext, authResolver, entityName, out string? effectiveRole, out string authError)) + { + return BuildErrorResult("PermissionDenied", $"Permission denied: {authError}", logger); + } + + // 6) Build and validate Upsert (UpdateIncremental) context + JsonElement upsertPayloadRoot = RequestValidator.ValidateAndParseRequestBody(JsonSerializer.Serialize(fields)); + RequestValidator requestValidator = new(metadataProviderFactory, runtimeConfigProvider); + + UpsertRequestContext context = new( + entityName: entityName, + dbo: dbObject, + insertPayloadRoot: upsertPayloadRoot, + operationType: EntityActionOperation.UpdateIncremental); + + foreach (KeyValuePair kvp in keys) + { + if (kvp.Value is null) + { + return BuildErrorResult("InvalidArguments", $"Primary key value for '{kvp.Key}' cannot be null.", logger); + } + + context.PrimaryKeyValuePairs[kvp.Key] = kvp.Value; + } + + if (context.DatabaseObject.SourceType is EntitySourceType.Table) + { + requestValidator.ValidateUpsertRequestContext(context); + } + + requestValidator.ValidatePrimaryKey(context); + + // 7) Execute + DatabaseType dbType = config.GetDataSourceFromDataSourceName(dataSourceName).DatabaseType; + IMutationEngine mutationEngine = mutationEngineFactory.GetMutationEngine(dbType); + + IActionResult? mutationResult = null; + try + { + mutationResult = await mutationEngine.ExecuteAsync(context).ConfigureAwait(false); + } + catch (Exception ex) + { + string errorMsg = ex.Message ?? string.Empty; + + if (errorMsg.Contains("No Update could be performed, record not found", StringComparison.OrdinalIgnoreCase)) + { + return BuildErrorResult( + "InvalidArguments", + "No record found with the given key.", + logger); + } + else + { + // Unexpected error, rethrow to be handled by outer catch + throw; + } + } + + cancellationToken.ThrowIfCancellationRequested(); + + // 8) Normalize response (success or engine error payload) + string rawPayloadJson = ExtractResultJson(mutationResult); + using JsonDocument resultDoc = JsonDocument.Parse(rawPayloadJson); + JsonElement root = resultDoc.RootElement; + + return BuildSuccessResult( + entityName: entityName, + engineRootElement: root.Clone(), + logger: logger); + } + catch (OperationCanceledException) + { + return BuildErrorResult("OperationCanceled", "The update operation was canceled.", logger); + } + catch (ArgumentException argEx) + { + return BuildErrorResult("InvalidArguments", argEx.Message, logger); + } + catch (Exception ex) + { + ILogger? innerLogger = serviceProvider.GetService>(); + innerLogger?.LogError(ex, "Unexpected error in UpdateRecordTool."); + + return BuildErrorResult( + "UnexpectedError", + ex.Message ?? "An unexpected error occurred during the update operation.", + logger); + } + } + + #region Parsing & Authorization + + private static bool TryParseArguments( + JsonElement root, + out string entityName, + out Dictionary keys, + out Dictionary fields, + out string error) + { + entityName = string.Empty; + keys = new Dictionary(); + fields = new Dictionary(); + error = string.Empty; + + if (!root.TryGetProperty("entity", out JsonElement entityEl) || + !root.TryGetProperty("keys", out JsonElement keysEl) || + !root.TryGetProperty("fields", out JsonElement fieldsEl)) + { + error = "Missing required arguments 'entity', 'keys', or 'fields'."; + return false; + } + + // Parse and validate required arguments: entity, keys, fields + entityName = entityEl.GetString() ?? string.Empty; + if (string.IsNullOrWhiteSpace(entityName)) + { + throw new ArgumentException("Entity is required", nameof(entityName)); + } + + if (keysEl.ValueKind != JsonValueKind.Object || fieldsEl.ValueKind != JsonValueKind.Object) + { + throw new ArgumentException("'keys' and 'fields' must be JSON objects."); + } + + try + { + keys = JsonSerializer.Deserialize>(keysEl.GetRawText()) ?? new Dictionary(); + fields = JsonSerializer.Deserialize>(fieldsEl.GetRawText()) ?? new Dictionary(); + } + catch (Exception ex) + { + throw new ArgumentException("Failed to parse 'keys' or 'fields'", ex); + } + + if (keys.Count == 0) + { + throw new ArgumentException("Keys are required to update an entity"); + } + + if (fields.Count == 0) + { + throw new ArgumentException("At least one field must be provided to update an entity", nameof(fields)); + } + + foreach (KeyValuePair kv in keys) + { + if (kv.Value is null || (kv.Value is string str && string.IsNullOrWhiteSpace(str))) + { + throw new ArgumentException($"Key value for '{kv.Key}' cannot be null or empty."); + } + } + + return true; + } + + private static bool TryResolveAuthorizedRoleHasPermission( + HttpContext httpContext, + IAuthorizationResolver authorizationResolver, + string entityName, + out string? effectiveRole, + out string error) + { + effectiveRole = null; + error = string.Empty; + + string roleHeader = httpContext.Request.Headers[AuthorizationResolver.CLIENT_ROLE_HEADER].ToString(); + + if (string.IsNullOrWhiteSpace(roleHeader)) + { + error = "Client role header is missing or empty."; + return false; + } + + string[] roles = roleHeader + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + if (roles.Length == 0) + { + error = "Client role header is missing or empty."; + return false; + } + + foreach (string role in roles) + { + bool allowed = authorizationResolver.AreRoleAndOperationDefinedForEntity( + entityName, role, EntityActionOperation.Update); + + if (allowed) + { + effectiveRole = role; + return true; + } + } + + error = "You do not have permission to update records for this entity."; + return false; + } + + #endregion + + #region Response Builders & Utilities + + private static CallToolResult BuildSuccessResult( + string entityName, + JsonElement engineRootElement, + ILogger? logger) + { + // Extract only requested keys and updated fields from engineRootElement + Dictionary filteredResult = new(); + + // Navigate to "value" array in the engine result + if (engineRootElement.TryGetProperty("value", out JsonElement valueArray) && + valueArray.ValueKind == JsonValueKind.Array && + valueArray.GetArrayLength() > 0) + { + JsonElement firstItem = valueArray[0]; + + // Include all properties from the result + foreach (JsonProperty prop in firstItem.EnumerateObject()) + { + filteredResult[prop.Name] = GetJsonValue(prop.Value); + } + } + + // Build normalized response + Dictionary normalized = new() + { + ["status"] = "success", + ["result"] = filteredResult + }; + + string output = JsonSerializer.Serialize(normalized, new JsonSerializerOptions { WriteIndented = true }); + + logger?.LogInformation("UpdateRecordTool success for entity {Entity}.", entityName); + + return new CallToolResult + { + Content = new List + { + new TextContentBlock { Type = "text", Text = output } + } + }; + } + + /// + /// Converts JsonElement to .NET object dynamically. + /// + private static object? GetJsonValue(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString(), + JsonValueKind.Number => element.TryGetInt64(out long l) ? l : element.GetDouble(), + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.Null => null, + _ => element.GetRawText() // fallback for arrays/objects + }; + } + + private static CallToolResult BuildErrorResult( + string errorType, + string message, + ILogger? logger) + { + Dictionary errorObj = new() + { + ["status"] = "error", + ["error"] = new Dictionary + { + ["type"] = errorType, + ["message"] = message + } + }; + + string output = JsonSerializer.Serialize(errorObj); + + logger?.LogWarning("UpdateRecordTool error {ErrorType}: {Message}", errorType, message); + + return new CallToolResult + { + Content = + [ + new TextContentBlock { Type = "text", Text = output } + ], + IsError = true + }; + } + + /// + /// Extracts a JSON string from a typical IActionResult. + /// Falls back to "{}" for unsupported/empty cases to avoid leaking internals. + /// + private static string ExtractResultJson(IActionResult? result) + { + switch (result) + { + case ObjectResult obj: + if (obj.Value is JsonElement je) + { + return je.GetRawText(); + } + + if (obj.Value is JsonDocument jd) + { + return jd.RootElement.GetRawText(); + } + + return JsonSerializer.Serialize(obj.Value ?? new object()); + + case ContentResult content: + return string.IsNullOrWhiteSpace(content.Content) ? "{}" : content.Content; + + default: + return "{}"; + } + } + + #endregion + } +} diff --git a/src/Core/Resolvers/Sql Query Structures/SqlUpdateQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/SqlUpdateQueryStructure.cs index c73e72c230..ecbbf3fc5c 100644 --- a/src/Core/Resolvers/Sql Query Structures/SqlUpdateQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/SqlUpdateQueryStructure.cs @@ -185,9 +185,9 @@ private Predicate CreatePredicateForParam(KeyValuePair param) { predicate = new( new PredicateOperand( - new Column(tableSchema: DatabaseObject.SchemaName, tableName: DatabaseObject.Name, param.Key)), + new Column(tableSchema: DatabaseObject.SchemaName, tableName: DatabaseObject.Name, backingColumn)), PredicateOperation.Equal, - new PredicateOperand($"{MakeDbConnectionParam(GetParamAsSystemType(param.Value.ToString()!, param.Key, GetColumnSystemType(param.Key)), param.Key)}")); + new PredicateOperand($"{MakeDbConnectionParam(GetParamAsSystemType(param.Value.ToString()!, backingColumn, GetColumnSystemType(backingColumn)), backingColumn)}")); } return predicate; From a090e37cada4712e9356b300e5e95e3faa4153f5 Mon Sep 17 00:00:00 2001 From: souvikghosh04 Date: Tue, 21 Oct 2025 02:15:12 +0530 Subject: [PATCH 69/79] [MCP] Fixes and Refactoring execute_entity (#2902) ## Why make this change? ### Closes on - `execute_entity` https://github.com/Azure/data-api-builder/issues/2831 ## What is this change? This PR implements built-int tool execute_entity as part of built-in tools to support execution of stored procedures or functions. - Performs execute operation on a stored procedure or function entity - Supports both querying and mutation - `entity` (required) name and `parameters` (optional based on entity parameters signature) should be specified (see sample below) - Operation is performed based on permissions as configured in dab-config - Success or Failure message response is generated on execution of the delete operation ## How was this tested? - [ ] Manual functional test using MCP inpector/HTTP client - [ ] Integration Tests - [ ] Unit Tests ## Sample Request(s) Execute stored procedure to read queries- ``` POST http://localhost:5000/mcp { "jsonrpc": "2.0", "method": "tools/call", "params": { "name": "execute_entity", "arguments": { "entity": "GetBook", "parameters": { "id": 1 } } } } ``` Execute stored procedure without parameters- ``` POST http://localhost:5000/mcp { "jsonrpc": "2.0", "method": "tools/call", "params": { "name": "execute_entity", "arguments": { "entity": "GetBooks" } } } ``` --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .../BuiltInTools/ExecuteEntityTool.cs | 453 ++++++++++++++++++ 1 file changed, 453 insertions(+) create mode 100644 src/Azure.DataApiBuilder.Mcp/BuiltInTools/ExecuteEntityTool.cs diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ExecuteEntityTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ExecuteEntityTool.cs new file mode 100644 index 0000000000..92f60f4703 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ExecuteEntityTool.cs @@ -0,0 +1,453 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Data.Common; +using System.Text.Json; +using Azure.DataApiBuilder.Auth; +using Azure.DataApiBuilder.Config.DatabasePrimitives; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; +using Azure.DataApiBuilder.Core.Resolvers; +using Azure.DataApiBuilder.Core.Resolvers.Factories; +using Azure.DataApiBuilder.Core.Services; +using Azure.DataApiBuilder.Core.Services.MetadataProviders; +using Azure.DataApiBuilder.Mcp.Model; +using Azure.DataApiBuilder.Mcp.Utils; +using Azure.DataApiBuilder.Service.Exceptions; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.BuiltInTools +{ + /// + /// Tool to execute a stored procedure or function exposed as a DAB entity. + /// Behaves most like the GraphQL flow with entity permissions enforced. + /// + public class ExecuteEntityTool : IMcpTool + { + /// + /// Gets the type of the tool, which is BuiltIn for this implementation. + /// + public ToolType ToolType { get; } = ToolType.BuiltIn; + + /// + /// Gets the metadata for the execute-entity tool, including its name, description, and input schema. + /// + public Tool GetToolMetadata() + { + return new Tool + { + Name = "execute_entity", + Description = "Executes a stored procedure or function, returns the results (if any)", + InputSchema = JsonSerializer.Deserialize( + @"{ + ""type"": ""object"", + ""properties"": { + ""entity"": { + ""type"": ""string"", + ""description"": ""The entity name of the procedure or function to execute. Must match a stored-procedure entity as configured in dab-config. Required."" + }, + ""parameters"": { + ""type"": ""object"", + ""description"": ""A dictionary of parameter names and values to pass to the procedure. Parameters must match those defined in dab-config. Optional if no parameters."" + } + }, + ""required"": [""entity""] + }" + ) + }; + } + + /// + /// Executes a stored procedure or function, returns the results (if any). + /// + public async Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default) + { + ILogger? logger = serviceProvider.GetService>(); + + try + { + // Cancellation check at the start + cancellationToken.ThrowIfCancellationRequested(); + + // 1) Resolve required services & configuration + RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService(); + RuntimeConfig config = runtimeConfigProvider.GetConfig(); + + // 2) Check if the tool is enabled in configuration before proceeding + if (config.McpDmlTools?.ExecuteEntity != true) + { + return McpResponseBuilder.BuildErrorResult( + "ToolDisabled", + $"The {this.GetToolMetadata().Name} tool is disabled in the configuration.", + logger); + } + + // 3) Parsing & basic argument validation + if (arguments is null) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", "No arguments provided.", logger); + } + + if (!TryParseExecuteArguments(arguments.RootElement, out string entity, out Dictionary parameters, out string parseError)) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", parseError, logger); + } + + // Entity is required + if (string.IsNullOrWhiteSpace(entity)) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", "Entity is required", logger); + } + + IMetadataProviderFactory metadataProviderFactory = serviceProvider.GetRequiredService(); + IQueryEngineFactory queryEngineFactory = serviceProvider.GetRequiredService(); + + // 4) Validate entity exists and is a stored procedure + if (!config.Entities.TryGetValue(entity, out Entity? entityConfig)) + { + return McpResponseBuilder.BuildErrorResult("EntityNotFound", $"Entity '{entity}' not found in configuration.", logger); + } + + if (entityConfig.Source.Type != EntitySourceType.StoredProcedure) + { + return McpResponseBuilder.BuildErrorResult("InvalidEntity", $"Entity {entity} cannot be executed.", logger); + } + + // 5) Resolve metadata + string dataSourceName; + ISqlMetadataProvider sqlMetadataProvider; + + try + { + dataSourceName = config.GetDataSourceNameFromEntityName(entity); + sqlMetadataProvider = metadataProviderFactory.GetMetadataProvider(dataSourceName); + } + catch (Exception) + { + return McpResponseBuilder.BuildErrorResult("EntityNotFound", $"Failed to resolve entity metadata for '{entity}'.", logger); + } + + if (!sqlMetadataProvider.EntityToDatabaseObject.TryGetValue(entity, out DatabaseObject? dbObject) || dbObject is null) + { + return McpResponseBuilder.BuildErrorResult("EntityNotFound", $"Failed to resolve database object for entity '{entity}'.", logger); + } + + // 6) Authorization - Never bypass permissions + IAuthorizationResolver authResolver = serviceProvider.GetRequiredService(); + IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService(); + HttpContext? httpContext = httpContextAccessor.HttpContext; + + if (!McpAuthorizationHelper.ValidateRoleContext(httpContext, authResolver, out string roleError)) + { + return McpResponseBuilder.BuildErrorResult("PermissionDenied", roleError, logger); + } + + if (!McpAuthorizationHelper.TryResolveAuthorizedRole( + httpContext!, + authResolver, + entity, + EntityActionOperation.Execute, + out string? effectiveRole, + out string authError)) + { + return McpResponseBuilder.BuildErrorResult("PermissionDenied", authError, logger); + } + + // 7) Validate parameters against metadata + if (parameters != null && entityConfig.Source.Parameters != null) + { + // Validate all provided parameters exist in metadata + foreach (KeyValuePair param in parameters) + { + if (!entityConfig.Source.Parameters.ContainsKey(param.Key)) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", $"Invalid parameter: {param.Key}", logger); + } + } + } + + // 8) Build request payload + JsonElement? requestPayloadRoot = null; + + if (parameters?.Count > 0) + { + string jsonPayload = JsonSerializer.Serialize(parameters); + using JsonDocument doc = JsonDocument.Parse(jsonPayload); + requestPayloadRoot = doc.RootElement.Clone(); + } + + // 9) Build stored procedure execution context + StoredProcedureRequestContext context = new( + entityName: entity, + dbo: dbObject, + requestPayloadRoot: requestPayloadRoot, + operationType: EntityActionOperation.Execute); + + // First, add user-provided parameters to the context + if (requestPayloadRoot != null) + { + foreach (JsonProperty property in requestPayloadRoot.Value.EnumerateObject()) + { + context.FieldValuePairsInBody[property.Name] = GetParameterValue(property.Value); + } + } + + // Then, add default parameters from configuration (only if not already provided by user) + if ((parameters == null || parameters.Count == 0) && entityConfig.Source.Parameters != null) + { + foreach (KeyValuePair param in entityConfig.Source.Parameters) + { + if (!context.FieldValuePairsInBody.ContainsKey(param.Key)) + { + context.FieldValuePairsInBody[param.Key] = param.Value; + } + } + } + + // Populate resolved parameters for stored procedure execution + context.PopulateResolvedParameters(); + + // 10) Execute stored procedure + DatabaseType dbType = config.GetDataSourceFromDataSourceName(dataSourceName).DatabaseType; + IQueryEngine queryEngine = queryEngineFactory.GetQueryEngine(dbType); + + IActionResult? queryResult = null; + + try + { + // Cancellation check before executing + cancellationToken.ThrowIfCancellationRequested(); + queryResult = await queryEngine.ExecuteAsync(context, dataSourceName).ConfigureAwait(false); + } + catch (DataApiBuilderException dabEx) + { + // Handle specific DAB exceptions + logger?.LogError(dabEx, "Data API builder error executing stored procedure {StoredProcedure}", entity); + + string message = dabEx.Message; + + // Check for specific error patterns + if (message.Contains("permission", StringComparison.OrdinalIgnoreCase) || + message.Contains("authorization", StringComparison.OrdinalIgnoreCase)) + { + return McpResponseBuilder.BuildErrorResult( + "PermissionDenied", + "You do not have permission to execute this stored procedure.", + logger); + } + else if (message.Contains("invalid", StringComparison.OrdinalIgnoreCase) && + message.Contains("type", StringComparison.OrdinalIgnoreCase)) + { + return McpResponseBuilder.BuildErrorResult( + "InvalidArguments", + "Invalid data type for one or more parameters.", + logger); + } + + // For any other DAB exceptions, return the message as-is + return McpResponseBuilder.BuildErrorResult( + "DataApiBuilderError", + dabEx.Message, + logger); + } + catch (SqlException sqlEx) + { + // Handle SQL Server specific errors + logger?.LogError(sqlEx, "SQL Server error executing stored procedure {StoredProcedure}", entity); + string errorMessage = sqlEx.Number switch + { + 2812 => $"Stored procedure '{entityConfig.Source.Object}' not found in the database.", + 8144 => $"Stored procedure '{entityConfig.Source.Object}' has too many parameters specified.", + 201 => $"Stored procedure '{entityConfig.Source.Object}' expects parameter(s) that were not supplied.", + 245 => "Type conversion failed when processing parameters.", + 229 or 262 => $"Permission denied to execute stored procedure '{entityConfig.Source.Object}'.", + _ => $"Database error: {sqlEx.Message}" + }; + return McpResponseBuilder.BuildErrorResult("DatabaseError", errorMessage, logger); + } + catch (DbException dbEx) + { + // Handle generic database exceptions (works for PostgreSQL, MySQL, etc.) + logger?.LogError(dbEx, "Database error executing stored procedure {StoredProcedure}", entity); + return McpResponseBuilder.BuildErrorResult("DatabaseError", $"Database error: {dbEx.Message}", logger); + } + catch (InvalidOperationException ioEx) when (ioEx.Message.Contains("connection", StringComparison.OrdinalIgnoreCase)) + { + // Handle connection-related issues + logger?.LogError(ioEx, "Database connection error"); + return McpResponseBuilder.BuildErrorResult("ConnectionError", "Failed to connect to the database.", logger); + } + catch (TimeoutException timeoutEx) + { + // Handle query timeout + logger?.LogError(timeoutEx, "Stored procedure execution timeout for {StoredProcedure}", entity); + return McpResponseBuilder.BuildErrorResult("TimeoutError", "The stored procedure execution timed out.", logger); + } + catch (Exception ex) + { + // Generic database/execution errors + logger?.LogError(ex, "Unexpected error executing stored procedure {StoredProcedure}", entity); + return McpResponseBuilder.BuildErrorResult("DatabaseError", "An error occurred while executing the stored procedure.", logger); + } + + // 11) Build response with execution result + return BuildExecuteSuccessResponse(entity, parameters, queryResult, logger); + } + catch (OperationCanceledException) + { + return McpResponseBuilder.BuildErrorResult("OperationCanceled", "The execute operation was canceled.", logger); + } + catch (ArgumentException argEx) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", argEx.Message, logger); + } + catch (Exception ex) + { + logger?.LogError(ex, "Unexpected error in ExecuteEntityTool."); + return McpResponseBuilder.BuildErrorResult( + "UnexpectedError", + "An unexpected error occurred during the execute operation.", + logger); + } + } + + /// + /// Parses the execute arguments from the JSON input. + /// + private static bool TryParseExecuteArguments( + JsonElement rootElement, + out string entity, + out Dictionary parameters, + out string parseError) + { + entity = string.Empty; + parameters = new Dictionary(); + parseError = string.Empty; + + if (rootElement.ValueKind != JsonValueKind.Object) + { + parseError = "Arguments must be an object"; + return false; + } + + // Extract entity name (required) + if (!rootElement.TryGetProperty("entity", out JsonElement entityElement) || + entityElement.ValueKind != JsonValueKind.String) + { + parseError = "Missing or invalid 'entity' parameter"; + return false; + } + + entity = entityElement.GetString() ?? string.Empty; + + // Extract parameters if provided (optional) + if (rootElement.TryGetProperty("parameters", out JsonElement parametersElement) && + parametersElement.ValueKind == JsonValueKind.Object) + { + foreach (JsonProperty property in parametersElement.EnumerateObject()) + { + parameters[property.Name] = GetParameterValue(property.Value); + } + } + + return true; + } + + /// + /// Converts a JSON element to its appropriate CLR type matching GraphQL data types. + /// + private static object? GetParameterValue(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString(), + JsonValueKind.Number => + element.TryGetInt64(out long longValue) ? longValue : + element.TryGetDecimal(out decimal decimalValue) ? decimalValue : + element.GetDouble(), + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.Null => null, + _ => element.ToString() + }; + } + + /// + /// Builds a successful response for the execute operation. + /// + private static CallToolResult BuildExecuteSuccessResponse( + string entityName, + Dictionary? parameters, + IActionResult? queryResult, + ILogger? logger) + { + Dictionary responseData = new() + { + ["entity"] = entityName, + ["message"] = "Stored procedure executed successfully" + }; + + // Include parameters if any were provided + if (parameters?.Count > 0) + { + responseData["parameters"] = parameters; + } + + // Handle different result types + if (queryResult is OkObjectResult okResult && okResult.Value != null) + { + // Extract the actual data from the action result + if (okResult.Value is JsonDocument jsonDoc) + { + JsonElement root = jsonDoc.RootElement; + responseData["value"] = root.ValueKind == JsonValueKind.Array ? root : JsonSerializer.SerializeToElement(new[] { root }); + } + else if (okResult.Value is JsonElement jsonElement) + { + responseData["value"] = jsonElement.ValueKind == JsonValueKind.Array ? jsonElement : JsonSerializer.SerializeToElement(new[] { jsonElement }); + } + else + { + // Serialize the value directly + JsonElement serialized = JsonSerializer.SerializeToElement(okResult.Value); + responseData["value"] = serialized; + } + } + else if (queryResult is BadRequestObjectResult badRequest) + { + return McpResponseBuilder.BuildErrorResult( + "BadRequest", + badRequest.Value?.ToString() ?? "Bad request", + logger); + } + else if (queryResult is UnauthorizedObjectResult) + { + return McpResponseBuilder.BuildErrorResult( + "PermissionDenied", + "You do not have permission to execute this entity", + logger); + } + else + { + // Empty or unknown result + responseData["value"] = JsonSerializer.SerializeToElement(Array.Empty()); + } + + return McpResponseBuilder.BuildSuccessResult( + responseData, + logger, + $"ExecuteEntityTool success for entity {entityName}." + ); + } + } +} From 505e0c2abcac02e010fcec3dc1c811558ef032b8 Mon Sep 17 00:00:00 2001 From: aaronburtle <93220300+aaronburtle@users.noreply.github.com> Date: Thu, 23 Oct 2025 02:52:41 +0000 Subject: [PATCH 70/79] [MCP] Add the Create_Record Built in tool (#2906) ## Why make this change? Closes https://github.com/Azure/data-api-builder/issues/2828 By adding the `Create_Record` tool to the MCP endpoint. ## What is this change? Add a built in tool, `Create_Record` which uses the `rest` infrastructure to form and validate the query needed to complete a create action. Behavior is at parity with the rest equivalent create operation. ## How was this tested? Manually tested Insomnia using post. image image image ## Sample Request(s) ``` { "jsonrpc": "2.0", "id": 2, "method": "tools/call", "params": { "name": "create_record", "arguments": { "entity": "Broker", "data": { "ID Number": 3, "First Name": "Michael", "Last Name": "Jordan" } } } } ``` --------- Co-authored-by: Aniruddh Munde --- .../BuiltInTools/CreateRecordTool.cs | 228 ++++++++++++++++-- 1 file changed, 208 insertions(+), 20 deletions(-) diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/CreateRecordTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/CreateRecordTool.cs index ed5425c515..6fbe08879b 100644 --- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/CreateRecordTool.cs +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/CreateRecordTool.cs @@ -2,7 +2,21 @@ // Licensed under the MIT License. using System.Text.Json; +using Azure.DataApiBuilder.Auth; +using Azure.DataApiBuilder.Config.DatabasePrimitives; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Authorization; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; +using Azure.DataApiBuilder.Core.Resolvers; +using Azure.DataApiBuilder.Core.Resolvers.Factories; +using Azure.DataApiBuilder.Core.Services; +using Azure.DataApiBuilder.Core.Services.MetadataProviders; using Azure.DataApiBuilder.Mcp.Model; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; using ModelContextProtocol.Protocol; using static Azure.DataApiBuilder.Mcp.Model.McpEnums; @@ -16,7 +30,7 @@ public Tool GetToolMetadata() { return new Tool { - Name = "create-record", + Name = "create_record", Description = "Creates a new record in the specified entity.", InputSchema = JsonSerializer.Deserialize( @"{ @@ -37,51 +51,225 @@ public Tool GetToolMetadata() }; } - public Task ExecuteAsync( + public async Task ExecuteAsync( JsonDocument? arguments, IServiceProvider serviceProvider, CancellationToken cancellationToken = default) { + ILogger? logger = serviceProvider.GetService>(); if (arguments == null) { - return Task.FromResult(new CallToolResult - { - Content = [new TextContentBlock { Type = "text", Text = "Error: No arguments provided" }] - }); + return Utils.McpResponseBuilder.BuildErrorResult("Invalid Arguments", "No arguments provided", logger); + } + + RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService(); + if (!runtimeConfigProvider.TryGetConfig(out RuntimeConfig? runtimeConfig)) + { + return Utils.McpResponseBuilder.BuildErrorResult("Invalid Configuration", "Runtime configuration not available", logger); + } + + if (runtimeConfig.McpDmlTools?.CreateRecord != true) + { + return Utils.McpResponseBuilder.BuildErrorResult( + "ToolDisabled", + "The create_record tool is disabled in the configuration.", + logger); } try { - // Extract arguments + cancellationToken.ThrowIfCancellationRequested(); JsonElement root = arguments.RootElement; if (!root.TryGetProperty("entity", out JsonElement entityElement) || !root.TryGetProperty("data", out JsonElement dataElement)) { - return Task.FromResult(new CallToolResult - { - Content = [new TextContentBlock { Type = "text", Text = "Error: Missing required arguments 'entity' or 'data'" }] - }); + return Utils.McpResponseBuilder.BuildErrorResult("InvalidArguments", "Missing required arguments 'entity' or 'data'", logger); } string entityName = entityElement.GetString() ?? string.Empty; + if (string.IsNullOrWhiteSpace(entityName)) + { + return Utils.McpResponseBuilder.BuildErrorResult("InvalidArguments", "Entity name cannot be empty", logger); + } + + string dataSourceName; + try + { + dataSourceName = runtimeConfig.GetDataSourceNameFromEntityName(entityName); + } + catch (Exception) + { + return Utils.McpResponseBuilder.BuildErrorResult("InvalidConfiguration", $"Entity '{entityName}' not found in configuration", logger); + } + + IMetadataProviderFactory metadataProviderFactory = serviceProvider.GetRequiredService(); + ISqlMetadataProvider sqlMetadataProvider = metadataProviderFactory.GetMetadataProvider(dataSourceName); + + DatabaseObject dbObject; + try + { + dbObject = sqlMetadataProvider.GetDatabaseObjectByKey(entityName); + } + catch (Exception) + { + return Utils.McpResponseBuilder.BuildErrorResult("InvalidConfiguration", $"Database object for entity '{entityName}' not found", logger); + } - // TODO: Implement actual create logic using DAB's internal services - // For now, return a placeholder response - string result = $"Would create record in entity '{entityName}' with data: {dataElement.GetRawText()}"; + // Create an HTTP context for authorization + IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService(); + HttpContext httpContext = httpContextAccessor.HttpContext ?? new DefaultHttpContext(); + IAuthorizationResolver authorizationResolver = serviceProvider.GetRequiredService(); - return Task.FromResult(new CallToolResult + if (httpContext is null || !authorizationResolver.IsValidRoleContext(httpContext)) { - Content = [new TextContentBlock { Type = "text", Text = result }] - }); + return Utils.McpResponseBuilder.BuildErrorResult("PermissionDenied", "Permission denied: Unable to resolve a valid role context for update operation.", logger); + } + + // Validate that we have at least one role authorized for create + if (!TryResolveAuthorizedRole(httpContext, authorizationResolver, entityName, out string authError)) + { + return Utils.McpResponseBuilder.BuildErrorResult("PermissionDenied", authError, logger); + } + + JsonElement insertPayloadRoot = dataElement.Clone(); + InsertRequestContext insertRequestContext = new( + entityName, + dbObject, + insertPayloadRoot, + EntityActionOperation.Insert); + + RequestValidator requestValidator = serviceProvider.GetRequiredService(); + + // Only validate tables + if (dbObject.SourceType is EntitySourceType.Table) + { + try + { + requestValidator.ValidateInsertRequestContext(insertRequestContext); + } + catch (Exception ex) + { + return Utils.McpResponseBuilder.BuildErrorResult("ValidationFailed", $"Request validation failed: {ex.Message}", logger); + } + } + else + { + return Utils.McpResponseBuilder.BuildErrorResult( + "InvalidCreateTarget", + "The create_record tool is only available for tables.", + logger); + } + + IMutationEngineFactory mutationEngineFactory = serviceProvider.GetRequiredService(); + DatabaseType databaseType = sqlMetadataProvider.GetDatabaseType(); + IMutationEngine mutationEngine = mutationEngineFactory.GetMutationEngine(databaseType); + + IActionResult? result = await mutationEngine.ExecuteAsync(insertRequestContext); + + if (result is CreatedResult createdResult) + { + return Utils.McpResponseBuilder.BuildSuccessResult( + new Dictionary + { + ["entity"] = entityName, + ["result"] = createdResult.Value, + ["message"] = $"Successfully created record in entity '{entityName}'" + }, + logger, + $"Successfully created record in entity '{entityName}'"); + } + else if (result is ObjectResult objectResult) + { + bool isError = objectResult.StatusCode.HasValue && objectResult.StatusCode.Value >= 400 && objectResult.StatusCode.Value != 403; + if (isError) + { + return Utils.McpResponseBuilder.BuildErrorResult( + "CreateFailed", + $"Failed to create record in entity '{entityName}'. Error: {JsonSerializer.Serialize(objectResult.Value)}", + logger); + } + else + { + return Utils.McpResponseBuilder.BuildSuccessResult( + new Dictionary + { + ["entity"] = entityName, + ["result"] = objectResult.Value, + ["message"] = $"Successfully created record in entity '{entityName}'. Unable to perform read-back of inserted records." + }, + logger, + $"Successfully created record in entity '{entityName}'. Unable to perform read-back of inserted records."); + } + } + else + { + if (result is null) + { + return Utils.McpResponseBuilder.BuildErrorResult( + "UnexpectedError", + $"Mutation engine returned null result for entity '{entityName}'", + logger); + } + else + { + return Utils.McpResponseBuilder.BuildSuccessResult( + new Dictionary + { + ["entity"] = entityName, + ["message"] = $"Create operation completed with unexpected result type: {result.GetType().Name}" + }, + logger, + $"Create operation completed for entity '{entityName}' with unexpected result type: {result.GetType().Name}"); + } + } } catch (Exception ex) { - return Task.FromResult(new CallToolResult + return Utils.McpResponseBuilder.BuildErrorResult("Error", $"Error: {ex.Message}", logger); + } + } + + private static bool TryResolveAuthorizedRole( + HttpContext httpContext, + IAuthorizationResolver authorizationResolver, + string entityName, + out string error) + { + error = string.Empty; + + string roleHeader = httpContext.Request.Headers[AuthorizationResolver.CLIENT_ROLE_HEADER].ToString(); + + if (string.IsNullOrWhiteSpace(roleHeader)) + { + error = "Client role header is missing or empty."; + return false; + } + + string[] roles = roleHeader + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + if (roles.Length == 0) + { + error = "Client role header is missing or empty."; + return false; + } + + foreach (string role in roles) + { + bool allowed = authorizationResolver.AreRoleAndOperationDefinedForEntity( + entityName, role, EntityActionOperation.Create); + + if (allowed) { - Content = [new TextContentBlock { Type = "text", Text = $"Error: {ex.Message}" }] - }); + return true; + } } + + error = "You do not have permission to create records for this entity."; + return false; } } } From 4c5038fb7594f01783630a6dc353e1561eb88926 Mon Sep 17 00:00:00 2001 From: Anusha Kolan Date: Thu, 23 Oct 2025 23:36:18 -0700 Subject: [PATCH 71/79] [MCP] Added parameter description. (#2904) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Why make this change? - Expands stored procedure parameter definitions in the configuration file to support richer metadata, using parameter's attributes like, `name`, `description`, `required`, and `default` values. - Enables MCP and other tooling to leverage parameter metadata for improved documentation, validation, and user experience. - Supports backward compatibility by allowing both the legacy dictionary format and the new array-of-objects format for parameters. ## What is this change? - Changes the `parameters` property in entity config from a dictionary mapping parameter names to values, to an array of parameter objects with explicit fields (`name`, `required`, `default`, `description`). - Updates the JSON schema to support the new structure and maintain support for the old syntax. - Refactors the internal model to handle both formats and expose richer metadata. - Enhances CLI commands (`dab add` and `dab update`) to accept and manage parameter metadata fields: `name`, `required`, `default`, and `description`. - Adds support for the expanded parameter object in CLI validation, including deprecation notes for the legacy format. - Includes parameter descriptions in DAB's OpenAPI and GraphQL schema outputs for improved API documentation. - This change also introduces smarter handling of optional parameters for stored procedures. If a parameter is not required and neither a user value nor a config default is provided, DAB will skip sending that parameter, allowing the database’s internal default to apply. This avoids duplication and drift between config and database defaults. The old behavior is fully supported for backward compatibility. ## How was this tested? - [x] Manual Testing: - Used CLI commands to add, validate and update parameters with metadata. - Verified config parsing, CLI commands, and schema generation for both legacy and expanded parameter formats. - Confirmed correct behavior in OpenAPI and GraphQL outputs. ## Sample Request(s) **Before (legacy dictionary format):** ```json { "entities": { "User": { "source": { "parameters": { "param1": "default-value" } } } } } ``` **After (expanded array format):** ```json { "entities": { "User": { "source": { "parameters": [ { "name": "Id", "required": true, "default": "default-value", "description": "The unique identifier for the user." } ] } } } } ``` **Command Line Examples:** **Before (legacy dictionary format):** ``` dotnet C:\DAB\data-api-builder\src\out\cli\net8.0\Microsoft.DataApiBuilder.dll add MyProc --source "dbo.MyProc" --source.type stored-procedure --source.params "PageSize:50,SortOrder:ASC" --permissions "anonymous:execute" "authenticated:execute" --config C:\DAB\data-api-builder\src\Service\dab-config.json ``` ``` "MyProc": { "source": { "object": "dbo.MyProc", "type": "stored-procedure", "parameters": [ { "name": "PageSize", "required": false, "default": "50" }, { "name": "SortOrder", "required": false, "default": "ASC" } ] }, "graphql": { "enabled": true, "operation": "mutation", "type": { "singular": "MyProc", "plural": "MyProcs" } }, "rest": { "enabled": true, "methods": [ "post" ] }, "permissions": [ { "role": "anonymous", "actions": [ { "action": "execute" } ] } ] } ``` **After (expanded array format):** Add: ``` dotnet C:\DAB\data-api-builder\src\out\cli\net8.0\Microsoft.DataApiBuilder.dll add entity GetTodosByOwnerAndStatus --source "dbo.GetTodosByOwnerAndStatus" --source.type stored-procedure --parameters.name "OwnerId,Completed" --parameters.description "Owner ID,Completed status" --parameters.required "true,false" --parameters.default "public,0" --permissions "anonymous:execute" --config C:\DAB\data-api-builder\src\Service\dab-config.json ``` Config created: ``` "entity": { "source": { "object": "dbo.GetTodosByOwnerAndStatus", "type": "stored-procedure", "parameters": [ { "name": "OwnerId", "description": "Owner ID", "required": true, "default": "public" }, { "name": "Completed", "description": "Completed status", "required": false, "default": "0" } ] }, "graphql": { "enabled": true, "operation": "mutation", "type": { "singular": "entity", "plural": "entities" } }, "rest": { "enabled": true, "methods": [ "post" ] }, "permissions": [ { "role": "anonymous", "actions": [ { "action": "execute" } ] } ] } ``` Validate: ``` dotnet C:\DAB\data-api-builder\src\out\cli\net8.0\Microsoft.DataApiBuilder.dll validate -c "C:\DAB\data-api-builder\src\Service\dab-config.json" ``` Validate when both formats are given: ``` PS C:\DAB\data-api-builder\src\Service> dotnet C:\DAB\data-api-builder\src\out\cli\net8.0\Microsoft.DataApiBuilder.dll add MyProc --source "dbo.MyProc" --source.type stored-procedure --source.params "PageSize:50,SortOrder:ASC" --permissions "anonymous:execute" "authenticated:execute" --config C:\DAB\data-api-builder\src\Service\dab-config.json --parameters.name "OwnerId,Completed" --parameters.description "Owner ID,Completed status" --parameters.required "true,false" --parameters.default "public,0" Information: Microsoft.DataApiBuilder 1.7.0 Information: User provided config file: C:\DAB\data-api-builder\src\Service\dab-config.json Loading config file from C:\DAB\data-api-builder\src\Service\dab-config.json. Error: Cannot use both --source.params and --parameters.name/description/required/default together. Please use only one format. Error: Unable to create the source object. Error: Failed to add a new entity. Error: Could not add entity: MyProc with source: dbo.MyProc and permissions: anonymous:execute. ``` Update: ``` dotnet C:\DAB\data-api-builder\src\out\cli\net8.0\Microsoft.DataApiBuilder.dll update entity GetTodosByOwnerAndStatus --parameters.name "OwnerId,Completed" --parameters.description "Owner ID,Completed status" --parameters.required "true,false" --parameters.default "private,protected" --config "C:\DAB\data-api-builder\src\Service\dab-config.json" ``` Config updated: ``` "entity": { "source": { "object": "dbo.GetTodosByOwnerAndStatus", "type": "stored-procedure", "parameters": [ { "name": "OwnerId", "description": "Owner ID", "required": true, "default": "private" }, { "name": "Completed", "description": "Completed status", "required": false, "default": "protected" } ] }, "graphql": { "enabled": true, "operation": "mutation", "type": { "singular": "entity", "plural": "entities" } }, "rest": { "enabled": true, "methods": [ "post" ] }, "permissions": [ { "role": "anonymous", "actions": [ { "action": "execute" } ] } ] } ``` ----------------------------------------------- Create a stored procedure: ``` CREATE PROCEDURE dbo.InsertTodo @id UNIQUEIDENTIFIER, @title NVARCHAR(1000), @completed BIT = 0, @owner_id VARCHAR(128), @position INT = NULL AS BEGIN INSERT INTO dbo.todos (id, title, completed, owner_id, position) VALUES (@id, @title, @completed, @owner_id, @position); SELECT id, title, completed, owner_id, position FROM dbo.todos WHERE id = @id; END GO ``` 1. Without providing default parameter values ``` mutation { executeInsertTodo( id: "00000000-0000-0000-0000-000000000013", title: "Test DAB GraphQL", owner_id: "Anusha Kolan" # completed and position are optional ) { id title completed owner_id position } } ``` Uses DB default values ``` { "data": { "executeInsertTodo": [ { "id": "00000000-0000-0000-0000-000000000013", "title": "Test DAB GraphQL", "completed": false, "owner_id": "Anusha Kolan", "position": null } ] } } ``` 2. With providing default parameter values ``` mutation { executeInsertTodo( id: "00000000-0000-0000-0000-000000000014", title: "Test with completed", owner_id: "Anusha Kolan", completed: true, position: 5 ) { id title completed owner_id position } } ``` Uses the default values provided ``` { "data": { "executeInsertTodo": [ { "id": "00000000-0000-0000-0000-000000000014", "title": "Test with completed", "completed": true, "owner_id": "Anusha Kolan", "position": 5 } ] } } ``` ----------------------------------------------------------------------------------- Config: ``` "InsertTodo": { "source": { "object": "dbo.InsertTodo", "type": "stored-procedure", "parameters": [ { "name": "id", "description": "The id for the Todo.", "required": true }, { "name": "title", "description": "The title for the Todo.", "required": true }, { "name": "completed", "description": "The state of the Todo.", "required": false }, { "name": "owner_id", "required": true }, { "name": "position", "required": false } ] }, "graphql": { "enabled": true, "operation": "mutation", "type": { "singular": "InsertTodo", "plural": "InsertTodos" } }, "rest": { "enabled": true, "methods": [ "post" ] }, "permissions": [ { "role": "anonymous", "actions": [ { "action": "execute" } ] } ] } ``` GraphQL Schema Documentation ``` "Execute Stored-Procedure InsertTodo and get results from the database" executeInsertTodo( "The state of the Todo." completed: Boolean "The id for the Todo." id: UUID "parameters for InsertTodo stored-procedure" owner_id: String "parameters for InsertTodo stored-procedure" position: Int "The title for the Todo." title: String ): [InsertTodo!]! @cost(weight: "10") ``` OpenAPI Documentation ``` "InsertTodo_sp_request": { "required": [ "id", "owner_id", "title" ], "type": "object", "properties": { "completed": { "type": "boolean", "description": "The state of the Todo." }, "id": { "type": "string", "description": "The id for the Todo." }, "owner_id": { "type": "string" }, "position": { "type": "number" }, "title": { "type": "string", "description": "The title for the Todo." } } }, ``` --- schemas/dab.draft.schema.json | 42 ++-- .../BuiltInTools/ExecuteEntityTool.cs | 8 +- src/Cli.Tests/AddEntityTests.cs | 79 ++++++-- ...esWithSourceAsStoredProcedure.verified.txt | 22 ++- ...stMethodsAndGraphQLOperations.verified.txt | 22 ++- ...stMethodsAndGraphQLOperations.verified.txt | 22 ++- ...tyWithSourceAsStoredProcedure.verified.txt | 22 ++- ...tyWithSourceAsStoredProcedure.verified.txt | 22 ++- ...toredProcedureWithRestMethods.verified.txt | 22 ++- ...stMethodsAndGraphQLOperations.verified.txt | 22 ++- ...SourceObject_a70c086a74142c82.verified.txt | 22 ++- ...ests.UpdateDatabaseSourceName.verified.txt | 22 ++- ...pdateDatabaseSourceParameters.verified.txt | 16 +- src/Cli.Tests/UpdateEntityTests.cs | 6 +- src/Cli.Tests/UtilsTests.cs | 12 +- src/Cli.Tests/ValidateConfigTests.cs | 4 +- src/Cli/Commands/AddOptions.cs | 53 +++-- src/Cli/Commands/EntityOptions.cs | 25 ++- src/Cli/Commands/UpdateOptions.cs | 16 +- src/Cli/ConfigGenerator.cs | 185 ++++++++++++++++-- src/Cli/Utils.cs | 50 +++-- .../EntitySourceConverterFactory.cs | 60 +++++- .../DatabasePrimitives/DatabaseObject.cs | 4 + src/Config/ObjectModel/EntitySource.cs | 3 +- src/Config/ObjectModel/ParameterMetadata.cs | 28 +++ .../MsSqlMetadataProvider.cs | 14 +- .../MetadataProviders/SqlMetadataProvider.cs | 32 +-- .../Services/OpenAPI/OpenApiDocumentor.cs | 20 +- .../GraphQLStoredProcedureBuilder.cs | 26 ++- .../Helpers/GraphQLTestHelpers.cs | 2 +- .../Sql/StoredProcedureBuilderTests.cs | 15 +- .../ParameterValidationTests.cs | 19 +- ...tReadingRuntimeConfigForMsSql.verified.txt | 58 ++++-- .../SerializationDeserializationTests.cs | 2 +- 34 files changed, 753 insertions(+), 224 deletions(-) create mode 100644 src/Config/ObjectModel/ParameterMetadata.cs diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json index b348ac4a4f..13d005c3ac 100644 --- a/schemas/dab.draft.schema.json +++ b/schemas/dab.draft.schema.json @@ -755,23 +755,35 @@ "description": "Database object name" }, "parameters": { - "type": "object", - "description": "Dictionary of parameters and their values", - "patternProperties": { - "^.*$": { - "oneOf": [ - { - "type": "boolean" - }, - { - "type": "string" - }, - { - "type": "number" + "oneOf": [ + { + "type": "object", + "description": "Dictionary of parameters and their values (deprecated)", + "patternProperties": { + "^.*$": { + "oneOf": [ + { "type": "boolean" }, + { "type": "string" }, + { "type": "number" } + ] } - ] + } + }, + { + "type": "array", + "description": "Array of parameter objects with metadata", + "items": { + "type": "object", + "required": ["name"], + "properties": { + "name": { "type": "string", "description": "Parameter name" }, + "required": { "type": "boolean", "description": "Is parameter required" }, + "default": { "type": ["string", "number", "boolean", "null"], "description": "Default value" }, + "description": { "type": "string", "description": "Parameter description. Since descriptions for multiple parameters are provided as a comma-separated string, individual parameter descriptions must not contain a comma (',')." } + } + } } - } + ] }, "key-fields": { "type": "array", diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ExecuteEntityTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ExecuteEntityTool.cs index 92f60f4703..c7734eea22 100644 --- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ExecuteEntityTool.cs +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ExecuteEntityTool.cs @@ -169,7 +169,7 @@ public async Task ExecuteAsync( // Validate all provided parameters exist in metadata foreach (KeyValuePair param in parameters) { - if (!entityConfig.Source.Parameters.ContainsKey(param.Key)) + if (!entityConfig.Source.Parameters.Any(p => p.Name == param.Key)) { return McpResponseBuilder.BuildErrorResult("InvalidArguments", $"Invalid parameter: {param.Key}", logger); } @@ -205,11 +205,11 @@ public async Task ExecuteAsync( // Then, add default parameters from configuration (only if not already provided by user) if ((parameters == null || parameters.Count == 0) && entityConfig.Source.Parameters != null) { - foreach (KeyValuePair param in entityConfig.Source.Parameters) + foreach (ParameterMetadata param in entityConfig.Source.Parameters) { - if (!context.FieldValuePairsInBody.ContainsKey(param.Key)) + if (!context.FieldValuePairsInBody.ContainsKey(param.Name)) { - context.FieldValuePairsInBody[param.Key] = param.Value; + context.FieldValuePairsInBody[param.Name] = param.Default; } } } diff --git a/src/Cli.Tests/AddEntityTests.cs b/src/Cli.Tests/AddEntityTests.cs index 53a2379557..c12b0a2d4a 100644 --- a/src/Cli.Tests/AddEntityTests.cs +++ b/src/Cli.Tests/AddEntityTests.cs @@ -45,9 +45,12 @@ public Task AddNewEntityWhenEntitiesEmpty() cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null ); - return ExecuteVerifyTest(options); } @@ -75,7 +78,11 @@ public Task AddNewEntityWhenEntitiesNotEmpty() cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null ); string initialConfiguration = AddPropertiesToJson(INITIAL_CONFIG, GetFirstEntityConfiguration()); @@ -107,7 +114,11 @@ public void AddDuplicateEntity() cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null ); string initialConfiguration = AddPropertiesToJson(INITIAL_CONFIG, GetFirstEntityConfiguration()); @@ -143,7 +154,11 @@ public Task AddEntityWithAnExistingNameButWithDifferentCase() cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null ); string initialConfiguration = AddPropertiesToJson(INITIAL_CONFIG, GetFirstEntityConfiguration()); @@ -174,7 +189,11 @@ public Task AddEntityWithCachingEnabled() cacheTtl: "1", config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null ); return ExecuteVerifyTest(options); @@ -211,7 +230,11 @@ public Task AddEntityWithPolicyAndFieldProperties( cacheEnabled: null, cacheTtl: null, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null ); // Create VerifySettings and add all arguments to the method as parameters @@ -244,7 +267,11 @@ public Task AddNewEntityWhenEntitiesWithSourceAsStoredProcedure() cacheEnabled: null, cacheTtl: null, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: ["This is a test parameter description."], + parametersRequiredCollection: null, + parametersDefaultCollection: null ); return ExecuteVerifyTest(options); @@ -276,7 +303,11 @@ public Task TestAddStoredProcedureWithRestMethodsAndGraphQLOperations() cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: new string[] { "Post", "Put", "Patch" }, - graphQLOperationForStoredProcedure: "Query" + graphQLOperationForStoredProcedure: "Query", + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null ); return ExecuteVerifyTest(options); @@ -304,7 +335,11 @@ public void AddEntityWithDescriptionAndVerifyInConfig() cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null ); string config = INITIAL_CONFIG; @@ -359,7 +394,11 @@ public void TestAddNewEntityWithSourceObjectHavingValidFields( cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null ); RuntimeConfigLoader.TryParseConfig(INITIAL_CONFIG, out RuntimeConfig? runtimeConfig); @@ -419,7 +458,11 @@ public Task TestAddNewSpWithDifferentRestAndGraphQLOptions( cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: restMethods, - graphQLOperationForStoredProcedure: graphQLOperation + graphQLOperationForStoredProcedure: graphQLOperation, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null ); VerifySettings settings = new(); @@ -455,7 +498,11 @@ public void TestAddStoredProcedureWithConflictingRestGraphQLOptions( cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: restMethods, - graphQLOperationForStoredProcedure: graphQLOperation + graphQLOperationForStoredProcedure: graphQLOperation, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null ); RuntimeConfigLoader.TryParseConfig(INITIAL_CONFIG, out RuntimeConfig? runtimeConfig); @@ -494,7 +541,11 @@ public void TestAddEntityPermissionWithInvalidOperation(IEnumerable perm cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null ); RuntimeConfigLoader.TryParseConfig(INITIAL_CONFIG, out RuntimeConfig? runtimeConfig); diff --git a/src/Cli.Tests/Snapshots/AddEntityTests.AddNewEntityWhenEntitiesWithSourceAsStoredProcedure.verified.txt b/src/Cli.Tests/Snapshots/AddEntityTests.AddNewEntityWhenEntitiesWithSourceAsStoredProcedure.verified.txt index 17e8de5193..21759deeed 100644 --- a/src/Cli.Tests/Snapshots/AddEntityTests.AddNewEntityWhenEntitiesWithSourceAsStoredProcedure.verified.txt +++ b/src/Cli.Tests/Snapshots/AddEntityTests.AddNewEntityWhenEntitiesWithSourceAsStoredProcedure.verified.txt @@ -28,11 +28,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/AddEntityTests.TestAddStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt b/src/Cli.Tests/Snapshots/AddEntityTests.TestAddStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt index 9aca5ba640..83d3882a96 100644 --- a/src/Cli.Tests/Snapshots/AddEntityTests.TestAddStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt +++ b/src/Cli.Tests/Snapshots/AddEntityTests.TestAddStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt @@ -28,11 +28,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt index 226c4e2a20..4411b47348 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt @@ -35,11 +35,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt index c4eb43648c..636d44805e 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt @@ -35,11 +35,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterUpdatingEntityWithSourceAsStoredProcedure.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterUpdatingEntityWithSourceAsStoredProcedure.verified.txt index 5c940443a5..49af50b975 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterUpdatingEntityWithSourceAsStoredProcedure.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterUpdatingEntityWithSourceAsStoredProcedure.verified.txt @@ -2,11 +2,23 @@ Source: { Object: dbo.books, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt index 5a6a50d38e..fef1d83bf2 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt @@ -35,11 +35,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt index 540a1b5a1d..09007e27f8 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt @@ -35,11 +35,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_a70c086a74142c82.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_a70c086a74142c82.verified.txt index 17e8de5193..21759deeed 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_a70c086a74142c82.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_a70c086a74142c82.verified.txt @@ -28,11 +28,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceName.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceName.verified.txt index 967a59f1f9..1719e1ade2 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceName.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceName.verified.txt @@ -28,11 +28,23 @@ Source: { Object: newSourceName, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceParameters.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceParameters.verified.txt index 016527cd68..0cbdc4347f 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceParameters.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceParameters.verified.txt @@ -28,10 +28,18 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: dab, - param2: false - } + Parameters: [ + { + Name: param1, + Required: false, + Default: dab + }, + { + Name: param2, + Required: false, + Default: False + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/UpdateEntityTests.cs b/src/Cli.Tests/UpdateEntityTests.cs index 663334c5e8..9acb6c6f81 100644 --- a/src/Cli.Tests/UpdateEntityTests.cs +++ b/src/Cli.Tests/UpdateEntityTests.cs @@ -1187,7 +1187,11 @@ private static UpdateOptions GenerateBaseUpdateOptions( config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: restMethodsForStoredProcedure, graphQLOperationForStoredProcedure: graphQLOperationForStoredProcedure, - description: description + description: description, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null ); } diff --git a/src/Cli.Tests/UtilsTests.cs b/src/Cli.Tests/UtilsTests.cs index 486d09f253..b02649339d 100644 --- a/src/Cli.Tests/UtilsTests.cs +++ b/src/Cli.Tests/UtilsTests.cs @@ -137,13 +137,13 @@ public void TestConfigSelectionBasedOnCliPrecedence( public void TestTryParseSourceParameterDictionary() { IEnumerable? parametersList = new string[] { "param1:123", "param2:-243", "param3:220.12", "param4:True", "param5:dab" }; - Assert.IsTrue(TryParseSourceParameterDictionary(parametersList, out Dictionary? sourceParameters)); + Assert.IsTrue(TryParseSourceParameterDictionary(parametersList, out List? sourceParameters)); Assert.IsNotNull(sourceParameters); - Assert.AreEqual(sourceParameters.GetValueOrDefault("param1"), 123); - Assert.AreEqual(sourceParameters.GetValueOrDefault("param2"), -243); - Assert.AreEqual(sourceParameters.GetValueOrDefault("param3"), 220.12); - Assert.AreEqual(sourceParameters.GetValueOrDefault("param4"), true); - Assert.AreEqual(sourceParameters.GetValueOrDefault("param5"), "dab"); + Assert.AreEqual(123, Convert.ToInt32(sourceParameters.First(p => p.Name == "param1").Default)); + Assert.AreEqual(-243, Convert.ToInt32(sourceParameters.First(p => p.Name == "param2").Default)); + Assert.AreEqual(220.12, Convert.ToDouble(sourceParameters.First(p => p.Name == "param3").Default)); + Assert.AreEqual(true, Convert.ToBoolean(sourceParameters.First(p => p.Name == "param4").Default)); + Assert.AreEqual("dab", Convert.ToString(sourceParameters.First(p => p.Name == "param5").Default)); } /// diff --git a/src/Cli.Tests/ValidateConfigTests.cs b/src/Cli.Tests/ValidateConfigTests.cs index 78f2db1b6f..e40a32e291 100644 --- a/src/Cli.Tests/ValidateConfigTests.cs +++ b/src/Cli.Tests/ValidateConfigTests.cs @@ -241,7 +241,7 @@ public void TestValidateConfigFailsWithNoDataSource() /// "object": "s001.book", /// "parameters": { /// "param1": "@env('sp_param1_int')", // INT - /// "param2": "@env('sp_param2_bool')" // BOOL + /// "param2": "@env('sp_param3_bool')" // BOOL /// } /// } /// @@ -252,7 +252,7 @@ public void ValidateConfigSchemaWhereConfigReferencesEnvironmentVariables() Environment.SetEnvironmentVariable($"connection-string", SAMPLE_TEST_CONN_STRING); Environment.SetEnvironmentVariable($"database-type", "mssql"); Environment.SetEnvironmentVariable($"sp_param1_int", "123"); - Environment.SetEnvironmentVariable($"sp_param2_bool", "true"); + Environment.SetEnvironmentVariable($"sp_param3_bool", "true"); // Capture console output to get error messaging. StringWriter writer = new(); diff --git a/src/Cli/Commands/AddOptions.cs b/src/Cli/Commands/AddOptions.cs index 60f575f469..635a438082 100644 --- a/src/Cli/Commands/AddOptions.cs +++ b/src/Cli/Commands/AddOptions.cs @@ -34,24 +34,35 @@ public AddOptions( string? policyDatabase, string? cacheEnabled, string? cacheTtl, - string? config, - string? description) - : base(entity, - sourceType, - sourceParameters, - sourceKeyFields, - restRoute, - restMethodsForStoredProcedure, - graphQLType, - graphQLOperationForStoredProcedure, - fieldsToInclude, - fieldsToExclude, - policyRequest, - policyDatabase, - cacheEnabled, - cacheTtl, - config, - description) + string? description, + IEnumerable? parametersNameCollection, + IEnumerable? parametersDescriptionCollection, + IEnumerable? parametersRequiredCollection, + IEnumerable? parametersDefaultCollection, + string? config + ) + : base( + entity, + sourceType, + sourceParameters, + sourceKeyFields, + restRoute, + restMethodsForStoredProcedure, + graphQLType, + graphQLOperationForStoredProcedure, + fieldsToInclude, + fieldsToExclude, + policyRequest, + policyDatabase, + cacheEnabled, + cacheTtl, + description, + parametersNameCollection, + parametersDescriptionCollection, + parametersRequiredCollection, + parametersDefaultCollection, + config + ) { Source = source; Permissions = permissions; @@ -74,12 +85,14 @@ public int Handler(ILogger logger, FileSystemRuntimeConfigLoader loader, IFileSy bool isSuccess = ConfigGenerator.TryAddEntityToConfigWithOptions(this, loader, fileSystem); if (isSuccess) { - logger.LogInformation("Added new entity: {Entity} with source: {Source} and permissions: {permissions}.", Entity, Source, string.Join(SEPARATOR, Permissions)); + logger.LogInformation("Added new entity: {Entity} with source: {Source} and permissions: {permissions}.", + Entity, Source, string.Join(SEPARATOR, Permissions)); logger.LogInformation("SUGGESTION: Use 'dab update [entity-name] [options]' to update any entities in your config."); } else { - logger.LogError("Could not add entity: {Entity} with source: {Source} and permissions: {permissions}.", Entity, Source, string.Join(SEPARATOR, Permissions)); + logger.LogError("Could not add entity: {Entity} with source: {Source} and permissions: {permissions}.", + Entity, Source, string.Join(SEPARATOR, Permissions)); } return isSuccess ? CliReturnCode.SUCCESS : CliReturnCode.GENERAL_ERROR; diff --git a/src/Cli/Commands/EntityOptions.cs b/src/Cli/Commands/EntityOptions.cs index d2173d1775..a11d6fe450 100644 --- a/src/Cli/Commands/EntityOptions.cs +++ b/src/Cli/Commands/EntityOptions.cs @@ -25,8 +25,13 @@ public EntityOptions( string? policyDatabase, string? cacheEnabled, string? cacheTtl, - string? config, - string? description) + string? description, + IEnumerable? parametersNameCollection, + IEnumerable? parametersDescriptionCollection, + IEnumerable? parametersRequiredCollection, + IEnumerable? parametersDefaultCollection, + string? config + ) : base(config) { Entity = entity; @@ -44,6 +49,10 @@ public EntityOptions( CacheEnabled = cacheEnabled; CacheTtl = cacheTtl; Description = description; + ParametersNameCollection = parametersNameCollection; + ParametersDescriptionCollection = parametersDescriptionCollection; + ParametersRequiredCollection = parametersRequiredCollection; + ParametersDefaultCollection = parametersDefaultCollection; } // Entity is required but we have made required as false to have custom error message (more user friendly), if not provided. @@ -91,5 +100,17 @@ public EntityOptions( [Option("description", Required = false, HelpText = "Description of the entity.")] public string? Description { get; } + + [Option("parameters.name", Required = false, Separator = ',', HelpText = "Comma-separated list of parameter names for stored procedure.")] + public IEnumerable? ParametersNameCollection { get; } + + [Option("parameters.description", Required = false, Separator = ',', HelpText = "Comma-separated list of parameter descriptions for stored procedure.")] + public IEnumerable? ParametersDescriptionCollection { get; } + + [Option("parameters.required", Required = false, Separator = ',', HelpText = "Comma-separated list of parameter required flags (true/false) for stored procedure.")] + public IEnumerable? ParametersRequiredCollection { get; } + + [Option("parameters.default", Required = false, Separator = ',', HelpText = "Comma-separated list of parameter default values for stored procedure.")] + public IEnumerable? ParametersDefaultCollection { get; } } } diff --git a/src/Cli/Commands/UpdateOptions.cs b/src/Cli/Commands/UpdateOptions.cs index a98d77f66d..2c1c0e74c7 100644 --- a/src/Cli/Commands/UpdateOptions.cs +++ b/src/Cli/Commands/UpdateOptions.cs @@ -42,8 +42,12 @@ public UpdateOptions( string? policyDatabase, string? cacheEnabled, string? cacheTtl, - string config, - string? description) + string? description, + IEnumerable? parametersNameCollection, + IEnumerable? parametersDescriptionCollection, + IEnumerable? parametersRequiredCollection, + IEnumerable? parametersDefaultCollection, + string? config) : base(entity, sourceType, sourceParameters, @@ -58,8 +62,12 @@ public UpdateOptions( policyDatabase, cacheEnabled, cacheTtl, - config, - description) + description, + parametersNameCollection, + parametersDescriptionCollection, + parametersRequiredCollection, + parametersDefaultCollection, + config) { Source = source; Permissions = permissions; diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs index 886447b256..b54f87b2fd 100644 --- a/src/Cli/ConfigGenerator.cs +++ b/src/Cli/ConfigGenerator.cs @@ -473,6 +473,7 @@ public static bool TryAddNewEntity(AddOptions options, RuntimeConfig initialRunt /// /// This method creates the source object for a new entity /// if the given source fields specified by the user are valid. + /// Supports both old (dictionary) and new (ParameterMetadata list) parameter formats. /// public static bool TryCreateSourceObjectForNewEntity( AddOptions options, @@ -501,19 +502,55 @@ public static bool TryCreateSourceObjectForNewEntity( if (!VerifyCorrectPairingOfParameterAndKeyFieldsWithType( objectType, options.SourceParameters, + options.ParametersNameCollection, options.SourceKeyFields)) { return false; } - // Parses the string array to parameter Dictionary - if (!TryParseSourceParameterDictionary( - options.SourceParameters, - out Dictionary? parametersDictionary)) + // Check for both old and new parameter formats + bool hasOldParams = options.SourceParameters != null && options.SourceParameters.Any(); + bool hasNewParams = options.ParametersNameCollection != null && options.ParametersNameCollection.Any(); + + if (hasOldParams && hasNewParams) { + _logger.LogError("Cannot use both --source.params and --parameters.name/description/required/default together. Please use only one format."); return false; } + List? parameters = null; + if (hasNewParams) + { + // Parse new format + List names = options.ParametersNameCollection != null ? options.ParametersNameCollection.ToList() : new List(); + List descriptions = options.ParametersDescriptionCollection?.ToList() ?? new List(); + List requiredFlags = options.ParametersRequiredCollection?.ToList() ?? new List(); + List defaults = options.ParametersDefaultCollection?.ToList() ?? new List(); + + parameters = []; + for (int i = 0; i < names.Count; i++) + { + parameters.Add(new ParameterMetadata + { + Name = names[i], + Description = descriptions.ElementAtOrDefault(i), + Required = requiredFlags.ElementAtOrDefault(i)?.ToLower() == "true", + Default = defaults.ElementAtOrDefault(i) + }); + } + } + else if (hasOldParams) + { + // Parse old format and convert to new type + if (!TryParseSourceParameterDictionary(options.SourceParameters, out parameters)) + { + return false; + } + + _logger.LogWarning("The --source.params format is deprecated. Please use --parameters.name/description/required/default instead."); + + } + string[]? sourceKeyFields = null; if (options.SourceKeyFields is not null && options.SourceKeyFields.Any()) { @@ -524,7 +561,7 @@ public static bool TryCreateSourceObjectForNewEntity( if (!TryCreateSourceObject( options.Source, objectType, - parametersDictionary, + parameters, sourceKeyFields, out sourceObject)) { @@ -534,7 +571,6 @@ public static bool TryCreateSourceObjectForNewEntity( return true; } - /// /// Tries to update the runtime settings based on the provided runtime options. /// @@ -1820,10 +1856,12 @@ private static bool TryGetUpdatedSourceObjectWithOptions( string updatedSourceName = options.Source ?? entity.Source.Object; string[]? updatedKeyFields = entity.Source.KeyFields; EntitySourceType? updatedSourceType = entity.Source.Type; - Dictionary? updatedSourceParameters = entity.Source.Parameters; - // If SourceType provided by user is null, - // no update is required. + // Support for new parameter format + bool hasOldParams = options.SourceParameters is not null && options.SourceParameters.Any(); + bool hasNewParams = options.ParametersNameCollection is not null && options.ParametersNameCollection.Any(); + + // If SourceType provided by user is not null, update type if (options.SourceType is not null) { if (!EnumExtensions.TryDeserialize(options.SourceType, out EntitySourceType? deserializedEntityType)) @@ -1833,7 +1871,6 @@ private static bool TryGetUpdatedSourceObjectWithOptions( } updatedSourceType = (EntitySourceType)deserializedEntityType; - if (IsStoredProcedureConvertedToOtherTypes(entity, options) || IsEntityBeingConvertedToStoredProcedure(entity, options)) { _logger.LogWarning( @@ -1842,13 +1879,15 @@ private static bool TryGetUpdatedSourceObjectWithOptions( } } - // No need to validate parameter and key field usage when there are no changes to the source object defined in 'options' + // Validate correct pairing of parameters and key fields if ((options.SourceType is not null - || (options.SourceParameters is not null && options.SourceParameters.Any()) - || (options.SourceKeyFields is not null && options.SourceKeyFields.Any())) + || hasOldParams + || (options.SourceKeyFields is not null && options.SourceKeyFields.Any()) + || hasNewParams) && !VerifyCorrectPairingOfParameterAndKeyFieldsWithType( updatedSourceType, options.SourceParameters, + options.ParametersNameCollection, options.SourceKeyFields)) { return false; @@ -1856,35 +1895,139 @@ private static bool TryGetUpdatedSourceObjectWithOptions( // Changing source object from stored-procedure to table/view // should automatically update the parameters to be null. - // Similarly from table/view to stored-procedure, key-fields - // should be marked null. + // Similarly from table/view to stored-procedure, key-fields should be marked null. if (EntitySourceType.StoredProcedure.Equals(updatedSourceType)) { updatedKeyFields = null; } else { - updatedSourceParameters = null; + hasOldParams = false; + hasNewParams = false; } - // If given SourceParameter is null or is Empty, no update is required. - // Else updatedSourceParameters will contain the parsed dictionary of parameters. - if (options.SourceParameters is not null && options.SourceParameters.Any() && - !TryParseSourceParameterDictionary(options.SourceParameters, out updatedSourceParameters)) + // Warn and error if both formats are provided + if (hasOldParams && hasNewParams) { + _logger.LogError("Cannot use both --source.params and --parameters.name/description/required/default together. Please use only one format."); return false; } + List? parameters = null; + + if (hasNewParams) + { + // Parse new format + List names = options.ParametersNameCollection != null ? options.ParametersNameCollection.ToList() : new List(); + List descriptions = options.ParametersDescriptionCollection?.ToList() ?? new List(); + List requiredFlags = options.ParametersRequiredCollection?.ToList() ?? new List(); + List defaults = options.ParametersDefaultCollection?.ToList() ?? new List(); + + parameters = []; + for (int i = 0; i < names.Count; i++) + { + parameters.Add(new ParameterMetadata + { + Name = names[i], + Description = descriptions.ElementAtOrDefault(i), + Required = requiredFlags.ElementAtOrDefault(i)?.ToLower() == "true", + Default = defaults.ElementAtOrDefault(i) + }); + } + } + else if (hasOldParams) + { + // Parse old format and convert to new type + if (!TryParseSourceParameterDictionary(options.SourceParameters, out parameters)) + { + return false; + } + + _logger.LogWarning("The --source.params format is deprecated. Please use --parameters.name/description/required/default instead."); + } + + // In TryGetUpdatedSourceObjectWithOptions, before TryCreateSourceObject: + if (parameters == null && EntitySourceType.StoredProcedure.Equals(updatedSourceType)) + { + parameters = entity.Source.Parameters?.ToList(); + } + if (options.SourceKeyFields is not null && options.SourceKeyFields.Any()) { updatedKeyFields = options.SourceKeyFields.ToArray(); } + if (hasNewParams && EntitySourceType.StoredProcedure.Equals(updatedSourceType)) + { + List existingParams; + if (entity.Source.Parameters != null) + { + existingParams = entity.Source.Parameters.ToList(); + } + else + { + existingParams = new List(); + } + + List mergedParams = new(); + + if (parameters != null) + { + foreach (ParameterMetadata newParam in parameters) + { + ParameterMetadata? match = null; + foreach (ParameterMetadata p in existingParams) + { + if (p.Name == newParam.Name) + { + match = p; + break; + } + } + + if (match != null) + { + mergedParams.Add(new ParameterMetadata + { + Name = newParam.Name, + Description = newParam.Description != null ? newParam.Description : match.Description, + Required = newParam.Required, + Default = newParam.Default != null ? newParam.Default : match.Default + }); + } + else + { + mergedParams.Add(newParam); + } + } + } + + foreach (ParameterMetadata param in existingParams) + { + bool found = false; + foreach (ParameterMetadata p in mergedParams) + { + if (p.Name == param.Name) + { + found = true; + break; + } + } + + if (!found) + { + mergedParams.Add(param); + } + } + + parameters = mergedParams; + } + // Try Creating Source Object with the updated values. if (!TryCreateSourceObject( updatedSourceName, updatedSourceType, - updatedSourceParameters, + parameters, updatedKeyFields, out updatedSourceObject)) { diff --git a/src/Cli/Utils.cs b/src/Cli/Utils.cs index ac6493ff65..451c330503 100644 --- a/src/Cli/Utils.cs +++ b/src/Cli/Utils.cs @@ -329,19 +329,24 @@ public static bool TryGetConfigFileBasedOnCliPrecedence( } /// - /// This method checks that parameter is only used with Stored Procedure, while - /// key-fields only with table/views. Also ensures that key-fields are always - /// provided for views. - /// - /// type of the source object. - /// IEnumerable string containing parameters for stored-procedure. - /// IEnumerable string containing key columns for table/view. - /// Returns true when successful else on failure, returns false. + /// Validates correct usage of parameters and key-fields based on the source type. + /// Ensures that parameters are only used with stored procedures, key-fields only with tables/views, + /// and that key-fields are always provided for views. + /// + /// Type of the source object. + /// IEnumerable of strings containing parameters for stored procedures (old format). + /// IEnumerable of strings containing parameter names for stored procedures (new format). + /// IEnumerable of strings containing key columns for tables/views. + /// True if the pairing is valid; otherwise, false. public static bool VerifyCorrectPairingOfParameterAndKeyFieldsWithType( EntitySourceType? sourceType, - IEnumerable? parameters, + IEnumerable? parameters, // old format + IEnumerable? parametersNameCollection, // new format IEnumerable? keyFields) { + bool hasOldParams = parameters is not null && parameters.Any(); + bool hasNewParams = parametersNameCollection is not null && parametersNameCollection.Any(); + if (sourceType is EntitySourceType.StoredProcedure) { if (keyFields is not null && keyFields.Any()) @@ -353,7 +358,7 @@ public static bool VerifyCorrectPairingOfParameterAndKeyFieldsWithType( else { // For Views and Tables - if (parameters is not null && parameters.Any()) + if (hasOldParams || hasNewParams) { _logger.LogError("Tables/Views don't support parameters."); return false; @@ -382,7 +387,7 @@ public static bool VerifyCorrectPairingOfParameterAndKeyFieldsWithType( public static bool TryCreateSourceObject( string name, EntitySourceType? type, - Dictionary? parameters, + List? parameters, string[]? keyFields, [NotNullWhen(true)] out EntitySource? sourceObject) { @@ -407,21 +412,23 @@ public static bool TryCreateSourceObject( /// Returns true when successful else on failure, returns false. public static bool TryParseSourceParameterDictionary( IEnumerable? parametersList, - out Dictionary? sourceParameters) + out List? parameterMetadataList) { - sourceParameters = null; + parameterMetadataList = null; + if (parametersList is null) { return true; } - sourceParameters = new(StringComparer.OrdinalIgnoreCase); + parameterMetadataList = new(); + foreach (string param in parametersList) { string[] items = param.Split(SEPARATOR); if (items.Length != 2) { - sourceParameters = null; + parameterMetadataList = null; _logger.LogError("Invalid format for --source.params"); _logger.LogError("Correct source parameter syntax: --source.params \"key1:value1,key2:value2,...\"."); return false; @@ -430,12 +437,19 @@ public static bool TryParseSourceParameterDictionary( string paramKey = items[0]; object paramValue = ParseStringValue(items[1]); - sourceParameters.Add(paramKey, paramValue); + // Add to ParameterMetadata list with default values for rich metadata + parameterMetadataList.Add(new ParameterMetadata + { + Name = paramKey, + Default = paramValue.ToString(), + Required = false, + Description = null + }); } - if (!sourceParameters.Any()) + if (!parameterMetadataList.Any()) { - sourceParameters = null; + parameterMetadataList = null; } return true; diff --git a/src/Config/Converters/EntitySourceConverterFactory.cs b/src/Config/Converters/EntitySourceConverterFactory.cs index 51af00717d..a748382e01 100644 --- a/src/Config/Converters/EntitySourceConverterFactory.cs +++ b/src/Config/Converters/EntitySourceConverterFactory.cs @@ -57,17 +57,61 @@ public EntitySourceConverter(bool replaceEnvVar) JsonSerializerOptions innerOptions = new(options); innerOptions.Converters.Remove(innerOptions.Converters.First(c => c is EntitySourceConverterFactory)); - EntitySource? source = JsonSerializer.Deserialize(ref reader, innerOptions); + using JsonDocument doc = JsonDocument.ParseValue(ref reader); + JsonElement root = doc.RootElement; - if (source?.Parameters is not null) + if (root.TryGetProperty("parameters", out JsonElement parametersElement) && + parametersElement.ValueKind == JsonValueKind.Object) { - // If we get parameters back the value field will be JsonElement, since that's what System.Text.Json uses for the `object` type. - // But we want to convert that to a CLR type so we can use it in our code and avoid having to do our own type checking - // and casting elsewhere. - return source with { Parameters = source.Parameters.ToDictionary(p => p.Key, p => GetClrValue((JsonElement)p.Value)) }; + // Old format detected + List paramList = []; + foreach (JsonProperty prop in parametersElement.EnumerateObject()) + { + string? defaultValue = GetClrValue(prop.Value)?.ToString(); + paramList.Add(new ParameterMetadata + { + Name = prop.Name, + Default = defaultValue, + }); + } + + // Remove "parameters" from the JSON before deserialization + Dictionary modObj = []; + foreach (JsonProperty property in root.EnumerateObject()) + { + if (!property.NameEquals("parameters")) + { + modObj[property.Name] = property.Value.Deserialize(innerOptions) ?? new object(); + } + } + + modObj["parameters"] = paramList; + + string modJson = JsonSerializer.Serialize(modObj, innerOptions); + + // Deserialize to EntitySource without parameters + EntitySource? entitySource = JsonSerializer.Deserialize(modJson, innerOptions) + ?? throw new JsonException("Failed to deserialize EntitySource from modified JSON."); + + // Use the with expression to set the correct Parameters + return entitySource with { Parameters = paramList }; + } + else + { + string rawJson = root.GetRawText(); + // If already in new format, deserialize as usual + EntitySource? source = JsonSerializer.Deserialize(rawJson, innerOptions); + + if (source?.Parameters is not null) + { + if (source.Parameters is IEnumerable paramList) + { + return source with { Parameters = [.. paramList] }; + } + } + + return source; } - - return source; } private static object GetClrValue(JsonElement element) diff --git a/src/Config/DatabasePrimitives/DatabaseObject.cs b/src/Config/DatabasePrimitives/DatabaseObject.cs index f3714cd1b0..8636e8c005 100644 --- a/src/Config/DatabasePrimitives/DatabaseObject.cs +++ b/src/Config/DatabasePrimitives/DatabaseObject.cs @@ -130,6 +130,10 @@ public class StoredProcedureDefinition : SourceDefinition public class ParameterDefinition { + public string Name { get; set; } = null!; + public bool? Required { get; set; } = false; + public string? Default { get; set; } + public string? Description { get; set; } public Type SystemType { get; set; } = null!; public DbType? DbType { get; set; } public SqlDbType? SqlDbType { get; set; } diff --git a/src/Config/ObjectModel/EntitySource.cs b/src/Config/ObjectModel/EntitySource.cs index e6cbd70a0b..8844796413 100644 --- a/src/Config/ObjectModel/EntitySource.cs +++ b/src/Config/ObjectModel/EntitySource.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. - namespace Azure.DataApiBuilder.Config.ObjectModel; /// @@ -13,4 +12,4 @@ namespace Azure.DataApiBuilder.Config.ObjectModel; /// If Type is SourceType.StoredProcedure, /// Parameters to be passed as defaults to the procedure call /// The field(s) to be used as primary keys. -public record EntitySource(string Object, EntitySourceType? Type, Dictionary? Parameters, string[]? KeyFields); +public record EntitySource(string Object, EntitySourceType? Type, List? Parameters, string[]? KeyFields); diff --git a/src/Config/ObjectModel/ParameterMetadata.cs b/src/Config/ObjectModel/ParameterMetadata.cs new file mode 100644 index 0000000000..334979d728 --- /dev/null +++ b/src/Config/ObjectModel/ParameterMetadata.cs @@ -0,0 +1,28 @@ +namespace Azure.DataApiBuilder.Config.ObjectModel +{ + /// + /// Represents metadata for a parameter, including its name, description, requirement status, and default value. + /// + public class ParameterMetadata + { + /// + /// Gets or sets the name of the parameter. + /// + public required string Name { get; set; } + + /// + /// Gets or sets the description of the parameter. + /// + public string? Description { get; set; } + + /// + /// Gets or sets a value indicating whether the parameter is required. + /// + public bool Required { get; set; } + + /// + /// Gets or sets the default value of the parameter, if any. + /// + public string? Default { get; set; } + } +} diff --git a/src/Core/Services/MetadataProviders/MsSqlMetadataProvider.cs b/src/Core/Services/MetadataProviders/MsSqlMetadataProvider.cs index f03216ba89..4acb52f21b 100644 --- a/src/Core/Services/MetadataProviders/MsSqlMetadataProvider.cs +++ b/src/Core/Services/MetadataProviders/MsSqlMetadataProvider.cs @@ -200,11 +200,14 @@ protected override async Task FillSchemaForStoredProcedureAsync( // Loop through parameters specified in config, throw error if not found in schema // else set runtime config defined default values. // Note: we defer type checking of parameters specified in config until request time - Dictionary? configParameters = procedureEntity.Source.Parameters; + List? configParameters = procedureEntity.Source.Parameters; if (configParameters is not null) { - foreach ((string configParamKey, object configParamValue) in configParameters) + foreach (ParameterMetadata paramMetadata in configParameters) { + string configParamKey = paramMetadata.Name; + object? configParamValue = paramMetadata.Default; + if (!storedProcedureDefinition.Parameters.TryGetValue(configParamKey, out ParameterDefinition? parameterDefinition)) { throw new DataApiBuilderException( @@ -214,8 +217,11 @@ protected override async Task FillSchemaForStoredProcedureAsync( } else { - parameterDefinition.HasConfigDefault = true; - parameterDefinition.ConfigDefaultValue = configParamValue?.ToString(); + parameterDefinition.Description = paramMetadata.Description; + parameterDefinition.Required = paramMetadata.Required; + parameterDefinition.Default = paramMetadata.Default; + parameterDefinition.HasConfigDefault = paramMetadata.Default is not null; + parameterDefinition.ConfigDefaultValue = paramMetadata.Default?.ToString(); } } } diff --git a/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs b/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs index 071c44fe05..75fff1b932 100644 --- a/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs +++ b/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs @@ -455,11 +455,12 @@ protected virtual async Task FillSchemaForStoredProcedureAsync( // Loop through parameters specified in config, throw error if not found in schema // else set runtime config defined default values. // Note: we defer type checking of parameters specified in config until request time - Dictionary? configParameters = procedureEntity.Source.Parameters; + List? configParameters = procedureEntity.Source.Parameters; if (configParameters is not null) { - foreach ((string configParamKey, object configParamValue) in configParameters) + foreach (ParameterMetadata paramMeta in configParameters) { + string configParamKey = paramMeta.Name; if (!storedProcedureDefinition.Parameters.TryGetValue(configParamKey, out ParameterDefinition? parameterDefinition)) { HandleOrRecordException(new DataApiBuilderException( @@ -469,8 +470,12 @@ protected virtual async Task FillSchemaForStoredProcedureAsync( } else { - parameterDefinition.HasConfigDefault = true; - parameterDefinition.ConfigDefaultValue = configParamValue?.ToString(); + // Map all metadata from config + parameterDefinition.Description = paramMeta.Description; + parameterDefinition.Required = paramMeta.Required; + parameterDefinition.Default = paramMeta.Default; + parameterDefinition.HasConfigDefault = paramMeta.Default is not null; + parameterDefinition.ConfigDefaultValue = paramMeta.Default?.ToString(); } } } @@ -1169,25 +1174,6 @@ private async Task PopulateResultSetDefinitionsForStoredProcedureAsync( } } - /// - /// Helper method to create params for the query. - /// - /// Common prefix of param names. - /// Values of the param. - /// - private static Dictionary GetQueryParams( - string paramName, - object[] paramValues) - { - Dictionary parameters = new(); - for (int paramNumber = 0; paramNumber < paramValues.Length; paramNumber++) - { - parameters.Add($"{paramName}{paramNumber}", paramValues[paramNumber]); - } - - return parameters; - } - /// /// Generate the mappings of exposed names to /// backing columns, and of backing columns to diff --git a/src/Core/Services/OpenAPI/OpenApiDocumentor.cs b/src/Core/Services/OpenAPI/OpenApiDocumentor.cs index 4b6aaac78e..33105db289 100644 --- a/src/Core/Services/OpenAPI/OpenApiDocumentor.cs +++ b/src/Core/Services/OpenAPI/OpenApiDocumentor.cs @@ -16,6 +16,7 @@ using Azure.DataApiBuilder.Core.Services.OpenAPI; using Azure.DataApiBuilder.Product; using Azure.DataApiBuilder.Service.Exceptions; +using Microsoft.OpenApi.Any; using Microsoft.OpenApi.Models; using Microsoft.OpenApi.Writers; using static Azure.DataApiBuilder.Config.DabConfigEvents; @@ -1071,21 +1072,32 @@ private Dictionary CreateComponentSchemas(RuntimeEntities private static OpenApiSchema CreateSpRequestComponentSchema(Dictionary fields) { Dictionary properties = new(); + HashSet required = new(); - foreach (string parameter in fields.Keys) + foreach (KeyValuePair kvp in fields) { - string typeMetadata = TypeHelper.GetJsonDataTypeFromSystemType(fields[parameter].SystemType).ToString().ToLower(); + string parameter = kvp.Key; + ParameterDefinition def = kvp.Value; + string typeMetadata = TypeHelper.GetJsonDataTypeFromSystemType(def.SystemType).ToString().ToLower(); properties.Add(parameter, new OpenApiSchema() { - Type = typeMetadata + Type = typeMetadata, + Description = def.Description, + Default = def.Default is not null ? new OpenApiString(def.Default) : null }); + + if (def.Required == true) + { + required.Add(parameter); + } } OpenApiSchema schema = new() { Type = SCHEMA_OBJECT_TYPE, - Properties = properties + Properties = properties, + Required = required }; return schema; diff --git a/src/Service.GraphQLBuilder/GraphQLStoredProcedureBuilder.cs b/src/Service.GraphQLBuilder/GraphQLStoredProcedureBuilder.cs index ed72a575e7..8aca57421c 100644 --- a/src/Service.GraphQLBuilder/GraphQLStoredProcedureBuilder.cs +++ b/src/Service.GraphQLBuilder/GraphQLStoredProcedureBuilder.cs @@ -55,17 +55,25 @@ public static FieldDefinitionNode GenerateStoredProcedureSchema( // Without database metadata, there is no way to know to cast 1 to a decimal versus an integer. IValueNode? defaultValueNode = null; - if (entity.Source.Parameters is not null && entity.Source.Parameters.TryGetValue(param, out object? value)) + if (entity.Source.Parameters is not null) { - Tuple defaultGraphQLValue = ConvertValueToGraphQLType(value.ToString()!, parameterDefinition: spdef.Parameters[param]); - defaultValueNode = defaultGraphQLValue.Item2; + ParameterMetadata? paramMetadata = entity.Source.Parameters + .FirstOrDefault(p => p.Name == param); + + if (paramMetadata is not null && paramMetadata.Default is not null) + { + Tuple defaultGraphQLValue = ConvertValueToGraphQLType(paramMetadata.Default.ToString()!, parameterDefinition: spdef.Parameters[param]); + defaultValueNode = defaultGraphQLValue.Item2; + } } inputValues.Add( new( location: null, name: new(param), - description: new StringValueNode($"parameters for {name.Value} stored-procedure"), + description: definition.Description != null + ? new StringValueNode(definition.Description) + : new StringValueNode($"parameters for {name.Value} stored-procedure"), type: new NamedTypeNode(SchemaConverter.GetGraphQLTypeFromSystemType(type: definition.SystemType)), defaultValue: defaultValueNode, directives: new List()) @@ -157,7 +165,15 @@ private static Tuple ConvertValueToGraphQLType(string defaul FLOAT_TYPE => new(FLOAT_TYPE, new FloatValueNode(double.Parse(defaultValueFromConfig))), DECIMAL_TYPE => new(DECIMAL_TYPE, new FloatValueNode(decimal.Parse(defaultValueFromConfig))), STRING_TYPE => new(STRING_TYPE, new StringValueNode(defaultValueFromConfig)), - BOOLEAN_TYPE => new(BOOLEAN_TYPE, new BooleanValueNode(bool.Parse(defaultValueFromConfig))), + BOOLEAN_TYPE => new(BOOLEAN_TYPE, new BooleanValueNode( + defaultValueFromConfig switch + { + "1" => true, + "0" => false, + var s when s.Equals("true", StringComparison.OrdinalIgnoreCase) => true, + var s when s.Equals("false", StringComparison.OrdinalIgnoreCase) => false, + _ => throw new FormatException($"String '{defaultValueFromConfig}' was not recognized as a valid Boolean.") + })), DATETIME_TYPE => new(DATETIME_TYPE, new DateTimeType().ParseResult( DateTime.Parse(defaultValueFromConfig, DateTimeFormatInfo.InvariantInfo, DateTimeStyles.AssumeUniversal))), BYTEARRAY_TYPE => new(BYTEARRAY_TYPE, new ByteArrayType().ParseValue(Convert.FromBase64String(defaultValueFromConfig))), diff --git a/src/Service.Tests/GraphQLBuilder/Helpers/GraphQLTestHelpers.cs b/src/Service.Tests/GraphQLBuilder/Helpers/GraphQLTestHelpers.cs index a00885fcf3..73f71db446 100644 --- a/src/Service.Tests/GraphQLBuilder/Helpers/GraphQLTestHelpers.cs +++ b/src/Service.Tests/GraphQLBuilder/Helpers/GraphQLTestHelpers.cs @@ -101,7 +101,7 @@ public static Entity GenerateStoredProcedureEntity( GraphQLOperation? graphQLOperation, string[] permissionOperations = null, string dbObjectName = "foo", - Dictionary parameters = null + List parameters = null ) { IEnumerable actions = (permissionOperations ?? new string[] { }).Select(a => new EntityAction(EnumExtensions.Deserialize(a), null, new(null, null))); diff --git a/src/Service.Tests/GraphQLBuilder/Sql/StoredProcedureBuilderTests.cs b/src/Service.Tests/GraphQLBuilder/Sql/StoredProcedureBuilderTests.cs index 06cad3e972..b07ebcb083 100644 --- a/src/Service.Tests/GraphQLBuilder/Sql/StoredProcedureBuilderTests.cs +++ b/src/Service.Tests/GraphQLBuilder/Sql/StoredProcedureBuilderTests.cs @@ -87,17 +87,28 @@ public void StoredProcedure_ParameterValueTypeResolution( // Parameter collection used to create DatabaseObjectSource which is used to create a new entity object. Dictionary configSourcedParameters = new() { { parameterName, JsonSerializer.SerializeToElement(configParamValue) } }; + // Convert configSourcedParameters to List + List parameterMetadataList = configSourcedParameters + .Select(kvp => new ParameterMetadata + { + Name = kvp.Key, + Default = kvp.Value is JsonElement je + ? je.ValueKind == JsonValueKind.String ? je.GetString() : je.ToString() + : kvp.Value?.ToString() + }) + .ToList(); + // Create a new entity where the GraphQL type is explicitly defined as Mutation in the runtime config. Entity spMutationEntity = GraphQLTestHelpers.GenerateStoredProcedureEntity( graphQLTypeName: spMutationTypeName, graphQLOperation: GraphQLOperation.Mutation, - parameters: configSourcedParameters); + parameters: parameterMetadataList); // Create a new entity where the GraphQL type is explicitly defined as Query in the runtime config. Entity spQueryEntity = GraphQLTestHelpers.GenerateStoredProcedureEntity( graphQLTypeName: spQueryTypeName, graphQLOperation: GraphQLOperation.Query, - parameters: configSourcedParameters); + parameters: parameterMetadataList); // Create the GraphQL type for the stored procedure entity. string spQueryEntityName = "spquery"; diff --git a/src/Service.Tests/OpenApiDocumentor/ParameterValidationTests.cs b/src/Service.Tests/OpenApiDocumentor/ParameterValidationTests.cs index 7ef4e015c0..afed2b1f3e 100644 --- a/src/Service.Tests/OpenApiDocumentor/ParameterValidationTests.cs +++ b/src/Service.Tests/OpenApiDocumentor/ParameterValidationTests.cs @@ -144,10 +144,23 @@ public async Task TestInputParametersForStoredProcedures() string entityName = "UpdateBookTitle"; string objectName = "update_book_title"; - // Adding a parameter default value. - Dictionary parameterDefaults = new() { { "title", "Test Title" } }; + // Adding parameter metadata with a default value. + List parameterMetadata = new() + { + new ParameterMetadata + { + Name = "id", + Required = false + }, + new ParameterMetadata + { + Name = "title", + Required = false, + Default = "Test Title" + } + }; - EntitySource entitySource = new(Object: objectName, EntitySourceType.StoredProcedure, parameterDefaults, null); + EntitySource entitySource = new(Object: objectName, Type: EntitySourceType.StoredProcedure, Parameters: parameterMetadata, KeyFields: null); OpenApiDocument openApiDocument = await GenerateOpenApiDocumentForGivenEntityAsync( entityName, entitySource, diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt index 4283eb432e..05fec88fff 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt @@ -2838,10 +2838,18 @@ Source: { Object: insert_book, Type: stored-procedure, - Parameters: { - publisher_id: 1234, - title: randomX - } + Parameters: [ + { + Name: title, + Required: false, + Default: randomX + }, + { + Name: publisher_id, + Required: false, + Default: 1234 + } + ] }, GraphQL: { Singular: InsertBook, @@ -2956,10 +2964,18 @@ Source: { Object: update_book_title, Type: stored-procedure, - Parameters: { - id: 1, - title: Testing Tonight - } + Parameters: [ + { + Name: id, + Required: false, + Default: 1 + }, + { + Name: title, + Required: false, + Default: Testing Tonight + } + ] }, GraphQL: { Singular: UpdateBookTitle, @@ -2998,9 +3014,13 @@ Source: { Object: get_authors_history_by_first_name, Type: stored-procedure, - Parameters: { - firstName: Aaron - } + Parameters: [ + { + Name: firstName, + Required: false, + Default: Aaron + } + ] }, GraphQL: { Singular: SearchAuthorByFirstName, @@ -3039,10 +3059,18 @@ Source: { Object: insert_and_display_all_books_for_given_publisher, Type: stored-procedure, - Parameters: { - publisher_name: MyPublisher, - title: MyTitle - } + Parameters: [ + { + Name: title, + Required: false, + Default: MyTitle + }, + { + Name: publisher_name, + Required: false, + Default: MyPublisher + } + ] }, GraphQL: { Singular: InsertAndDisplayAllBooksUnderGivenPublisher, diff --git a/src/Service.Tests/UnitTests/SerializationDeserializationTests.cs b/src/Service.Tests/UnitTests/SerializationDeserializationTests.cs index 2b5e5bf3ba..44978cd6aa 100644 --- a/src/Service.Tests/UnitTests/SerializationDeserializationTests.cs +++ b/src/Service.Tests/UnitTests/SerializationDeserializationTests.cs @@ -428,7 +428,7 @@ private static void VerifyParameterDefinitionSerializationDeserialization(Parame { // test number of properties/fields defined in Column Definition int fields = typeof(ParameterDefinition).GetFields(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance).Length; - Assert.AreEqual(fields, 5); + Assert.AreEqual(fields, 9); // test values expectedParameterDefinition.Equals(deserializedParameterDefinition); } From c3515c8eca59904a8f202e1ba851cf8665589a44 Mon Sep 17 00:00:00 2001 From: Anusha Kolan Date: Fri, 24 Oct 2025 16:20:35 -0700 Subject: [PATCH 72/79] [MCP] Added fields section for entities and backward compatibility to mappings and key-fields. (#2917) ## Why make this change? This change introduces richer metadata for entity fields and keys in the Data API Builder configuration. The goal is to enable semantic metadata for documentation, client generation, and MCP tools, and to begin deprecating the legacy mappings and source.key-fields properties. ## What is this change? - Introduces a new `fields` property for entities in the config schema. - Each field supports: - `name`: database column name (required) - `alias`: exposed name for the field (optional) - `description`: field description (optional) - `primary-key`: whether the field is a primary key (optional) - Updates the JSON schema to enforce that `fields` cannot be used with legacy `mappings` or `source.key-fields`. - Updates CLI commands (`dab add`, `dab update`) to support specifying field alias, description, and primary-key. - Updates `dab validate` to warn if fields are missing and MCP is enabled. - Updates OpenAPI, GraphQL, and MCP `describe_entities` responses to include field descriptions. - Adds auto-migration logic to convert legacy `mappings` and `source.key-fields` to the new `fields` format when relevant CLI flags are used. - Maintains backward compatibility: legacy properties are still supported, but validation enforces that `fields` and legacy props are not mixed. ## How was this tested? All automated tests have been executed, updated as necessary, and completed successfully. Manually tested with following queries: 1. Update Entity with Field Mappings and Key Fields `dotnet C:\DAB\data-api-builder\src\out\cli\net8.0\Microsoft.DataApiBuilder.dll update BookAuthor --map "BookID:book_id,AuthorID:author_id" --source.key-fields "BookID,AuthorID" --config "C:\DAB\data-api-builder\src\Service\dab-config.json"` ------ Validate the Configuration `dotnet C:\DAB\data-api-builder\src\out\cli\net8.0\Microsoft.DataApiBuilder.dll validate -c "C:\DAB\data-api-builder\src\Service\dab-config.json"` ----- Update Entity Field Metadata (Primary Key) `dotnet C:\DAB\data-api-builder\src\out\cli\net8.0\Microsoft.DataApiBuilder.dll update Todo --fields.name owner_id --fields.primary-key True --config C:\DAB\data-api-builder\src\Service\dab-config.json` ---- Update Entity Key Fields Only `dotnet C:\DAB\data-api-builder\src\out\cli\net8.0\Microsoft.DataApiBuilder.dll update BookAuthor --source.key-fields "BookID"` ---- Sample new format `"fields": [ { "name": "id", "alias": "id", "description": "The unique identifier for a todo item", "primary-key": true }, { "name": "title", "alias": "title", "description": "The title of the todo item", "primary-key": false }, { "name": "completed", "alias": "completed", "description": "Indicates whether the todo item is completed", "primary-key": false }, { "name": "owner_id", "alias": "owner", "description": "Hello", "primary-key": true }, { "name": "position", "alias": "position", "description": "The position of the todo item in the list", "primary-key": false } ],` ----- GraphQL Introspection Example Query `{ __type(name: "Todo") { name description fields { name description } } }` Result `{ "data": { "__type": { "name": "Todo", "description": "Represents a todo item in the system", "fields": [ { "name": "id", "description": "The unique identifier for a todo item" }, { "name": "title", "description": "The title of the todo item" }, { "name": "completed", "description": "Indicates whether the todo item is completed" }, { "name": "owner", "description": "Hello" }, { "name": "position", "description": "The position of the todo item in the list" } ] } } }` ------ OpenAPI Schema Example `"Todo": { "type": "object", "properties": { "id": { "type": "string", "description": "The unique identifier for a todo item", "format": "" }, "title": { "type": "string", "description": "The title of the todo item", "format": "" }, "completed": { "type": "boolean", "description": "Indicates whether the todo item is completed", "format": "" }, "owner_id": { "type": "string", "description": "Hello", "format": "" }, "position": { "type": "number", "description": "The position of the todo item in the list", "format": "" } }, "description": "Represents a todo item in the system" }` --- schemas/dab.draft.schema.json | 32 ++- src/Cli.Tests/AddEntityTests.cs | 78 +++++- src/Cli.Tests/EndToEndTests.cs | 8 +- ...SourceObject_036a859f50ce167c.verified.txt | 16 +- ...SourceObject_103655d39b48d89f.verified.txt | 16 +- ...SourceObject_442649c7ef2176bd.verified.txt | 16 +- ...SourceObject_c26902b0e44f97cd.verified.txt | 10 + ....TestUpdateEntityWithMappings.verified.txt | 18 +- ...ithSpecialCharacterInMappings.verified.txt | 30 ++- ...ts.TestUpdateExistingMappings.verified.txt | 24 +- ...SourceObject_574e1995f787740f.verified.txt | 16 +- ...SourceObject_a13a9ca73b21f261.verified.txt | 16 +- ...SourceObject_a5ce76c8bea25cc8.verified.txt | 16 +- ...UpdateDatabaseSourceKeyFields.verified.txt | 16 +- src/Cli.Tests/UpdateEntityTests.cs | 8 +- src/Cli/Commands/AddOptions.cs | 8 + src/Cli/Commands/EntityOptions.cs | 20 ++ src/Cli/Commands/UpdateOptions.cs | 8 + src/Cli/ConfigGenerator.cs | 254 +++++++++++++++++- src/Config/ObjectModel/Entity.cs | 4 +- src/Config/ObjectModel/FieldMetadata.cs | 28 ++ .../MsSqlMetadataProvider.cs | 1 + .../MetadataProviders/SqlMetadataProvider.cs | 209 +++++++++++--- .../Services/OpenAPI/OpenApiDocumentor.cs | 28 +- .../Sql/SchemaConverter.cs | 14 +- .../Authorization/AuthorizationHelpers.cs | 1 + .../AuthorizationResolverUnitTests.cs | 1 + .../DabCacheServiceIntegrationTests.cs | 1 + .../Caching/HealthEndpointCachingTests.cs | 1 + .../Configuration/ConfigurationTests.cs | 21 ++ .../Configuration/HealthEndpointRolesTests.cs | 1 + .../Configuration/HealthEndpointTests.cs | 1 + .../AuthorizationResolverHotReloadTests.cs | 2 + .../CosmosTests/MutationTests.cs | 2 + src/Service.Tests/CosmosTests/QueryTests.cs | 1 + .../SchemaGeneratorFactoryTests.cs | 3 + .../Helpers/GraphQLTestHelpers.cs | 4 + .../GraphQLBuilder/MutationBuilderTests.cs | 1 + .../Sql/SchemaConverterTests.cs | 1 + .../DocumentVerbosityTests.cs | 1 + .../ParameterValidationTests.cs | 1 + .../OpenApiDocumentor/PathValidationTests.cs | 1 + .../StoredProcedureGeneration.cs | 1 + ...tReadingRuntimeConfigForMsSql.verified.txt | 208 +++++++++----- ...tReadingRuntimeConfigForMySql.verified.txt | 192 +++++++++---- ...ingRuntimeConfigForPostgreSql.verified.txt | 181 +++++++++---- .../GraphQLQueryTests/GraphQLQueryTestBase.cs | 2 + .../MsSqlGraphQLQueryTests.cs | 1 + src/Service.Tests/TestHelper.cs | 13 + .../UnitTests/ConfigValidationUnitTests.cs | 5 + .../UnitTests/RequestValidatorUnitTests.cs | 2 +- .../UnitTests/SqlMetadataProviderUnitTests.cs | 2 + 52 files changed, 1248 insertions(+), 297 deletions(-) create mode 100644 src/Config/ObjectModel/FieldMetadata.cs diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json index 13d005c3ac..80cfd953ad 100644 --- a/schemas/dab.draft.schema.json +++ b/schemas/dab.draft.schema.json @@ -797,6 +797,21 @@ } ] }, + "fields": { + "type": "array", + "description": "Defines the fields (columns) exposed for this entity, with metadata.", + "items": { + "type": "object", + "properties": { + "name": { "type": "string", "description": "Database column name." }, + "alias": { "type": "string", "description": "Exposed name for the field." }, + "description": { "type": "string", "description": "Field description." }, + "primary-key": { "type": "boolean", "description": "Indicates whether this field is a primary key." } + }, + "required": ["name"] + }, + "uniqueItems": true + }, "rest": { "oneOf": [ { @@ -1116,7 +1131,22 @@ } } }, - "required": ["source", "permissions"] + "required": ["source", "permissions"], + "allOf": [ + { + "if": { + "required": ["fields"] + }, + "then": { + "not": { + "anyOf": [ + { "required": ["mappings"] }, + { "properties": { "source": { "properties": { "key-fields": { } }, "required": ["key-fields"] } } } + ] + } + } + } + ] } } } diff --git a/src/Cli.Tests/AddEntityTests.cs b/src/Cli.Tests/AddEntityTests.cs index c12b0a2d4a..9386916f7f 100644 --- a/src/Cli.Tests/AddEntityTests.cs +++ b/src/Cli.Tests/AddEntityTests.cs @@ -49,7 +49,11 @@ public Task AddNewEntityWhenEntitiesEmpty() parametersNameCollection: null, parametersDescriptionCollection: null, parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); return ExecuteVerifyTest(options); } @@ -82,7 +86,11 @@ public Task AddNewEntityWhenEntitiesNotEmpty() parametersNameCollection: null, parametersDescriptionCollection: null, parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); string initialConfiguration = AddPropertiesToJson(INITIAL_CONFIG, GetFirstEntityConfiguration()); @@ -118,7 +126,11 @@ public void AddDuplicateEntity() parametersNameCollection: null, parametersDescriptionCollection: null, parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); string initialConfiguration = AddPropertiesToJson(INITIAL_CONFIG, GetFirstEntityConfiguration()); @@ -158,7 +170,11 @@ public Task AddEntityWithAnExistingNameButWithDifferentCase() parametersNameCollection: null, parametersDescriptionCollection: null, parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); string initialConfiguration = AddPropertiesToJson(INITIAL_CONFIG, GetFirstEntityConfiguration()); @@ -193,7 +209,11 @@ public Task AddEntityWithCachingEnabled() parametersNameCollection: null, parametersDescriptionCollection: null, parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); return ExecuteVerifyTest(options); @@ -234,7 +254,11 @@ public Task AddEntityWithPolicyAndFieldProperties( parametersNameCollection: null, parametersDescriptionCollection: null, parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); // Create VerifySettings and add all arguments to the method as parameters @@ -271,7 +295,11 @@ public Task AddNewEntityWhenEntitiesWithSourceAsStoredProcedure() parametersNameCollection: null, parametersDescriptionCollection: ["This is a test parameter description."], parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); return ExecuteVerifyTest(options); @@ -307,7 +335,11 @@ public Task TestAddStoredProcedureWithRestMethodsAndGraphQLOperations() parametersNameCollection: null, parametersDescriptionCollection: null, parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); return ExecuteVerifyTest(options); @@ -339,7 +371,11 @@ public void AddEntityWithDescriptionAndVerifyInConfig() parametersNameCollection: null, parametersDescriptionCollection: null, parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); string config = INITIAL_CONFIG; @@ -398,7 +434,11 @@ public void TestAddNewEntityWithSourceObjectHavingValidFields( parametersNameCollection: null, parametersDescriptionCollection: null, parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); RuntimeConfigLoader.TryParseConfig(INITIAL_CONFIG, out RuntimeConfig? runtimeConfig); @@ -462,7 +502,11 @@ public Task TestAddNewSpWithDifferentRestAndGraphQLOptions( parametersNameCollection: null, parametersDescriptionCollection: null, parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); VerifySettings settings = new(); @@ -502,7 +546,11 @@ public void TestAddStoredProcedureWithConflictingRestGraphQLOptions( parametersNameCollection: null, parametersDescriptionCollection: null, parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); RuntimeConfigLoader.TryParseConfig(INITIAL_CONFIG, out RuntimeConfig? runtimeConfig); @@ -545,7 +593,11 @@ public void TestAddEntityPermissionWithInvalidOperation(IEnumerable perm parametersNameCollection: null, parametersDescriptionCollection: null, parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); RuntimeConfigLoader.TryParseConfig(INITIAL_CONFIG, out RuntimeConfig? runtimeConfig); diff --git a/src/Cli.Tests/EndToEndTests.cs b/src/Cli.Tests/EndToEndTests.cs index 28fecfb2d5..7fe017501f 100644 --- a/src/Cli.Tests/EndToEndTests.cs +++ b/src/Cli.Tests/EndToEndTests.cs @@ -771,9 +771,11 @@ public void TestUpdateEntity() CollectionAssert.AreEqual(new string[] { "todo_id" }, relationship.LinkingSourceFields); CollectionAssert.AreEqual(new string[] { "id" }, relationship.LinkingTargetFields); - Assert.IsNotNull(entity.Mappings); - Assert.AreEqual("identity", entity.Mappings["id"]); - Assert.AreEqual("Company Name", entity.Mappings["name"]); + Assert.IsNotNull(entity.Fields); + Assert.AreEqual(2, entity.Fields.Count); + Assert.AreEqual(entity.Fields[0].Alias, "identity"); + Assert.AreEqual(entity.Fields[1].Alias, "Company Name"); + Assert.IsNull(entity.Mappings); } /// diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_036a859f50ce167c.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_036a859f50ce167c.verified.txt index a78465898d..260eecd0c9 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_036a859f50ce167c.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_036a859f50ce167c.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: View, - KeyFields: [ - col1, - col2 - ] + Type: View }, + Fields: [ + { + Name: col1, + PrimaryKey: true + }, + { + Name: col2, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_103655d39b48d89f.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_103655d39b48d89f.verified.txt index d3ed32cf42..80f61e17ac 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_103655d39b48d89f.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_103655d39b48d89f.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: Table, - KeyFields: [ - id, - name - ] + Type: Table }, + Fields: [ + { + Name: id, + PrimaryKey: true + }, + { + Name: name, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_442649c7ef2176bd.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_442649c7ef2176bd.verified.txt index a78465898d..260eecd0c9 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_442649c7ef2176bd.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_442649c7ef2176bd.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: View, - KeyFields: [ - col1, - col2 - ] + Type: View }, + Fields: [ + { + Name: col1, + PrimaryKey: true + }, + { + Name: col2, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_c26902b0e44f97cd.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_c26902b0e44f97cd.verified.txt index 2b4a7b8518..2d00804545 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_c26902b0e44f97cd.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_c26902b0e44f97cd.verified.txt @@ -29,6 +29,16 @@ Object: s001.book, Type: stored-procedure }, + Fields: [ + { + Name: id, + PrimaryKey: true + }, + { + Name: name, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithMappings.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithMappings.verified.txt index 63ba7e2898..54d9077f1c 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithMappings.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithMappings.verified.txt @@ -33,6 +33,18 @@ Object: MyTable, Type: Table }, + Fields: [ + { + Name: id, + Alias: Identity, + PrimaryKey: false + }, + { + Name: name, + Alias: Company Name, + PrimaryKey: false + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, @@ -53,11 +65,7 @@ } ] } - ], - Mappings: { - id: Identity, - name: Company Name - } + ] } } ] diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithSpecialCharacterInMappings.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithSpecialCharacterInMappings.verified.txt index 8dcadec7b1..1906f87425 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithSpecialCharacterInMappings.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithSpecialCharacterInMappings.verified.txt @@ -33,6 +33,28 @@ Object: MyTable, Type: Table }, + Fields: [ + { + Name: Macaroni, + Alias: Mac & Cheese, + PrimaryKey: false + }, + { + Name: region, + Alias: United State's Region, + PrimaryKey: false + }, + { + Name: russian, + Alias: русский, + PrimaryKey: false + }, + { + Name: chinese, + Alias: 中文, + PrimaryKey: false + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, @@ -53,13 +75,7 @@ } ] } - ], - Mappings: { - chinese: 中文, - Macaroni: Mac & Cheese, - region: United State's Region, - russian: русский - } + ] } } ] diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateExistingMappings.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateExistingMappings.verified.txt index 13e994a5cc..56ce5b55c3 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateExistingMappings.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateExistingMappings.verified.txt @@ -33,6 +33,23 @@ Object: MyTable, Type: Table }, + Fields: [ + { + Name: name, + Alias: Company Name, + PrimaryKey: false + }, + { + Name: addr, + Alias: Company Address, + PrimaryKey: false + }, + { + Name: number, + Alias: Contact Details, + PrimaryKey: false + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, @@ -53,12 +70,7 @@ } ] } - ], - Mappings: { - addr: Company Address, - name: Company Name, - number: Contact Details - } + ] } } ] diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_574e1995f787740f.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_574e1995f787740f.verified.txt index a78465898d..260eecd0c9 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_574e1995f787740f.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_574e1995f787740f.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: View, - KeyFields: [ - col1, - col2 - ] + Type: View }, + Fields: [ + { + Name: col1, + PrimaryKey: true + }, + { + Name: col2, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a13a9ca73b21f261.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a13a9ca73b21f261.verified.txt index d3ed32cf42..80f61e17ac 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a13a9ca73b21f261.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a13a9ca73b21f261.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: Table, - KeyFields: [ - id, - name - ] + Type: Table }, + Fields: [ + { + Name: id, + PrimaryKey: true + }, + { + Name: name, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a5ce76c8bea25cc8.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a5ce76c8bea25cc8.verified.txt index d3ed32cf42..80f61e17ac 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a5ce76c8bea25cc8.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a5ce76c8bea25cc8.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: Table, - KeyFields: [ - id, - name - ] + Type: Table }, + Fields: [ + { + Name: id, + PrimaryKey: true + }, + { + Name: name, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceKeyFields.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceKeyFields.verified.txt index 697074cedf..544a3484f9 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceKeyFields.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceKeyFields.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: Table, - KeyFields: [ - col1, - col2 - ] + Type: Table }, + Fields: [ + { + Name: col1, + PrimaryKey: true + }, + { + Name: col2, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/UpdateEntityTests.cs b/src/Cli.Tests/UpdateEntityTests.cs index 9acb6c6f81..3a106c0adc 100644 --- a/src/Cli.Tests/UpdateEntityTests.cs +++ b/src/Cli.Tests/UpdateEntityTests.cs @@ -1030,6 +1030,7 @@ public void EnsureFailure_AddRelationshipToEntityWithDisabledGraphQL() Entity sampleEntity1 = new( Source: new("SOURCE1", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("SOURCE1", "SOURCE1s"), Permissions: new[] { permissionForEntity }, @@ -1040,6 +1041,7 @@ public void EnsureFailure_AddRelationshipToEntityWithDisabledGraphQL() // entity with graphQL disabled Entity sampleEntity2 = new( Source: new("SOURCE2", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("SOURCE2", "SOURCE2s", false), Permissions: new[] { permissionForEntity }, @@ -1191,7 +1193,11 @@ private static UpdateOptions GenerateBaseUpdateOptions( parametersNameCollection: null, parametersDescriptionCollection: null, parametersRequiredCollection: null, - parametersDefaultCollection: null + parametersDefaultCollection: null, + fieldsNameCollection: null, + fieldsAliasCollection: null, + fieldsDescriptionCollection: null, + fieldsPrimaryKeyCollection: null ); } diff --git a/src/Cli/Commands/AddOptions.cs b/src/Cli/Commands/AddOptions.cs index 635a438082..b7d9fbeb08 100644 --- a/src/Cli/Commands/AddOptions.cs +++ b/src/Cli/Commands/AddOptions.cs @@ -39,6 +39,10 @@ public AddOptions( IEnumerable? parametersDescriptionCollection, IEnumerable? parametersRequiredCollection, IEnumerable? parametersDefaultCollection, + IEnumerable? fieldsNameCollection, + IEnumerable? fieldsAliasCollection, + IEnumerable? fieldsDescriptionCollection, + IEnumerable? fieldsPrimaryKeyCollection, string? config ) : base( @@ -61,6 +65,10 @@ public AddOptions( parametersDescriptionCollection, parametersRequiredCollection, parametersDefaultCollection, + fieldsNameCollection, + fieldsAliasCollection, + fieldsDescriptionCollection, + fieldsPrimaryKeyCollection, config ) { diff --git a/src/Cli/Commands/EntityOptions.cs b/src/Cli/Commands/EntityOptions.cs index a11d6fe450..7f26816800 100644 --- a/src/Cli/Commands/EntityOptions.cs +++ b/src/Cli/Commands/EntityOptions.cs @@ -30,6 +30,10 @@ public EntityOptions( IEnumerable? parametersDescriptionCollection, IEnumerable? parametersRequiredCollection, IEnumerable? parametersDefaultCollection, + IEnumerable? fieldsNameCollection, + IEnumerable? fieldsAliasCollection, + IEnumerable? fieldsDescriptionCollection, + IEnumerable? fieldsPrimaryKeyCollection, string? config ) : base(config) @@ -53,6 +57,10 @@ public EntityOptions( ParametersDescriptionCollection = parametersDescriptionCollection; ParametersRequiredCollection = parametersRequiredCollection; ParametersDefaultCollection = parametersDefaultCollection; + FieldsNameCollection = fieldsNameCollection; + FieldsAliasCollection = fieldsAliasCollection; + FieldsDescriptionCollection = fieldsDescriptionCollection; + FieldsPrimaryKeyCollection = fieldsPrimaryKeyCollection; } // Entity is required but we have made required as false to have custom error message (more user friendly), if not provided. @@ -112,5 +120,17 @@ public EntityOptions( [Option("parameters.default", Required = false, Separator = ',', HelpText = "Comma-separated list of parameter default values for stored procedure.")] public IEnumerable? ParametersDefaultCollection { get; } + + [Option("fields.name", Required = false, Separator = ',', HelpText = "Name of the database column to expose as a field.")] + public IEnumerable? FieldsNameCollection { get; } + + [Option("fields.alias", Required = false, Separator = ',', HelpText = "Alias for the field.")] + public IEnumerable? FieldsAliasCollection { get; } + + [Option("fields.description", Required = false, Separator = ',', HelpText = "Description for the field.")] + public IEnumerable? FieldsDescriptionCollection { get; } + + [Option("fields.primary-key", Required = false, Separator = ',', HelpText = "Set this field as a primary key.")] + public IEnumerable? FieldsPrimaryKeyCollection { get; } } } diff --git a/src/Cli/Commands/UpdateOptions.cs b/src/Cli/Commands/UpdateOptions.cs index 2c1c0e74c7..fe1664c5bb 100644 --- a/src/Cli/Commands/UpdateOptions.cs +++ b/src/Cli/Commands/UpdateOptions.cs @@ -47,6 +47,10 @@ public UpdateOptions( IEnumerable? parametersDescriptionCollection, IEnumerable? parametersRequiredCollection, IEnumerable? parametersDefaultCollection, + IEnumerable? fieldsNameCollection, + IEnumerable? fieldsAliasCollection, + IEnumerable? fieldsDescriptionCollection, + IEnumerable? fieldsPrimaryKeyCollection, string? config) : base(entity, sourceType, @@ -67,6 +71,10 @@ public UpdateOptions( parametersDescriptionCollection, parametersRequiredCollection, parametersDefaultCollection, + fieldsNameCollection, + fieldsAliasCollection, + fieldsDescriptionCollection, + fieldsPrimaryKeyCollection, config) { Source = source; diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs index b54f87b2fd..9a56f83c4a 100644 --- a/src/Cli/ConfigGenerator.cs +++ b/src/Cli/ConfigGenerator.cs @@ -453,6 +453,7 @@ public static bool TryAddNewEntity(AddOptions options, RuntimeConfig initialRunt // Create new entity. Entity entity = new( Source: source, + Fields: null, Rest: restOptions, GraphQL: graphqlOptions, Permissions: permissionSettings, @@ -1682,24 +1683,182 @@ public static bool TryUpdateExistingEntity(UpdateOptions options, RuntimeConfig updatedRelationships[options.Relationship] = new_relationship; } - if (options.Map is not null && options.Map.Any()) + bool hasFields = options.FieldsNameCollection != null && options.FieldsNameCollection.Count() > 0; + bool hasMappings = options.Map != null && options.Map.Any(); + bool hasKeyFields = options.SourceKeyFields != null && options.SourceKeyFields.Any(); + + List? fields; + if (hasFields) { - // Parsing mappings dictionary from Collection - if (!TryParseMappingDictionary(options.Map, out updatedMappings)) + if (hasMappings && hasKeyFields) + { + _logger.LogError("Entity cannot define 'fields', 'mappings', and 'key-fields' together. Please use only one."); + return false; + } + + if (hasMappings) + { + _logger.LogError("Entity cannot define both 'fields' and 'mappings'. Please use only one."); + return false; + } + + if (hasKeyFields) { + _logger.LogError("Entity cannot define both 'fields' and 'key-fields'. Please use only one."); return false; } + + // Merge updated fields with existing fields + List existingFields = entity.Fields?.ToList() ?? []; + List updatedFieldsList = ComposeFieldsFromOptions(options); + Dictionary updatedFieldsDict = updatedFieldsList.ToDictionary(f => f.Name, f => f); + List mergedFields = []; + + foreach (FieldMetadata field in existingFields) + { + if (updatedFieldsDict.TryGetValue(field.Name, out FieldMetadata? updatedField)) + { + mergedFields.Add(new FieldMetadata + { + Name = updatedField.Name, + Alias = updatedField.Alias ?? field.Alias, + Description = updatedField.Description ?? field.Description, + PrimaryKey = updatedField.PrimaryKey + }); + updatedFieldsDict.Remove(field.Name); // Remove so only new fields remain + } + else + { + mergedFields.Add(field); // Keep existing field + } + } + + // Add any new fields that didn't exist before + mergedFields.AddRange(updatedFieldsDict.Values); + + fields = mergedFields; + + // If user didn't mark any PK in fields, carry over existing source key-fields + if (!fields.Any(f => f.PrimaryKey) && updatedSource.KeyFields is { Length: > 0 }) + { + foreach (string k in updatedSource.KeyFields) + { + FieldMetadata? f = fields.FirstOrDefault(f => string.Equals(f.Name, k, StringComparison.OrdinalIgnoreCase)); + if (f is not null) + { + f.PrimaryKey = true; + } + else + { + fields.Add(new FieldMetadata { Name = k, PrimaryKey = true }); + } + } + } + + // Remove legacy props if fields present + updatedSource = updatedSource with { KeyFields = null }; + updatedMappings = null; + } + else if (hasMappings || hasKeyFields) + { + // If mappings or key-fields are provided, convert them to fields and remove legacy props + // Start with existing fields + List existingFields = entity.Fields?.ToList() ?? new List(); + + // Build a dictionary for quick lookup and merging + Dictionary fieldDict = existingFields + .ToDictionary(f => f.Name, StringComparer.OrdinalIgnoreCase); + + // Parse mappings from options + if (hasMappings) + { + if (options.Map is null || !TryParseMappingDictionary(options.Map, out updatedMappings)) + { + _logger.LogError("Failed to parse mappings from --map option."); + return false; + } + + foreach (KeyValuePair mapping in updatedMappings) + { + if (fieldDict.TryGetValue(mapping.Key, out FieldMetadata? existing) && existing != null) + { + // Update alias, preserve PK and description + existing.Alias = mapping.Value ?? existing.Alias; + } + else + { + // New field from mapping + fieldDict[mapping.Key] = new FieldMetadata + { + Name = mapping.Key, + Alias = mapping.Value + }; + } + } + } + + // Always carry over existing PKs on the entity/update, not only when the user re-supplies --source.key-fields. + string[]? existingKeys = updatedSource.KeyFields; + if (existingKeys is not null && existingKeys.Length > 0) + { + foreach (string key in existingKeys) + { + if (fieldDict.TryGetValue(key, out FieldMetadata? pkField) && pkField != null) + { + pkField.PrimaryKey = true; + } + else + { + fieldDict[key] = new FieldMetadata { Name = key, PrimaryKey = true }; + } + } + } + + // Final merged list, no duplicates + fields = fieldDict.Values.ToList(); + + // Remove legacy props only after we have safely embedded PKs into fields. + updatedSource = updatedSource with { KeyFields = null }; + updatedMappings = null; + } + else if (!hasFields && !hasMappings && !hasKeyFields && entity.Source.KeyFields?.Length > 0) + { + // If no fields, mappings, or key-fields are provided with update command, use the entity's key-fields added using add command. + fields = entity.Source.KeyFields.Select(k => new FieldMetadata + { + Name = k, + PrimaryKey = true + }).ToList(); + + updatedSource = updatedSource with { KeyFields = null }; + updatedMappings = null; + } + else + { + fields = entity.Fields?.ToList() ?? new List(); + if (entity.Mappings is not null || entity.Source?.KeyFields is not null) + { + _logger.LogWarning("Using legacy 'mappings' and 'key-fields' properties. Consider using 'fields' for new entities."); + } + } + + if (!ValidateFields(fields, out string errorMessage)) + { + _logger.LogError(errorMessage); + return false; } Entity updatedEntity = new( Source: updatedSource, + Fields: fields, Rest: updatedRestDetails, GraphQL: updatedGraphQLDetails, Permissions: updatedPermissions, Relationships: updatedRelationships, Mappings: updatedMappings, Cache: updatedCacheOptions, - Description: string.IsNullOrWhiteSpace(options.Description) ? entity.Description : options.Description); + Description: string.IsNullOrWhiteSpace(options.Description) ? entity.Description : options.Description + ); IDictionary entities = new Dictionary(initialConfig.Entities.Entities) { [options.Entity] = updatedEntity @@ -2220,7 +2379,29 @@ public static bool IsConfigValid(ValidateOptions options, FileSystemRuntimeConfi ILogger runtimeConfigValidatorLogger = LoggerFactoryForCli.CreateLogger(); RuntimeConfigValidator runtimeConfigValidator = new(runtimeConfigProvider, fileSystem, runtimeConfigValidatorLogger, true); - return runtimeConfigValidator.TryValidateConfig(runtimeConfigFile, LoggerFactoryForCli).Result; + bool isValid = runtimeConfigValidator.TryValidateConfig(runtimeConfigFile, LoggerFactoryForCli).Result; + + // Additional validation: warn if fields are missing and MCP is enabled + if (isValid) + { + if (runtimeConfigProvider.TryGetConfig(out RuntimeConfig? config) && config is not null) + { + bool mcpEnabled = config.Runtime?.Mcp?.Enabled == true; + if (mcpEnabled) + { + foreach (KeyValuePair entity in config.Entities) + { + if (entity.Value.Fields == null || !entity.Value.Fields.Any()) + { + _logger.LogWarning($"Entity '{entity.Key}' is missing 'fields' definition while MCP is enabled. " + + "It's recommended to define fields explicitly to ensure optimal performance with MCP."); + } + } + } + } + } + + return isValid; } /// @@ -2616,5 +2797,68 @@ private static bool TryUpdateConfiguredAzureKeyVaultOptions( return false; } } + + /// + /// Helper to build a list of FieldMetadata from UpdateOptions. + /// + private static List ComposeFieldsFromOptions(UpdateOptions options) + { + List fields = []; + if (options.FieldsNameCollection != null) + { + List names = options.FieldsNameCollection.ToList(); + List aliases = options.FieldsAliasCollection?.ToList() ?? []; + List descriptions = options.FieldsDescriptionCollection?.ToList() ?? []; + List keys = options.FieldsPrimaryKeyCollection?.ToList() ?? []; + + for (int i = 0; i < names.Count; i++) + { + fields.Add(new FieldMetadata + { + Name = names[i], + Alias = aliases.Count > i ? aliases[i] : null, + Description = descriptions.Count > i ? descriptions[i] : null, + PrimaryKey = keys.Count > i && keys[i], + }); + } + } + + return fields; + } + + /// + /// Validates that the provided fields are valid against the database columns and constraints. + /// + private static bool ValidateFields( + List fields, + out string errorMessage) + { + errorMessage = string.Empty; + HashSet aliases = []; + HashSet keys = []; + + foreach (FieldMetadata field in fields) + { + if (!string.IsNullOrEmpty(field.Alias)) + { + if (!aliases.Add(field.Alias)) + { + errorMessage = $"Alias '{field.Alias}' is not unique within the entity."; + return false; + } + } + + if (field.PrimaryKey) + { + if (!keys.Add(field.Name)) + { + errorMessage = $"Duplicate key field '{field.Name}' detected."; + return false; + } + } + } + + return true; + } } } diff --git a/src/Config/ObjectModel/Entity.cs b/src/Config/ObjectModel/Entity.cs index 4b56e0478c..c9f247e0f6 100644 --- a/src/Config/ObjectModel/Entity.cs +++ b/src/Config/ObjectModel/Entity.cs @@ -30,13 +30,13 @@ public record Entity public const string PROPERTY_METHODS = "methods"; public string? Description { get; init; } public EntitySource Source { get; init; } + public List? Fields { get; init; } public EntityGraphQLOptions GraphQL { get; init; } public EntityRestOptions Rest { get; init; } public EntityPermission[] Permissions { get; init; } public Dictionary? Mappings { get; init; } public Dictionary? Relationships { get; init; } public EntityCacheOptions? Cache { get; init; } - public EntityHealthCheckConfig? Health { get; init; } [JsonIgnore] @@ -46,6 +46,7 @@ public record Entity public Entity( EntitySource Source, EntityGraphQLOptions GraphQL, + List? Fields, EntityRestOptions Rest, EntityPermission[] Permissions, Dictionary? Mappings, @@ -57,6 +58,7 @@ public Entity( { this.Health = Health; this.Source = Source; + this.Fields = Fields; this.GraphQL = GraphQL; this.Rest = Rest; this.Permissions = Permissions; diff --git a/src/Config/ObjectModel/FieldMetadata.cs b/src/Config/ObjectModel/FieldMetadata.cs new file mode 100644 index 0000000000..118f38c0c2 --- /dev/null +++ b/src/Config/ObjectModel/FieldMetadata.cs @@ -0,0 +1,28 @@ +namespace Azure.DataApiBuilder.Config.ObjectModel +{ + /// + /// Represents metadata for a field in an entity. + /// + public class FieldMetadata + { + /// + /// The name of the field (must match a database column). + /// + public string Name { get; set; } = string.Empty; + + /// + /// The alias for the field (must be unique per entity). + /// + public string? Alias { get; set; } + + /// + /// The description for the field. + /// + public string? Description { get; set; } + + /// + /// Whether this field is a key (must be unique). + /// + public bool PrimaryKey { get; set; } + } +} diff --git a/src/Core/Services/MetadataProviders/MsSqlMetadataProvider.cs b/src/Core/Services/MetadataProviders/MsSqlMetadataProvider.cs index 4acb52f21b..7d02798427 100644 --- a/src/Core/Services/MetadataProviders/MsSqlMetadataProvider.cs +++ b/src/Core/Services/MetadataProviders/MsSqlMetadataProvider.cs @@ -253,6 +253,7 @@ protected override void PopulateMetadataForLinkingObject( // GraphQL is enabled/disabled. The linking object definitions are not exposed in the schema to the user. Entity linkingEntity = new( Source: new EntitySource(Type: EntitySourceType.Table, Object: linkingObject, Parameters: null, KeyFields: null), + Fields: null, Rest: new(Array.Empty(), Enabled: false), GraphQL: new(Singular: linkingEntityName, Plural: linkingEntityName, Enabled: false), Permissions: Array.Empty(), diff --git a/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs b/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs index 75fff1b932..8553e08136 100644 --- a/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs +++ b/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs @@ -217,13 +217,33 @@ public StoredProcedureDefinition GetStoredProcedureDefinition(string entityName) /// public bool TryGetExposedColumnName(string entityName, string backingFieldName, [NotNullWhen(true)] out string? name) { - Dictionary? backingColumnsToExposedNamesMap; - if (!EntityBackingColumnsToExposedNames.TryGetValue(entityName, out backingColumnsToExposedNamesMap)) + if (!EntityBackingColumnsToExposedNames.TryGetValue(entityName, out Dictionary? backingToExposed)) { throw new KeyNotFoundException($"Initialization of metadata incomplete for entity: {entityName}"); } - return backingColumnsToExposedNamesMap.TryGetValue(backingFieldName, out name); + if (backingToExposed.TryGetValue(backingFieldName, out name)) + { + return true; + } + + if (_entities.TryGetValue(entityName, out Entity? entityDefinition) && entityDefinition.Fields is not null) + { + // Find the field by backing name and use its Alias if present. + FieldMetadata? matched = entityDefinition + .Fields + .FirstOrDefault(f => f.Name.Equals(backingFieldName, StringComparison.OrdinalIgnoreCase) + && !string.IsNullOrEmpty(f.Alias)); + + if (matched is not null) + { + name = matched.Alias!; + return true; + } + } + + name = null; + return false; } /// @@ -235,6 +255,23 @@ public bool TryGetBackingColumn(string entityName, string field, [NotNullWhen(tr throw new KeyNotFoundException($"Initialization of metadata incomplete for entity: {entityName}"); } + if (exposedNamesToBackingColumnsMap.TryGetValue(field, out name)) + { + return true; + } + + if (_entities.TryGetValue(entityName, out Entity? entityDefinition) && entityDefinition.Fields is not null) + { + FieldMetadata? matchedField = entityDefinition.Fields.FirstOrDefault(f => + f.Alias != null && f.Alias.Equals(field, StringComparison.OrdinalIgnoreCase)); + + if (matchedField is not null) + { + name = matchedField.Name; + return true; + } + } + return exposedNamesToBackingColumnsMap.TryGetValue(field, out name); } @@ -1099,22 +1136,81 @@ await PopulateResultSetDefinitionsForStoredProcedureAsync( } else if (entitySourceType is EntitySourceType.Table) { + List pkFields = new(); + + // Resolve PKs from fields first + if (entity.Fields is not null && entity.Fields.Any()) + { + pkFields = entity.Fields + .Where(f => f.PrimaryKey) + .Select(f => f.Name) + .ToList(); + } + + // Fallback to key-fields from config + if (pkFields.Count == 0 && entity.Source.KeyFields is not null) + { + pkFields = entity.Source.KeyFields.ToList(); + } + + // If still empty, fallback to DB schema PKs + if (pkFields.Count == 0) + { + DataTable dataTable = await GetTableWithSchemaFromDataSetAsync( + entityName, + GetSchemaName(entityName), + GetDatabaseObjectName(entityName)); + + pkFields = dataTable.PrimaryKey.Select(pk => pk.ColumnName).ToList(); + } + + // Final safeguard + pkFields ??= new List(); + await PopulateSourceDefinitionAsync( entityName, GetSchemaName(entityName), GetDatabaseObjectName(entityName), GetSourceDefinition(entityName), - entity.Source.KeyFields); + pkFields); } else { + List pkFields = new(); + + // Resolve PKs from fields first + if (entity.Fields is not null && entity.Fields.Any()) + { + pkFields = entity.Fields + .Where(f => f.PrimaryKey) + .Select(f => f.Name) + .ToList(); + } + + // Fallback to key-fields from config + if (pkFields.Count == 0 && entity.Source.KeyFields is not null) + { + pkFields = entity.Source.KeyFields.ToList(); + } + + // If still empty, fallback to DB schema PKs + if (pkFields.Count == 0) + { + DataTable dataTable = await GetTableWithSchemaFromDataSetAsync( + entityName, + GetSchemaName(entityName), + GetDatabaseObjectName(entityName)); + + pkFields = dataTable.PrimaryKey.Select(pk => pk.ColumnName).ToList(); + } + ViewDefinition viewDefinition = (ViewDefinition)GetSourceDefinition(entityName); await PopulateSourceDefinitionAsync( entityName, GetSchemaName(entityName), GetDatabaseObjectName(entityName), viewDefinition, - entity.Source.KeyFields); + pkFields); } } catch (Exception e) @@ -1215,19 +1311,66 @@ private void GenerateExposedToBackingColumnMapUtil(string entityName) { try { - // For StoredProcedures, result set definitions become the column definition. - Dictionary? mapping = GetMappingForEntity(entityName); - EntityBackingColumnsToExposedNames[entityName] = mapping is not null ? mapping : new(); - EntityExposedNamesToBackingColumnNames[entityName] = EntityBackingColumnsToExposedNames[entityName].ToDictionary(x => x.Value, x => x.Key); + // Build case-insensitive maps per entity. + Dictionary backToExposed = new(StringComparer.OrdinalIgnoreCase); + Dictionary exposedToBack = new(StringComparer.OrdinalIgnoreCase); + + // Pull definitions. + _entities.TryGetValue(entityName, out Entity? entity); SourceDefinition sourceDefinition = GetSourceDefinition(entityName); - foreach (string columnName in sourceDefinition.Columns.Keys) + + // 1) Prefer new-style fields (backing = f.Name, exposed = f.Alias ?? f.Name) + if (entity?.Fields is not null) { - if (!EntityExposedNamesToBackingColumnNames[entityName].ContainsKey(columnName) && !EntityBackingColumnsToExposedNames[entityName].ContainsKey(columnName)) + foreach (FieldMetadata f in entity.Fields) { - EntityBackingColumnsToExposedNames[entityName].Add(columnName, columnName); - EntityExposedNamesToBackingColumnNames[entityName].Add(columnName, columnName); + string backing = f.Name; + string exposed = string.IsNullOrWhiteSpace(f.Alias) ? backing : f.Alias!; + backToExposed[backing] = exposed; + exposedToBack[exposed] = backing; + } + } + + // 2) Overlay legacy mappings (backing -> alias) only where we don't already have an alias from fields. + if (entity?.Mappings is not null) + { + foreach (KeyValuePair kvp in entity.Mappings) + { + string backing = kvp.Key; + string exposed = kvp.Value; + + // If fields already provided an alias for this backing column, keep fields precedence. + if (!backToExposed.ContainsKey(backing)) + { + backToExposed[backing] = exposed; + } + + // Always ensure reverse map is coherent (fields still take precedence if the same exposed already exists). + if (!exposedToBack.ContainsKey(exposed)) + { + exposedToBack[exposed] = backing; + } } } + + // 3) Ensure all physical columns are mapped (identity default). + foreach (string backing in sourceDefinition.Columns.Keys) + { + if (!backToExposed.ContainsKey(backing)) + { + backToExposed[backing] = backing; + } + + string exposed = backToExposed[backing]; + if (!exposedToBack.ContainsKey(exposed)) + { + exposedToBack[exposed] = backing; + } + } + + // 4) Store maps for runtime + EntityBackingColumnsToExposedNames[entityName] = backToExposed; + EntityExposedNamesToBackingColumnNames[entityName] = exposedToBack; } catch (Exception e) { @@ -1235,18 +1378,6 @@ private void GenerateExposedToBackingColumnMapUtil(string entityName) } } - /// - /// Obtains the underlying mapping that belongs - /// to a given entity. - /// - /// entity whose map we get. - /// mapping belonging to entity. - private Dictionary? GetMappingForEntity(string entityName) - { - _entities.TryGetValue(entityName, out Entity? entity); - return entity?.Mappings; - } - /// /// Initialize OData parser by building OData model. /// The parser will be used for parsing filter clause and order by clause. @@ -1269,19 +1400,9 @@ private async Task PopulateSourceDefinitionAsync( string schemaName, string tableName, SourceDefinition sourceDefinition, - string[]? runtimeConfigKeyFields) + List pkFields) { - DataTable dataTable = await GetTableWithSchemaFromDataSetAsync(entityName, schemaName, tableName); - - List primaryKeys = new(dataTable.PrimaryKey); - if (runtimeConfigKeyFields is null || runtimeConfigKeyFields.Length == 0) - { - sourceDefinition.PrimaryKey = new(primaryKeys.Select(primaryKey => primaryKey.ColumnName)); - } - else - { - sourceDefinition.PrimaryKey = new(runtimeConfigKeyFields); - } + sourceDefinition.PrimaryKey = [.. pkFields]; if (sourceDefinition.PrimaryKey.Count == 0) { @@ -1297,6 +1418,7 @@ private async Task PopulateSourceDefinitionAsync( await PopulateTriggerMetadataForTable(entityName, schemaName, tableName, sourceDefinition); } + DataTable dataTable = await GetTableWithSchemaFromDataSetAsync(entityName, schemaName, tableName); using DataTableReader reader = new(dataTable); DataTable schemaTable = reader.GetSchemaTable(); RuntimeConfig runtimeConfig = _runtimeConfigProvider.GetConfig(); @@ -1404,12 +1526,21 @@ public static bool IsGraphQLReservedName(Entity entity, string databaseColumnNam if (entity.GraphQL is null || (entity.GraphQL.Enabled)) { if (entity.Mappings is not null - && entity.Mappings.TryGetValue(databaseColumnName, out string? fieldAlias) - && !string.IsNullOrWhiteSpace(fieldAlias)) + && entity.Mappings.TryGetValue(databaseColumnName, out string? fieldAlias) + && !string.IsNullOrWhiteSpace(fieldAlias)) { databaseColumnName = fieldAlias; } + if (entity.Fields is not null) + { + FieldMetadata? fieldMeta = entity.Fields.FirstOrDefault(f => f.Name == databaseColumnName); + if (fieldMeta != null && !string.IsNullOrWhiteSpace(fieldMeta.Alias)) + { + databaseColumnName = fieldMeta.Alias; + } + } + return IsIntrospectionField(databaseColumnName); } } diff --git a/src/Core/Services/OpenAPI/OpenApiDocumentor.cs b/src/Core/Services/OpenAPI/OpenApiDocumentor.cs index 33105db289..87fb96bc32 100644 --- a/src/Core/Services/OpenAPI/OpenApiDocumentor.cs +++ b/src/Core/Services/OpenAPI/OpenApiDocumentor.cs @@ -1011,13 +1011,13 @@ private Dictionary CreateComponentSchemas(RuntimeEntities // Response body schema whose properties map to the stored procedure's first result set columns // as described by sys.dm_exec_describe_first_result_set. - schemas.Add(entityName + SP_RESPONSE_SUFFIX, CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider)); + schemas.Add(entityName + SP_RESPONSE_SUFFIX, CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider, entities)); } else { // Create component schema for FULL entity with all primary key columns (included auto-generated) // which will typically represent the response body of a request or a stored procedure's request body. - schemas.Add(entityName, CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider)); + schemas.Add(entityName, CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider, entities)); // Create an entity's request body component schema excluding autogenerated primary keys. // A POST request requires any non-autogenerated primary key references to be in the request body. @@ -1037,7 +1037,7 @@ private Dictionary CreateComponentSchemas(RuntimeEntities } } - schemas.Add($"{entityName}_NoAutoPK", CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider)); + schemas.Add($"{entityName}_NoAutoPK", CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider, entities)); // Create an entity's request body component schema excluding all primary keys // by removing the tracked non-autogenerated primary key column names and removing them from @@ -1053,7 +1053,7 @@ private Dictionary CreateComponentSchemas(RuntimeEntities } } - schemas.Add($"{entityName}_NoPK", CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider)); + schemas.Add($"{entityName}_NoPK", CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider, entities)); } } @@ -1113,10 +1113,12 @@ private static OpenApiSchema CreateSpRequestComponentSchema(Dictionary /// Name of the entity. /// List of mapped (alias) field names. + /// Metadata provider for database objects. + /// Runtime entities from configuration. /// Raised when an entity's database metadata can't be found, /// indicating a failure due to the provided entityName. /// Entity's OpenApiSchema representation. - private static OpenApiSchema CreateComponentSchema(string entityName, HashSet fields, ISqlMetadataProvider metadataProvider) + private static OpenApiSchema CreateComponentSchema(string entityName, HashSet fields, ISqlMetadataProvider metadataProvider, RuntimeEntities entities) { if (!metadataProvider.EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? dbObject) || dbObject is null) { @@ -1128,6 +1130,8 @@ private static OpenApiSchema CreateComponentSchema(string entityName, HashSet properties = new(); + Entity? entityConfig = entities.TryGetValue(entityName, out Entity? ent) ? ent : null; + // Get backing column metadata to resolve the correct system type which is then // used to resolve the correct Json data type. foreach (string field in fields) @@ -1136,15 +1140,24 @@ private static OpenApiSchema CreateComponentSchema(string entityName, HashSet f.Alias == field || f.Name == field); + fieldDescription = fieldMetadata?.Description; + } + properties.Add(field, new OpenApiSchema() { Type = typeMetadata, - Format = formatMetadata + Format = formatMetadata, + Description = fieldDescription }); } } @@ -1152,7 +1165,8 @@ private static OpenApiSchema CreateComponentSchema(string entityName, HashSet f.Name == columnName); + if (fieldMetadata != null && !string.IsNullOrEmpty(fieldMetadata.Alias)) + { + exposedColumnName = fieldMetadata.Alias; + } + } + NamedTypeNode fieldType = new(GetGraphQLTypeFromSystemType(column.SystemType)); FieldDefinitionNode field = new( location: null, new(exposedColumnName), - description: null, + description: fieldMetadata?.Description is null ? null : new StringValueNode(fieldMetadata.Description), new List(), column.IsNullable ? fieldType : new NonNullTypeNode(fieldType), directives); diff --git a/src/Service.Tests/Authorization/AuthorizationHelpers.cs b/src/Service.Tests/Authorization/AuthorizationHelpers.cs index 85f05a1c3b..bdd5630a50 100644 --- a/src/Service.Tests/Authorization/AuthorizationHelpers.cs +++ b/src/Service.Tests/Authorization/AuthorizationHelpers.cs @@ -112,6 +112,7 @@ public static RuntimeConfig InitRuntimeConfig( Entity sampleEntity = new( Source: entitySource, + Fields: null, Rest: new(Array.Empty()), GraphQL: new(entityName.Singularize(), entityName.Pluralize()), Permissions: new EntityPermission[] { permissionForEntity }, diff --git a/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs b/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs index 39a77bffff..0dff3ac016 100644 --- a/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs +++ b/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs @@ -1424,6 +1424,7 @@ private static RuntimeConfig BuildTestRuntimeConfig(EntityPermission[] permissio { Entity sampleEntity = new( Source: new(entityName, EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("", ""), Permissions: permissions, diff --git a/src/Service.Tests/Caching/DabCacheServiceIntegrationTests.cs b/src/Service.Tests/Caching/DabCacheServiceIntegrationTests.cs index 706a0c42ad..02b7ca6492 100644 --- a/src/Service.Tests/Caching/DabCacheServiceIntegrationTests.cs +++ b/src/Service.Tests/Caching/DabCacheServiceIntegrationTests.cs @@ -740,6 +740,7 @@ private static Mock CreateMockRuntimeConfigProvider(strin { Entity entity = new( Source: new EntitySource(string.Empty, null, null, null), + Fields: null, GraphQL: new EntityGraphQLOptions(string.Empty, string.Empty), Rest: new EntityRestOptions(), Permissions: Array.Empty(), diff --git a/src/Service.Tests/Caching/HealthEndpointCachingTests.cs b/src/Service.Tests/Caching/HealthEndpointCachingTests.cs index 94216a4409..fcc3e097e5 100644 --- a/src/Service.Tests/Caching/HealthEndpointCachingTests.cs +++ b/src/Service.Tests/Caching/HealthEndpointCachingTests.cs @@ -119,6 +119,7 @@ private static void SetupCachingTest(int? cacheTtlSeconds) { Entity requiredEntity = new( Health: new(enabled: true), + Fields: null, Source: new("books", EntitySourceType.Table, null, null), Rest: new(Enabled: true), GraphQL: new("book", "books", true), diff --git a/src/Service.Tests/Configuration/ConfigurationTests.cs b/src/Service.Tests/Configuration/ConfigurationTests.cs index 0be24fa886..65f6e6643b 100644 --- a/src/Service.Tests/Configuration/ConfigurationTests.cs +++ b/src/Service.Tests/Configuration/ConfigurationTests.cs @@ -1613,6 +1613,7 @@ public async Task TestSqlMetadataForInvalidConfigEntities() // creating an entity with invalid table name Entity entityWithInvalidSourceName = new( Source: new("bokos", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "book", Plural: "books"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -1622,6 +1623,7 @@ public async Task TestSqlMetadataForInvalidConfigEntities() Entity entityWithInvalidSourceType = new( Source: new("publishers", EntitySourceType.StoredProcedure, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "publisher", Plural: "publishers"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_AUTHENTICATED) }, @@ -1684,6 +1686,7 @@ public async Task TestSqlMetadataValidationForEntitiesWithInvalidSource() // creating an entity with invalid table name Entity entityWithInvalidSource = new( Source: new(null, EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "book", Plural: "books"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -1694,6 +1697,7 @@ public async Task TestSqlMetadataValidationForEntitiesWithInvalidSource() // creating an entity with invalid source object and adding relationship with an entity with invalid source Entity entityWithInvalidSourceAndRelationship = new( Source: new(null, EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "publisher", Plural: "publishers"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -2642,6 +2646,7 @@ public async Task ValidateErrorMessageForMutationWithoutReadPermission() new EntityPermission( Role: AuthorizationResolver.ROLE_AUTHENTICATED , Actions: new[] { readAction, createAction, deleteAction })}; Entity entity = new(Source: new("stocks", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "Stock", Plural: "Stocks"), Permissions: permissions, @@ -2944,6 +2949,7 @@ public async Task ValidateInheritanceOfReadPermissionFromAnonymous() new EntityPermission( Role: AuthorizationResolver.ROLE_AUTHENTICATED , Actions: new[] { createAction })}; Entity entity = new(Source: new("stocks", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "Stock", Plural: "Stocks"), Permissions: permissions, @@ -3072,6 +3078,7 @@ public async Task ValidateLocationHeaderFieldForPostRequests(EntitySourceType en if (entityType is EntitySourceType.StoredProcedure) { Entity entity = new(Source: new("get_books", EntitySourceType.StoredProcedure, null, null), + Fields: null, Rest: new(new SupportedHttpVerb[] { SupportedHttpVerb.Get, SupportedHttpVerb.Post }), GraphQL: null, Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -3171,6 +3178,7 @@ public async Task ValidateLocationHeaderWhenBaseRouteIsConfigured( if (entityType is EntitySourceType.StoredProcedure) { Entity entity = new(Source: new("get_books", EntitySourceType.StoredProcedure, null, null), + Fields: null, Rest: new(new SupportedHttpVerb[] { SupportedHttpVerb.Get, SupportedHttpVerb.Post }), GraphQL: null, Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -3344,6 +3352,7 @@ public async Task TestEngineSupportViewsWithoutKeyFieldsInConfigForMsSQL() GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); Entity viewEntity = new( Source: new("books_view_all", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("", ""), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -3683,6 +3692,7 @@ public void TestInvalidDatabaseColumnNameHandling( Entity entity = new( Source: new("graphql_incompatible", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("graphql_incompatible", "graphql_incompatibles", entityGraphQLEnabled), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -4308,6 +4318,7 @@ public async Task OpenApi_GlobalEntityRestPath(bool globalRestEnabled, bool expe // file creation function. Entity requiredEntity = new( Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("book", "books"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -4369,6 +4380,7 @@ public async Task HealthEndpoint_ValidateContents() // config file creation. Entity requiredEntity = new( Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("book", "books"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -4418,6 +4430,7 @@ public async Task OpenApi_EntityLevelRestEndpoint() // Create the entities under test. Entity restEnabledEntity = new( Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("", "", false), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -4426,6 +4439,7 @@ public async Task OpenApi_EntityLevelRestEndpoint() Entity restDisabledEntity = new( Source: new("publishers", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("publisher", "publishers", true), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -4508,6 +4522,7 @@ public async Task ValidateNextLinkUsage(bool isNextLinkRelative) // file creation function. Entity requiredEntity = new( Source: new("bookmarks", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new(Singular: "", Plural: "", Enabled: false), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -4633,6 +4648,7 @@ public async Task ValidateNextLinkRespectsXForwardedHostAndProto(string forwarde Entity requiredEntity = new( Source: new("bookmarks", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new(Singular: "", Plural: "", Enabled: false), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -5349,6 +5365,7 @@ public static RuntimeConfig InitialzieRuntimeConfigForMultipleCreateTests(bool i LinkingTargetFields: null); Entity bookEntity = new(Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "book", Plural: "books"), Permissions: permissions, @@ -5372,6 +5389,7 @@ public static RuntimeConfig InitialzieRuntimeConfigForMultipleCreateTests(bool i Entity publisherEntity = new( Source: new("publishers", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "publisher", Plural: "publishers"), Permissions: permissions, @@ -5405,6 +5423,7 @@ public static RuntimeConfig InitMinimalRuntimeConfig( { entity ??= new( Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "book", Plural: "books"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -5422,6 +5441,7 @@ public static RuntimeConfig InitMinimalRuntimeConfig( // Adding an entity with only Authorized Access Entity anotherEntity = new( Source: new("publishers", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "publisher", Plural: "publishers"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_AUTHENTICATED) }, @@ -5529,6 +5549,7 @@ private static RuntimeConfig CreateBasicRuntimeConfigWithSingleEntityAndAuthOpti { Entity entity = new( Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "book", Plural: "books"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, diff --git a/src/Service.Tests/Configuration/HealthEndpointRolesTests.cs b/src/Service.Tests/Configuration/HealthEndpointRolesTests.cs index 2a83697a3a..9ad36bfa15 100644 --- a/src/Service.Tests/Configuration/HealthEndpointRolesTests.cs +++ b/src/Service.Tests/Configuration/HealthEndpointRolesTests.cs @@ -49,6 +49,7 @@ public async Task ComprehensiveHealthEndpoint_RolesTests(string role, HostMode h // config file creation. Entity requiredEntity = new( Health: new(enabled: true), + Fields: null, Source: new("books", EntitySourceType.Table, null, null), Rest: new(Enabled: true), GraphQL: new("book", "books", true), diff --git a/src/Service.Tests/Configuration/HealthEndpointTests.cs b/src/Service.Tests/Configuration/HealthEndpointTests.cs index 70e14e0108..1eac7416e3 100644 --- a/src/Service.Tests/Configuration/HealthEndpointTests.cs +++ b/src/Service.Tests/Configuration/HealthEndpointTests.cs @@ -519,6 +519,7 @@ private static RuntimeConfig SetupCustomConfigFile(bool enableGlobalHealth, bool Entity requiredEntity = new( Health: new(enabled: enableEntityHealth), Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: enableEntityRest), GraphQL: new("book", "bookLists", enableEntityGraphQL), Permissions: new[] { ConfigurationTests.GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, diff --git a/src/Service.Tests/Configuration/HotReload/AuthorizationResolverHotReloadTests.cs b/src/Service.Tests/Configuration/HotReload/AuthorizationResolverHotReloadTests.cs index b5fcb6162b..2175c8ac83 100644 --- a/src/Service.Tests/Configuration/HotReload/AuthorizationResolverHotReloadTests.cs +++ b/src/Service.Tests/Configuration/HotReload/AuthorizationResolverHotReloadTests.cs @@ -65,6 +65,7 @@ public async Task ValidateAuthorizationResolver_HotReload() Entity requiredEntityHR = new( Source: new("publishers", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new(Singular: "", Plural: "", Enabled: false), Permissions: new[] { permissionsHR }, @@ -178,6 +179,7 @@ public static async Task ClassInitializeAsync(TestContext context) // file creation function. Entity requiredEntity = new( Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new(Singular: "", Plural: "", Enabled: false), Permissions: new[] { permissions }, diff --git a/src/Service.Tests/CosmosTests/MutationTests.cs b/src/Service.Tests/CosmosTests/MutationTests.cs index 2f82541806..de931dcf22 100644 --- a/src/Service.Tests/CosmosTests/MutationTests.cs +++ b/src/Service.Tests/CosmosTests/MutationTests.cs @@ -542,6 +542,7 @@ type Planet @model(name:""Planet"") { new EntityPermission( Role: AuthorizationResolver.ROLE_AUTHENTICATED , Actions: new[] { readAction, createAction, deleteAction })}; Entity entity = new(Source: new($"graphqldb.{_containerName}", null, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "Planet", Plural: "Planets"), Permissions: permissions, @@ -672,6 +673,7 @@ type Planet @model(name:""Planet"") { new EntityPermission( Role: AuthorizationResolver.ROLE_AUTHENTICATED , Actions: new[] { createAction })}; Entity entity = new(Source: new($"graphqldb.{_containerName}", null, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "Planet", Plural: "Planets"), Permissions: permissions, diff --git a/src/Service.Tests/CosmosTests/QueryTests.cs b/src/Service.Tests/CosmosTests/QueryTests.cs index c40c95c75b..52afa8e788 100644 --- a/src/Service.Tests/CosmosTests/QueryTests.cs +++ b/src/Service.Tests/CosmosTests/QueryTests.cs @@ -710,6 +710,7 @@ type Planet @model(name:""Planet"") { EntityPermission[] permissions = new[] { new EntityPermission(Role: AuthorizationResolver.ROLE_ANONYMOUS, Actions: new[] { createAction, readAction, deleteAction }) }; Entity entity = new(Source: new($"graphqldb.{_containerName}", null, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "Planet", Plural: "Planets"), Permissions: permissions, diff --git a/src/Service.Tests/CosmosTests/SchemaGeneratorFactoryTests.cs b/src/Service.Tests/CosmosTests/SchemaGeneratorFactoryTests.cs index 562a5174d2..f10ac17354 100644 --- a/src/Service.Tests/CosmosTests/SchemaGeneratorFactoryTests.cs +++ b/src/Service.Tests/CosmosTests/SchemaGeneratorFactoryTests.cs @@ -83,6 +83,7 @@ public async Task ExportGraphQLFromCosmosDB_GeneratesSchemaSuccessfully(string g { {"Container1", new Entity( Source: new(entitySource, EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("Container1", "Container1s"), Permissions: new EntityPermission[] {}, @@ -90,6 +91,7 @@ public async Task ExportGraphQLFromCosmosDB_GeneratesSchemaSuccessfully(string g Mappings: null) }, {"Container2", new Entity( Source: new("mydb2.container2", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("Container2", "Container2s"), Permissions: new EntityPermission[] {}, @@ -97,6 +99,7 @@ public async Task ExportGraphQLFromCosmosDB_GeneratesSchemaSuccessfully(string g Mappings: null) }, {"Container0", new Entity( Source: new(null, EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("Container0", "Container0s"), Permissions: new EntityPermission[] {}, diff --git a/src/Service.Tests/GraphQLBuilder/Helpers/GraphQLTestHelpers.cs b/src/Service.Tests/GraphQLBuilder/Helpers/GraphQLTestHelpers.cs index 73f71db446..737e29f48f 100644 --- a/src/Service.Tests/GraphQLBuilder/Helpers/GraphQLTestHelpers.cs +++ b/src/Service.Tests/GraphQLBuilder/Helpers/GraphQLTestHelpers.cs @@ -80,6 +80,7 @@ public static Dictionary CreateStubEntityPermissionsMap( public static Entity GenerateEmptyEntity(EntitySourceType sourceType = EntitySourceType.Table) { return new Entity(Source: new EntitySource(Type: sourceType, Object: "foo", Parameters: null, KeyFields: null), + Fields: null, Rest: new(Array.Empty()), GraphQL: new("", ""), Permissions: Array.Empty(), @@ -106,6 +107,7 @@ public static Entity GenerateStoredProcedureEntity( { IEnumerable actions = (permissionOperations ?? new string[] { }).Select(a => new EntityAction(EnumExtensions.Deserialize(a), null, new(null, null))); Entity entity = new(Source: new EntitySource(Type: EntitySourceType.StoredProcedure, Object: "foo", Parameters: parameters, KeyFields: null), + Fields: null, Rest: new(Array.Empty()), GraphQL: new(Singular: graphQLTypeName, Plural: "", Enabled: true, Operation: graphQLOperation), Permissions: new[] { new EntityPermission(Role: "anonymous", Actions: actions.ToArray()) }, @@ -123,6 +125,7 @@ public static Entity GenerateStoredProcedureEntity( public static Entity GenerateEntityWithSingularPlural(string singularNameForEntity, string pluralNameForEntity, EntitySourceType sourceType = EntitySourceType.Table) { return new Entity(Source: new EntitySource(Type: sourceType, Object: "foo", Parameters: null, KeyFields: null), + Fields: null, Rest: new(Array.Empty()), GraphQL: new(singularNameForEntity, pluralNameForEntity), Permissions: Array.Empty(), @@ -139,6 +142,7 @@ public static Entity GenerateEntityWithSingularPlural(string singularNameForEnti public static Entity GenerateEntityWithStringType(string singularGraphQLName, EntitySourceType sourceType = EntitySourceType.Table) { return new Entity(Source: new EntitySource(Type: sourceType, Object: "foo", Parameters: null, KeyFields: null), + Fields: null, Rest: new(Array.Empty()), GraphQL: new(singularGraphQLName, ""), Permissions: Array.Empty(), diff --git a/src/Service.Tests/GraphQLBuilder/MutationBuilderTests.cs b/src/Service.Tests/GraphQLBuilder/MutationBuilderTests.cs index 4ebe842c36..a1478093dd 100644 --- a/src/Service.Tests/GraphQLBuilder/MutationBuilderTests.cs +++ b/src/Service.Tests/GraphQLBuilder/MutationBuilderTests.cs @@ -45,6 +45,7 @@ private static Entity GenerateEmptyEntity() { return new Entity( Source: new("dbo.entity", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("Foo", "Foos", Enabled: true), Permissions: Array.Empty(), diff --git a/src/Service.Tests/GraphQLBuilder/Sql/SchemaConverterTests.cs b/src/Service.Tests/GraphQLBuilder/Sql/SchemaConverterTests.cs index e472097fad..84806adc78 100644 --- a/src/Service.Tests/GraphQLBuilder/Sql/SchemaConverterTests.cs +++ b/src/Service.Tests/GraphQLBuilder/Sql/SchemaConverterTests.cs @@ -743,6 +743,7 @@ public static Entity GenerateEmptyEntity(string entityName) { return new Entity( Source: new($"{SCHEMA_NAME}.{TABLE_NAME}", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new(entityName, ""), Permissions: Array.Empty(), diff --git a/src/Service.Tests/OpenApiDocumentor/DocumentVerbosityTests.cs b/src/Service.Tests/OpenApiDocumentor/DocumentVerbosityTests.cs index de8a35212b..fa43617f4f 100644 --- a/src/Service.Tests/OpenApiDocumentor/DocumentVerbosityTests.cs +++ b/src/Service.Tests/OpenApiDocumentor/DocumentVerbosityTests.cs @@ -42,6 +42,7 @@ public async Task ResponseObjectSchemaIncludesTypeProperty() // Arrange Entity entity = new( Source: new(Object: "books", EntitySourceType.Table, null, null), + Fields: null, GraphQL: new(Singular: null, Plural: null, Enabled: false), Rest: new(Methods: EntityRestOptions.DEFAULT_SUPPORTED_VERBS), Permissions: OpenApiTestBootstrap.CreateBasicPermissions(), diff --git a/src/Service.Tests/OpenApiDocumentor/ParameterValidationTests.cs b/src/Service.Tests/OpenApiDocumentor/ParameterValidationTests.cs index afed2b1f3e..7c0e0225ae 100644 --- a/src/Service.Tests/OpenApiDocumentor/ParameterValidationTests.cs +++ b/src/Service.Tests/OpenApiDocumentor/ParameterValidationTests.cs @@ -234,6 +234,7 @@ private async static Task GenerateOpenApiDocumentForGivenEntity { Entity entity = new( Source: entitySource, + Fields: null, GraphQL: new(Singular: null, Plural: null, Enabled: false), Rest: new(Methods: supportedHttpMethods ?? EntityRestOptions.DEFAULT_SUPPORTED_VERBS), Permissions: OpenApiTestBootstrap.CreateBasicPermissions(), diff --git a/src/Service.Tests/OpenApiDocumentor/PathValidationTests.cs b/src/Service.Tests/OpenApiDocumentor/PathValidationTests.cs index bff1333497..5f478b3b80 100644 --- a/src/Service.Tests/OpenApiDocumentor/PathValidationTests.cs +++ b/src/Service.Tests/OpenApiDocumentor/PathValidationTests.cs @@ -45,6 +45,7 @@ public async Task ValidateEntityRestPath(string entityName, string configuredRes { Entity entity = new( Source: new(Object: "books", EntitySourceType.Table, null, null), + Fields: null, GraphQL: new(Singular: null, Plural: null, Enabled: false), Rest: new(Methods: EntityRestOptions.DEFAULT_SUPPORTED_VERBS, Path: configuredRestPath), Permissions: OpenApiTestBootstrap.CreateBasicPermissions(), diff --git a/src/Service.Tests/OpenApiDocumentor/StoredProcedureGeneration.cs b/src/Service.Tests/OpenApiDocumentor/StoredProcedureGeneration.cs index b7105dfa45..ffd5aaadde 100644 --- a/src/Service.Tests/OpenApiDocumentor/StoredProcedureGeneration.cs +++ b/src/Service.Tests/OpenApiDocumentor/StoredProcedureGeneration.cs @@ -55,6 +55,7 @@ public static void CreateEntities() { Entity entity1 = new( Source: new(Object: "insert_and_display_all_books_for_given_publisher", EntitySourceType.StoredProcedure, null, null), + Fields: null, GraphQL: new(Singular: null, Plural: null, Enabled: false), Rest: new(Methods: EntityRestOptions.DEFAULT_SUPPORTED_VERBS), Permissions: OpenApiTestBootstrap.CreateBasicPermissions(), diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt index 05fec88fff..b622552ef5 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt @@ -509,6 +509,18 @@ Object: books, Type: Table }, + Fields: [ + { + Name: id, + Alias: id, + PrimaryKey: false + }, + { + Name: title, + Alias: title, + PrimaryKey: false + } + ], GraphQL: { Singular: book, Plural: books, @@ -893,10 +905,6 @@ ] } ], - Mappings: { - id: id, - title: title - }, Relationships: { authors: { Cardinality: Many, @@ -1624,6 +1632,13 @@ Object: type_table, Type: Table }, + Fields: [ + { + Name: id, + Alias: typeid, + PrimaryKey: false + } + ], GraphQL: { Singular: SupportedType, Plural: SupportedTypes, @@ -1667,10 +1682,7 @@ } ] } - ], - Mappings: { - id: typeid - } + ] } }, { @@ -1765,6 +1777,18 @@ Object: trees, Type: Table }, + Fields: [ + { + Name: species, + Alias: Scientific Name, + PrimaryKey: false + }, + { + Name: region, + Alias: United State's Region, + PrimaryKey: false + } + ], GraphQL: { Singular: Tree, Plural: Trees, @@ -1808,11 +1832,7 @@ } ] } - ], - Mappings: { - region: United State's Region, - species: Scientific Name - } + ] } }, { @@ -1821,6 +1841,13 @@ Object: trees, Type: Table }, + Fields: [ + { + Name: species, + Alias: fancyName, + PrimaryKey: false + } + ], GraphQL: { Singular: Shrub, Plural: Shrubs, @@ -1866,9 +1893,6 @@ ] } ], - Mappings: { - species: fancyName - }, Relationships: { fungus: { TargetEntity: Fungus, @@ -1888,6 +1912,13 @@ Object: fungi, Type: Table }, + Fields: [ + { + Name: spores, + Alias: hazards, + PrimaryKey: false + } + ], GraphQL: { Singular: fungus, Plural: fungi, @@ -1948,9 +1979,6 @@ ] } ], - Mappings: { - spores: hazards - }, Relationships: { Shrub: { TargetEntity: Shrub, @@ -1968,11 +1996,14 @@ books_view_all: { Source: { Object: books_view_all, - Type: View, - KeyFields: [ - id - ] + Type: View }, + Fields: [ + { + Name: id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_view_all, Plural: books_view_alls, @@ -2014,11 +2045,15 @@ books_view_with_mapping: { Source: { Object: books_view_with_mapping, - Type: View, - KeyFields: [ - id - ] + Type: View }, + Fields: [ + { + Name: id, + Alias: book_id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_view_with_mapping, Plural: books_view_with_mappings, @@ -2036,22 +2071,25 @@ } ] } - ], - Mappings: { - id: book_id - } + ] } }, { stocks_view_selected: { Source: { Object: stocks_view_selected, - Type: View, - KeyFields: [ - categoryid, - pieceid - ] + Type: View }, + Fields: [ + { + Name: categoryid, + PrimaryKey: true + }, + { + Name: pieceid, + PrimaryKey: true + } + ], GraphQL: { Singular: stocks_view_selected, Plural: stocks_view_selecteds, @@ -2093,12 +2131,18 @@ books_publishers_view_composite: { Source: { Object: books_publishers_view_composite, - Type: View, - KeyFields: [ - id, - pub_id - ] + Type: View }, + Fields: [ + { + Name: id, + PrimaryKey: true + }, + { + Name: pub_id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_publishers_view_composite, Plural: books_publishers_view_composites, @@ -2352,6 +2396,28 @@ Object: aow, Type: Table }, + Fields: [ + { + Name: DetailAssessmentAndPlanning, + Alias: 始計, + PrimaryKey: false + }, + { + Name: WagingWar, + Alias: 作戰, + PrimaryKey: false + }, + { + Name: StrategicAttack, + Alias: 謀攻, + PrimaryKey: false + }, + { + Name: NoteNum, + Alias: ┬─┬ノ( º _ ºノ), + PrimaryKey: false + } + ], GraphQL: { Singular: ArtOfWar, Plural: ArtOfWars, @@ -2377,13 +2443,7 @@ } ] } - ], - Mappings: { - DetailAssessmentAndPlanning: 始計, - NoteNum: ┬─┬ノ( º _ ºノ), - StrategicAttack: 謀攻, - WagingWar: 作戰 - } + ] } }, { @@ -3110,6 +3170,18 @@ Object: GQLmappings, Type: Table }, + Fields: [ + { + Name: __column1, + Alias: column1, + PrimaryKey: false + }, + { + Name: __column2, + Alias: column2, + PrimaryKey: false + } + ], GraphQL: { Singular: GQLmappings, Plural: GQLmappings, @@ -3135,11 +3207,7 @@ } ] } - ], - Mappings: { - __column1: column1, - __column2: column2 - } + ] } }, { @@ -3182,6 +3250,18 @@ Object: mappedbookmarks, Type: Table }, + Fields: [ + { + Name: id, + Alias: bkid, + PrimaryKey: false + }, + { + Name: bkname, + Alias: name, + PrimaryKey: false + } + ], GraphQL: { Singular: MappedBookmarks, Plural: MappedBookmarks, @@ -3207,11 +3287,7 @@ } ] } - ], - Mappings: { - bkname: name, - id: bkid - } + ] } }, { @@ -3417,6 +3493,18 @@ Object: books, Type: Table }, + Fields: [ + { + Name: id, + Alias: id, + PrimaryKey: false + }, + { + Name: title, + Alias: title, + PrimaryKey: false + } + ], GraphQL: { Singular: bookNF, Plural: booksNF, @@ -3489,10 +3577,6 @@ ] } ], - Mappings: { - id: id, - title: title - }, Relationships: { authors: { Cardinality: Many, diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt index f34141c964..6c81c138ce 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt @@ -374,6 +374,18 @@ Object: books, Type: Table }, + Fields: [ + { + Name: id, + Alias: id, + PrimaryKey: false + }, + { + Name: title, + Alias: title, + PrimaryKey: false + } + ], GraphQL: { Singular: book, Plural: books, @@ -735,10 +747,6 @@ ] } ], - Mappings: { - id: id, - title: title - }, Relationships: { authors: { Cardinality: Many, @@ -1143,6 +1151,13 @@ Object: type_table, Type: Table }, + Fields: [ + { + Name: id, + Alias: typeid, + PrimaryKey: false + } + ], GraphQL: { Singular: SupportedType, Plural: SupportedTypes, @@ -1186,10 +1201,7 @@ } ] } - ], - Mappings: { - id: typeid - } + ] } }, { @@ -1241,6 +1253,18 @@ Object: trees, Type: Table }, + Fields: [ + { + Name: species, + Alias: Scientific Name, + PrimaryKey: false + }, + { + Name: region, + Alias: United State's Region, + PrimaryKey: false + } + ], GraphQL: { Singular: Tree, Plural: Trees, @@ -1284,11 +1308,7 @@ } ] } - ], - Mappings: { - region: United State's Region, - species: Scientific Name - } + ] } }, { @@ -1297,6 +1317,13 @@ Object: trees, Type: Table }, + Fields: [ + { + Name: species, + Alias: fancyName, + PrimaryKey: false + } + ], GraphQL: { Singular: Shrub, Plural: Shrubs, @@ -1342,9 +1369,6 @@ ] } ], - Mappings: { - species: fancyName - }, Relationships: { fungus: { TargetEntity: Fungus, @@ -1364,6 +1388,13 @@ Object: fungi, Type: Table }, + Fields: [ + { + Name: spores, + Alias: hazards, + PrimaryKey: false + } + ], GraphQL: { Singular: fungus, Plural: fungi, @@ -1424,9 +1455,6 @@ ] } ], - Mappings: { - spores: hazards - }, Relationships: { Shrub: { TargetEntity: Shrub, @@ -1444,11 +1472,14 @@ books_view_all: { Source: { Object: books_view_all, - Type: View, - KeyFields: [ - id - ] + Type: View }, + Fields: [ + { + Name: id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_view_all, Plural: books_view_alls, @@ -1490,11 +1521,15 @@ books_view_with_mapping: { Source: { Object: books_view_with_mapping, - Type: View, - KeyFields: [ - id - ] + Type: View }, + Fields: [ + { + Name: id, + Alias: book_id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_view_with_mapping, Plural: books_view_with_mappings, @@ -1512,22 +1547,25 @@ } ] } - ], - Mappings: { - id: book_id - } + ] } }, { stocks_view_selected: { Source: { Object: stocks_view_selected, - Type: View, - KeyFields: [ - categoryid, - pieceid - ] + Type: View }, + Fields: [ + { + Name: categoryid, + PrimaryKey: true + }, + { + Name: pieceid, + PrimaryKey: true + } + ], GraphQL: { Singular: stocks_view_selected, Plural: stocks_view_selecteds, @@ -1569,12 +1607,18 @@ books_publishers_view_composite: { Source: { Object: books_publishers_view_composite, - Type: View, - KeyFields: [ - id, - pub_id - ] + Type: View }, + Fields: [ + { + Name: id, + PrimaryKey: true + }, + { + Name: pub_id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_publishers_view_composite, Plural: books_publishers_view_composites, @@ -1828,6 +1872,28 @@ Object: aow, Type: Table }, + Fields: [ + { + Name: DetailAssessmentAndPlanning, + Alias: 始計, + PrimaryKey: false + }, + { + Name: WagingWar, + Alias: 作戰, + PrimaryKey: false + }, + { + Name: StrategicAttack, + Alias: 謀攻, + PrimaryKey: false + }, + { + Name: NoteNum, + Alias: ┬─┬ノ( º _ ºノ), + PrimaryKey: false + } + ], GraphQL: { Singular: ArtOfWar, Plural: ArtOfWars, @@ -1853,13 +1919,7 @@ } ] } - ], - Mappings: { - DetailAssessmentAndPlanning: 始計, - NoteNum: ┬─┬ノ( º _ ºノ), - StrategicAttack: 謀攻, - WagingWar: 作戰 - } + ] } }, { @@ -2073,6 +2133,18 @@ Object: GQLmappings, Type: Table }, + Fields: [ + { + Name: __column1, + Alias: column1, + PrimaryKey: false + }, + { + Name: __column2, + Alias: column2, + PrimaryKey: false + } + ], GraphQL: { Singular: GQLmappings, Plural: GQLmappings, @@ -2098,11 +2170,7 @@ } ] } - ], - Mappings: { - __column1: column1, - __column2: column2 - } + ] } }, { @@ -2145,6 +2213,18 @@ Object: mappedbookmarks, Type: Table }, + Fields: [ + { + Name: id, + Alias: bkid, + PrimaryKey: false + }, + { + Name: bkname, + Alias: name, + PrimaryKey: false + } + ], GraphQL: { Singular: MappedBookmarks, Plural: MappedBookmarks, @@ -2170,11 +2250,7 @@ } ] } - ], - Mappings: { - bkname: name, - id: bkid - } + ] } }, { diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt index 75490a804b..5e8631d46f 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt @@ -407,6 +407,18 @@ Object: books, Type: Table }, + Fields: [ + { + Name: id, + Alias: id, + PrimaryKey: false + }, + { + Name: title, + Alias: title, + PrimaryKey: false + } + ], GraphQL: { Singular: book, Plural: books, @@ -768,10 +780,6 @@ ] } ], - Mappings: { - id: id, - title: title - }, Relationships: { authors: { Cardinality: Many, @@ -1193,6 +1201,13 @@ Object: type_table, Type: Table }, + Fields: [ + { + Name: id, + Alias: typeid, + PrimaryKey: false + } + ], GraphQL: { Singular: SupportedType, Plural: SupportedTypes, @@ -1236,10 +1251,7 @@ } ] } - ], - Mappings: { - id: typeid - } + ] } }, { @@ -1312,6 +1324,18 @@ Object: trees, Type: Table }, + Fields: [ + { + Name: species, + Alias: Scientific Name, + PrimaryKey: false + }, + { + Name: region, + Alias: United State's Region, + PrimaryKey: false + } + ], GraphQL: { Singular: Tree, Plural: Trees, @@ -1355,11 +1379,7 @@ } ] } - ], - Mappings: { - region: United State's Region, - species: Scientific Name - } + ] } }, { @@ -1368,6 +1388,13 @@ Object: trees, Type: Table }, + Fields: [ + { + Name: species, + Alias: fancyName, + PrimaryKey: false + } + ], GraphQL: { Singular: Shrub, Plural: Shrubs, @@ -1413,9 +1440,6 @@ ] } ], - Mappings: { - species: fancyName - }, Relationships: { fungus: { TargetEntity: Fungus, @@ -1435,6 +1459,13 @@ Object: fungi, Type: Table }, + Fields: [ + { + Name: spores, + Alias: hazards, + PrimaryKey: false + } + ], GraphQL: { Singular: fungus, Plural: fungi, @@ -1495,9 +1526,6 @@ ] } ], - Mappings: { - spores: hazards - }, Relationships: { Shrub: { TargetEntity: Shrub, @@ -1683,11 +1711,15 @@ books_view_with_mapping: { Source: { Object: books_view_with_mapping, - Type: View, - KeyFields: [ - id - ] + Type: View }, + Fields: [ + { + Name: id, + Alias: book_id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_view_with_mapping, Plural: books_view_with_mappings, @@ -1705,10 +1737,7 @@ } ] } - ], - Mappings: { - id: book_id - } + ] } }, { @@ -1986,6 +2015,28 @@ Object: aow, Type: Table }, + Fields: [ + { + Name: DetailAssessmentAndPlanning, + Alias: 始計, + PrimaryKey: false + }, + { + Name: WagingWar, + Alias: 作戰, + PrimaryKey: false + }, + { + Name: StrategicAttack, + Alias: 謀攻, + PrimaryKey: false + }, + { + Name: NoteNum, + Alias: ┬─┬ノ( º _ ºノ), + PrimaryKey: false + } + ], GraphQL: { Singular: ArtOfWar, Plural: ArtOfWars, @@ -2011,13 +2062,7 @@ } ] } - ], - Mappings: { - DetailAssessmentAndPlanning: 始計, - NoteNum: ┬─┬ノ( º _ ºノ), - StrategicAttack: 謀攻, - WagingWar: 作戰 - } + ] } }, { @@ -2135,6 +2180,18 @@ Object: gqlmappings, Type: Table }, + Fields: [ + { + Name: __column1, + Alias: column1, + PrimaryKey: false + }, + { + Name: __column2, + Alias: column2, + PrimaryKey: false + } + ], GraphQL: { Singular: GQLmappings, Plural: GQLmappings, @@ -2160,11 +2217,7 @@ } ] } - ], - Mappings: { - __column1: column1, - __column2: column2 - } + ] } }, { @@ -2207,6 +2260,18 @@ Object: mappedbookmarks, Type: Table }, + Fields: [ + { + Name: id, + Alias: bkid, + PrimaryKey: false + }, + { + Name: bkname, + Alias: name, + PrimaryKey: false + } + ], GraphQL: { Singular: MappedBookmarks, Plural: MappedBookmarks, @@ -2232,11 +2297,7 @@ } ] } - ], - Mappings: { - bkname: name, - id: bkid - } + ] } }, { @@ -2390,6 +2451,18 @@ Object: books, Type: Table }, + Fields: [ + { + Name: id, + Alias: id, + PrimaryKey: false + }, + { + Name: title, + Alias: title, + PrimaryKey: false + } + ], GraphQL: { Singular: bookNF, Plural: booksNF, @@ -2449,10 +2522,6 @@ ] } ], - Mappings: { - id: id, - title: title - }, Relationships: { authors: { Cardinality: Many, @@ -2574,6 +2643,18 @@ Object: dimaccount, Type: Table }, + Fields: [ + { + Name: parentaccountkey, + Alias: ParentAccountKey, + PrimaryKey: false + }, + { + Name: accountkey, + Alias: AccountKey, + PrimaryKey: false + } + ], GraphQL: { Singular: dbo_DimAccount, Plural: dbo_DimAccounts, @@ -2592,10 +2673,6 @@ ] } ], - Mappings: { - accountkey: AccountKey, - parentaccountkey: ParentAccountKey - }, Relationships: { child_accounts: { Cardinality: Many, diff --git a/src/Service.Tests/SqlTests/GraphQLQueryTests/GraphQLQueryTestBase.cs b/src/Service.Tests/SqlTests/GraphQLQueryTests/GraphQLQueryTestBase.cs index 55a1becb4a..16c1a878fa 100644 --- a/src/Service.Tests/SqlTests/GraphQLQueryTests/GraphQLQueryTestBase.cs +++ b/src/Service.Tests/SqlTests/GraphQLQueryTests/GraphQLQueryTestBase.cs @@ -2295,6 +2295,7 @@ public virtual async Task TestConfigTakesPrecedenceForRelationshipFieldsOverDB( Entity clubEntity = new( Source: new("clubs", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("club", "clubs"), Permissions: new[] { ConfigurationTests.GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -2304,6 +2305,7 @@ public virtual async Task TestConfigTakesPrecedenceForRelationshipFieldsOverDB( Entity playerEntity = new( Source: new("players", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("player", "players"), Permissions: new[] { ConfigurationTests.GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, diff --git a/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs b/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs index f65e7a5088..00930103c7 100644 --- a/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs +++ b/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs @@ -855,6 +855,7 @@ private static Entity CreateEntityWithDescription(string description) return new( source, gqlOptions, + null, restOptions, [], null, diff --git a/src/Service.Tests/TestHelper.cs b/src/Service.Tests/TestHelper.cs index f1ccd7f13e..b94470b96b 100644 --- a/src/Service.Tests/TestHelper.cs +++ b/src/Service.Tests/TestHelper.cs @@ -78,8 +78,21 @@ public static RuntimeConfigProvider GetRuntimeConfigProvider(FileSystemRuntimeCo /// The source name of the entity. public static RuntimeConfig AddMissingEntitiesToConfig(RuntimeConfig config, string entityKey, string entityName, string[] keyfields = null) { + List fields = []; + if (keyfields != null) + { + foreach (string key in keyfields) + { + if (!string.IsNullOrWhiteSpace(key)) + { + fields.Add(new FieldMetadata { Name = key, PrimaryKey = true }); + } + } + } + Entity entity = new( Source: new(entityName, EntitySourceType.Table, null, keyfields), + Fields: fields, GraphQL: new(entityKey, entityKey.Pluralize()), Rest: new(Enabled: true), Permissions: new[] diff --git a/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs b/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs index 16caf29b49..119e6637c6 100644 --- a/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs +++ b/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs @@ -116,6 +116,7 @@ public void InvalidCRUDForStoredProcedure( Entity testEntity = new( Source: entitySource, + Fields: null, Rest: new(EntityRestOptions.DEFAULT_HTTP_VERBS_ENABLED_FOR_SP), GraphQL: new(AuthorizationHelpers.TEST_ENTITY, AuthorizationHelpers.TEST_ENTITY + "s"), Permissions: permissionSettings.ToArray(), @@ -1000,6 +1001,7 @@ public void TestOperationValidityAndCasing(string operationName, bool exceptionE Entity sampleEntity = new( Source: new(AuthorizationHelpers.TEST_ENTITY, EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: null, Permissions: new[] { permissionForEntity }, @@ -1537,6 +1539,7 @@ private static Entity GetSampleEntityUsingSourceAndRelationshipMap( Entity sampleEntity = new( Source: new(source, EntitySourceType.Table, null, null), + Fields: null, Rest: restDetails ?? new(Enabled: false), GraphQL: graphQLDetails, Permissions: new[] { permissionForEntity }, @@ -2012,6 +2015,7 @@ public void ValidateRestMethodsForEntityInConfig( string entityName = "EntityA"; // Sets REST method for the entity Entity entity = new(Source: new("TEST_SOURCE", sourceType, null, null), + Fields: null, Rest: new(Methods: methods), GraphQL: new(entityName, ""), Permissions: Array.Empty(), @@ -2337,6 +2341,7 @@ public void TestRuntimeConfigSetupWithNonJsonConstructor() Entity sampleEntity1 = new( Source: entitySource, + Fields: null, GraphQL: null, Rest: null, Permissions: null, diff --git a/src/Service.Tests/UnitTests/RequestValidatorUnitTests.cs b/src/Service.Tests/UnitTests/RequestValidatorUnitTests.cs index 186f254c51..5be1375c0f 100644 --- a/src/Service.Tests/UnitTests/RequestValidatorUnitTests.cs +++ b/src/Service.Tests/UnitTests/RequestValidatorUnitTests.cs @@ -361,7 +361,7 @@ public static void PerformTest( ), Entities: new(new Dictionary() { - { DEFAULT_NAME, new Entity(entitySource, new EntityGraphQLOptions(findRequestContext.EntityName, findRequestContext.EntityName), new EntityRestOptions(new SupportedHttpVerb[0]), null, null, null) } + { DEFAULT_NAME, new Entity(entitySource, new EntityGraphQLOptions(findRequestContext.EntityName, findRequestContext.EntityName), null, new EntityRestOptions(new SupportedHttpVerb[0]), null, null, null) } }) ); MockFileSystem fileSystem = new(); diff --git a/src/Service.Tests/UnitTests/SqlMetadataProviderUnitTests.cs b/src/Service.Tests/UnitTests/SqlMetadataProviderUnitTests.cs index f1f9c4255d..8b4ed68f60 100644 --- a/src/Service.Tests/UnitTests/SqlMetadataProviderUnitTests.cs +++ b/src/Service.Tests/UnitTests/SqlMetadataProviderUnitTests.cs @@ -347,6 +347,7 @@ public void ValidateGraphQLReservedNaming_DatabaseColumns(string dbColumnName, s Entity sampleEntity = new( Source: new("sampleElement", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("", ""), Permissions: new EntityPermission[] { ConfigurationTests.GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -423,6 +424,7 @@ public async Task ValidateExceptionForInvalidResultFieldNames(string invalidFiel { "get_book_by_id", new Entity( Source: new("dbo.get_book_by_id", EntitySourceType.StoredProcedure, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("get_book_by_id", "get_book_by_ids", Enabled: true), Permissions: new EntityPermission[] { From 5ba86821a90ad1ff300149b2dec13dedeb39d957 Mon Sep 17 00:00:00 2001 From: souvikghosh04 Date: Sat, 25 Oct 2025 06:04:38 +0530 Subject: [PATCH 73/79] [MCP] describe_entities tool fixes and refactoring (#2900) ## Why make this change? ### Closes on - https://github.com/Azure/data-api-builder/issues/2827 Added fixes and refactored the describe_entities tool to support entity metadata discovery for AI agents and LLM clients before performing CRUD operations. ## What is this change? - Expose entity descriptions with proper null handling - Expose field description for entities of type stored-procedure with parameter metadata - Expose permissions for all entities with distinct values - Added nameOnly parameter to return lightweight response (entity names and descriptions only) - Added entities parameter to filter results by specific entity names - Implemented proper error handling using McpResponseBuilder pattern - Added support for relationships, mappings, and primary key information - Improved response structure with metadata section including mode and count ## How was this tested? Functional testing using Insomnia client by running DAB in localhost and local SQL DB database - MCP endpoint: http://localhost:5000/mcp - JSON payload (details below) - Validating the response Expected (In-progress) format of entity description in response- ``` { "entities": [ { "name": "entity-name", "description": "entity description", "type": "Table", "fields": [ { "name": "field-name", "type": "unknown", "description": "field description" } ], "permissions": [ "CREATE", "READ", "UPDATE", "DELETE" ], "primaryKey": ["id"], "relationships": [...], "mappings": {...} } ], "count": 1, "mode": "full" } ``` ## Sample Request(s) ``` POST http://localhost:5000/mcp { "jsonrpc": "2.0", "method": "tools/call", "params": { "name": "describe_entities" }, "id": 1 } ``` ``` POST http://localhost:5000/mcp { "jsonrpc": "2.0", "method": "tools/call", "params": { "name": "describe_entities", "arguments": { "nameOnly": true } }, "id": 2 } ``` ``` POST http://localhost:5000/mcp { "jsonrpc": "2.0", "method": "tools/call", "params": { "name": "describe_entities", "arguments": { "entities": ["Book", "Publisher"] } }, "id": 1 } ``` --------- Co-authored-by: Anusha Kolan --- .../BuiltInTools/DescribeEntitiesTool.cs | 364 ++++++++++++++++-- 1 file changed, 334 insertions(+), 30 deletions(-) diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DescribeEntitiesTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DescribeEntitiesTool.cs index 3e7ade6075..95c53d1d28 100644 --- a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DescribeEntitiesTool.cs +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DescribeEntitiesTool.cs @@ -5,79 +5,383 @@ using Azure.DataApiBuilder.Config.ObjectModel; using Azure.DataApiBuilder.Core.Configurations; using Azure.DataApiBuilder.Mcp.Model; +using Azure.DataApiBuilder.Mcp.Utils; +using Azure.DataApiBuilder.Service.Exceptions; using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; using ModelContextProtocol.Protocol; using static Azure.DataApiBuilder.Mcp.Model.McpEnums; namespace Azure.DataApiBuilder.Mcp.BuiltInTools { + /// + /// Tool to describe all entities configured in DAB, including their types and metadata. + /// public class DescribeEntitiesTool : IMcpTool { + /// + /// Gets the type of the tool, which is BuiltIn for this implementation. + /// public ToolType ToolType { get; } = ToolType.BuiltIn; + /// + /// Gets the metadata for the delete-record tool, including its name, description, and input schema. + /// + /// public Tool GetToolMetadata() { return new Tool { - Name = "describe-entities", - Description = "Lists and describes all entities in the database." + Name = "describe_entities", + Description = "Lists and describes all entities in the database, including their types and available operations.", + InputSchema = JsonSerializer.Deserialize( + @"{ + ""type"": ""object"", + ""properties"": { + ""nameOnly"": { + ""type"": ""boolean"", + ""description"": ""If true, only entity names and descriptions will be returned. If false, full metadata including fields, parameters etc. will be included. Default is false."" + }, + ""entities"": { + ""type"": ""array"", + ""items"": { + ""type"": ""string"" + }, + ""description"": ""Optional list of specific entity names to filter by. If empty, all entities will be described."" + } + } + }" + ) }; } + /// + /// Executes the DescribeEntities tool, returning metadata about configured entities. + /// public Task ExecuteAsync( JsonDocument? arguments, IServiceProvider serviceProvider, CancellationToken cancellationToken = default) { + ILogger? logger = serviceProvider.GetService>(); + try { - // Get the runtime config provider - RuntimeConfigProvider? runtimeConfigProvider = serviceProvider.GetService(); - if (runtimeConfigProvider == null || !runtimeConfigProvider.TryGetConfig(out RuntimeConfig? runtimeConfig)) + cancellationToken.ThrowIfCancellationRequested(); + + RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService(); + RuntimeConfig runtimeConfig = runtimeConfigProvider.GetConfig(); + + if (!IsToolEnabled(runtimeConfig)) { - return Task.FromResult(new CallToolResult - { - Content = [new TextContentBlock { Type = "text", Text = "Error: Runtime configuration not available." }] - }); + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "ToolDisabled", + $"The {GetToolMetadata().Name} tool is disabled in the configuration.", + logger)); } - // Extract entity information from the runtime config - Dictionary entities = new(); + (bool nameOnly, HashSet? entityFilter) = ParseArguments(arguments, logger); + + List> entityList = new(); if (runtimeConfig.Entities != null) { - foreach (KeyValuePair entity in runtimeConfig.Entities) + foreach (KeyValuePair entityEntry in runtimeConfig.Entities) { - entities[entity.Key] = new + cancellationToken.ThrowIfCancellationRequested(); + + string entityName = entityEntry.Key; + Entity entity = entityEntry.Value; + + if (!ShouldIncludeEntity(entityName, entityFilter)) { - source = entity.Value.Source, - permissions = entity.Value.Permissions?.Select(p => new - { - role = p.Role, - actions = p.Actions - }) - }; + continue; + } + + try + { + Dictionary entityInfo = nameOnly + ? BuildBasicEntityInfo(entityName, entity) + : BuildFullEntityInfo(entityName, entity); + + entityList.Add(entityInfo); + } + catch (Exception ex) + { + logger?.LogWarning(ex, "Failed to build info for entity {EntityName}", entityName); + } + } + } + + if (entityList.Count == 0) + { + if (entityFilter != null && entityFilter.Count > 0) + { + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "EntitiesNotFound", + $"No entities found matching the filter: {string.Join(", ", entityFilter)}", + logger)); + } + else + { + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "NoEntitiesConfigured", + "No entities are configured in the runtime configuration.", + logger)); } } - string entitiesJson = JsonSerializer.Serialize(entities, new JsonSerializerOptions + cancellationToken.ThrowIfCancellationRequested(); + + entityList = entityList.OrderBy(e => e["name"]?.ToString() ?? string.Empty).ToList(); + + List finalEntityList = entityList.Cast().ToList(); + + Dictionary responseData = new() { - WriteIndented = true, - PropertyNamingPolicy = JsonNamingPolicy.CamelCase - }); + ["entities"] = finalEntityList, + ["count"] = finalEntityList.Count, + ["mode"] = nameOnly ? "basic" : "full" + }; - return Task.FromResult(new CallToolResult + if (entityFilter != null && entityFilter.Count > 0) { - Content = [new TextContentBlock { Type = "application/json", Text = entitiesJson }] - }); + responseData["filter"] = entityFilter.ToArray(); + } + + logger?.LogInformation( + "DescribeEntitiesTool returned {EntityCount} entities in {Mode} mode.", + finalEntityList.Count, + nameOnly ? "basic" : "full"); + + return Task.FromResult(McpResponseBuilder.BuildSuccessResult( + responseData, + logger, + $"DescribeEntitiesTool success: {finalEntityList.Count} entities returned.")); + } + catch (OperationCanceledException) + { + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "OperationCanceled", + "The describe operation was canceled.", + logger)); + } + catch (DataApiBuilderException dabEx) + { + logger?.LogError(dabEx, "Data API Builder error in DescribeEntitiesTool"); + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "DataApiBuilderError", + dabEx.Message, + logger)); + } + catch (ArgumentException argEx) + { + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "InvalidArguments", + argEx.Message, + logger)); + } + catch (InvalidOperationException ioEx) + { + logger?.LogError(ioEx, "Invalid operation in DescribeEntitiesTool"); + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "InvalidOperation", + "Failed to retrieve entity metadata: " + ioEx.Message, + logger)); } catch (Exception ex) { - return Task.FromResult(new CallToolResult + logger?.LogError(ex, "Unexpected error in DescribeEntitiesTool"); + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "UnexpectedError", + "An unexpected error occurred while describing entities.", + logger)); + } + } + + /// + /// Determines whether the tool is enabled based on the specified runtime configuration. + /// + /// The runtime configuration to evaluate. Must not be null. + /// if the tool is enabled and the DescribeEntities property of McpDmlTools + /// is set to ; otherwise, . + private static bool IsToolEnabled(RuntimeConfig runtimeConfig) + { + return runtimeConfig.McpDmlTools?.DescribeEntities == true; + } + + /// + /// Parses the input arguments to extract the 'nameOnly' flag and the optional entity filter list. + /// + /// The arguments to parse + /// The logger + /// A tuple containing the parsed 'nameOnly' flag and the optional entity filter list. + private static (bool nameOnly, HashSet? entityFilter) ParseArguments(JsonDocument? arguments, ILogger? logger) + { + bool nameOnly = false; + HashSet? entityFilter = null; + + if (arguments?.RootElement.ValueKind == JsonValueKind.Object) + { + if (arguments.RootElement.TryGetProperty("nameOnly", out JsonElement nameOnlyElement)) { - Content = [new TextContentBlock { Type = "text", Text = $"Error: {ex.Message}" }] - }); + if (nameOnlyElement.ValueKind == JsonValueKind.True || nameOnlyElement.ValueKind == JsonValueKind.False) + { + nameOnly = nameOnlyElement.GetBoolean(); + } + } + + if (arguments.RootElement.TryGetProperty("entities", out JsonElement entitiesElement) && + entitiesElement.ValueKind == JsonValueKind.Array) + { + entityFilter = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (JsonElement entityElement in entitiesElement.EnumerateArray()) + { + if (entityElement.ValueKind == JsonValueKind.String) + { + string? entityName = entityElement.GetString(); + if (!string.IsNullOrWhiteSpace(entityName)) + { + entityFilter.Add(entityName); + } + } + } + + if (entityFilter.Count == 0) + { + entityFilter = null; + } + } + } + + logger?.LogDebug("Parsed arguments - nameOnly: {NameOnly}, entityFilter: {EntityFilter}", + nameOnly, entityFilter != null ? string.Join(", ", entityFilter) : "none"); + + return (nameOnly, entityFilter); + } + + /// + /// Determines whether the specified entity should be included based on the provided entity filter. + /// + /// The name of the entity to evaluate. + /// A set of entity names to include. If or empty, all entities are included. + /// if the entity should be included; otherwise, . + private static bool ShouldIncludeEntity(string entityName, HashSet? entityFilter) + { + return entityFilter == null || entityFilter.Count == 0 || entityFilter.Contains(entityName); + } + + /// + /// Creates a dictionary containing basic information about an entity. + /// + /// The name of the entity to include in the dictionary. + /// The entity object from which to extract additional information. + /// A dictionary with two keys: "name", containing the entity name, and "description", containing the entity's + /// description or an empty string if the description is null. + private static Dictionary BuildBasicEntityInfo(string entityName, Entity entity) + { + return new Dictionary + { + ["name"] = entityName, + ["description"] = entity.Description ?? string.Empty + }; + } + + /// + /// Builds full entity info: name, description, fields, parameters (for stored procs), permissions. + /// + private static Dictionary BuildFullEntityInfo(string entityName, Entity entity) + { + Dictionary info = new() + { + ["name"] = entityName, + ["description"] = entity.Description ?? string.Empty, + ["fields"] = BuildFieldMetadataInfo(entity.Fields), + }; + + if (entity.Source.Type == EntitySourceType.StoredProcedure) + { + info["parameters"] = BuildParameterMetadataInfo(entity.Source.Parameters); + } + + info["permissions"] = BuildPermissionsInfo(entity); + + return info; + } + + /// + /// Builds a list of metadata information objects from the provided collection of fields. + /// + /// A list of objects representing the fields to process. Can be null. + /// A list of objects, each containing the name and description of a field. If is + /// null, an empty list is returned. + private static List BuildFieldMetadataInfo(List? fields) + { + List result = new(); + + if (fields != null) + { + foreach (FieldMetadata field in fields) + { + result.Add(new + { + name = field.Name, + description = field.Description ?? string.Empty + }); + } } + + return result; + } + + /// + /// Builds a list of parameter metadata objects containing information about each parameter. + /// + /// A list of objects representing the parameters to process. Can be null. + /// A list of anonymous objects, each containing the parameter's name, whether it is required, its default + /// value, and its description. Returns an empty list if is null. + private static List BuildParameterMetadataInfo(List? parameters) + { + List result = new(); + + if (parameters != null) + { + foreach (ParameterMetadata param in parameters) + { + result.Add(new + { + name = param.Name, + required = param.Default == null, // required if no default + @default = param.Default, + description = param.Description ?? string.Empty + }); + } + } + + return result; + } + + /// + /// Build a list of permission metadata info + /// + /// The entity object + /// A list of permissions available to the entity + private static string[] BuildPermissionsInfo(Entity entity) + { + HashSet permissions = new(); + + if (entity.Permissions != null) + { + foreach (EntityPermission permission in entity.Permissions) + { + foreach (EntityAction action in permission.Actions) + { + permissions.Add(action.Action.ToString().ToUpperInvariant()); + } + } + } + + return permissions.OrderBy(p => p).ToArray(); } } } From d6d0f832dea4712d75de1d30c93ee5f642776048 Mon Sep 17 00:00:00 2001 From: Jerry Nixon <1749983+JerryNixon@users.noreply.github.com> Date: Sun, 26 Oct 2025 21:53:01 -0600 Subject: [PATCH 74/79] Updating the readme for clarity (pre-MCP) (#2918) This pull request significantly improves the `README.md` by restructuring and expanding the documentation. --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- README.md | 388 ++++++++++++++++++++++++++++++++++-------------------- 1 file changed, 244 insertions(+), 144 deletions(-) diff --git a/README.md b/README.md index 1746a62482..d8edb22a50 100644 --- a/README.md +++ b/README.md @@ -7,9 +7,9 @@ [What's new?](https://learn.microsoft.com/azure/data-api-builder/whats-new) -## Community +## Join the community -Join the Data API builder community! This sign up will help us maintain a list of interested developers to be part of our roadmap and to help us better understand the different ways DAB is being used. Sign up [here](https://forms.office.com/pages/responsepage.aspx?id=v4j5cvGGr0GRqy180BHbR1S1JdzGAxhDrefV-tBYtwZUNE1RWVo0SUVMTkRESUZLMVVOS0wwUFNVRy4u). +Want to be part of our priorities and roadmap? Sign up [here](https://forms.office.com/pages/responsepage.aspx?id=v4j5cvGGr0GRqy180BHbR1S1JdzGAxhDrefV-tBYtwZUNE1RWVo0SUVMTkRESUZLMVVOS0wwUFNVRy4u). ![](docs/media/dab-logo.png) @@ -17,199 +17,299 @@ Join the Data API builder community! This sign up will help us maintain a list o Data API builder (DAB) is an open-source, no-code tool that creates secure, full-featured REST and GraphQL endpoints for your database. It’s a CRUD data API engine that runs in a container—on Azure, any other cloud, or on-premises. DAB is built for developers with integrated tooling, telemetry, and other productivity features. -```mermaid -erDiagram - DATA_API_BUILDER ||--|{ DATA_API : "Provides" - DATA_API_BUILDER { - container true "Microsoft Container Repository" - open-source true "MIT license / any cloud or on-prem." - objects true "Supports: Table / View / Stored Procedure" - developer true "Swagger / Nitro (fka Banana Cake Pop)" - otel true "Open Telemetry / Structured Logs / Health Endpoints" - security true "EntraId / EasyAuth / OAuth / JWT / Anonymous" - cache true "Level1 (in-memory) / Level2 (redis)" - policy true "Item policy / Database policy / Claims policy" - hot_reload true "Dynamically controllable log levels" - } - DATA_API ||--o{ DATASOURCE : "Queries" - DATA_API { - REST true "$select / $filter / $orderby" - GraphQL true "relationships / multiple mutations" - } - DATASOURCE { - MS_SQL Supported - PostgreSQL Supported - Cosmos_DB Supported - MySQL Supported - SQL_DW Supported - } - CLIENT ||--o{ DATA_API : "Consumes" - CLIENT { - Transport HTTP "HTTP / HTTPS" - Syntax JSON "Standard payloads" - Mobile Supported "No requirement" - Web Supported "No requirement" - Desktop Supported "No requirement" - Language Any "No requirement" - Framework None "Not required" - Library None "Not required" - ORM None "Not required" - Driver None "Not required" - } -``` +> [!IMPORTANT] +> Data API builder (DAB) is open source and always free. + +### Which databases does Data API builder support? + +| | Azure SQL | SQL Server | SQLDW | Cosmos DB | PostgreSQL | MySQL | +| :-----------: | :-------: | :--------: | :---: | :-------: | :--------: | :---: | +| **Supported** | Yes | Yes | Yes | Yes | Yes | Yes | + +### Which environments does Data API builder support? + +| | On-Prem | Azure | AWS | GCP | Other | +| :-----------: | :-----: | :---: | :--: | :--: | :---: | +| **Supported** | Yes | Yes | Yes | Yes | Yes | + +### Which endpoints does Data API builder support? + +| | REST | GraphQL | MCP | +| :-----------: | :--: | :-----: | :---------: | +| **Supported** | Yes | Yes | Coming soon | + +## Getting started + +Use the [Getting Started](https://learn.microsoft.com/azure/data-api-builder/get-started/get-started-with-data-api-builder) tutorial to quickly explore the core tools and concepts. -## Getting Started +### 1. Install the `dotnet` [command line](https://get.dot.net) -Use the [Getting Started](https://learn.microsoft.com/azure/data-api-builder/get-started/get-started-with-data-api-builder) tutorial to quickly explore the core tools and concepts. It gives you hands-on experience with how DAB makes you more efficient by removing boilerplate code. +https://get.dot.net -**1. Install the DAB CLI** +> [!NOTE] +> You may already have .NET installed! -The [DAB CLI](https://aka.ms/dab/docs) is a cross-platform .NET tool. Install the [.NET SDK](https://get.dot.net) before running: +The Data API builder (DAB) command line requires the .NET runtime version 8 or later. +#### Validate your installation + +```sh +dotnet --version ``` + +### 2. Install the `dab` command line + +The Data API builder (DAB) command line is cross-platform and intended for local developer use. + +```sh dotnet tool install microsoft.dataapibuilder -g ``` -**2. Create your initial configuration file** +#### Validate your installation + +```sh +dab --version +``` + +### 3. Create your database (example: Azure SQL database / T-SQL) + +This example uses a single table for simplicity. + +```sql +CREATE TABLE dbo.Todo +( + Id INT PRIMARY KEY IDENTITY, + Title NVARCHAR(500) NOT NULL, + IsCompleted BIT NOT NULL DEFAULT 0 +); +INSERT dbo.Todo (Title, IsCompleted) +VALUES + ('Walk the dog', 0), + ('Feed the fish', 0), + ('Clean the cat', 1); +``` + +### 4. Prepare your connection string -DAB requires a JSON configuration file. Edit manually or with the CLI. Use `dab --help` for syntax options. +Data API builder (DAB) supports `.env` files for testing process-level environment variables. +#### PowerShell (Windows) + +```ps +echo "my-connection-string=$env:database_connection_string" > .env ``` + +#### cmd.exe (Windows) + +```cmd +echo my-connection-string=%database_connection_string% > .env +``` + +#### bash (macOS/Linux) + +```bash +echo "my-connection-string=$database_connection_string" > .env +``` + +#### Resulting .env file + +The file `.env` is automatically created through this process. These are the resulting contents: + +``` +"my-connection-string=$env:database_connection_string" +``` +> [!NOTE] +> Be sure and replace `database_connection_string` with your actual database connection string. + +> [!IMPORTANT] +> Adding `.env` to your `.gitignore` file will help ensure your secrets are not added to source control. + +### 5. Create your initial configuration file + +Data API builder (DAB) requires a JSON configuration file. Use `dab --help` for syntax options. + +```sh dab init --database-type mssql --connection-string "@env('my-connection-string')" --host-mode development ``` -**3. Add your first table** +> [!NOTE] +> Including `--host-mode development` enables Swagger for REST and Nitro for GraphQL. -DAB supports tables, views, and stored procedures. It works with SQL Server, Azure Cosmos DB, PostgreSQL, MySQL, and SQL Data Warehouse. Security is engine-level, but permissions are per entity. +#### Resulting configuration +The file `dab-config.json` is automatically created through this process. These are the resulting contents: + +```json +{ + "$schema": "https://github.com/Azure/data-api-builder/releases/download/v1.5.56/dab.draft.schema.json", + "data-source": { + "database-type": "mssql", + "connection-string": "@env('my-connection-string')", + "options": { + "set-session-context": false + } + }, + "runtime": { + "rest": { + "enabled": true, + "path": "/api", + "request-body-strict": true + }, + "graphql": { + "enabled": true, + "path": "/graphql", + "allow-introspection": true + }, + "host": { + "cors": { + "origins": [], + "allow-credentials": false + }, + "authentication": { + "provider": "StaticWebApps" + }, + "mode": "development" + } + }, + "entities": { } +} ``` -dab add Actor - --source "dbo.Actor" +### 6. Add your table to the configuration + +```sh +dab add Todo + --source "dbo.Todo" --permissions "anonymous:*" ``` -**4. Run Data API builder** +> [!NOTE] +> DAB supports tables, views, and stored procedures. When the type is not specified, the default is `table`. + +#### Resulting configuration + +The `entities` section of the configuration is no longer empty: + +```json +{ + "entities": { + "Todo": { + "source": { + "object": "dbo.Todo", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Todo", + "plural": "Todos" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "*" + } + ] + } + ] + } + } +} +``` -In `production`, DAB runs in a container. In `development`, it’s self-hosted locally with hot reload, Swagger, and Nitro (fka Banana Cake Pop) support. +### 7. Run Data API builder -``` +In `production`, DAB runs in a container. In `development`, it’s locally self-hosted. + +```sh dab start ``` -> **Note**: Before you run `dab start`, make sure your connection string is stored in an environment variable called `my-connection-string`. This is required for `@env('my-connection-string')` in your config file to work. The easiest way is to create a `.env` file with `name=value` pairs—DAB will load these automatically at runtime. +> [!IMPORTANT] +> The DAB CLI assumes your configuration file is called `dab-config.json` and is in the local folder. -**5. Access your data source** +### 8. Access your data! -By default, DAB enables both REST and GraphQL. REST supports `$select`, `$filter`, and `$orderBy`. GraphQL uses config-defined relationships. +By default, DAB enables both REST and GraphQL. +```sh +GET http://localhost:5000/api/Todo ``` -GET http://localhost:5000/api/Actor -``` - -### Walk-through video - - Play Video - +> [!NOTE] +> Change the URL to match your port if it is different. -Demo source code: [startrek](https://aka.ms/dab/startrek) +#### Other things you should try -## Overview - -| Category | Features | -|----------------|----------| -| **Database Objects** | • NoSQL collections
• RDBMS tables, views, stored procedures | -| **Data Sources** | • SQL Server & Azure SQL
• Azure Cosmos DB
• PostgreSQL
• MySQL | -| **REST** | • `$select` for projection
• `$filter` for filtering
• `$orderBy` for sorting | -| **GraphQL** | • Relationship navigation
• Data aggregation
• Multiple mutations | -| **Telemetry** | • Structured logs
• OpenTelemetry
• Application Insights
• Health endpoints | -| **Advanced** | • Pagination
• Level 1 (in-memory) cache | -| **Authentication** | • OAuth2/JWT
• EasyAuth
• Entra ID | -| **Authorization** | • Role-based support
• Entity permissions
• Database policies | -| **Developer** | • Cross-platform CLI
• Swagger (REST)
• Nitro [previously Banana Cake Pop] (GraphQL)
• Open Source
• Configuration Hot Reload | +* DAB’s Health endpoint: `http://localhost:5000/health` +* DAB’s Swagger UI: `http://localhost:5000/swagger` +* DAB’s Nitro UI: `http://localhost:5000/graphql` ## How does it work? -This diagram shows how DAB works. DAB dynamically creates endpoints from your config file. It translates HTTP requests to SQL, returns JSON, and auto-pages results. +DAB dynamically creates endpoints and translates requests to SQL, returning JSON. ```mermaid sequenceDiagram - actor Client - - box Data API builder (DAB) - participant Endpoint - participant QueryBuilder - end - - participant Configuration as Configuration File - - box Data Source - participant DB - end - - Endpoint->>Endpoint: Start - activate Endpoint - Endpoint->>Configuration: Request - Configuration-->>Endpoint: Configuration - Endpoint->>DB: Request - DB-->>Endpoint: Metadata - Note over Endpoint, DB: Some configuration is validated against the metadata - Endpoint-->>Endpoint: Configure - deactivate Endpoint - Client-->>Endpoint: HTTP Request - activate Endpoint - critical - Endpoint-->>Endpoint: Authenticate - Endpoint-->>Endpoint: Authorize - end - Endpoint->>QueryBuilder: Request - QueryBuilder-->>Endpoint: SQL - alt Cache - Endpoint-->>Endpoint: Use Cache - else Query - Endpoint-->>DB: Request - Note over Endpoint, DB: Query is automatically throttled and results paginated - DB->>Endpoint: Results - Note over Endpoint, DB: Results are automatically cached for use in next request - end - Endpoint->>Client: HTTP 200 - deactivate Endpoint -``` - -Because DAB is stateless, it can scale up or out using any container size. It builds a feature-rich API like you would from scratch—but now you don’t have to. - -## Additional Resources - -- [Online Documentation](https://aka.ms/dab/docs) -- [Official Samples](https://aka.ms/dab/samples) -- [Known Issues](https://learn.microsoft.com/azure/data-api-builder/known-issues) -- [Feature Roadmap](https://github.com/Azure/data-api-builder/discussions/1377) + actor Client as Client + participant Endpoint as Endpoint + participant QueryBuilder as QueryBuilder + participant DB as Database + + %% Initialization / Warming up section (light grey) + rect rgba(120,120,120,0.10) + Endpoint -->>+ Endpoint: Read Config + Endpoint ->> DB: Query Metadata + DB -->> Endpoint: Metadata Response + Endpoint ->>- Endpoint: Start Engine + end + + %% Request/Response section (very light purple) + rect rgba(180,150,255,0.11) + Client ->>+ Endpoint: HTTP Request + Endpoint ->> Endpoint: Authorize + Endpoint ->> QueryBuilder: Invoke + QueryBuilder -->> Endpoint: SQL Query + Endpoint ->> DB: Submit Query + DB -->> Endpoint: Data Response + Endpoint -->>- Client: HTTP Response + end +``` -#### References +## Additional resources -- [Microsoft REST API Guidelines](https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md) -- [Microsoft Azure REST API Guidelines](https://github.com/microsoft/api-guidelines/blob/vNext/azure/Guidelines.md) -- [GraphQL Specification](https://graphql.org/) +* [Online Documentation](https://aka.ms/dab/docs) +* [Official Samples](https://aka.ms/dab/samples) +* [Known Issues](https://learn.microsoft.com/azure/data-api-builder/known-issues) +* [Feature Roadmap](https://github.com/Azure/data-api-builder/discussions/1377) -### How to Contribute +#### References -To contribute, see these documents: +* [Microsoft REST API Guidelines](https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md) +* [Microsoft Azure REST API Guidelines](https://github.com/microsoft/api-guidelines/blob/vNext/azure/Guidelines.md) +* [GraphQL Specification](https://graphql.org/) -- [Code of Conduct](./CODE_OF_CONDUCT.md) -- [Security](./SECURITY.md) -- [Contributing](./CONTRIBUTING.md) +### How to contribute -### License +To contribute, see these documents: -**Data API builder for Azure Databases** is licensed under the MIT License. See [LICENSE](./LICENSE.txt) for details. +* [Code of Conduct](./CODE_OF_CONDUCT.md) +* [Security](./SECURITY.md) +* [Contributing](./CONTRIBUTING.md) +* [MIT License](./LICENSE.txt) -### Third-Party Component Notice +### Third-party component notice -Nitro (fka Banana Cake Pop by ChilliCream, Inc.) may optionally store work in its cloud service via your ChilliCream account. Microsoft is not affiliated with or endorsing this service. Use at your discretion. +Nitro (formerly Banana Cake Pop by ChilliCream, Inc.) may optionally store work in its cloud service via your ChilliCream account. Microsoft is not affiliated with or endorsing this service. Use at your discretion. ### Trademarks -This project may use trademarks or logos. Use of Microsoft trademarks must follow Microsoft’s [Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks). Use of third-party marks is subject to their policies. +This project may use trademarks or logos. Use of Microsoft trademarks must follow Microsoft’s [Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks). Use of third-party marks is subject to their policies. \ No newline at end of file From 69b0b17392a8ce2c93aa6044f575946f5721527b Mon Sep 17 00:00:00 2001 From: Giovanna Ribeiro <44936262+gilemos@users.noreply.github.com> Date: Wed, 29 Oct 2025 19:17:55 -0700 Subject: [PATCH 75/79] Fix for stored procedure empty cell bug (#2887) ## Why make this change? We have recently noticed that, if we have a column of type NVARCHAR or VARCHAR and we try to run a stored procedure that reads a row in which that column has an empty string value, we had an internal server error. This error happens when we try to run the method GetChars passing in a buffer with length 0 This PR aims to fix this problem ## What is this change? We have added a small change to the method that was throwing the exception. If we find that resultFieldSize is 0 - which means that the data in the cell we are reading has a length of 0 - we will not call the method GetChars to read the data, but assume the data is empty and return the size of the data read in bytes as 0. As you can see in the example bellow, that fixes the issue. ## How was this tested? - [x] Integration Tests - [x] Unit Tests ## Sample Request(s) We have a table with a column of type NVARCHAR called "Description". In one of the rows, Description is an empty string image **Before the changes:** If we try to run a stored procedure that reads that empty cell, we get an error image **After changes** Stored procedure runs as expected image --------- Co-authored-by: Giovanna Ribeiro Co-authored-by: RubenCerna2079 <32799214+RubenCerna2079@users.noreply.github.com> --- src/Core/Resolvers/QueryExecutor.cs | 13 ++ src/Service.Tests/DatabaseSchema-DwSql.sql | 3 +- src/Service.Tests/DatabaseSchema-MsSql.sql | 3 +- src/Service.Tests/DatabaseSchema-MySql.sql | 3 +- .../DatabaseSchema-PostgreSql.sql | 3 +- .../GraphQLMutationTestBase.cs | 4 +- .../GraphQLPaginationTestBase.cs | 191 +++++++++--------- .../MsSqlGraphQLQueryTests.cs | 16 ++ .../RestApiTests/Delete/DwSqlDeleteApiTest.cs | 2 +- .../RestApiTests/Delete/MsSqlDeleteApiTest.cs | 2 +- .../UnitTests/SqlQueryExecutorUnitTests.cs | 45 +++++ 11 files changed, 187 insertions(+), 98 deletions(-) diff --git a/src/Core/Resolvers/QueryExecutor.cs b/src/Core/Resolvers/QueryExecutor.cs index 908c1bb1e8..97e2f7e8d4 100644 --- a/src/Core/Resolvers/QueryExecutor.cs +++ b/src/Core/Resolvers/QueryExecutor.cs @@ -740,6 +740,12 @@ internal int StreamCharData(DbDataReader dbDataReader, long availableSize, Strin // else we throw exception. ValidateSize(availableSize, resultFieldSize); + // If the cell is empty, don't append anything to the resultJsonString and return 0. + if (resultFieldSize == 0) + { + return 0; + } + char[] buffer = new char[resultFieldSize]; // read entire field into buffer and reduce available size. @@ -766,6 +772,13 @@ internal int StreamByteData(DbDataReader dbDataReader, long availableSize, int o // else we throw exception. ValidateSize(availableSize, resultFieldSize); + // If the cell is empty, set resultBytes to an empty array and return 0. + if (resultFieldSize == 0) + { + resultBytes = Array.Empty(); + return 0; + } + resultBytes = new byte[resultFieldSize]; dbDataReader.GetBytes(ordinal: ordinal, dataOffset: 0, buffer: resultBytes, bufferOffset: 0, length: resultBytes.Length); diff --git a/src/Service.Tests/DatabaseSchema-DwSql.sql b/src/Service.Tests/DatabaseSchema-DwSql.sql index 300ef7ff32..daed665949 100644 --- a/src/Service.Tests/DatabaseSchema-DwSql.sql +++ b/src/Service.Tests/DatabaseSchema-DwSql.sql @@ -336,7 +336,8 @@ VALUES (1, 'Awesome book', 1234), (17, 'CONN%_CONN', 1234), (18, '[Special Book]', 1234), (19, 'ME\YOU', 1234), -(20, 'C:\\LIFE', 1234); +(20, 'C:\\LIFE', 1234), +(21, '', 1234); INSERT INTO book_website_placements(id, book_id, price) VALUES (1, 1, 100), (2, 2, 50), (3, 3, 23), (4, 5, 33); diff --git a/src/Service.Tests/DatabaseSchema-MsSql.sql b/src/Service.Tests/DatabaseSchema-MsSql.sql index 3605b2628a..4e87394aee 100644 --- a/src/Service.Tests/DatabaseSchema-MsSql.sql +++ b/src/Service.Tests/DatabaseSchema-MsSql.sql @@ -531,7 +531,8 @@ VALUES (1, 'Awesome book', 1234), (17, 'CONN%_CONN', 1234), (18, '[Special Book]', 1234), (19, 'ME\YOU', 1234), -(20, 'C:\\LIFE', 1234); +(20, 'C:\\LIFE', 1234), +(21, '', 1234); SET IDENTITY_INSERT books OFF SET IDENTITY_INSERT books_mm ON diff --git a/src/Service.Tests/DatabaseSchema-MySql.sql b/src/Service.Tests/DatabaseSchema-MySql.sql index f746bc063a..dda93d86d1 100644 --- a/src/Service.Tests/DatabaseSchema-MySql.sql +++ b/src/Service.Tests/DatabaseSchema-MySql.sql @@ -388,7 +388,8 @@ INSERT INTO books(id, title, publisher_id) (17, 'CONN%_CONN', 1234), (18, '[Special Book]', 1234), (19, 'ME\\YOU', 1234), - (20, 'C:\\\\LIFE', 1234); + (20, 'C:\\\\LIFE', 1234), + (21, '', 1234); INSERT INTO book_website_placements(book_id, price) VALUES (1, 100), (2, 50), (3, 23), (5, 33); INSERT INTO website_users(id, username) VALUES (1, 'George'), (2, NULL), (3, ''), (4, 'book_lover_95'), (5, 'null'); INSERT INTO book_author_link(book_id, author_id) VALUES (1, 123), (2, 124), (3, 123), (3, 124), (4, 123), (4, 124), (5, 126); diff --git a/src/Service.Tests/DatabaseSchema-PostgreSql.sql b/src/Service.Tests/DatabaseSchema-PostgreSql.sql index 14615707b1..523e96c22f 100644 --- a/src/Service.Tests/DatabaseSchema-PostgreSql.sql +++ b/src/Service.Tests/DatabaseSchema-PostgreSql.sql @@ -391,7 +391,8 @@ INSERT INTO books(id, title, publisher_id) (17, 'CONN%_CONN', 1234), (18, '[Special Book]', 1234), (19, 'ME\YOU', 1234), - (20, 'C:\\LIFE', 1234); + (20, 'C:\\LIFE', 1234), + (21, '', 1234); INSERT INTO book_website_placements(book_id, price) VALUES (1, 100), (2, 50), (3, 23), (5, 33); INSERT INTO website_users(id, username) VALUES (1, 'George'), (2, NULL), (3, ''), (4, 'book_lover_95'), (5, 'null'); INSERT INTO book_author_link(book_id, author_id) VALUES (1, 123), (2, 124), (3, 123), (3, 124), (4, 123), (4, 124), (5, 126);; diff --git a/src/Service.Tests/SqlTests/GraphQLMutationTests/GraphQLMutationTestBase.cs b/src/Service.Tests/SqlTests/GraphQLMutationTests/GraphQLMutationTestBase.cs index c9207a6672..745d5eade3 100644 --- a/src/Service.Tests/SqlTests/GraphQLMutationTests/GraphQLMutationTestBase.cs +++ b/src/Service.Tests/SqlTests/GraphQLMutationTests/GraphQLMutationTestBase.cs @@ -257,7 +257,7 @@ public async Task TestStoredProcedureMutationForDeletion(string dbQueryToVerifyD string currentDbResponse = await GetDatabaseResultAsync(dbQueryToVerifyDeletion); JsonDocument currentResult = JsonDocument.Parse(currentDbResponse); - Assert.AreEqual(currentResult.RootElement.GetProperty("maxId").GetInt64(), 20); + Assert.AreEqual(currentResult.RootElement.GetProperty("maxId").GetInt64(), 21); JsonElement graphQLResponse = await ExecuteGraphQLRequestAsync(graphQLMutation, graphQLMutationName, isAuthenticated: true); // Stored Procedure didn't return anything @@ -266,7 +266,7 @@ public async Task TestStoredProcedureMutationForDeletion(string dbQueryToVerifyD // check to verify new element is inserted string updatedDbResponse = await GetDatabaseResultAsync(dbQueryToVerifyDeletion); JsonDocument updatedResult = JsonDocument.Parse(updatedDbResponse); - Assert.AreEqual(updatedResult.RootElement.GetProperty("maxId").GetInt64(), 19); + Assert.AreEqual(updatedResult.RootElement.GetProperty("maxId").GetInt64(), 20); } public async Task InsertMutationOnTableWithTriggerWithNonAutoGenPK(string dbQuery) diff --git a/src/Service.Tests/SqlTests/GraphQLPaginationTests/GraphQLPaginationTestBase.cs b/src/Service.Tests/SqlTests/GraphQLPaginationTests/GraphQLPaginationTestBase.cs index e7e18d6090..33db8f8b49 100644 --- a/src/Service.Tests/SqlTests/GraphQLPaginationTests/GraphQLPaginationTestBase.cs +++ b/src/Service.Tests/SqlTests/GraphQLPaginationTests/GraphQLPaginationTestBase.cs @@ -84,95 +84,101 @@ public async Task RequestMaxUsingNegativeOne() } }"; - // this resultset represents all books in the db. - JsonElement actual = await ExecuteGraphQLRequestAsync(graphQLQuery, graphQLQueryName, isAuthenticated: false); string expected = @"{ - ""items"": [ + ""items"": [ { - ""id"": 1, - ""title"": ""Awesome book"" + ""id"": 1, + ""title"": ""Awesome book"" }, { - ""id"": 2, - ""title"": ""Also Awesome book"" + ""id"": 2, + ""title"": ""Also Awesome book"" }, { - ""id"": 3, - ""title"": ""Great wall of china explained"" + ""id"": 3, + ""title"": ""Great wall of china explained"" }, { - ""id"": 4, - ""title"": ""US history in a nutshell"" + ""id"": 4, + ""title"": ""US history in a nutshell"" }, { - ""id"": 5, - ""title"": ""Chernobyl Diaries"" + ""id"": 5, + ""title"": ""Chernobyl Diaries"" }, { - ""id"": 6, - ""title"": ""The Palace Door"" + ""id"": 6, + ""title"": ""The Palace Door"" }, { - ""id"": 7, - ""title"": ""The Groovy Bar"" + ""id"": 7, + ""title"": ""The Groovy Bar"" }, { - ""id"": 8, - ""title"": ""Time to Eat"" + ""id"": 8, + ""title"": ""Time to Eat"" }, { - ""id"": 9, - ""title"": ""Policy-Test-01"" + ""id"": 9, + ""title"": ""Policy-Test-01"" }, { - ""id"": 10, - ""title"": ""Policy-Test-02"" + ""id"": 10, + ""title"": ""Policy-Test-02"" }, { - ""id"": 11, - ""title"": ""Policy-Test-04"" + ""id"": 11, + ""title"": ""Policy-Test-04"" }, { - ""id"": 12, - ""title"": ""Time to Eat 2"" + ""id"": 12, + ""title"": ""Time to Eat 2"" + }, + { + ""id"": 13, + ""title"": ""Before Sunrise"" }, { - ""id"": 13, - ""title"": ""Before Sunrise"" + ""id"": 14, + ""title"": ""Before Sunset"" }, { - ""id"": 14, - ""title"": ""Before Sunset"" + ""id"": 15, + ""title"": ""SQL_CONN"" }, { - ""id"": 15, - ""title"": ""SQL_CONN"" + ""id"": 16, + ""title"": ""SOME%CONN"" }, { - ""id"": 16, - ""title"": ""SOME%CONN"" + ""id"": 17, + ""title"": ""CONN%_CONN"" }, { - ""id"": 17, - ""title"": ""CONN%_CONN"" + ""id"": 18, + ""title"": ""[Special Book]"" }, { - ""id"": 18, - ""title"": ""[Special Book]"" + ""id"": 19, + ""title"": ""ME\\YOU"" }, { - ""id"": 19, - ""title"": ""ME\\YOU"" + ""id"": 20, + ""title"": ""C:\\\\LIFE"" }, { - ""id"": 20, - ""title"": ""C:\\\\LIFE"" + ""id"": 21, + ""title"": """" } - ], - ""endCursor"": null, - ""hasNextPage"": false + ], + ""endCursor"": null, + ""hasNextPage"": false }"; + // Note: The max page size is 21 for MsSql and 20 for all other data sources, so when using -1 + // this resultset represents all books in the db. + JsonElement actual = await ExecuteGraphQLRequestAsync(graphQLQuery, graphQLQueryName, isAuthenticated: false); + SqlTestHelper.PerformTestEqualJsonStrings(expected, actual.ToString()); } @@ -196,91 +202,96 @@ public async Task RequestNoParamFullConnection() }"; JsonElement actual = await ExecuteGraphQLRequestAsync(graphQLQuery, graphQLQueryName, isAuthenticated: false); + string expected = @"{ - ""items"": [ + ""items"": [ { - ""id"": 1, - ""title"": ""Awesome book"" + ""id"": 1, + ""title"": ""Awesome book"" }, { - ""id"": 2, - ""title"": ""Also Awesome book"" + ""id"": 2, + ""title"": ""Also Awesome book"" }, { - ""id"": 3, - ""title"": ""Great wall of china explained"" + ""id"": 3, + ""title"": ""Great wall of china explained"" }, { - ""id"": 4, - ""title"": ""US history in a nutshell"" + ""id"": 4, + ""title"": ""US history in a nutshell"" }, { - ""id"": 5, - ""title"": ""Chernobyl Diaries"" + ""id"": 5, + ""title"": ""Chernobyl Diaries"" }, { - ""id"": 6, - ""title"": ""The Palace Door"" + ""id"": 6, + ""title"": ""The Palace Door"" }, { - ""id"": 7, - ""title"": ""The Groovy Bar"" + ""id"": 7, + ""title"": ""The Groovy Bar"" }, { - ""id"": 8, - ""title"": ""Time to Eat"" + ""id"": 8, + ""title"": ""Time to Eat"" }, { - ""id"": 9, - ""title"": ""Policy-Test-01"" + ""id"": 9, + ""title"": ""Policy-Test-01"" }, { - ""id"": 10, - ""title"": ""Policy-Test-02"" + ""id"": 10, + ""title"": ""Policy-Test-02"" }, { - ""id"": 11, - ""title"": ""Policy-Test-04"" + ""id"": 11, + ""title"": ""Policy-Test-04"" }, { - ""id"": 12, - ""title"": ""Time to Eat 2"" + ""id"": 12, + ""title"": ""Time to Eat 2"" }, { - ""id"": 13, - ""title"": ""Before Sunrise"" + ""id"": 13, + ""title"": ""Before Sunrise"" }, { - ""id"": 14, - ""title"": ""Before Sunset"" + ""id"": 14, + ""title"": ""Before Sunset"" }, { - ""id"": 15, - ""title"": ""SQL_CONN"" + ""id"": 15, + ""title"": ""SQL_CONN"" }, { - ""id"": 16, - ""title"": ""SOME%CONN"" + ""id"": 16, + ""title"": ""SOME%CONN"" }, { - ""id"": 17, - ""title"": ""CONN%_CONN"" + ""id"": 17, + ""title"": ""CONN%_CONN"" }, { - ""id"": 18, - ""title"": ""[Special Book]"" + ""id"": 18, + ""title"": ""[Special Book]"" }, { - ""id"": 19, - ""title"": ""ME\\YOU"" + ""id"": 19, + ""title"": ""ME\\YOU"" }, { - ""id"": 20, - ""title"": ""C:\\\\LIFE"" + ""id"": 20, + ""title"": ""C:\\\\LIFE"" + }, + { + ""id"": 21, + ""title"": """" } - ], - ""endCursor"": null, - ""hasNextPage"": false + ], + ""endCursor"": null, + ""hasNextPage"": false }"; SqlTestHelper.PerformTestEqualJsonStrings(expected, actual.ToString()); diff --git a/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs b/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs index 00930103c7..1d90a4c6f1 100644 --- a/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs +++ b/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs @@ -268,6 +268,22 @@ SELECT title FROM books await QueryWithSingleColumnPrimaryKey(msSqlQuery); } + [TestMethod] + public virtual async Task QueryWithEmptyStringResult() + { + string graphQLQueryName = "book_by_pk"; + string graphQLQuery = @"{ + book_by_pk(id: 21) { + title + } + }"; + + JsonElement actual = await ExecuteGraphQLRequestAsync(graphQLQuery, graphQLQueryName, isAuthenticated: false); + + string title = actual.GetProperty("title").GetString(); + Assert.AreEqual("", title); + } + [TestMethod] public async Task QueryWithSingleColumnPrimaryKeyAndMappings() { diff --git a/src/Service.Tests/SqlTests/RestApiTests/Delete/DwSqlDeleteApiTest.cs b/src/Service.Tests/SqlTests/RestApiTests/Delete/DwSqlDeleteApiTest.cs index c574db540f..984b252727 100644 --- a/src/Service.Tests/SqlTests/RestApiTests/Delete/DwSqlDeleteApiTest.cs +++ b/src/Service.Tests/SqlTests/RestApiTests/Delete/DwSqlDeleteApiTest.cs @@ -20,7 +20,7 @@ public class DwSqlDeleteApiTests : DeleteApiTestBase { "DeleteOneWithStoredProcedureTest", $"SELECT [id] FROM { _integrationTableName } " + - $"WHERE id = 20" + $"WHERE id = 21" } }; #region Test Fixture Setup diff --git a/src/Service.Tests/SqlTests/RestApiTests/Delete/MsSqlDeleteApiTest.cs b/src/Service.Tests/SqlTests/RestApiTests/Delete/MsSqlDeleteApiTest.cs index 13f4d31cf2..cf8a1f6fc5 100644 --- a/src/Service.Tests/SqlTests/RestApiTests/Delete/MsSqlDeleteApiTest.cs +++ b/src/Service.Tests/SqlTests/RestApiTests/Delete/MsSqlDeleteApiTest.cs @@ -29,7 +29,7 @@ public class MsSqlDeleteApiTests : DeleteApiTestBase // This query is used to confirm that the item no longer exists, not the // actual delete query. $"SELECT [id] FROM { _integrationTableName } " + - $"WHERE id = 20 FOR JSON PATH, INCLUDE_NULL_VALUES, WITHOUT_ARRAY_WRAPPER" + $"WHERE id = 21 FOR JSON PATH, INCLUDE_NULL_VALUES, WITHOUT_ARRAY_WRAPPER" } }; #region Test Fixture Setup diff --git a/src/Service.Tests/UnitTests/SqlQueryExecutorUnitTests.cs b/src/Service.Tests/UnitTests/SqlQueryExecutorUnitTests.cs index 908b7019c4..2b62c6b444 100644 --- a/src/Service.Tests/UnitTests/SqlQueryExecutorUnitTests.cs +++ b/src/Service.Tests/UnitTests/SqlQueryExecutorUnitTests.cs @@ -623,6 +623,51 @@ public void ValidateStreamingLogicForStoredProcedures(int readDataLoops, bool ex } } + /// + /// Makes sure the stream logic handles cells with empty strings correctly. + /// + [DataTestMethod, TestCategory(TestCategory.MSSQL)] + public void ValidateStreamingLogicForEmptyCellsAsync() + { + TestHelper.SetupDatabaseEnvironment(TestCategory.MSSQL); + FileSystem fileSystem = new(); + FileSystemRuntimeConfigLoader loader = new(fileSystem); + RuntimeConfig runtimeConfig = new( + Schema: "UnitTestSchema", + DataSource: new DataSource(DatabaseType: DatabaseType.MSSQL, "", Options: null), + Runtime: new( + Rest: new(), + GraphQL: new(), + Mcp: new(), + Host: new(Cors: null, Authentication: null, MaxResponseSizeMB: 5) + ), + Entities: new(new Dictionary())); + + RuntimeConfigProvider runtimeConfigProvider = TestHelper.GenerateInMemoryRuntimeConfigProvider(runtimeConfig); + + Mock>> queryExecutorLogger = new(); + Mock httpContextAccessor = new(); + DbExceptionParser dbExceptionParser = new MsSqlDbExceptionParser(runtimeConfigProvider); + + // Instantiate the MsSqlQueryExecutor and Setup parameters for the query + MsSqlQueryExecutor msSqlQueryExecutor = new(runtimeConfigProvider, dbExceptionParser, queryExecutorLogger.Object, httpContextAccessor.Object); + + Mock dbDataReader = new(); + dbDataReader.Setup(d => d.HasRows).Returns(true); + + // Make sure GetChars returns 0 when buffer is null + dbDataReader.Setup(x => x.GetChars(It.IsAny(), It.IsAny(), null, It.IsAny(), It.IsAny())).Returns(0); + + // Make sure available size is set to > 0 + int availableSize = (int)runtimeConfig.MaxResponseSizeMB() * 1024 * 1024; + + // Stream char data should not return an exception + availableSize -= msSqlQueryExecutor.StreamCharData( + dbDataReader: dbDataReader.Object, availableSize: availableSize, resultJsonString: new(), ordinal: 0); + + Assert.AreEqual(availableSize, (int)runtimeConfig.MaxResponseSizeMB() * 1024 * 1024); + } + [TestCleanup] public void CleanupAfterEachTest() { From a20c6f68b30b395b3e6c833c215b3ab9ce48a337 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Thu, 30 Oct 2025 13:24:24 +0100 Subject: [PATCH 76/79] Merge errors --- src/Core/Resolvers/SqlResponseHelpers.cs | 14 +++++--------- src/Core/Services/ExecutionHelper.cs | 13 ++++++++----- 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/src/Core/Resolvers/SqlResponseHelpers.cs b/src/Core/Resolvers/SqlResponseHelpers.cs index 44c1a65691..6c8ce21f01 100644 --- a/src/Core/Resolvers/SqlResponseHelpers.cs +++ b/src/Core/Resolvers/SqlResponseHelpers.cs @@ -131,8 +131,11 @@ public static OkObjectResult FormatFindResult( queryString: queryString, isNextLinkRelative: runtimeConfig.NextLinkRelative()); - //Get the element RecordCount from the first element of the array - //JsonElement recordCountElement = rootEnumerated[0].GetProperty("RecordCount"); + rootEnumerated.Add(nextLink); + } + + //Get the element RecordCount from the first element of the array + //JsonElement recordCountElement = rootEnumerated[0].GetProperty("RecordCount"); string jsonRecordCount = JsonSerializer.Serialize(new[] { new @@ -141,13 +144,6 @@ public static OkObjectResult FormatFindResult( } }); - // When there are extra fields present, they are removed before returning the response. - if (extraFieldsInResponse.Count > 0) - { - rootEnumerated = RemoveExtraFieldsInResponseWithMultipleItems(rootEnumerated, extraFieldsInResponse); - } - - rootEnumerated.Add(nextLink); rootEnumerated.Add(JsonSerializer.Deserialize(jsonRecordCount)); return OkResponse(JsonSerializer.SerializeToElement(rootEnumerated)); } diff --git a/src/Core/Services/ExecutionHelper.cs b/src/Core/Services/ExecutionHelper.cs index 300a7f6cd3..2ef961081a 100644 --- a/src/Core/Services/ExecutionHelper.cs +++ b/src/Core/Services/ExecutionHelper.cs @@ -20,7 +20,6 @@ using HotChocolate.Execution.Processing; using HotChocolate.Language; using HotChocolate.Resolvers; -using HotChocolate.Types.Descriptors.Definitions; using NodaTime.Text; using Kestral = Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Http.HttpMethod; @@ -427,10 +426,10 @@ private static bool TryGetPropertyFromParent( // CNEXT: This is a nasty exernal dependancy // Add one extra allowable parameter to schemaArgument: offset - InputFieldDefinition offsetDef = new("offset", "Offset for the query", null, null, null); - IInputField offS = new InputField(offsetDef, 5); + //HotChocolate.Types.InputFieldDefinition offsetDef = new("offset", "Offset for the query", null, null, null); + //IInputField offS = new InputField(offsetDef, 5); - IEnumerable ss = schemaArguments.Append(offS); + //IEnumerable ss = schemaArguments.Append(offS); //IInputField i = new InputField("offset", new IntType()) @@ -442,7 +441,7 @@ private static bool TryGetPropertyFromParent( // {orderBy:entityOrderByInput} // The values in schemaArguments will have default values when the backing // entity is a stored procedure with runtime config defined default parameter values. - foreach (IInputField argument in ss) + foreach (IInputValueDefinition argument in schemaArguments) { if (argument.DefaultValue != null) { @@ -455,6 +454,10 @@ private static bool TryGetPropertyFromParent( } } + collectedParameters.Add( + "offset", + 0); + // Overwrite the default values with the passed in arguments // Example: { myEntity(first: $first, orderBy: {entityField: ASC) { items { entityField } } } // User supplied $first filter variable overwrites the default value of 'first'. From 0383b1fae2b4b429aed0d11fa59712f5fd6bac23 Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Fri, 31 Oct 2025 09:46:13 +0100 Subject: [PATCH 77/79] Fix the things --- .dockerignore | 19 ++++++ Dockerfile | 31 +++++++-- .../Sql Query Structures/SqlQueryStructure.cs | 1 - src/Core/Services/ExecutionHelper.cs | 15 +---- src/Service/Startup.cs | 9 ++- src/Service/dab-config.json | 66 ++++++++++++++++++- 6 files changed, 118 insertions(+), 23 deletions(-) create mode 100644 .dockerignore diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000..6f711721f7 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,19 @@ +# Exclude repo metadata and local build outputs from Docker context +.git/ +.vs/ +.vscode/ +TestResults/ + +# Dotnet build artifacts +**/bin/ +**/obj/ +**/out/ + +# IDE and tooling miscellany +*.user +*.suo +*.swp +.DS_Store + +# Local docker artifacts +Dockerfile.*.local diff --git a/Dockerfile b/Dockerfile index e31cb72052..1387981858 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,15 +1,34 @@ -# Version values referenced from https://hub.docker.com/_/microsoft-dotnet-aspnet +# Stage image versions mirror https://hub.docker.com/_/microsoft-dotnet-aspnet -FROM mcr.microsoft.com/dotnet/sdk:8.0-cbl-mariner2.0. AS build +FROM mcr.microsoft.com/dotnet/sdk:8.0-cbl-mariner2.0 AS build WORKDIR /src -COPY [".", "./"] -RUN dotnet build "./src/Service/Azure.DataApiBuilder.Service.csproj" -f net8.0 -o /out -r linux-x64 --self-contained + +# Copy project files first to maximize layer caching during restore +COPY ["Nuget.config", "."] +COPY ["global.json", "."] +COPY ["src/Directory.Build.props", "src/"] +COPY ["src/Directory.Packages.props", "src/"] +COPY ["src/Azure.DataApiBuilder.sln", "src/"] +COPY ["src/Service/Azure.DataApiBuilder.Service.csproj", "src/Service/"] +COPY ["src/Azure.DataApiBuilder.Mcp/Azure.DataApiBuilder.Mcp.csproj", "src/Azure.DataApiBuilder.Mcp/"] +COPY ["src/Core/Azure.DataApiBuilder.Core.csproj", "src/Core/"] +COPY ["src/Auth/Azure.DataApiBuilder.Auth.csproj", "src/Auth/"] +COPY ["src/Config/Azure.DataApiBuilder.Config.csproj", "src/Config/"] +COPY ["src/Product/Azure.DataApiBuilder.Product.csproj", "src/Product/"] +COPY ["src/Service.GraphQLBuilder/Azure.DataApiBuilder.Service.GraphQLBuilder.csproj", "src/Service.GraphQLBuilder/"] +RUN dotnet restore "src/Service/Azure.DataApiBuilder.Service.csproj" -r linux-x64 + +# Copy the remaining source and publish the service +COPY . . +RUN dotnet publish "src/Service/Azure.DataApiBuilder.Service.csproj" -c Release -f net8.0 -o /app/publish --no-restore FROM mcr.microsoft.com/dotnet/aspnet:8.0-cbl-mariner2.0 AS runtime -COPY --from=build /out /App +WORKDIR /app +COPY --from=build /app/publish . -WORKDIR /App ENV ASPNETCORE_URLS=http://+:5000 +EXPOSE 5000 + ENTRYPOINT ["dotnet", "Azure.DataApiBuilder.Service.dll"] diff --git a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs index 92685fa83f..10284048ff 100644 --- a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs @@ -508,7 +508,6 @@ private SqlQueryStructure( } else { - // if first is not passed, we should use the default page size. _offset = 0; } } diff --git a/src/Core/Services/ExecutionHelper.cs b/src/Core/Services/ExecutionHelper.cs index 2ef961081a..525152d8bb 100644 --- a/src/Core/Services/ExecutionHelper.cs +++ b/src/Core/Services/ExecutionHelper.cs @@ -423,15 +423,6 @@ private static bool TryGetPropertyFromParent( // Fill the parameters dictionary with the default argument values ArgumentCollection schemaArguments = schema.Arguments; - // CNEXT: This is a nasty exernal dependancy - // Add one extra allowable parameter to schemaArgument: offset - - //HotChocolate.Types.InputFieldDefinition offsetDef = new("offset", "Offset for the query", null, null, null); - //IInputField offS = new InputField(offsetDef, 5); - - //IEnumerable ss = schemaArguments.Append(offS); - - //IInputField i = new InputField("offset", new IntType()) // Example 'argumentSchemas' IInputField objects of type 'HotChocolate.Types.Argument': // These are all default arguments defined in the schema for queries. @@ -453,11 +444,7 @@ private static bool TryGetPropertyFromParent( variables: variables)); } } - - collectedParameters.Add( - "offset", - 0); - + // Overwrite the default values with the passed in arguments // Example: { myEntity(first: $first, orderBy: {entityField: ASC) { items { entityField } } } // User supplied $first filter variable overwrites the default value of 'first'. diff --git a/src/Service/Startup.cs b/src/Service/Startup.cs index c36fab29ae..92726367a8 100644 --- a/src/Service/Startup.cs +++ b/src/Service/Startup.cs @@ -495,7 +495,14 @@ private void AddGraphQLService(IServiceCollection services, GraphQLRuntimeOption .AddTypeConverter( from => new TimeOnly(from.Hour, from.Minute, from.Second, from.Millisecond)) .AddTypeConverter( - from => new LocalTime(from.Hour, from.Minute, from.Second, from.Millisecond)); + from => new LocalTime(from.Hour, from.Minute, from.Second, from.Millisecond)) + .ModifyCostOptions(options => + { + options.MaxFieldCost = 10000; + options.MaxTypeCost = 10000; + options.EnforceCostLimits = false; + options.ApplyCostDefaults = false; + }); // Conditionally adds a maximum depth rule to the GraphQL queries/mutation selection set. // This rule is only added if a positive depth limit is specified, ensuring that the server diff --git a/src/Service/dab-config.json b/src/Service/dab-config.json index 31cde65058..6e20350b74 100644 --- a/src/Service/dab-config.json +++ b/src/Service/dab-config.json @@ -2,7 +2,7 @@ "$schema": "https://github.com/Azure/data-api-builder/releases/latest/download/dab.draft.schema.json", "data-source": { "database-type": "mssql", - "connection-string": "Data Source=nqf4kgvoqm4ufazdzriupb2pay-hmnbvxar2mgu7e3ng27fsqy3we.database.fabric.microsoft.com,1433;User ID=8bfaf0d6-fa20-4ed5-a450-0005ceb77729;Password=z5y8Q~hLcfdAflVrfnYoVxdavIJXZb5tlH~tAbRn;Pooling=True;Min Pool Size=0;Max Pool Size=100;Multiple Active Result Sets=False;Connect Timeout=30;Encrypt=False;Trust Server Certificate=True;Authentication=ActiveDirectoryServicePrincipal;Initial Catalog=apiLayer-345587c4-1232-457e-9761-b6bca3d72e2e", + "connection-string": "Data Source=nqf4kgvoqm4ufazdzriupb2pay-hmnbvxar2mgu7e3ng27fsqy3we.database.fabric.microsoft.com,1433;User ID=8bfaf0d6-fa20-4ed5-a450-0005ceb77729;Password=z5y8Q~hLcfdAflVrfnYoVxdavIJXZb5tlH~tAbRn;Pooling=True;Min Pool Size=0;Max Pool Size=100;Multiple Active Result Sets=False;Connect Timeout=30;Encrypt=False;Trust Server Certificate=True;Authentication=ActiveDirectoryServicePrincipal;Initial Catalog=apiLayer-345587c4-1232-457e-9761-b6bca3d72e2e; Command Timeout=60", "options": { "set-session-context": true } @@ -3293,6 +3293,70 @@ "enabled": true, "ttl-seconds": 120 } + }, + "OperationalUnitOfMeasure": { + "source": { + "object": "silver_ops.OperationalUnitOfMeasure", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OperationalUnitOfMeasure", + "plural": "OperationalUnitsOfMeasure" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 600 + } + }, + "ContainerType": { + "source": { + "object": "silver_ops.ContainerType", + "type": "table", + "key-fields": [ + "ISOCode" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ContainerType", + "plural": "ContainerTypes" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { "action": "read" } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 600 + } } } } From 15c59fe0e42f5924dbdd7026bfabcc7d3e20df1c Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Fri, 31 Oct 2025 13:50:51 +0100 Subject: [PATCH 78/79] h --- src/Core/Services/ExecutionHelper.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/Core/Services/ExecutionHelper.cs b/src/Core/Services/ExecutionHelper.cs index 525152d8bb..fe527d9f0c 100644 --- a/src/Core/Services/ExecutionHelper.cs +++ b/src/Core/Services/ExecutionHelper.cs @@ -423,7 +423,6 @@ private static bool TryGetPropertyFromParent( // Fill the parameters dictionary with the default argument values ArgumentCollection schemaArguments = schema.Arguments; - // Example 'argumentSchemas' IInputField objects of type 'HotChocolate.Types.Argument': // These are all default arguments defined in the schema for queries. // {first:int} From 467c1f67b4641505d7d6d07207cfa64537a0415f Mon Sep 17 00:00:00 2001 From: KobeLenjou Date: Mon, 10 Nov 2025 08:10:28 +0100 Subject: [PATCH 79/79] re-add filtering and sorting on one-relationships --- .../Queries/InputTypeBuilder.cs | 22 +++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/src/Service.GraphQLBuilder/Queries/InputTypeBuilder.cs b/src/Service.GraphQLBuilder/Queries/InputTypeBuilder.cs index 58ff41c504..ba57bde446 100644 --- a/src/Service.GraphQLBuilder/Queries/InputTypeBuilder.cs +++ b/src/Service.GraphQLBuilder/Queries/InputTypeBuilder.cs @@ -24,7 +24,7 @@ IDictionary inputTypes { List inputFields = GenerateFilterInputFieldsForBuiltInFields(node, inputTypes); string filterInputName = GenerateObjectInputFilterName(node); - GenerateFilterInputTypeFromInputFields(inputTypes, inputFields, filterInputName, $"Filter input for {node.Name} GraphQL type"); + GenerateInputTypeFromInputFields(inputTypes, inputFields, filterInputName, $"Filter input for {node.Name} GraphQL type"); } internal static void GenerateOrderByInputTypeForObjectType(ObjectTypeDefinitionNode node, IDictionary inputTypes) @@ -32,7 +32,10 @@ internal static void GenerateOrderByInputTypeForObjectType(ObjectTypeDefinitionN List inputFields = GenerateOrderByInputFieldsForBuiltInFields(node); string orderByInputName = GenerateObjectInputOrderByName(node); + GenerateInputTypeFromInputFields(inputTypes, inputFields, orderByInputName, $"Order by input for {node.Name} GraphQL type"); + // OrderBy does not include "and" and "or" input types so we add only the orderByInputName here. + /* inputTypes.Add( orderByInputName, new( @@ -43,6 +46,7 @@ internal static void GenerateOrderByInputTypeForObjectType(ObjectTypeDefinitionN inputFields ) ); + */ } private static List GenerateOrderByInputFieldsForBuiltInFields(ObjectTypeDefinitionNode node) @@ -62,12 +66,26 @@ private static List GenerateOrderByInputFieldsForBuilt new List()) ); } + else if (RelationshipDirectiveType.Cardinality(field) == Config.ObjectModel.Cardinality.One) + { + string targetEntityName = RelationshipDirectiveType.Target(field); + + inputFields.Add( + new( + location: null, + field.Name, + new StringValueNode($"Order by options for {field.Name}"), + new NamedTypeNode(GenerateObjectInputOrderByName(targetEntityName)), + defaultValue: null, + new List()) + ); + } } return inputFields; } - private static void GenerateFilterInputTypeFromInputFields( + private static void GenerateInputTypeFromInputFields( IDictionary inputTypes, List inputFields, string inputTypeName,