diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000..6f711721f7 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,19 @@ +# Exclude repo metadata and local build outputs from Docker context +.git/ +.vs/ +.vscode/ +TestResults/ + +# Dotnet build artifacts +**/bin/ +**/obj/ +**/out/ + +# IDE and tooling miscellany +*.user +*.suo +*.swp +.DS_Store + +# Local docker artifacts +Dockerfile.*.local diff --git a/CODEOWNERS b/CODEOWNERS index ed2b4835ef..ac65d7bc52 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1,7 +1,7 @@ # These owners will be the default owners for everything in # the repo. Unless a later match takes precedence, # review when someone opens a pull request. -* @Aniruddh25 @aaronburtle @anushakolan @RubenCerna2079 @souvikghosh04 @ravishetye @neeraj-sharma2592 @sourabh1007 @vadeveka @Alekhya-Polavarapu @rusamant +* @Aniruddh25 @aaronburtle @anushakolan @RubenCerna2079 @souvikghosh04 @neeraj-sharma2592 @sourabh1007 @vadeveka @Alekhya-Polavarapu @rusamant @stuartpa code_of_conduct.md @jerrynixon contributing.md @jerrynixon diff --git a/Dockerfile b/Dockerfile index d6d950733c..1387981858 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,15 +1,34 @@ -# Version values referenced from https://hub.docker.com/_/microsoft-dotnet-aspnet - -FROM mcr.microsoft.com/dotnet/sdk:8.0-cbl-mariner2.0. AS build +# Stage image versions mirror https://hub.docker.com/_/microsoft-dotnet-aspnet +FROM mcr.microsoft.com/dotnet/sdk:8.0-cbl-mariner2.0 AS build WORKDIR /src -COPY [".", "./"] -RUN dotnet build "./src/Service/Azure.DataApiBuilder.Service.csproj" -c Docker -o /out -r linux-x64 + +# Copy project files first to maximize layer caching during restore +COPY ["Nuget.config", "."] +COPY ["global.json", "."] +COPY ["src/Directory.Build.props", "src/"] +COPY ["src/Directory.Packages.props", "src/"] +COPY ["src/Azure.DataApiBuilder.sln", "src/"] +COPY ["src/Service/Azure.DataApiBuilder.Service.csproj", "src/Service/"] +COPY ["src/Azure.DataApiBuilder.Mcp/Azure.DataApiBuilder.Mcp.csproj", "src/Azure.DataApiBuilder.Mcp/"] +COPY ["src/Core/Azure.DataApiBuilder.Core.csproj", "src/Core/"] +COPY ["src/Auth/Azure.DataApiBuilder.Auth.csproj", "src/Auth/"] +COPY ["src/Config/Azure.DataApiBuilder.Config.csproj", "src/Config/"] +COPY ["src/Product/Azure.DataApiBuilder.Product.csproj", "src/Product/"] +COPY ["src/Service.GraphQLBuilder/Azure.DataApiBuilder.Service.GraphQLBuilder.csproj", "src/Service.GraphQLBuilder/"] +RUN dotnet restore "src/Service/Azure.DataApiBuilder.Service.csproj" -r linux-x64 + +# Copy the remaining source and publish the service +COPY . . +RUN dotnet publish "src/Service/Azure.DataApiBuilder.Service.csproj" -c Release -f net8.0 -o /app/publish --no-restore FROM mcr.microsoft.com/dotnet/aspnet:8.0-cbl-mariner2.0 AS runtime -COPY --from=build /out /App -WORKDIR /App +WORKDIR /app +COPY --from=build /app/publish . + ENV ASPNETCORE_URLS=http://+:5000 +EXPOSE 5000 + ENTRYPOINT ["dotnet", "Azure.DataApiBuilder.Service.dll"] diff --git a/README.md b/README.md index 1746a62482..d8edb22a50 100644 --- a/README.md +++ b/README.md @@ -7,9 +7,9 @@ [What's new?](https://learn.microsoft.com/azure/data-api-builder/whats-new) -## Community +## Join the community -Join the Data API builder community! This sign up will help us maintain a list of interested developers to be part of our roadmap and to help us better understand the different ways DAB is being used. Sign up [here](https://forms.office.com/pages/responsepage.aspx?id=v4j5cvGGr0GRqy180BHbR1S1JdzGAxhDrefV-tBYtwZUNE1RWVo0SUVMTkRESUZLMVVOS0wwUFNVRy4u). +Want to be part of our priorities and roadmap? Sign up [here](https://forms.office.com/pages/responsepage.aspx?id=v4j5cvGGr0GRqy180BHbR1S1JdzGAxhDrefV-tBYtwZUNE1RWVo0SUVMTkRESUZLMVVOS0wwUFNVRy4u). ![](docs/media/dab-logo.png) @@ -17,199 +17,299 @@ Join the Data API builder community! This sign up will help us maintain a list o Data API builder (DAB) is an open-source, no-code tool that creates secure, full-featured REST and GraphQL endpoints for your database. It’s a CRUD data API engine that runs in a container—on Azure, any other cloud, or on-premises. DAB is built for developers with integrated tooling, telemetry, and other productivity features. -```mermaid -erDiagram - DATA_API_BUILDER ||--|{ DATA_API : "Provides" - DATA_API_BUILDER { - container true "Microsoft Container Repository" - open-source true "MIT license / any cloud or on-prem." - objects true "Supports: Table / View / Stored Procedure" - developer true "Swagger / Nitro (fka Banana Cake Pop)" - otel true "Open Telemetry / Structured Logs / Health Endpoints" - security true "EntraId / EasyAuth / OAuth / JWT / Anonymous" - cache true "Level1 (in-memory) / Level2 (redis)" - policy true "Item policy / Database policy / Claims policy" - hot_reload true "Dynamically controllable log levels" - } - DATA_API ||--o{ DATASOURCE : "Queries" - DATA_API { - REST true "$select / $filter / $orderby" - GraphQL true "relationships / multiple mutations" - } - DATASOURCE { - MS_SQL Supported - PostgreSQL Supported - Cosmos_DB Supported - MySQL Supported - SQL_DW Supported - } - CLIENT ||--o{ DATA_API : "Consumes" - CLIENT { - Transport HTTP "HTTP / HTTPS" - Syntax JSON "Standard payloads" - Mobile Supported "No requirement" - Web Supported "No requirement" - Desktop Supported "No requirement" - Language Any "No requirement" - Framework None "Not required" - Library None "Not required" - ORM None "Not required" - Driver None "Not required" - } -``` +> [!IMPORTANT] +> Data API builder (DAB) is open source and always free. + +### Which databases does Data API builder support? + +| | Azure SQL | SQL Server | SQLDW | Cosmos DB | PostgreSQL | MySQL | +| :-----------: | :-------: | :--------: | :---: | :-------: | :--------: | :---: | +| **Supported** | Yes | Yes | Yes | Yes | Yes | Yes | + +### Which environments does Data API builder support? + +| | On-Prem | Azure | AWS | GCP | Other | +| :-----------: | :-----: | :---: | :--: | :--: | :---: | +| **Supported** | Yes | Yes | Yes | Yes | Yes | + +### Which endpoints does Data API builder support? + +| | REST | GraphQL | MCP | +| :-----------: | :--: | :-----: | :---------: | +| **Supported** | Yes | Yes | Coming soon | + +## Getting started + +Use the [Getting Started](https://learn.microsoft.com/azure/data-api-builder/get-started/get-started-with-data-api-builder) tutorial to quickly explore the core tools and concepts. -## Getting Started +### 1. Install the `dotnet` [command line](https://get.dot.net) -Use the [Getting Started](https://learn.microsoft.com/azure/data-api-builder/get-started/get-started-with-data-api-builder) tutorial to quickly explore the core tools and concepts. It gives you hands-on experience with how DAB makes you more efficient by removing boilerplate code. +https://get.dot.net -**1. Install the DAB CLI** +> [!NOTE] +> You may already have .NET installed! -The [DAB CLI](https://aka.ms/dab/docs) is a cross-platform .NET tool. Install the [.NET SDK](https://get.dot.net) before running: +The Data API builder (DAB) command line requires the .NET runtime version 8 or later. +#### Validate your installation + +```sh +dotnet --version ``` + +### 2. Install the `dab` command line + +The Data API builder (DAB) command line is cross-platform and intended for local developer use. + +```sh dotnet tool install microsoft.dataapibuilder -g ``` -**2. Create your initial configuration file** +#### Validate your installation + +```sh +dab --version +``` + +### 3. Create your database (example: Azure SQL database / T-SQL) + +This example uses a single table for simplicity. + +```sql +CREATE TABLE dbo.Todo +( + Id INT PRIMARY KEY IDENTITY, + Title NVARCHAR(500) NOT NULL, + IsCompleted BIT NOT NULL DEFAULT 0 +); +INSERT dbo.Todo (Title, IsCompleted) +VALUES + ('Walk the dog', 0), + ('Feed the fish', 0), + ('Clean the cat', 1); +``` + +### 4. Prepare your connection string -DAB requires a JSON configuration file. Edit manually or with the CLI. Use `dab --help` for syntax options. +Data API builder (DAB) supports `.env` files for testing process-level environment variables. +#### PowerShell (Windows) + +```ps +echo "my-connection-string=$env:database_connection_string" > .env ``` + +#### cmd.exe (Windows) + +```cmd +echo my-connection-string=%database_connection_string% > .env +``` + +#### bash (macOS/Linux) + +```bash +echo "my-connection-string=$database_connection_string" > .env +``` + +#### Resulting .env file + +The file `.env` is automatically created through this process. These are the resulting contents: + +``` +"my-connection-string=$env:database_connection_string" +``` +> [!NOTE] +> Be sure and replace `database_connection_string` with your actual database connection string. + +> [!IMPORTANT] +> Adding `.env` to your `.gitignore` file will help ensure your secrets are not added to source control. + +### 5. Create your initial configuration file + +Data API builder (DAB) requires a JSON configuration file. Use `dab --help` for syntax options. + +```sh dab init --database-type mssql --connection-string "@env('my-connection-string')" --host-mode development ``` -**3. Add your first table** +> [!NOTE] +> Including `--host-mode development` enables Swagger for REST and Nitro for GraphQL. -DAB supports tables, views, and stored procedures. It works with SQL Server, Azure Cosmos DB, PostgreSQL, MySQL, and SQL Data Warehouse. Security is engine-level, but permissions are per entity. +#### Resulting configuration +The file `dab-config.json` is automatically created through this process. These are the resulting contents: + +```json +{ + "$schema": "https://github.com/Azure/data-api-builder/releases/download/v1.5.56/dab.draft.schema.json", + "data-source": { + "database-type": "mssql", + "connection-string": "@env('my-connection-string')", + "options": { + "set-session-context": false + } + }, + "runtime": { + "rest": { + "enabled": true, + "path": "/api", + "request-body-strict": true + }, + "graphql": { + "enabled": true, + "path": "/graphql", + "allow-introspection": true + }, + "host": { + "cors": { + "origins": [], + "allow-credentials": false + }, + "authentication": { + "provider": "StaticWebApps" + }, + "mode": "development" + } + }, + "entities": { } +} ``` -dab add Actor - --source "dbo.Actor" +### 6. Add your table to the configuration + +```sh +dab add Todo + --source "dbo.Todo" --permissions "anonymous:*" ``` -**4. Run Data API builder** +> [!NOTE] +> DAB supports tables, views, and stored procedures. When the type is not specified, the default is `table`. + +#### Resulting configuration + +The `entities` section of the configuration is no longer empty: + +```json +{ + "entities": { + "Todo": { + "source": { + "object": "dbo.Todo", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Todo", + "plural": "Todos" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "*" + } + ] + } + ] + } + } +} +``` -In `production`, DAB runs in a container. In `development`, it’s self-hosted locally with hot reload, Swagger, and Nitro (fka Banana Cake Pop) support. +### 7. Run Data API builder -``` +In `production`, DAB runs in a container. In `development`, it’s locally self-hosted. + +```sh dab start ``` -> **Note**: Before you run `dab start`, make sure your connection string is stored in an environment variable called `my-connection-string`. This is required for `@env('my-connection-string')` in your config file to work. The easiest way is to create a `.env` file with `name=value` pairs—DAB will load these automatically at runtime. +> [!IMPORTANT] +> The DAB CLI assumes your configuration file is called `dab-config.json` and is in the local folder. -**5. Access your data source** +### 8. Access your data! -By default, DAB enables both REST and GraphQL. REST supports `$select`, `$filter`, and `$orderBy`. GraphQL uses config-defined relationships. +By default, DAB enables both REST and GraphQL. +```sh +GET http://localhost:5000/api/Todo ``` -GET http://localhost:5000/api/Actor -``` - -### Walk-through video - - Play Video - +> [!NOTE] +> Change the URL to match your port if it is different. -Demo source code: [startrek](https://aka.ms/dab/startrek) +#### Other things you should try -## Overview - -| Category | Features | -|----------------|----------| -| **Database Objects** | • NoSQL collections
• RDBMS tables, views, stored procedures | -| **Data Sources** | • SQL Server & Azure SQL
• Azure Cosmos DB
• PostgreSQL
• MySQL | -| **REST** | • `$select` for projection
• `$filter` for filtering
• `$orderBy` for sorting | -| **GraphQL** | • Relationship navigation
• Data aggregation
• Multiple mutations | -| **Telemetry** | • Structured logs
• OpenTelemetry
• Application Insights
• Health endpoints | -| **Advanced** | • Pagination
• Level 1 (in-memory) cache | -| **Authentication** | • OAuth2/JWT
• EasyAuth
• Entra ID | -| **Authorization** | • Role-based support
• Entity permissions
• Database policies | -| **Developer** | • Cross-platform CLI
• Swagger (REST)
• Nitro [previously Banana Cake Pop] (GraphQL)
• Open Source
• Configuration Hot Reload | +* DAB’s Health endpoint: `http://localhost:5000/health` +* DAB’s Swagger UI: `http://localhost:5000/swagger` +* DAB’s Nitro UI: `http://localhost:5000/graphql` ## How does it work? -This diagram shows how DAB works. DAB dynamically creates endpoints from your config file. It translates HTTP requests to SQL, returns JSON, and auto-pages results. +DAB dynamically creates endpoints and translates requests to SQL, returning JSON. ```mermaid sequenceDiagram - actor Client - - box Data API builder (DAB) - participant Endpoint - participant QueryBuilder - end - - participant Configuration as Configuration File - - box Data Source - participant DB - end - - Endpoint->>Endpoint: Start - activate Endpoint - Endpoint->>Configuration: Request - Configuration-->>Endpoint: Configuration - Endpoint->>DB: Request - DB-->>Endpoint: Metadata - Note over Endpoint, DB: Some configuration is validated against the metadata - Endpoint-->>Endpoint: Configure - deactivate Endpoint - Client-->>Endpoint: HTTP Request - activate Endpoint - critical - Endpoint-->>Endpoint: Authenticate - Endpoint-->>Endpoint: Authorize - end - Endpoint->>QueryBuilder: Request - QueryBuilder-->>Endpoint: SQL - alt Cache - Endpoint-->>Endpoint: Use Cache - else Query - Endpoint-->>DB: Request - Note over Endpoint, DB: Query is automatically throttled and results paginated - DB->>Endpoint: Results - Note over Endpoint, DB: Results are automatically cached for use in next request - end - Endpoint->>Client: HTTP 200 - deactivate Endpoint -``` - -Because DAB is stateless, it can scale up or out using any container size. It builds a feature-rich API like you would from scratch—but now you don’t have to. - -## Additional Resources - -- [Online Documentation](https://aka.ms/dab/docs) -- [Official Samples](https://aka.ms/dab/samples) -- [Known Issues](https://learn.microsoft.com/azure/data-api-builder/known-issues) -- [Feature Roadmap](https://github.com/Azure/data-api-builder/discussions/1377) + actor Client as Client + participant Endpoint as Endpoint + participant QueryBuilder as QueryBuilder + participant DB as Database + + %% Initialization / Warming up section (light grey) + rect rgba(120,120,120,0.10) + Endpoint -->>+ Endpoint: Read Config + Endpoint ->> DB: Query Metadata + DB -->> Endpoint: Metadata Response + Endpoint ->>- Endpoint: Start Engine + end + + %% Request/Response section (very light purple) + rect rgba(180,150,255,0.11) + Client ->>+ Endpoint: HTTP Request + Endpoint ->> Endpoint: Authorize + Endpoint ->> QueryBuilder: Invoke + QueryBuilder -->> Endpoint: SQL Query + Endpoint ->> DB: Submit Query + DB -->> Endpoint: Data Response + Endpoint -->>- Client: HTTP Response + end +``` -#### References +## Additional resources -- [Microsoft REST API Guidelines](https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md) -- [Microsoft Azure REST API Guidelines](https://github.com/microsoft/api-guidelines/blob/vNext/azure/Guidelines.md) -- [GraphQL Specification](https://graphql.org/) +* [Online Documentation](https://aka.ms/dab/docs) +* [Official Samples](https://aka.ms/dab/samples) +* [Known Issues](https://learn.microsoft.com/azure/data-api-builder/known-issues) +* [Feature Roadmap](https://github.com/Azure/data-api-builder/discussions/1377) -### How to Contribute +#### References -To contribute, see these documents: +* [Microsoft REST API Guidelines](https://github.com/microsoft/api-guidelines/blob/vNext/Guidelines.md) +* [Microsoft Azure REST API Guidelines](https://github.com/microsoft/api-guidelines/blob/vNext/azure/Guidelines.md) +* [GraphQL Specification](https://graphql.org/) -- [Code of Conduct](./CODE_OF_CONDUCT.md) -- [Security](./SECURITY.md) -- [Contributing](./CONTRIBUTING.md) +### How to contribute -### License +To contribute, see these documents: -**Data API builder for Azure Databases** is licensed under the MIT License. See [LICENSE](./LICENSE.txt) for details. +* [Code of Conduct](./CODE_OF_CONDUCT.md) +* [Security](./SECURITY.md) +* [Contributing](./CONTRIBUTING.md) +* [MIT License](./LICENSE.txt) -### Third-Party Component Notice +### Third-party component notice -Nitro (fka Banana Cake Pop by ChilliCream, Inc.) may optionally store work in its cloud service via your ChilliCream account. Microsoft is not affiliated with or endorsing this service. Use at your discretion. +Nitro (formerly Banana Cake Pop by ChilliCream, Inc.) may optionally store work in its cloud service via your ChilliCream account. Microsoft is not affiliated with or endorsing this service. Use at your discretion. ### Trademarks -This project may use trademarks or logos. Use of Microsoft trademarks must follow Microsoft’s [Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks). Use of third-party marks is subject to their policies. +This project may use trademarks or logos. Use of Microsoft trademarks must follow Microsoft’s [Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks). Use of third-party marks is subject to their policies. \ No newline at end of file diff --git a/config-generators/dwsql-commands.txt b/config-generators/dwsql-commands.txt index 1f613d7f48..df4940ae59 100644 --- a/config-generators/dwsql-commands.txt +++ b/config-generators/dwsql-commands.txt @@ -57,6 +57,7 @@ update Publisher --config "dab-config.DwSql.json" --permissions "policy_tester_0 update Publisher --config "dab-config.DwSql.json" --permissions "policy_tester_06:read" --fields.include "*" --policy-database "@item.id eq 1940" update Publisher --config "dab-config.DwSql.json" --permissions "database_policy_tester:read" --policy-database "@item.id ne 1234 or @item.id gt 1940" update Publisher --config "dab-config.DwSql.json" --permissions "database_policy_tester:update" --policy-database "@item.id ne 1234" +update Publisher --config "dab-config.DwSql.json" --permissions "database_policy_tester:create" --policy-database "@item.name ne 'New publisher'" update Stock --config "dab-config.DwSql.json" --permissions "authenticated:create,read,update,delete" update Stock --config "dab-config.DwSql.json" --rest commodities --graphql true update Stock --config "dab-config.DwSql.json" --permissions "TestNestedFilterFieldIsNull_ColumnForbidden:read" diff --git a/config-generators/mssql-commands.txt b/config-generators/mssql-commands.txt index 158d4bd179..cecc6b522c 100644 --- a/config-generators/mssql-commands.txt +++ b/config-generators/mssql-commands.txt @@ -207,6 +207,7 @@ update BookNF --config "dab-config.MsSql.json" --permissions "TestNestedFilter_E update BookNF --config "dab-config.MsSql.json" --permissions "TestNestedFilter_ColumnForbidden:read" update BookNF --config "dab-config.MsSql.json" --permissions "TestNestedFilterChained_EntityReadForbidden:read" update BookNF --config "dab-config.MsSql.json" --permissions "TestNestedFilterChained_ColumnForbidden:read" +update BookNF --config "dab-config.MsSql.json" --permissions "TestFieldExcludedForAggregation:read" --fields.exclude "publisher_id" update BookNF --config "dab-config.MsSql.json" --relationship publishers --target.entity PublisherNF --cardinality one update BookNF --config "dab-config.MsSql.json" --relationship websiteplacement --target.entity BookWebsitePlacement --cardinality one update BookNF --config "dab-config.MsSql.json" --relationship reviews --target.entity Review --cardinality many @@ -235,3 +236,4 @@ add dbo_DimAccount --config "dab-config.MsSql.json" --source "DimAccount" --perm update dbo_DimAccount --config "dab-config.MsSql.json" --relationship parent_account --target.entity dbo_DimAccount --cardinality one --relationship.fields "ParentAccountKey:AccountKey" update dbo_DimAccount --config "dab-config.MsSql.json" --relationship child_accounts --target.entity dbo_DimAccount --cardinality many --relationship.fields "AccountKey:ParentAccountKey" add DateOnlyTable --config "dab-config.MsSql.json" --source "date_only_table" --permissions "anonymous:*" --rest true --graphql true --source.key-fields "event_date" +add GetBooksAuth --config "dab-config.MsSql.json" --source "get_books" --source.type "stored-procedure" --permissions "teststoredprocauth:execute" --rest true --graphql true --graphql.operation "Query" --rest.methods "Get" diff --git a/docs/Testing/mcp-inspector-testing.md b/docs/Testing/mcp-inspector-testing.md new file mode 100644 index 0000000000..d6942311ea --- /dev/null +++ b/docs/Testing/mcp-inspector-testing.md @@ -0,0 +1,23 @@ + +# MCP Inspector Testing Guide + +Steps to run and test MCP tools using the https://www.npmjs.com/package/@modelcontextprotocol/inspector. +### Pre-requisite: +- Node.js must be installed on your system to run this code. +- Ensure that the DAB MCP server is running before attempting to connect with the inspector tool. + +### 1. **Install MCP Inspector** +npx @modelcontextprotocol/inspector + +### 2. ** Bypass TLS Verification (For Local Testing)** +set NODE_TLS_REJECT_UNAUTHORIZED=0 + +### 3. ** Open the inspector with pre-filled token.** +http://localhost:6274/?MCP_PROXY_AUTH_TOKEN= + +### 4. ** How to use the tool..** +- Set the transport type "Streamable HTTP". +- Set the URL "http://localhost:5000/mcp" and hit connect. +- Select a Tool from the dropdown list. +- Fill in the Parameters required for the tool. +- Click "Run" to execute the tool and view the response. \ No newline at end of file diff --git a/global.json b/global.json index 391ba3c2a3..7e9f2f6bb4 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "sdk": { - "version": "8.0.100", + "version": "8.0.414", "rollForward": "latestFeature" } } diff --git a/schemas/dab.draft.schema.json b/schemas/dab.draft.schema.json index 20903284b0..80cfd953ad 100644 --- a/schemas/dab.draft.schema.json +++ b/schemas/dab.draft.schema.json @@ -158,16 +158,21 @@ "type": "object", "properties": { "max-page-size": { - "type": ["integer", "null"], + "type": [ "integer", "null" ], "description": "Defines the maximum number of records that can be returned in a single page of results. If set to null, the default value is 100,000.", "default": 100000, "minimum": 1 }, "default-page-size": { - "type": ["integer", "null"], + "type": [ "integer", "null" ], "description": "Sets the default number of records returned in a single response. When this limit is reached, a continuation token is provided to retrieve the next page. If set to null, the default value is 100.", "default": 100, "minimum": 1 + }, + "next-link-relative": { + "type": "boolean", + "default": false, + "description": "When true, nextLink in paginated results will use a relative URL." } } }, @@ -209,7 +214,7 @@ "description": "Allow enabling/disabling GraphQL requests for all entities." }, "depth-limit": { - "type": ["integer", "null"], + "type": [ "integer", "null" ], "description": "Maximum allowed depth of a GraphQL query.", "default": null }, @@ -234,13 +239,74 @@ } } }, + "mcp": { + "type": "object", + "description": "Global MCP endpoint configuration", + "additionalProperties": false, + "properties": { + "path": { + "default": "/mcp", + "type": "string" + }, + "enabled": { + "type": "boolean", + "description": "Allow enabling/disabling MCP requests for all entities.", + "default": true + }, + "dml-tools": { + "oneOf": [ + { + "type": "boolean", + "description": "Enable/disable all DML tools with default settings." + }, + { + "type": "object", + "description": "Individual DML tools configuration", + "additionalProperties": false, + "properties": { + "describe-entities": { + "type": "boolean", + "description": "Enable/disable the describe-entities tool.", + "default": false + }, + "create-record": { + "type": "boolean", + "description": "Enable/disable the create-record tool.", + "default": false + }, + "read-records": { + "type": "boolean", + "description": "Enable/disable the read-records tool.", + "default": false + }, + "update-record": { + "type": "boolean", + "description": "Enable/disable the update-record tool.", + "default": false + }, + "delete-record": { + "type": "boolean", + "description": "Enable/disable the delete-record tool.", + "default": false + }, + "execute-entity": { + "type": "boolean", + "description": "Enable/disable the execute-entity tool.", + "default": false + } + } + } + ] + } + } + }, "host": { "type": "object", "description": "Global hosting configuration", "additionalProperties": false, "properties": { "max-response-size-mb": { - "type": ["integer", "null"], + "type": [ "integer", "null" ], "description": "Specifies the maximum size, in megabytes, of the database response allowed in a single result. If set to null, the default value is 158 MB.", "default": 158, "minimum": 1, @@ -248,12 +314,12 @@ }, "mode": { "description": "Set if running in Development or Production mode", - "type": ["string", "null"], + "type": [ "string", "null" ], "default": "production", - "enum": ["production", "development"] + "enum": [ "production", "development" ] }, "cors": { - "type": ["object", "null"], + "type": [ "object", "null" ], "description": "Configure CORS", "additionalProperties": false, "properties": { @@ -273,7 +339,7 @@ } }, "authentication": { - "type": ["object", "null"], + "type": [ "object", "null" ], "additionalProperties": false, "properties": { "provider": { @@ -317,7 +383,7 @@ "type": "string" } }, - "required": ["audience", "issuer"] + "required": [ "audience", "issuer" ] } }, "allOf": [ @@ -333,9 +399,9 @@ ] } }, - "required": ["provider"] + "required": [ "provider" ] }, - "then": { "required": ["jwt"] }, + "then": { "required": [ "jwt" ] }, "else": { "properties": { "jwt": false } } } ] @@ -377,7 +443,7 @@ "default": true } }, - "required": ["connection-string"] + "required": [ "connection-string" ] }, "open-telemetry": { "type": "object", @@ -400,7 +466,7 @@ "type": "string", "description": "Open Telemetry protocol", "default": "grpc", - "enum": ["grpc", "httpprotobuf"] + "enum": [ "grpc", "httpprotobuf" ] }, "enabled": { "type": "boolean", @@ -408,7 +474,119 @@ "default": true } }, - "required": ["endpoint"] + "required": [ "endpoint" ] + }, + "azure-log-analytics": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "description": "Allow enabling/disabling Azure Log Analytics.", + "default": false + }, + "auth": { + "type": "object", + "additionalProperties": false, + "properties": { + "custom-table-name": { + "type": [ "string", "null" ], + "description": "Azure Log Analytics Custom Table Name for entra-id mode" + }, + "dcr-immutable-id": { + "type": [ "string", "null" ], + "description": "DCR ID for entra-id mode" + }, + "dce-endpoint": { + "type": [ "string", "null" ], + "description": "DCE endpoint for entra-id mode" + } + } + }, + "dab-identifier": { + "type": "string", + "description": "Identifier passed on to Log Analytics", + "default": "DabLogs" + }, + "flush-interval-seconds": { + "type": "integer", + "description": "Interval between log batch pushes (in seconds)", + "default": 5 + } + }, + "if": { + "properties": { + "enabled": { + "const": true + } + } + }, + "then": { + "properties": { + "auth": { + "properties": { + "custom-table-name": { + "type": "string", + "description": "Azure Log Analytics Custom Table Name for entra-id mode" + }, + "dcr-immutable-id": { + "type": "string", + "description": "DCR ID for entra-id mode" + }, + "dce-endpoint": { + "type": "string", + "description": "DCE endpoint for entra-id mode" + } + }, + "required": [ "custom-table-name", "dcr-immutable-id", "dce-endpoint" ] + } + }, + "required": [ "auth" ] + } + }, + "file": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean", + "description": "Enable/disable file sink telemetry logging.", + "default": false + }, + "path": { + "type": "string", + "description": "File path for telemetry logs.", + "default": "/logs/dab-log.txt" + }, + "rolling-interval": { + "type": "string", + "description": "Rolling interval for log files.", + "default": "Day", + "enum": [ "Minute", "Hour", "Day", "Month", "Year", "Infinite" ] + }, + "retained-file-count-limit": { + "type": "integer", + "description": "Maximum number of retained log files.", + "default": 1, + "minimum": 1 + }, + "file-size-limit-bytes": { + "type": "integer", + "description": "Maximum file size in bytes before rolling.", + "default": 1048576, + "minimum": 1 + } + }, + "if": { + "properties": { + "enabled": { + "const": true + } + } + }, + "then": { + "required": [ "path" ] + } }, "log-level": { "type": "object", @@ -457,6 +635,59 @@ "type": "integer", "description": "Time to live in seconds for the Comprehensive Health Check Report cache entry.", "default": 5 + }, + "max-query-parallelism": { + "type": "integer", + "description": "The max degree of parallelism for running parallel health check queries.", + "default": 4 + } + } + } + } + }, + "azure-key-vault": { + "type": "object", + "description": "Azure Key Vault configuration for storing secrets", + "additionalProperties": false, + "properties": { + "endpoint": { + "type": "string", + "description": "Azure Key Vault endpoint URL" + }, + "retry-policy": { + "type": "object", + "description": "Retry policy configuration for Azure Key Vault operations", + "additionalProperties": false, + "properties": { + "mode": { + "type": "string", + "enum": ["fixed", "exponential"], + "default": "exponential", + "description": "Retry mode: fixed or exponential backoff" + }, + "max-count": { + "type": "integer", + "default": 3, + "minimum": 0, + "description": "Maximum number of retry attempts" + }, + "delay-seconds": { + "type": "integer", + "default": 1, + "minimum": 1, + "description": "Initial delay between retries in seconds" + }, + "max-delay-seconds": { + "type": "integer", + "default": 60, + "minimum": 1, + "description": "Maximum delay between retries in seconds (for exponential mode)" + }, + "network-timeout-seconds": { + "type": "integer", + "default": 60, + "minimum": 1, + "description": "Network timeout for requests in seconds" } } } @@ -470,6 +701,10 @@ "type": "object", "additionalProperties": false, "properties": { + "description": { + "type": "string", + "description": "Optional description for the entity. Will be surfaced in generated API documentation and GraphQL schema as comments." + }, "health": { "description": "Health check configuration for entity", "type": [ "object", "null" ], @@ -520,23 +755,35 @@ "description": "Database object name" }, "parameters": { - "type": "object", - "description": "Dictionary of parameters and their values", - "patternProperties": { - "^.*$": { - "oneOf": [ - { - "type": "boolean" - }, - { - "type": "string" - }, - { - "type": "number" + "oneOf": [ + { + "type": "object", + "description": "Dictionary of parameters and their values (deprecated)", + "patternProperties": { + "^.*$": { + "oneOf": [ + { "type": "boolean" }, + { "type": "string" }, + { "type": "number" } + ] + } + } + }, + { + "type": "array", + "description": "Array of parameter objects with metadata", + "items": { + "type": "object", + "required": ["name"], + "properties": { + "name": { "type": "string", "description": "Parameter name" }, + "required": { "type": "boolean", "description": "Is parameter required" }, + "default": { "type": ["string", "number", "boolean", "null"], "description": "Default value" }, + "description": { "type": "string", "description": "Parameter description. Since descriptions for multiple parameters are provided as a comma-separated string, individual parameter descriptions must not contain a comma (',')." } } - ] + } } - } + ] }, "key-fields": { "type": "array", @@ -550,6 +797,21 @@ } ] }, + "fields": { + "type": "array", + "description": "Defines the fields (columns) exposed for this entity, with metadata.", + "items": { + "type": "object", + "properties": { + "name": { "type": "string", "description": "Database column name." }, + "alias": { "type": "string", "description": "Exposed name for the field." }, + "description": { "type": "string", "description": "Field description." }, + "primary-key": { "type": "boolean", "description": "Indicates whether this field is a primary key." } + }, + "required": ["name"] + }, + "uniqueItems": true + }, "rest": { "oneOf": [ { @@ -869,11 +1131,36 @@ } } }, - "required": ["source", "permissions"] + "required": ["source", "permissions"], + "allOf": [ + { + "if": { + "required": ["fields"] + }, + "then": { + "not": { + "anyOf": [ + { "required": ["mappings"] }, + { "properties": { "source": { "properties": { "key-fields": { } }, "required": ["key-fields"] } } } + ] + } + } + } + ] } } } }, + "if": { + "required": ["azure-key-vault"] + }, + "then": { + "properties": { + "azure-key-vault": { + "required": ["endpoint"] + } + } + }, "required": ["data-source", "entities"], "$defs": { "singular-plural": { diff --git a/src/Azure.DataApiBuilder.Mcp/Azure.DataApiBuilder.Mcp.csproj b/src/Azure.DataApiBuilder.Mcp/Azure.DataApiBuilder.Mcp.csproj new file mode 100644 index 0000000000..f675f8d8d1 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Azure.DataApiBuilder.Mcp.csproj @@ -0,0 +1,22 @@ + + + + net8.0 + enable + enable + + + + + + + + + + + + + + + + diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/CreateRecordTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/CreateRecordTool.cs new file mode 100644 index 0000000000..6fbe08879b --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/CreateRecordTool.cs @@ -0,0 +1,275 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using Azure.DataApiBuilder.Auth; +using Azure.DataApiBuilder.Config.DatabasePrimitives; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Authorization; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; +using Azure.DataApiBuilder.Core.Resolvers; +using Azure.DataApiBuilder.Core.Resolvers.Factories; +using Azure.DataApiBuilder.Core.Services; +using Azure.DataApiBuilder.Core.Services.MetadataProviders; +using Azure.DataApiBuilder.Mcp.Model; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.BuiltInTools +{ + public class CreateRecordTool : IMcpTool + { + public ToolType ToolType { get; } = ToolType.BuiltIn; + + public Tool GetToolMetadata() + { + return new Tool + { + Name = "create_record", + Description = "Creates a new record in the specified entity.", + InputSchema = JsonSerializer.Deserialize( + @"{ + ""type"": ""object"", + ""properties"": { + ""entity"": { + ""type"": ""string"", + ""description"": ""The name of the entity"" + }, + ""data"": { + ""type"": ""object"", + ""description"": ""The data for the new record"" + } + }, + ""required"": [""entity"", ""data""] + }" + ) + }; + } + + public async Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default) + { + ILogger? logger = serviceProvider.GetService>(); + if (arguments == null) + { + return Utils.McpResponseBuilder.BuildErrorResult("Invalid Arguments", "No arguments provided", logger); + } + + RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService(); + if (!runtimeConfigProvider.TryGetConfig(out RuntimeConfig? runtimeConfig)) + { + return Utils.McpResponseBuilder.BuildErrorResult("Invalid Configuration", "Runtime configuration not available", logger); + } + + if (runtimeConfig.McpDmlTools?.CreateRecord != true) + { + return Utils.McpResponseBuilder.BuildErrorResult( + "ToolDisabled", + "The create_record tool is disabled in the configuration.", + logger); + } + + try + { + cancellationToken.ThrowIfCancellationRequested(); + JsonElement root = arguments.RootElement; + + if (!root.TryGetProperty("entity", out JsonElement entityElement) || + !root.TryGetProperty("data", out JsonElement dataElement)) + { + return Utils.McpResponseBuilder.BuildErrorResult("InvalidArguments", "Missing required arguments 'entity' or 'data'", logger); + } + + string entityName = entityElement.GetString() ?? string.Empty; + if (string.IsNullOrWhiteSpace(entityName)) + { + return Utils.McpResponseBuilder.BuildErrorResult("InvalidArguments", "Entity name cannot be empty", logger); + } + + string dataSourceName; + try + { + dataSourceName = runtimeConfig.GetDataSourceNameFromEntityName(entityName); + } + catch (Exception) + { + return Utils.McpResponseBuilder.BuildErrorResult("InvalidConfiguration", $"Entity '{entityName}' not found in configuration", logger); + } + + IMetadataProviderFactory metadataProviderFactory = serviceProvider.GetRequiredService(); + ISqlMetadataProvider sqlMetadataProvider = metadataProviderFactory.GetMetadataProvider(dataSourceName); + + DatabaseObject dbObject; + try + { + dbObject = sqlMetadataProvider.GetDatabaseObjectByKey(entityName); + } + catch (Exception) + { + return Utils.McpResponseBuilder.BuildErrorResult("InvalidConfiguration", $"Database object for entity '{entityName}' not found", logger); + } + + // Create an HTTP context for authorization + IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService(); + HttpContext httpContext = httpContextAccessor.HttpContext ?? new DefaultHttpContext(); + IAuthorizationResolver authorizationResolver = serviceProvider.GetRequiredService(); + + if (httpContext is null || !authorizationResolver.IsValidRoleContext(httpContext)) + { + return Utils.McpResponseBuilder.BuildErrorResult("PermissionDenied", "Permission denied: Unable to resolve a valid role context for update operation.", logger); + } + + // Validate that we have at least one role authorized for create + if (!TryResolveAuthorizedRole(httpContext, authorizationResolver, entityName, out string authError)) + { + return Utils.McpResponseBuilder.BuildErrorResult("PermissionDenied", authError, logger); + } + + JsonElement insertPayloadRoot = dataElement.Clone(); + InsertRequestContext insertRequestContext = new( + entityName, + dbObject, + insertPayloadRoot, + EntityActionOperation.Insert); + + RequestValidator requestValidator = serviceProvider.GetRequiredService(); + + // Only validate tables + if (dbObject.SourceType is EntitySourceType.Table) + { + try + { + requestValidator.ValidateInsertRequestContext(insertRequestContext); + } + catch (Exception ex) + { + return Utils.McpResponseBuilder.BuildErrorResult("ValidationFailed", $"Request validation failed: {ex.Message}", logger); + } + } + else + { + return Utils.McpResponseBuilder.BuildErrorResult( + "InvalidCreateTarget", + "The create_record tool is only available for tables.", + logger); + } + + IMutationEngineFactory mutationEngineFactory = serviceProvider.GetRequiredService(); + DatabaseType databaseType = sqlMetadataProvider.GetDatabaseType(); + IMutationEngine mutationEngine = mutationEngineFactory.GetMutationEngine(databaseType); + + IActionResult? result = await mutationEngine.ExecuteAsync(insertRequestContext); + + if (result is CreatedResult createdResult) + { + return Utils.McpResponseBuilder.BuildSuccessResult( + new Dictionary + { + ["entity"] = entityName, + ["result"] = createdResult.Value, + ["message"] = $"Successfully created record in entity '{entityName}'" + }, + logger, + $"Successfully created record in entity '{entityName}'"); + } + else if (result is ObjectResult objectResult) + { + bool isError = objectResult.StatusCode.HasValue && objectResult.StatusCode.Value >= 400 && objectResult.StatusCode.Value != 403; + if (isError) + { + return Utils.McpResponseBuilder.BuildErrorResult( + "CreateFailed", + $"Failed to create record in entity '{entityName}'. Error: {JsonSerializer.Serialize(objectResult.Value)}", + logger); + } + else + { + return Utils.McpResponseBuilder.BuildSuccessResult( + new Dictionary + { + ["entity"] = entityName, + ["result"] = objectResult.Value, + ["message"] = $"Successfully created record in entity '{entityName}'. Unable to perform read-back of inserted records." + }, + logger, + $"Successfully created record in entity '{entityName}'. Unable to perform read-back of inserted records."); + } + } + else + { + if (result is null) + { + return Utils.McpResponseBuilder.BuildErrorResult( + "UnexpectedError", + $"Mutation engine returned null result for entity '{entityName}'", + logger); + } + else + { + return Utils.McpResponseBuilder.BuildSuccessResult( + new Dictionary + { + ["entity"] = entityName, + ["message"] = $"Create operation completed with unexpected result type: {result.GetType().Name}" + }, + logger, + $"Create operation completed for entity '{entityName}' with unexpected result type: {result.GetType().Name}"); + } + } + } + catch (Exception ex) + { + return Utils.McpResponseBuilder.BuildErrorResult("Error", $"Error: {ex.Message}", logger); + } + } + + private static bool TryResolveAuthorizedRole( + HttpContext httpContext, + IAuthorizationResolver authorizationResolver, + string entityName, + out string error) + { + error = string.Empty; + + string roleHeader = httpContext.Request.Headers[AuthorizationResolver.CLIENT_ROLE_HEADER].ToString(); + + if (string.IsNullOrWhiteSpace(roleHeader)) + { + error = "Client role header is missing or empty."; + return false; + } + + string[] roles = roleHeader + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + if (roles.Length == 0) + { + error = "Client role header is missing or empty."; + return false; + } + + foreach (string role in roles) + { + bool allowed = authorizationResolver.AreRoleAndOperationDefinedForEntity( + entityName, role, EntityActionOperation.Create); + + if (allowed) + { + return true; + } + } + + error = "You do not have permission to create records for this entity."; + return false; + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DeleteRecordTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DeleteRecordTool.cs new file mode 100644 index 0000000000..86a5ce15ec --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DeleteRecordTool.cs @@ -0,0 +1,346 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Data.Common; +using System.Text.Json; +using Azure.DataApiBuilder.Auth; +using Azure.DataApiBuilder.Config.DatabasePrimitives; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; +using Azure.DataApiBuilder.Core.Resolvers; +using Azure.DataApiBuilder.Core.Resolvers.Factories; +using Azure.DataApiBuilder.Core.Services; +using Azure.DataApiBuilder.Core.Services.MetadataProviders; +using Azure.DataApiBuilder.Mcp.Model; +using Azure.DataApiBuilder.Mcp.Utils; +using Azure.DataApiBuilder.Service.Exceptions; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.BuiltInTools +{ + /// + /// Tool to delete records from a table/view entity configured in DAB. + /// Supports both simple and composite primary keys. + /// + public class DeleteRecordTool : IMcpTool + { + /// + /// Gets the type of the tool, which is BuiltIn for this implementation. + /// + public ToolType ToolType { get; } = ToolType.BuiltIn; + + /// + /// Gets the metadata for the delete-record tool, including its name, description, and input schema. + /// + public Tool GetToolMetadata() + { + return new Tool + { + Name = "delete_record", + Description = "Deletes a record from a table based on primary key or composite key", + InputSchema = JsonSerializer.Deserialize( + @"{ + ""type"": ""object"", + ""properties"": { + ""entity"": { + ""type"": ""string"", + ""description"": ""The name of the entity (table) as configured in dab-config. Required."" + }, + ""keys"": { + ""type"": ""object"", + ""description"": ""Primary key values to identify the record to delete. For composite keys, provide all key columns as properties. Required."" + } + }, + ""required"": [""entity"", ""keys""] + }" + ) + }; + } + + /// + /// Executes the delete-record tool, deleting an existing record in the specified entity using provided keys. + /// + public async Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default) + { + ILogger? logger = serviceProvider.GetService>(); + + try + { + // Cancellation check at the start + cancellationToken.ThrowIfCancellationRequested(); + + // 1) Resolve required services & configuration + RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService(); + RuntimeConfig config = runtimeConfigProvider.GetConfig(); + + // 2) Check if the tool is enabled in configuration before proceeding + if (config.McpDmlTools?.DeleteRecord != true) + { + return McpResponseBuilder.BuildErrorResult( + "ToolDisabled", + $"The {this.GetToolMetadata().Name} tool is disabled in the configuration.", + logger); + } + + // 3) Parsing & basic argument validation + if (arguments is null) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", "No arguments provided.", logger); + } + + if (!McpArgumentParser.TryParseEntityAndKeys(arguments.RootElement, out string entityName, out Dictionary keys, out string parseError)) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", parseError, logger); + } + + IMetadataProviderFactory metadataProviderFactory = serviceProvider.GetRequiredService(); + IMutationEngineFactory mutationEngineFactory = serviceProvider.GetRequiredService(); + + // 4) Resolve metadata for entity existence check + string dataSourceName; + ISqlMetadataProvider sqlMetadataProvider; + + try + { + dataSourceName = config.GetDataSourceNameFromEntityName(entityName); + sqlMetadataProvider = metadataProviderFactory.GetMetadataProvider(dataSourceName); + } + catch (Exception) + { + return McpResponseBuilder.BuildErrorResult("EntityNotFound", $"Entity '{entityName}' is not defined in the configuration.", logger); + } + + if (!sqlMetadataProvider.EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? dbObject) || dbObject is null) + { + return McpResponseBuilder.BuildErrorResult("EntityNotFound", $"Entity '{entityName}' is not defined in the configuration.", logger); + } + + // Validate it's a table or view + if (dbObject.SourceType != EntitySourceType.Table && dbObject.SourceType != EntitySourceType.View) + { + return McpResponseBuilder.BuildErrorResult("InvalidEntity", $"Entity '{entityName}' is not a table or view. Use 'execute-entity' for stored procedures.", logger); + } + + // 5) Authorization + IAuthorizationResolver authResolver = serviceProvider.GetRequiredService(); + IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService(); + HttpContext? httpContext = httpContextAccessor.HttpContext; + + if (!McpAuthorizationHelper.ValidateRoleContext(httpContext, authResolver, out string roleError)) + { + return McpResponseBuilder.BuildErrorResult("PermissionDenied", $"Permission denied: {roleError}", logger); + } + + if (!McpAuthorizationHelper.TryResolveAuthorizedRole( + httpContext!, + authResolver, + entityName, + EntityActionOperation.Delete, + out string? effectiveRole, + out string authError)) + { + return McpResponseBuilder.BuildErrorResult("PermissionDenied", $"Permission denied: {authError}", logger); + } + + // 6) Build and validate Delete context + RequestValidator requestValidator = new(metadataProviderFactory, runtimeConfigProvider); + + DeleteRequestContext context = new( + entityName: entityName, + dbo: dbObject, + isList: false); + + foreach (KeyValuePair kvp in keys) + { + if (kvp.Value is null) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", $"Primary key value for '{kvp.Key}' cannot be null.", logger); + } + + context.PrimaryKeyValuePairs[kvp.Key] = kvp.Value; + } + + requestValidator.ValidatePrimaryKey(context); + + // 7) Execute + DatabaseType dbType = config.GetDataSourceFromDataSourceName(dataSourceName).DatabaseType; + IMutationEngine mutationEngine = mutationEngineFactory.GetMutationEngine(dbType); + + IActionResult? mutationResult = null; + try + { + // Cancellation check before executing + cancellationToken.ThrowIfCancellationRequested(); + mutationResult = await mutationEngine.ExecuteAsync(context).ConfigureAwait(false); + } + catch (DataApiBuilderException dabEx) + { + // Handle specific DAB exceptions + logger?.LogError(dabEx, "Data API Builder error deleting record from {Entity}", entityName); + + string message = dabEx.Message; + + // Check for specific error patterns + if (message.Contains("Could not find item with", StringComparison.OrdinalIgnoreCase)) + { + string keyDetails = McpJsonHelper.FormatKeyDetails(keys); + return McpResponseBuilder.BuildErrorResult( + "RecordNotFound", + $"No record found with the specified primary key: {keyDetails}", + logger); + } + else if (message.Contains("violates foreign key constraint", StringComparison.OrdinalIgnoreCase) || + message.Contains("REFERENCE constraint", StringComparison.OrdinalIgnoreCase)) + { + return McpResponseBuilder.BuildErrorResult( + "ConstraintViolation", + "Cannot delete record due to foreign key constraint. Other records depend on this record.", + logger); + } + else if (message.Contains("permission", StringComparison.OrdinalIgnoreCase) || + message.Contains("authorization", StringComparison.OrdinalIgnoreCase)) + { + return McpResponseBuilder.BuildErrorResult( + "PermissionDenied", + "You do not have permission to delete this record.", + logger); + } + else if (message.Contains("invalid", StringComparison.OrdinalIgnoreCase) && + message.Contains("type", StringComparison.OrdinalIgnoreCase)) + { + return McpResponseBuilder.BuildErrorResult( + "InvalidArguments", + "Invalid data type for one or more key values.", + logger); + } + + // For any other DAB exceptions, return the message as-is + return McpResponseBuilder.BuildErrorResult( + "DataApiBuilderError", + dabEx.Message, + logger); + } + catch (SqlException sqlEx) + { + // Handle SQL Server specific errors + logger?.LogError(sqlEx, "SQL Server error deleting record from {Entity}", entityName); + string errorMessage = sqlEx.Number switch + { + 547 => "Cannot delete record due to foreign key constraint. Other records depend on this record.", + 2627 or 2601 => "Cannot delete record due to unique constraint violation.", + 229 or 262 => $"Permission denied to delete from table '{dbObject.FullName}'.", + 208 => $"Table '{dbObject.FullName}' not found in the database.", + _ => $"Database error: {sqlEx.Message}" + }; + return McpResponseBuilder.BuildErrorResult("DatabaseError", errorMessage, logger); + } + catch (DbException dbEx) + { + // Handle generic database exceptions (works for PostgreSQL, MySQL, etc.) + logger?.LogError(dbEx, "Database error deleting record from {Entity}", entityName); + + // Check for common patterns in error messages + string errorMsg = dbEx.Message.ToLowerInvariant(); + if (errorMsg.Contains("foreign key") || errorMsg.Contains("constraint")) + { + return McpResponseBuilder.BuildErrorResult( + "ConstraintViolation", + "Cannot delete record due to foreign key constraint. Other records depend on this record.", + logger); + } + else if (errorMsg.Contains("not found") || errorMsg.Contains("does not exist")) + { + return McpResponseBuilder.BuildErrorResult( + "RecordNotFound", + "No record found with the specified primary key.", + logger); + } + + return McpResponseBuilder.BuildErrorResult("DatabaseError", $"Database error: {dbEx.Message}", logger); + } + catch (InvalidOperationException ioEx) when (ioEx.Message.Contains("connection", StringComparison.OrdinalIgnoreCase)) + { + // Handle connection-related issues + logger?.LogError(ioEx, "Database connection error"); + return McpResponseBuilder.BuildErrorResult("ConnectionError", "Failed to connect to the database.", logger); + } + catch (TimeoutException timeoutEx) + { + // Handle query timeout + logger?.LogError(timeoutEx, "Delete operation timeout for {Entity}", entityName); + return McpResponseBuilder.BuildErrorResult("TimeoutError", "The delete operation timed out.", logger); + } + catch (Exception ex) + { + string errorMsg = ex.Message ?? string.Empty; + + if (errorMsg.Contains("Could not find", StringComparison.OrdinalIgnoreCase) || + errorMsg.Contains("record not found", StringComparison.OrdinalIgnoreCase)) + { + string keyDetails = McpJsonHelper.FormatKeyDetails(keys); + return McpResponseBuilder.BuildErrorResult( + "RecordNotFound", + $"No entity found with the given key {keyDetails}.", + logger); + } + else + { + // Re-throw unexpected exceptions + throw; + } + } + + // 8) Build response + // Based on SqlMutationEngine, delete operations typically return NoContentResult + // We build a success response with just the operation details + Dictionary responseData = new() + { + ["entity"] = entityName, + ["keyDetails"] = McpJsonHelper.FormatKeyDetails(keys), + ["message"] = "Record deleted successfully" + }; + + // If the mutation result is OkObjectResult (which would be unusual for delete), + // include the result value directly without re-serialization + if (mutationResult is OkObjectResult okObjectResult && okObjectResult.Value is not null) + { + responseData["result"] = okObjectResult.Value; + } + + return McpResponseBuilder.BuildSuccessResult( + responseData, + logger, + $"DeleteRecordTool success for entity {entityName}." + ); + } + catch (OperationCanceledException) + { + return McpResponseBuilder.BuildErrorResult("OperationCanceled", "The delete operation was canceled.", logger); + } + catch (ArgumentException argEx) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", argEx.Message, logger); + } + catch (Exception ex) + { + ILogger? innerLogger = serviceProvider.GetService>(); + innerLogger?.LogError(ex, "Unexpected error in DeleteRecordTool."); + + return McpResponseBuilder.BuildErrorResult( + "UnexpectedError", + "An unexpected error occurred during the delete operation.", + logger); + } + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DescribeEntitiesTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DescribeEntitiesTool.cs new file mode 100644 index 0000000000..95c53d1d28 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/DescribeEntitiesTool.cs @@ -0,0 +1,387 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Mcp.Model; +using Azure.DataApiBuilder.Mcp.Utils; +using Azure.DataApiBuilder.Service.Exceptions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.BuiltInTools +{ + /// + /// Tool to describe all entities configured in DAB, including their types and metadata. + /// + public class DescribeEntitiesTool : IMcpTool + { + /// + /// Gets the type of the tool, which is BuiltIn for this implementation. + /// + public ToolType ToolType { get; } = ToolType.BuiltIn; + + /// + /// Gets the metadata for the delete-record tool, including its name, description, and input schema. + /// + /// + public Tool GetToolMetadata() + { + return new Tool + { + Name = "describe_entities", + Description = "Lists and describes all entities in the database, including their types and available operations.", + InputSchema = JsonSerializer.Deserialize( + @"{ + ""type"": ""object"", + ""properties"": { + ""nameOnly"": { + ""type"": ""boolean"", + ""description"": ""If true, only entity names and descriptions will be returned. If false, full metadata including fields, parameters etc. will be included. Default is false."" + }, + ""entities"": { + ""type"": ""array"", + ""items"": { + ""type"": ""string"" + }, + ""description"": ""Optional list of specific entity names to filter by. If empty, all entities will be described."" + } + } + }" + ) + }; + } + + /// + /// Executes the DescribeEntities tool, returning metadata about configured entities. + /// + public Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default) + { + ILogger? logger = serviceProvider.GetService>(); + + try + { + cancellationToken.ThrowIfCancellationRequested(); + + RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService(); + RuntimeConfig runtimeConfig = runtimeConfigProvider.GetConfig(); + + if (!IsToolEnabled(runtimeConfig)) + { + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "ToolDisabled", + $"The {GetToolMetadata().Name} tool is disabled in the configuration.", + logger)); + } + + (bool nameOnly, HashSet? entityFilter) = ParseArguments(arguments, logger); + + List> entityList = new(); + + if (runtimeConfig.Entities != null) + { + foreach (KeyValuePair entityEntry in runtimeConfig.Entities) + { + cancellationToken.ThrowIfCancellationRequested(); + + string entityName = entityEntry.Key; + Entity entity = entityEntry.Value; + + if (!ShouldIncludeEntity(entityName, entityFilter)) + { + continue; + } + + try + { + Dictionary entityInfo = nameOnly + ? BuildBasicEntityInfo(entityName, entity) + : BuildFullEntityInfo(entityName, entity); + + entityList.Add(entityInfo); + } + catch (Exception ex) + { + logger?.LogWarning(ex, "Failed to build info for entity {EntityName}", entityName); + } + } + } + + if (entityList.Count == 0) + { + if (entityFilter != null && entityFilter.Count > 0) + { + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "EntitiesNotFound", + $"No entities found matching the filter: {string.Join(", ", entityFilter)}", + logger)); + } + else + { + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "NoEntitiesConfigured", + "No entities are configured in the runtime configuration.", + logger)); + } + } + + cancellationToken.ThrowIfCancellationRequested(); + + entityList = entityList.OrderBy(e => e["name"]?.ToString() ?? string.Empty).ToList(); + + List finalEntityList = entityList.Cast().ToList(); + + Dictionary responseData = new() + { + ["entities"] = finalEntityList, + ["count"] = finalEntityList.Count, + ["mode"] = nameOnly ? "basic" : "full" + }; + + if (entityFilter != null && entityFilter.Count > 0) + { + responseData["filter"] = entityFilter.ToArray(); + } + + logger?.LogInformation( + "DescribeEntitiesTool returned {EntityCount} entities in {Mode} mode.", + finalEntityList.Count, + nameOnly ? "basic" : "full"); + + return Task.FromResult(McpResponseBuilder.BuildSuccessResult( + responseData, + logger, + $"DescribeEntitiesTool success: {finalEntityList.Count} entities returned.")); + } + catch (OperationCanceledException) + { + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "OperationCanceled", + "The describe operation was canceled.", + logger)); + } + catch (DataApiBuilderException dabEx) + { + logger?.LogError(dabEx, "Data API Builder error in DescribeEntitiesTool"); + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "DataApiBuilderError", + dabEx.Message, + logger)); + } + catch (ArgumentException argEx) + { + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "InvalidArguments", + argEx.Message, + logger)); + } + catch (InvalidOperationException ioEx) + { + logger?.LogError(ioEx, "Invalid operation in DescribeEntitiesTool"); + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "InvalidOperation", + "Failed to retrieve entity metadata: " + ioEx.Message, + logger)); + } + catch (Exception ex) + { + logger?.LogError(ex, "Unexpected error in DescribeEntitiesTool"); + return Task.FromResult(McpResponseBuilder.BuildErrorResult( + "UnexpectedError", + "An unexpected error occurred while describing entities.", + logger)); + } + } + + /// + /// Determines whether the tool is enabled based on the specified runtime configuration. + /// + /// The runtime configuration to evaluate. Must not be null. + /// if the tool is enabled and the DescribeEntities property of McpDmlTools + /// is set to ; otherwise, . + private static bool IsToolEnabled(RuntimeConfig runtimeConfig) + { + return runtimeConfig.McpDmlTools?.DescribeEntities == true; + } + + /// + /// Parses the input arguments to extract the 'nameOnly' flag and the optional entity filter list. + /// + /// The arguments to parse + /// The logger + /// A tuple containing the parsed 'nameOnly' flag and the optional entity filter list. + private static (bool nameOnly, HashSet? entityFilter) ParseArguments(JsonDocument? arguments, ILogger? logger) + { + bool nameOnly = false; + HashSet? entityFilter = null; + + if (arguments?.RootElement.ValueKind == JsonValueKind.Object) + { + if (arguments.RootElement.TryGetProperty("nameOnly", out JsonElement nameOnlyElement)) + { + if (nameOnlyElement.ValueKind == JsonValueKind.True || nameOnlyElement.ValueKind == JsonValueKind.False) + { + nameOnly = nameOnlyElement.GetBoolean(); + } + } + + if (arguments.RootElement.TryGetProperty("entities", out JsonElement entitiesElement) && + entitiesElement.ValueKind == JsonValueKind.Array) + { + entityFilter = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (JsonElement entityElement in entitiesElement.EnumerateArray()) + { + if (entityElement.ValueKind == JsonValueKind.String) + { + string? entityName = entityElement.GetString(); + if (!string.IsNullOrWhiteSpace(entityName)) + { + entityFilter.Add(entityName); + } + } + } + + if (entityFilter.Count == 0) + { + entityFilter = null; + } + } + } + + logger?.LogDebug("Parsed arguments - nameOnly: {NameOnly}, entityFilter: {EntityFilter}", + nameOnly, entityFilter != null ? string.Join(", ", entityFilter) : "none"); + + return (nameOnly, entityFilter); + } + + /// + /// Determines whether the specified entity should be included based on the provided entity filter. + /// + /// The name of the entity to evaluate. + /// A set of entity names to include. If or empty, all entities are included. + /// if the entity should be included; otherwise, . + private static bool ShouldIncludeEntity(string entityName, HashSet? entityFilter) + { + return entityFilter == null || entityFilter.Count == 0 || entityFilter.Contains(entityName); + } + + /// + /// Creates a dictionary containing basic information about an entity. + /// + /// The name of the entity to include in the dictionary. + /// The entity object from which to extract additional information. + /// A dictionary with two keys: "name", containing the entity name, and "description", containing the entity's + /// description or an empty string if the description is null. + private static Dictionary BuildBasicEntityInfo(string entityName, Entity entity) + { + return new Dictionary + { + ["name"] = entityName, + ["description"] = entity.Description ?? string.Empty + }; + } + + /// + /// Builds full entity info: name, description, fields, parameters (for stored procs), permissions. + /// + private static Dictionary BuildFullEntityInfo(string entityName, Entity entity) + { + Dictionary info = new() + { + ["name"] = entityName, + ["description"] = entity.Description ?? string.Empty, + ["fields"] = BuildFieldMetadataInfo(entity.Fields), + }; + + if (entity.Source.Type == EntitySourceType.StoredProcedure) + { + info["parameters"] = BuildParameterMetadataInfo(entity.Source.Parameters); + } + + info["permissions"] = BuildPermissionsInfo(entity); + + return info; + } + + /// + /// Builds a list of metadata information objects from the provided collection of fields. + /// + /// A list of objects representing the fields to process. Can be null. + /// A list of objects, each containing the name and description of a field. If is + /// null, an empty list is returned. + private static List BuildFieldMetadataInfo(List? fields) + { + List result = new(); + + if (fields != null) + { + foreach (FieldMetadata field in fields) + { + result.Add(new + { + name = field.Name, + description = field.Description ?? string.Empty + }); + } + } + + return result; + } + + /// + /// Builds a list of parameter metadata objects containing information about each parameter. + /// + /// A list of objects representing the parameters to process. Can be null. + /// A list of anonymous objects, each containing the parameter's name, whether it is required, its default + /// value, and its description. Returns an empty list if is null. + private static List BuildParameterMetadataInfo(List? parameters) + { + List result = new(); + + if (parameters != null) + { + foreach (ParameterMetadata param in parameters) + { + result.Add(new + { + name = param.Name, + required = param.Default == null, // required if no default + @default = param.Default, + description = param.Description ?? string.Empty + }); + } + } + + return result; + } + + /// + /// Build a list of permission metadata info + /// + /// The entity object + /// A list of permissions available to the entity + private static string[] BuildPermissionsInfo(Entity entity) + { + HashSet permissions = new(); + + if (entity.Permissions != null) + { + foreach (EntityPermission permission in entity.Permissions) + { + foreach (EntityAction action in permission.Actions) + { + permissions.Add(action.Action.ToString().ToUpperInvariant()); + } + } + } + + return permissions.OrderBy(p => p).ToArray(); + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ExecuteEntityTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ExecuteEntityTool.cs new file mode 100644 index 0000000000..c7734eea22 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ExecuteEntityTool.cs @@ -0,0 +1,453 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Data.Common; +using System.Text.Json; +using Azure.DataApiBuilder.Auth; +using Azure.DataApiBuilder.Config.DatabasePrimitives; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; +using Azure.DataApiBuilder.Core.Resolvers; +using Azure.DataApiBuilder.Core.Resolvers.Factories; +using Azure.DataApiBuilder.Core.Services; +using Azure.DataApiBuilder.Core.Services.MetadataProviders; +using Azure.DataApiBuilder.Mcp.Model; +using Azure.DataApiBuilder.Mcp.Utils; +using Azure.DataApiBuilder.Service.Exceptions; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.BuiltInTools +{ + /// + /// Tool to execute a stored procedure or function exposed as a DAB entity. + /// Behaves most like the GraphQL flow with entity permissions enforced. + /// + public class ExecuteEntityTool : IMcpTool + { + /// + /// Gets the type of the tool, which is BuiltIn for this implementation. + /// + public ToolType ToolType { get; } = ToolType.BuiltIn; + + /// + /// Gets the metadata for the execute-entity tool, including its name, description, and input schema. + /// + public Tool GetToolMetadata() + { + return new Tool + { + Name = "execute_entity", + Description = "Executes a stored procedure or function, returns the results (if any)", + InputSchema = JsonSerializer.Deserialize( + @"{ + ""type"": ""object"", + ""properties"": { + ""entity"": { + ""type"": ""string"", + ""description"": ""The entity name of the procedure or function to execute. Must match a stored-procedure entity as configured in dab-config. Required."" + }, + ""parameters"": { + ""type"": ""object"", + ""description"": ""A dictionary of parameter names and values to pass to the procedure. Parameters must match those defined in dab-config. Optional if no parameters."" + } + }, + ""required"": [""entity""] + }" + ) + }; + } + + /// + /// Executes a stored procedure or function, returns the results (if any). + /// + public async Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default) + { + ILogger? logger = serviceProvider.GetService>(); + + try + { + // Cancellation check at the start + cancellationToken.ThrowIfCancellationRequested(); + + // 1) Resolve required services & configuration + RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService(); + RuntimeConfig config = runtimeConfigProvider.GetConfig(); + + // 2) Check if the tool is enabled in configuration before proceeding + if (config.McpDmlTools?.ExecuteEntity != true) + { + return McpResponseBuilder.BuildErrorResult( + "ToolDisabled", + $"The {this.GetToolMetadata().Name} tool is disabled in the configuration.", + logger); + } + + // 3) Parsing & basic argument validation + if (arguments is null) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", "No arguments provided.", logger); + } + + if (!TryParseExecuteArguments(arguments.RootElement, out string entity, out Dictionary parameters, out string parseError)) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", parseError, logger); + } + + // Entity is required + if (string.IsNullOrWhiteSpace(entity)) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", "Entity is required", logger); + } + + IMetadataProviderFactory metadataProviderFactory = serviceProvider.GetRequiredService(); + IQueryEngineFactory queryEngineFactory = serviceProvider.GetRequiredService(); + + // 4) Validate entity exists and is a stored procedure + if (!config.Entities.TryGetValue(entity, out Entity? entityConfig)) + { + return McpResponseBuilder.BuildErrorResult("EntityNotFound", $"Entity '{entity}' not found in configuration.", logger); + } + + if (entityConfig.Source.Type != EntitySourceType.StoredProcedure) + { + return McpResponseBuilder.BuildErrorResult("InvalidEntity", $"Entity {entity} cannot be executed.", logger); + } + + // 5) Resolve metadata + string dataSourceName; + ISqlMetadataProvider sqlMetadataProvider; + + try + { + dataSourceName = config.GetDataSourceNameFromEntityName(entity); + sqlMetadataProvider = metadataProviderFactory.GetMetadataProvider(dataSourceName); + } + catch (Exception) + { + return McpResponseBuilder.BuildErrorResult("EntityNotFound", $"Failed to resolve entity metadata for '{entity}'.", logger); + } + + if (!sqlMetadataProvider.EntityToDatabaseObject.TryGetValue(entity, out DatabaseObject? dbObject) || dbObject is null) + { + return McpResponseBuilder.BuildErrorResult("EntityNotFound", $"Failed to resolve database object for entity '{entity}'.", logger); + } + + // 6) Authorization - Never bypass permissions + IAuthorizationResolver authResolver = serviceProvider.GetRequiredService(); + IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService(); + HttpContext? httpContext = httpContextAccessor.HttpContext; + + if (!McpAuthorizationHelper.ValidateRoleContext(httpContext, authResolver, out string roleError)) + { + return McpResponseBuilder.BuildErrorResult("PermissionDenied", roleError, logger); + } + + if (!McpAuthorizationHelper.TryResolveAuthorizedRole( + httpContext!, + authResolver, + entity, + EntityActionOperation.Execute, + out string? effectiveRole, + out string authError)) + { + return McpResponseBuilder.BuildErrorResult("PermissionDenied", authError, logger); + } + + // 7) Validate parameters against metadata + if (parameters != null && entityConfig.Source.Parameters != null) + { + // Validate all provided parameters exist in metadata + foreach (KeyValuePair param in parameters) + { + if (!entityConfig.Source.Parameters.Any(p => p.Name == param.Key)) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", $"Invalid parameter: {param.Key}", logger); + } + } + } + + // 8) Build request payload + JsonElement? requestPayloadRoot = null; + + if (parameters?.Count > 0) + { + string jsonPayload = JsonSerializer.Serialize(parameters); + using JsonDocument doc = JsonDocument.Parse(jsonPayload); + requestPayloadRoot = doc.RootElement.Clone(); + } + + // 9) Build stored procedure execution context + StoredProcedureRequestContext context = new( + entityName: entity, + dbo: dbObject, + requestPayloadRoot: requestPayloadRoot, + operationType: EntityActionOperation.Execute); + + // First, add user-provided parameters to the context + if (requestPayloadRoot != null) + { + foreach (JsonProperty property in requestPayloadRoot.Value.EnumerateObject()) + { + context.FieldValuePairsInBody[property.Name] = GetParameterValue(property.Value); + } + } + + // Then, add default parameters from configuration (only if not already provided by user) + if ((parameters == null || parameters.Count == 0) && entityConfig.Source.Parameters != null) + { + foreach (ParameterMetadata param in entityConfig.Source.Parameters) + { + if (!context.FieldValuePairsInBody.ContainsKey(param.Name)) + { + context.FieldValuePairsInBody[param.Name] = param.Default; + } + } + } + + // Populate resolved parameters for stored procedure execution + context.PopulateResolvedParameters(); + + // 10) Execute stored procedure + DatabaseType dbType = config.GetDataSourceFromDataSourceName(dataSourceName).DatabaseType; + IQueryEngine queryEngine = queryEngineFactory.GetQueryEngine(dbType); + + IActionResult? queryResult = null; + + try + { + // Cancellation check before executing + cancellationToken.ThrowIfCancellationRequested(); + queryResult = await queryEngine.ExecuteAsync(context, dataSourceName).ConfigureAwait(false); + } + catch (DataApiBuilderException dabEx) + { + // Handle specific DAB exceptions + logger?.LogError(dabEx, "Data API builder error executing stored procedure {StoredProcedure}", entity); + + string message = dabEx.Message; + + // Check for specific error patterns + if (message.Contains("permission", StringComparison.OrdinalIgnoreCase) || + message.Contains("authorization", StringComparison.OrdinalIgnoreCase)) + { + return McpResponseBuilder.BuildErrorResult( + "PermissionDenied", + "You do not have permission to execute this stored procedure.", + logger); + } + else if (message.Contains("invalid", StringComparison.OrdinalIgnoreCase) && + message.Contains("type", StringComparison.OrdinalIgnoreCase)) + { + return McpResponseBuilder.BuildErrorResult( + "InvalidArguments", + "Invalid data type for one or more parameters.", + logger); + } + + // For any other DAB exceptions, return the message as-is + return McpResponseBuilder.BuildErrorResult( + "DataApiBuilderError", + dabEx.Message, + logger); + } + catch (SqlException sqlEx) + { + // Handle SQL Server specific errors + logger?.LogError(sqlEx, "SQL Server error executing stored procedure {StoredProcedure}", entity); + string errorMessage = sqlEx.Number switch + { + 2812 => $"Stored procedure '{entityConfig.Source.Object}' not found in the database.", + 8144 => $"Stored procedure '{entityConfig.Source.Object}' has too many parameters specified.", + 201 => $"Stored procedure '{entityConfig.Source.Object}' expects parameter(s) that were not supplied.", + 245 => "Type conversion failed when processing parameters.", + 229 or 262 => $"Permission denied to execute stored procedure '{entityConfig.Source.Object}'.", + _ => $"Database error: {sqlEx.Message}" + }; + return McpResponseBuilder.BuildErrorResult("DatabaseError", errorMessage, logger); + } + catch (DbException dbEx) + { + // Handle generic database exceptions (works for PostgreSQL, MySQL, etc.) + logger?.LogError(dbEx, "Database error executing stored procedure {StoredProcedure}", entity); + return McpResponseBuilder.BuildErrorResult("DatabaseError", $"Database error: {dbEx.Message}", logger); + } + catch (InvalidOperationException ioEx) when (ioEx.Message.Contains("connection", StringComparison.OrdinalIgnoreCase)) + { + // Handle connection-related issues + logger?.LogError(ioEx, "Database connection error"); + return McpResponseBuilder.BuildErrorResult("ConnectionError", "Failed to connect to the database.", logger); + } + catch (TimeoutException timeoutEx) + { + // Handle query timeout + logger?.LogError(timeoutEx, "Stored procedure execution timeout for {StoredProcedure}", entity); + return McpResponseBuilder.BuildErrorResult("TimeoutError", "The stored procedure execution timed out.", logger); + } + catch (Exception ex) + { + // Generic database/execution errors + logger?.LogError(ex, "Unexpected error executing stored procedure {StoredProcedure}", entity); + return McpResponseBuilder.BuildErrorResult("DatabaseError", "An error occurred while executing the stored procedure.", logger); + } + + // 11) Build response with execution result + return BuildExecuteSuccessResponse(entity, parameters, queryResult, logger); + } + catch (OperationCanceledException) + { + return McpResponseBuilder.BuildErrorResult("OperationCanceled", "The execute operation was canceled.", logger); + } + catch (ArgumentException argEx) + { + return McpResponseBuilder.BuildErrorResult("InvalidArguments", argEx.Message, logger); + } + catch (Exception ex) + { + logger?.LogError(ex, "Unexpected error in ExecuteEntityTool."); + return McpResponseBuilder.BuildErrorResult( + "UnexpectedError", + "An unexpected error occurred during the execute operation.", + logger); + } + } + + /// + /// Parses the execute arguments from the JSON input. + /// + private static bool TryParseExecuteArguments( + JsonElement rootElement, + out string entity, + out Dictionary parameters, + out string parseError) + { + entity = string.Empty; + parameters = new Dictionary(); + parseError = string.Empty; + + if (rootElement.ValueKind != JsonValueKind.Object) + { + parseError = "Arguments must be an object"; + return false; + } + + // Extract entity name (required) + if (!rootElement.TryGetProperty("entity", out JsonElement entityElement) || + entityElement.ValueKind != JsonValueKind.String) + { + parseError = "Missing or invalid 'entity' parameter"; + return false; + } + + entity = entityElement.GetString() ?? string.Empty; + + // Extract parameters if provided (optional) + if (rootElement.TryGetProperty("parameters", out JsonElement parametersElement) && + parametersElement.ValueKind == JsonValueKind.Object) + { + foreach (JsonProperty property in parametersElement.EnumerateObject()) + { + parameters[property.Name] = GetParameterValue(property.Value); + } + } + + return true; + } + + /// + /// Converts a JSON element to its appropriate CLR type matching GraphQL data types. + /// + private static object? GetParameterValue(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString(), + JsonValueKind.Number => + element.TryGetInt64(out long longValue) ? longValue : + element.TryGetDecimal(out decimal decimalValue) ? decimalValue : + element.GetDouble(), + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.Null => null, + _ => element.ToString() + }; + } + + /// + /// Builds a successful response for the execute operation. + /// + private static CallToolResult BuildExecuteSuccessResponse( + string entityName, + Dictionary? parameters, + IActionResult? queryResult, + ILogger? logger) + { + Dictionary responseData = new() + { + ["entity"] = entityName, + ["message"] = "Stored procedure executed successfully" + }; + + // Include parameters if any were provided + if (parameters?.Count > 0) + { + responseData["parameters"] = parameters; + } + + // Handle different result types + if (queryResult is OkObjectResult okResult && okResult.Value != null) + { + // Extract the actual data from the action result + if (okResult.Value is JsonDocument jsonDoc) + { + JsonElement root = jsonDoc.RootElement; + responseData["value"] = root.ValueKind == JsonValueKind.Array ? root : JsonSerializer.SerializeToElement(new[] { root }); + } + else if (okResult.Value is JsonElement jsonElement) + { + responseData["value"] = jsonElement.ValueKind == JsonValueKind.Array ? jsonElement : JsonSerializer.SerializeToElement(new[] { jsonElement }); + } + else + { + // Serialize the value directly + JsonElement serialized = JsonSerializer.SerializeToElement(okResult.Value); + responseData["value"] = serialized; + } + } + else if (queryResult is BadRequestObjectResult badRequest) + { + return McpResponseBuilder.BuildErrorResult( + "BadRequest", + badRequest.Value?.ToString() ?? "Bad request", + logger); + } + else if (queryResult is UnauthorizedObjectResult) + { + return McpResponseBuilder.BuildErrorResult( + "PermissionDenied", + "You do not have permission to execute this entity", + logger); + } + else + { + // Empty or unknown result + responseData["value"] = JsonSerializer.SerializeToElement(Array.Empty()); + } + + return McpResponseBuilder.BuildSuccessResult( + responseData, + logger, + $"ExecuteEntityTool success for entity {entityName}." + ); + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ReadRecordsTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ReadRecordsTool.cs new file mode 100644 index 0000000000..db1c761d2f --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/ReadRecordsTool.cs @@ -0,0 +1,422 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Data.Common; +using System.Text.Json; +using Azure.DataApiBuilder.Auth; +using Azure.DataApiBuilder.Config.DatabasePrimitives; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Authorization; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; +using Azure.DataApiBuilder.Core.Parsers; +using Azure.DataApiBuilder.Core.Resolvers; +using Azure.DataApiBuilder.Core.Resolvers.Factories; +using Azure.DataApiBuilder.Core.Services; +using Azure.DataApiBuilder.Core.Services.MetadataProviders; +using Azure.DataApiBuilder.Mcp.Model; +using Azure.DataApiBuilder.Service.Exceptions; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.BuiltInTools +{ + public class ReadRecordsTool : IMcpTool + { + public ToolType ToolType { get; } = ToolType.BuiltIn; + + public Tool GetToolMetadata() + { + return new Tool + { + Name = "read_records", + Description = "Retrieves records from a given entity.", + InputSchema = JsonSerializer.Deserialize( + @"{ + ""type"": ""object"", + ""properties"": { + ""entity"": { + ""type"": ""string"", + ""description"": ""The name of the entity to read, as provided by the describe_entities tool. Required."" + }, + ""select"": { + ""type"": ""string"", + ""description"": ""A comma-separated list of field names to include in the response. If omitted, all fields are returned. Optional."" + }, + ""filter"": { + ""type"": ""string"", + ""description"": ""A case-insensitive OData-like expression that defines a query predicate. Supports logical grouping with parentheses and the operators eq, ne, gt, ge, lt, le, and, or, not. Examples: year ge 1990, date lt 2025-01-01T00:00:00Z, (title eq 'Foundation') and (available ne false). Optional."" + }, + ""first"": { + ""type"": ""integer"", + ""description"": ""The maximum number of records to return in the current page. Optional."" + }, + ""orderby"": { + ""type"": ""array"", + ""items"": { ""type"": ""string"" }, + ""description"": ""A list of field names and directions for sorting, for example 'name asc' or 'year desc'. Optional."" + }, + ""after"": { + ""type"": ""string"", + ""description"": ""A cursor token for retrieving the next page of results. Returned as 'after' in the previous response. Optional."" + } + } + }" + ) + }; + } + + public async Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default) + { + ILogger? logger = serviceProvider.GetService>(); + + // Get runtime config + RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService(); + RuntimeConfig runtimeConfig = runtimeConfigProvider.GetConfig(); + + if (runtimeConfig.McpDmlTools?.ReadRecords is not true) + { + return BuildErrorResult( + "ToolDisabled", + "The read_records tool is disabled in the configuration.", + logger); + } + + try + { + cancellationToken.ThrowIfCancellationRequested(); + + string entityName; + string? select = null; + string? filter = null; + int? first = null; + IEnumerable? orderby = null; + string? after = null; + + // Extract arguments + if (arguments == null) + { + return BuildErrorResult("InvalidArguments", "No arguments provided.", logger); + } + + JsonElement root = arguments.RootElement; + + if (!root.TryGetProperty("entity", out JsonElement entityElement) || string.IsNullOrWhiteSpace(entityElement.GetString())) + { + return BuildErrorResult("InvalidArguments", "Missing required argument 'entity'.", logger); + } + + entityName = entityElement.GetString()!; + + if (root.TryGetProperty("select", out JsonElement selectElement)) + { + select = selectElement.GetString(); + } + + if (root.TryGetProperty("filter", out JsonElement filterElement)) + { + filter = filterElement.GetString(); + } + + if (root.TryGetProperty("first", out JsonElement firstElement)) + { + first = firstElement.GetInt32(); + } + + if (root.TryGetProperty("orderby", out JsonElement orderbyElement)) + { + orderby = (IEnumerable?)orderbyElement.EnumerateArray().Select(e => e.GetString()); + } + + if (root.TryGetProperty("after", out JsonElement afterElement)) + { + after = afterElement.GetString(); + } + + // Get required services & configuration + IQueryEngineFactory queryEngineFactory = serviceProvider.GetRequiredService(); + IMetadataProviderFactory metadataProviderFactory = serviceProvider.GetRequiredService(); + + // Check metadata for entity exists + string dataSourceName; + ISqlMetadataProvider sqlMetadataProvider; + + try + { + dataSourceName = runtimeConfig.GetDataSourceNameFromEntityName(entityName); + sqlMetadataProvider = metadataProviderFactory.GetMetadataProvider(dataSourceName); + } + catch (Exception) + { + return BuildErrorResult("EntityNotFound", $"Entity '{entityName}' is not defined in the configuration.", logger); + } + + if (!sqlMetadataProvider.EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? dbObject) || dbObject is null) + { + return BuildErrorResult("EntityNotFound", $"Entity '{entityName}' is not defined in the configuration.", logger); + } + + // Authorization check in the existing entity + IAuthorizationResolver authResolver = serviceProvider.GetRequiredService(); + IAuthorizationService authorizationService = serviceProvider.GetRequiredService(); + IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService(); + HttpContext? httpContext = httpContextAccessor.HttpContext; + + if (httpContext is null || !authResolver.IsValidRoleContext(httpContext)) + { + return BuildErrorResult("PermissionDenied", $"You do not have permission to read records for entity '{entityName}'.", logger); + } + + if (!TryResolveAuthorizedRole(httpContext, authResolver, entityName, out string? effectiveRole, out string authError)) + { + return BuildErrorResult("PermissionDenied", authError, logger); + } + + // Build and validate Find context + RequestValidator requestValidator = new(metadataProviderFactory, runtimeConfigProvider); + FindRequestContext context = new(entityName, dbObject, true); + httpContext.Request.Method = "GET"; + + requestValidator.ValidateEntity(entityName); + + if (!string.IsNullOrWhiteSpace(select)) + { + // Update the context to specify which fields will be returned from the entity. + IEnumerable fieldsReturnedForFind = select.Split(",").ToList(); + context.UpdateReturnFields(fieldsReturnedForFind); + } + + if (!string.IsNullOrWhiteSpace(filter)) + { + string filterQueryString = $"?{RequestParser.FILTER_URL}={filter}"; + context.FilterClauseInUrl = sqlMetadataProvider.GetODataParser().GetFilterClause(filterQueryString, $"{context.EntityName}.{context.DatabaseObject.FullName}"); + } + + if (orderby is not null && orderby.Count() != 0) + { + string sortQueryString = $"?{RequestParser.SORT_URL}="; + foreach (string param in orderby) + { + if (string.IsNullOrWhiteSpace(param)) + { + return BuildErrorResult("InvalidArguments", "Parameters inside 'orderby' argument cannot be empty or null.", logger); + } + + sortQueryString += $"{param}, "; + } + + sortQueryString = sortQueryString.Substring(0, sortQueryString.Length - 2); + (context.OrderByClauseInUrl, context.OrderByClauseOfBackingColumns) = RequestParser.GenerateOrderByLists(context, sqlMetadataProvider, sortQueryString); + } + + context.First = first; + context.After = after; + + // The final authorization check on columns occurs after the request is fully parsed and validated. + requestValidator.ValidateRequestContext(context); + + AuthorizationResult authorizationResult = await authorizationService.AuthorizeAsync( + user: httpContext.User, + resource: context, + requirements: new[] { new ColumnsPermissionsRequirement() }); + if (!authorizationResult.Succeeded) + { + return BuildErrorResult("PermissionDenied", DataApiBuilderException.AUTHORIZATION_FAILURE, logger); + } + + // Execute + IQueryEngine queryEngine = queryEngineFactory.GetQueryEngine(sqlMetadataProvider.GetDatabaseType()); + JsonDocument? queryResult = await queryEngine.ExecuteAsync(context); + IActionResult actionResult = queryResult is null ? SqlResponseHelpers.FormatFindResult(JsonDocument.Parse("[]").RootElement.Clone(), context, metadataProviderFactory.GetMetadataProvider(dataSourceName), runtimeConfigProvider.GetConfig(), httpContext, true) + : SqlResponseHelpers.FormatFindResult(queryResult.RootElement.Clone(), context, metadataProviderFactory.GetMetadataProvider(dataSourceName), runtimeConfigProvider.GetConfig(), httpContext, true); + + // Normalize response + string rawPayloadJson = ExtractResultJson(actionResult); + JsonDocument result = JsonDocument.Parse(rawPayloadJson); + JsonElement queryRoot = result.RootElement; + + return BuildSuccessResult( + entityName, + queryRoot.Clone(), + logger); + } + catch (OperationCanceledException) + { + return BuildErrorResult("OperationCanceled", "The read operation was canceled.", logger); + } + catch (DbException argEx) + { + return BuildErrorResult("DatabaseOperationFailed", argEx.Message, logger); + } + catch (ArgumentException argEx) + { + return BuildErrorResult("InvalidArguments", argEx.Message, logger); + } + catch (DataApiBuilderException argEx) + { + return BuildErrorResult(argEx.StatusCode.ToString(), argEx.Message, logger); + } + catch (Exception) + { + return BuildErrorResult("UnexpectedError", "Unexpected error occurred in ReadRecordsTool.", logger); + } + } + + /// + /// Ensures that the role used on the request has the necessary authorizations. + /// + /// Contains request headers and metadata of the user. + /// Resolver used to check if role has necessary authorizations. + /// Name of the entity used in the request. + /// Role defined in client role header. + /// Error message given to the user. + /// True if the user role is authorized, along with the role. + private static bool TryResolveAuthorizedRole( + HttpContext httpContext, + IAuthorizationResolver authorizationResolver, + string entityName, + out string? effectiveRole, + out string error) + { + effectiveRole = null; + error = string.Empty; + + string roleHeader = httpContext.Request.Headers[AuthorizationResolver.CLIENT_ROLE_HEADER].ToString(); + + if (string.IsNullOrWhiteSpace(roleHeader)) + { + error = $"Client role header '{AuthorizationResolver.CLIENT_ROLE_HEADER}' is missing or empty."; + return false; + } + + string[] roles = roleHeader + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + if (roles.Length == 0) + { + error = $"Client role header '{AuthorizationResolver.CLIENT_ROLE_HEADER}' is missing or empty."; + return false; + } + + foreach (string role in roles) + { + bool allowed = authorizationResolver.AreRoleAndOperationDefinedForEntity( + entityName, role, EntityActionOperation.Read); + + if (allowed) + { + effectiveRole = role; + return true; + } + } + + error = $"You do not have permission to read records for entity '{entityName}'."; + return false; + } + + /// + /// Returns a result from the query in the case that it was successfully ran. + /// + /// Name of the entity used in the request. + /// Query result from engine. + /// MCP logger that returns all logged events. + private static CallToolResult BuildSuccessResult( + string entityName, + JsonElement engineRootElement, + ILogger? logger) + { + // Build normalized response + Dictionary normalized = new() + { + ["status"] = "success", + ["result"] = engineRootElement // only requested values + }; + + string output = JsonSerializer.Serialize(normalized, new JsonSerializerOptions { WriteIndented = true }); + + logger?.LogInformation("ReadRecordsTool success for entity {Entity}.", entityName); + + return new CallToolResult + { + Content = new List + { + new TextContentBlock { Type = "text", Text = output } + } + }; + } + + /// + /// Returns an error if the query failed to run at any point. + /// + /// Type of error that is encountered. + /// Error message given to the user. + /// MCP logger that returns all logged events. + private static CallToolResult BuildErrorResult( + string errorType, + string message, + ILogger? logger) + { + Dictionary errorObj = new() + { + ["status"] = "error", + ["error"] = new Dictionary + { + ["type"] = errorType, + ["message"] = message + } + }; + + string output = JsonSerializer.Serialize(errorObj); + + logger?.LogError("ReadRecordsTool error {ErrorType}: {Message}", errorType, message); + + return new CallToolResult + { + Content = + [ + new TextContentBlock { Type = "text", Text = output } + ], + IsError = true + }; + } + + /// + /// Extracts a JSON string from a typical IActionResult. + /// Falls back to "{}" for unsupported/empty cases to avoid leaking internals. + /// + private static string ExtractResultJson(IActionResult? result) + { + switch (result) + { + case ObjectResult obj: + if (obj.Value is JsonElement je) + { + return je.GetRawText(); + } + + if (obj.Value is JsonDocument jd) + { + return jd.RootElement.GetRawText(); + } + + return JsonSerializer.Serialize(obj.Value ?? new object()); + + case ContentResult content: + return string.IsNullOrWhiteSpace(content.Content) ? "{}" : content.Content; + + default: + return "{}"; + } + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/BuiltInTools/UpdateRecordTool.cs b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/UpdateRecordTool.cs new file mode 100644 index 0000000000..195a758454 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/BuiltInTools/UpdateRecordTool.cs @@ -0,0 +1,472 @@ +// Copyright (c) Microsoft. +// Licensed under the MIT License. + +using System.Text.Json; +using Azure.DataApiBuilder.Auth; +using Azure.DataApiBuilder.Config.DatabasePrimitives; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Authorization; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; +using Azure.DataApiBuilder.Core.Resolvers; +using Azure.DataApiBuilder.Core.Resolvers.Factories; +using Azure.DataApiBuilder.Core.Services; +using Azure.DataApiBuilder.Core.Services.MetadataProviders; +using Azure.DataApiBuilder.Mcp.Model; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.BuiltInTools +{ + /// + /// Updates an existing record in the specified entity using provided keys (PKs) and fields (new values). + /// Input schema: + /// { + /// "entity": "EntityName", + /// "keys": { "Id": 42, "TenantId": "ABC" }, + /// "fields": { "Status": "Closed", "Comment": "Done" } + /// } + /// + public class UpdateRecordTool : IMcpTool + { + /// + /// Gets the type of the tool, which is BuiltIn for this implementation. + /// + public ToolType ToolType { get; } = ToolType.BuiltIn; + + /// + /// Gets the metadata for the update_record tool, including its name, description, and input schema. + /// + public Tool GetToolMetadata() + { + return new Tool + { + Name = "update_record", + Description = "Updates an existing record in the specified entity. Requires 'keys' to locate the record and 'fields' to specify new values.", + InputSchema = JsonSerializer.Deserialize( + @"{ + ""type"": ""object"", + ""properties"": { + ""entity"": { + ""type"": ""string"", + ""description"": ""The name of the entity"" + }, + ""keys"": { + ""type"": ""object"", + ""description"": ""Key fields and their values to identify the record"" + }, + ""fields"": { + ""type"": ""object"", + ""description"": ""Fields and their new values to update"" + } + }, + ""required"": [""entity"", ""keys"", ""fields""] + }" + ) + }; + } + + /// + /// Executes the update_record tool, updating an existing record in the specified entity using provided keys and fields. + /// + /// The JSON arguments containing entity, keys, and fields. + /// The service provider for resolving dependencies. + /// A token to cancel the operation. + /// A representing the outcome of the update operation. + public async Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default) + { + ILogger? logger = serviceProvider.GetService>(); + + // 1) Resolve required services & configuration + + RuntimeConfigProvider runtimeConfigProvider = serviceProvider.GetRequiredService(); + RuntimeConfig config = runtimeConfigProvider.GetConfig(); + + // 2)Check if the tool is enabled in configuration before proceeding. + if (config.McpDmlTools?.UpdateRecord != true) + { + return BuildErrorResult( + "ToolDisabled", + "The update_record tool is disabled in the configuration.", + logger); + } + + try + { + + cancellationToken.ThrowIfCancellationRequested(); + + // 3) Parsing & basic argument validation (entity, keys, fields) + if (arguments is null) + { + return BuildErrorResult("InvalidArguments", "No arguments provided.", logger); + } + + if (!TryParseArguments(arguments.RootElement, out string entityName, out Dictionary keys, out Dictionary fields, out string parseError)) + { + return BuildErrorResult("InvalidArguments", parseError, logger); + } + + IMetadataProviderFactory metadataProviderFactory = serviceProvider.GetRequiredService(); + IMutationEngineFactory mutationEngineFactory = serviceProvider.GetRequiredService(); + + // 4) Resolve metadata for entity existence check + string dataSourceName; + ISqlMetadataProvider sqlMetadataProvider; + + try + { + dataSourceName = config.GetDataSourceNameFromEntityName(entityName); + sqlMetadataProvider = metadataProviderFactory.GetMetadataProvider(dataSourceName); + } + catch (Exception) + { + return BuildErrorResult("EntityNotFound", $"Entity '{entityName}' is not defined in the configuration.", logger); + } + + if (!sqlMetadataProvider.EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? dbObject) || dbObject is null) + { + return BuildErrorResult("EntityNotFound", $"Entity '{entityName}' is not defined in the configuration.", logger); + } + + // 5) Authorization after we have a known entity + IHttpContextAccessor httpContextAccessor = serviceProvider.GetRequiredService(); + HttpContext? httpContext = httpContextAccessor.HttpContext; + IAuthorizationResolver authResolver = serviceProvider.GetRequiredService(); + + if (httpContext is null || !authResolver.IsValidRoleContext(httpContext)) + { + return BuildErrorResult("PermissionDenied", "Permission denied: unable to resolve a valid role context for update operation.", logger); + } + + if (!TryResolveAuthorizedRoleHasPermission(httpContext, authResolver, entityName, out string? effectiveRole, out string authError)) + { + return BuildErrorResult("PermissionDenied", $"Permission denied: {authError}", logger); + } + + // 6) Build and validate Upsert (UpdateIncremental) context + JsonElement upsertPayloadRoot = RequestValidator.ValidateAndParseRequestBody(JsonSerializer.Serialize(fields)); + RequestValidator requestValidator = new(metadataProviderFactory, runtimeConfigProvider); + + UpsertRequestContext context = new( + entityName: entityName, + dbo: dbObject, + insertPayloadRoot: upsertPayloadRoot, + operationType: EntityActionOperation.UpdateIncremental); + + foreach (KeyValuePair kvp in keys) + { + if (kvp.Value is null) + { + return BuildErrorResult("InvalidArguments", $"Primary key value for '{kvp.Key}' cannot be null.", logger); + } + + context.PrimaryKeyValuePairs[kvp.Key] = kvp.Value; + } + + if (context.DatabaseObject.SourceType is EntitySourceType.Table) + { + requestValidator.ValidateUpsertRequestContext(context); + } + + requestValidator.ValidatePrimaryKey(context); + + // 7) Execute + DatabaseType dbType = config.GetDataSourceFromDataSourceName(dataSourceName).DatabaseType; + IMutationEngine mutationEngine = mutationEngineFactory.GetMutationEngine(dbType); + + IActionResult? mutationResult = null; + try + { + mutationResult = await mutationEngine.ExecuteAsync(context).ConfigureAwait(false); + } + catch (Exception ex) + { + string errorMsg = ex.Message ?? string.Empty; + + if (errorMsg.Contains("No Update could be performed, record not found", StringComparison.OrdinalIgnoreCase)) + { + return BuildErrorResult( + "InvalidArguments", + "No record found with the given key.", + logger); + } + else + { + // Unexpected error, rethrow to be handled by outer catch + throw; + } + } + + cancellationToken.ThrowIfCancellationRequested(); + + // 8) Normalize response (success or engine error payload) + string rawPayloadJson = ExtractResultJson(mutationResult); + using JsonDocument resultDoc = JsonDocument.Parse(rawPayloadJson); + JsonElement root = resultDoc.RootElement; + + return BuildSuccessResult( + entityName: entityName, + engineRootElement: root.Clone(), + logger: logger); + } + catch (OperationCanceledException) + { + return BuildErrorResult("OperationCanceled", "The update operation was canceled.", logger); + } + catch (ArgumentException argEx) + { + return BuildErrorResult("InvalidArguments", argEx.Message, logger); + } + catch (Exception ex) + { + ILogger? innerLogger = serviceProvider.GetService>(); + innerLogger?.LogError(ex, "Unexpected error in UpdateRecordTool."); + + return BuildErrorResult( + "UnexpectedError", + ex.Message ?? "An unexpected error occurred during the update operation.", + logger); + } + } + + #region Parsing & Authorization + + private static bool TryParseArguments( + JsonElement root, + out string entityName, + out Dictionary keys, + out Dictionary fields, + out string error) + { + entityName = string.Empty; + keys = new Dictionary(); + fields = new Dictionary(); + error = string.Empty; + + if (!root.TryGetProperty("entity", out JsonElement entityEl) || + !root.TryGetProperty("keys", out JsonElement keysEl) || + !root.TryGetProperty("fields", out JsonElement fieldsEl)) + { + error = "Missing required arguments 'entity', 'keys', or 'fields'."; + return false; + } + + // Parse and validate required arguments: entity, keys, fields + entityName = entityEl.GetString() ?? string.Empty; + if (string.IsNullOrWhiteSpace(entityName)) + { + throw new ArgumentException("Entity is required", nameof(entityName)); + } + + if (keysEl.ValueKind != JsonValueKind.Object || fieldsEl.ValueKind != JsonValueKind.Object) + { + throw new ArgumentException("'keys' and 'fields' must be JSON objects."); + } + + try + { + keys = JsonSerializer.Deserialize>(keysEl.GetRawText()) ?? new Dictionary(); + fields = JsonSerializer.Deserialize>(fieldsEl.GetRawText()) ?? new Dictionary(); + } + catch (Exception ex) + { + throw new ArgumentException("Failed to parse 'keys' or 'fields'", ex); + } + + if (keys.Count == 0) + { + throw new ArgumentException("Keys are required to update an entity"); + } + + if (fields.Count == 0) + { + throw new ArgumentException("At least one field must be provided to update an entity", nameof(fields)); + } + + foreach (KeyValuePair kv in keys) + { + if (kv.Value is null || (kv.Value is string str && string.IsNullOrWhiteSpace(str))) + { + throw new ArgumentException($"Key value for '{kv.Key}' cannot be null or empty."); + } + } + + return true; + } + + private static bool TryResolveAuthorizedRoleHasPermission( + HttpContext httpContext, + IAuthorizationResolver authorizationResolver, + string entityName, + out string? effectiveRole, + out string error) + { + effectiveRole = null; + error = string.Empty; + + string roleHeader = httpContext.Request.Headers[AuthorizationResolver.CLIENT_ROLE_HEADER].ToString(); + + if (string.IsNullOrWhiteSpace(roleHeader)) + { + error = "Client role header is missing or empty."; + return false; + } + + string[] roles = roleHeader + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + if (roles.Length == 0) + { + error = "Client role header is missing or empty."; + return false; + } + + foreach (string role in roles) + { + bool allowed = authorizationResolver.AreRoleAndOperationDefinedForEntity( + entityName, role, EntityActionOperation.Update); + + if (allowed) + { + effectiveRole = role; + return true; + } + } + + error = "You do not have permission to update records for this entity."; + return false; + } + + #endregion + + #region Response Builders & Utilities + + private static CallToolResult BuildSuccessResult( + string entityName, + JsonElement engineRootElement, + ILogger? logger) + { + // Extract only requested keys and updated fields from engineRootElement + Dictionary filteredResult = new(); + + // Navigate to "value" array in the engine result + if (engineRootElement.TryGetProperty("value", out JsonElement valueArray) && + valueArray.ValueKind == JsonValueKind.Array && + valueArray.GetArrayLength() > 0) + { + JsonElement firstItem = valueArray[0]; + + // Include all properties from the result + foreach (JsonProperty prop in firstItem.EnumerateObject()) + { + filteredResult[prop.Name] = GetJsonValue(prop.Value); + } + } + + // Build normalized response + Dictionary normalized = new() + { + ["status"] = "success", + ["result"] = filteredResult + }; + + string output = JsonSerializer.Serialize(normalized, new JsonSerializerOptions { WriteIndented = true }); + + logger?.LogInformation("UpdateRecordTool success for entity {Entity}.", entityName); + + return new CallToolResult + { + Content = new List + { + new TextContentBlock { Type = "text", Text = output } + } + }; + } + + /// + /// Converts JsonElement to .NET object dynamically. + /// + private static object? GetJsonValue(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString(), + JsonValueKind.Number => element.TryGetInt64(out long l) ? l : element.GetDouble(), + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.Null => null, + _ => element.GetRawText() // fallback for arrays/objects + }; + } + + private static CallToolResult BuildErrorResult( + string errorType, + string message, + ILogger? logger) + { + Dictionary errorObj = new() + { + ["status"] = "error", + ["error"] = new Dictionary + { + ["type"] = errorType, + ["message"] = message + } + }; + + string output = JsonSerializer.Serialize(errorObj); + + logger?.LogWarning("UpdateRecordTool error {ErrorType}: {Message}", errorType, message); + + return new CallToolResult + { + Content = + [ + new TextContentBlock { Type = "text", Text = output } + ], + IsError = true + }; + } + + /// + /// Extracts a JSON string from a typical IActionResult. + /// Falls back to "{}" for unsupported/empty cases to avoid leaking internals. + /// + private static string ExtractResultJson(IActionResult? result) + { + switch (result) + { + case ObjectResult obj: + if (obj.Value is JsonElement je) + { + return je.GetRawText(); + } + + if (obj.Value is JsonDocument jd) + { + return jd.RootElement.GetRawText(); + } + + return JsonSerializer.Serialize(obj.Value ?? new object()); + + case ContentResult content: + return string.IsNullOrWhiteSpace(content.Content) ? "{}" : content.Content; + + default: + return "{}"; + } + } + + #endregion + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Core/McpEndpointRouteBuilderExtensions.cs b/src/Azure.DataApiBuilder.Mcp/Core/McpEndpointRouteBuilderExtensions.cs new file mode 100644 index 0000000000..6401e17e22 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Core/McpEndpointRouteBuilderExtensions.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Configurations; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Routing; + +namespace Azure.DataApiBuilder.Mcp.Core +{ + /// + /// Extension methods for mapping MCP endpoints to an . + /// + public static class McpEndpointRouteBuilderExtensions + { + /// + /// Maps the MCP endpoint to the specified if MCP is enabled in the runtime configuration. + /// + public static IEndpointRouteBuilder MapDabMcp( + this IEndpointRouteBuilder endpoints, + RuntimeConfigProvider runtimeConfigProvider, + [StringSyntax("Route")] string pattern = "") + { + if (!TryGetMcpOptions(runtimeConfigProvider, out McpRuntimeOptions? mcpOptions) || mcpOptions == null || !mcpOptions.Enabled) + { + return endpoints; + } + + string mcpPath = mcpOptions.Path ?? McpRuntimeOptions.DEFAULT_PATH; + + // Map the MCP endpoint + endpoints.MapMcp(mcpPath); + + return endpoints; + } + + /// + /// Gets MCP options from the runtime configuration + /// + /// Runtime config provider + /// MCP options + /// True if MCP options were found, false otherwise + private static bool TryGetMcpOptions(RuntimeConfigProvider runtimeConfigProvider, out McpRuntimeOptions? mcpOptions) + { + mcpOptions = null; + + if (!runtimeConfigProvider.TryGetConfig(out RuntimeConfig? runtimeConfig)) + { + return false; + } + + mcpOptions = runtimeConfig?.Runtime?.Mcp; + return mcpOptions != null; + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Core/McpServerConfiguration.cs b/src/Azure.DataApiBuilder.Mcp/Core/McpServerConfiguration.cs new file mode 100644 index 0000000000..86cccd2aaf --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Core/McpServerConfiguration.cs @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using Azure.DataApiBuilder.Mcp.Model; +using Microsoft.Extensions.DependencyInjection; +using ModelContextProtocol; +using ModelContextProtocol.Protocol; + +namespace Azure.DataApiBuilder.Mcp.Core +{ + /// + /// Configuration for MCP server capabilities and handlers + /// + internal static class McpServerConfiguration + { + /// + /// Configures the MCP server with tool capabilities + /// + internal static IServiceCollection ConfigureMcpServer(this IServiceCollection services) + { + services.AddMcpServer(options => + { + options.ServerInfo = new() { Name = "Data API builder MCP Server", Version = "1.0.0" }; + options.Capabilities = new() + { + Tools = new() + { + ListToolsHandler = (request, ct) => + { + McpToolRegistry? toolRegistry = request.Services?.GetRequiredService(); + if (toolRegistry == null) + { + throw new InvalidOperationException("Tool registry is not available."); + } + + List tools = toolRegistry.GetAllTools().ToList(); + + return ValueTask.FromResult(new ListToolsResult + { + Tools = tools + }); + }, + CallToolHandler = async (request, ct) => + { + McpToolRegistry? toolRegistry = request.Services?.GetRequiredService(); + if (toolRegistry == null) + { + throw new InvalidOperationException("Tool registry is not available."); + } + + string? toolName = request.Params?.Name; + if (string.IsNullOrEmpty(toolName)) + { + throw new McpException("Tool name is required."); + } + + if (!toolRegistry.TryGetTool(toolName, out IMcpTool? tool)) + { + throw new McpException($"Unknown tool: '{toolName}'"); + } + + JsonDocument? arguments = null; + if (request.Params?.Arguments != null) + { + // Convert IReadOnlyDictionary to JsonDocument + Dictionary jsonObject = new(); + foreach (KeyValuePair kvp in request.Params.Arguments) + { + jsonObject[kvp.Key] = kvp.Value; + } + + string json = JsonSerializer.Serialize(jsonObject); + arguments = JsonDocument.Parse(json); + } + + try + { + return await tool!.ExecuteAsync(arguments, request.Services!, ct); + } + finally + { + arguments?.Dispose(); + } + } + } + }; + }) + .WithHttpTransport(); + + return services; + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Core/McpServiceCollectionExtensions.cs b/src/Azure.DataApiBuilder.Mcp/Core/McpServiceCollectionExtensions.cs new file mode 100644 index 0000000000..01f6015786 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Core/McpServiceCollectionExtensions.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Reflection; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Mcp.Model; +using Microsoft.Extensions.DependencyInjection; + +namespace Azure.DataApiBuilder.Mcp.Core +{ + /// + /// Extension methods for configuring MCP services in the DI container + /// + public static class McpServiceCollectionExtensions + { + /// + /// Adds MCP server and related services to the service collection + /// + public static IServiceCollection AddDabMcpServer(this IServiceCollection services, RuntimeConfigProvider runtimeConfigProvider) + { + if (!runtimeConfigProvider.TryGetConfig(out RuntimeConfig? runtimeConfig)) + { + // If config is not available, skip MCP setup + return services; + } + + // Only add MCP server if it's enabled in the configuration + if (!runtimeConfig.IsMcpEnabled) + { + return services; + } + + // Register core MCP services + services.AddSingleton(); + services.AddHostedService(); + + // Auto-discover and register all MCP tools + RegisterAllMcpTools(services); + + // Configure MCP server + services.ConfigureMcpServer(); + + return services; + } + + /// + /// Automatically discovers and registers all classes implementing IMcpTool + /// + private static void RegisterAllMcpTools(IServiceCollection services) + { + Assembly mcpAssembly = typeof(IMcpTool).Assembly; + + IEnumerable toolTypes = mcpAssembly.GetTypes() + .Where(t => t.IsClass && + !t.IsAbstract && + typeof(IMcpTool).IsAssignableFrom(t)); + + foreach (Type toolType in toolTypes) + { + services.AddSingleton(typeof(IMcpTool), toolType); + } + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs b/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs new file mode 100644 index 0000000000..9c9b96d72b --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistry.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using Azure.DataApiBuilder.Mcp.Model; +using ModelContextProtocol.Protocol; + +namespace Azure.DataApiBuilder.Mcp.Core +{ + /// + /// Registry for managing MCP tools + /// + public class McpToolRegistry + { + private readonly Dictionary _tools = new(); + + /// + /// Registers a tool in the registry + /// + public void RegisterTool(IMcpTool tool) + { + Tool metadata = tool.GetToolMetadata(); + _tools[metadata.Name] = tool; + } + + /// + /// Gets all registered tools + /// + public IEnumerable GetAllTools() + { + return _tools.Values.Select(t => t.GetToolMetadata()); + } + + /// + /// Tries to get a tool by name + /// + public bool TryGetTool(string toolName, out IMcpTool? tool) + { + return _tools.TryGetValue(toolName, out tool); + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistryInitializer.cs b/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistryInitializer.cs new file mode 100644 index 0000000000..97d0dac7f3 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Core/McpToolRegistryInitializer.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using Azure.DataApiBuilder.Mcp.Model; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + +namespace Azure.DataApiBuilder.Mcp.Core +{ + /// + /// Hosted service to initialize the MCP tool registry + /// + public class McpToolRegistryInitializer : IHostedService + { + private readonly IServiceProvider _serviceProvider; + private readonly McpToolRegistry _toolRegistry; + + public McpToolRegistryInitializer(IServiceProvider serviceProvider, McpToolRegistry toolRegistry) + { + _serviceProvider = serviceProvider; + _toolRegistry = toolRegistry; + } + + public Task StartAsync(CancellationToken cancellationToken) + { + // Register all IMcpTool implementations + IEnumerable tools = _serviceProvider.GetServices(); + foreach (IMcpTool tool in tools) + { + _toolRegistry.RegisterTool(tool); + } + + return Task.CompletedTask; + } + + public Task StopAsync(CancellationToken cancellationToken) + { + return Task.CompletedTask; + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Model/Enums.cs b/src/Azure.DataApiBuilder.Mcp/Model/Enums.cs new file mode 100644 index 0000000000..84ca49e1b0 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Model/Enums.cs @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +namespace Azure.DataApiBuilder.Mcp.Model +{ + public class McpEnums + { + /// + /// Specifies the type of tool. + /// + /// This enumeration defines whether a tool is a built-in tool provided by the system or + /// a custom tool defined by the user. + public enum ToolType + { + BuiltIn, + Custom + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Model/IMcpTool.cs b/src/Azure.DataApiBuilder.Mcp/Model/IMcpTool.cs new file mode 100644 index 0000000000..bbee6a9304 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Model/IMcpTool.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using ModelContextProtocol.Protocol; +using static Azure.DataApiBuilder.Mcp.Model.McpEnums; + +namespace Azure.DataApiBuilder.Mcp.Model +{ + /// + /// Interface for MCP tool implementations + /// + public interface IMcpTool + { + /// + /// Gets the type of the tool. + /// + ToolType ToolType { get; } + + /// + /// Gets the tool metadata + /// + Tool GetToolMetadata(); + + /// + /// Executes the tool with the provided arguments + /// + /// The JSON arguments passed to the tool + /// The service provider for resolving dependencies + /// Cancellation token + /// The tool execution result + Task ExecuteAsync( + JsonDocument? arguments, + IServiceProvider serviceProvider, + CancellationToken cancellationToken = default); + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpArgumentParser.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpArgumentParser.cs new file mode 100644 index 0000000000..04d14eb5d6 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpArgumentParser.cs @@ -0,0 +1,127 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; + +namespace Azure.DataApiBuilder.Mcp.Utils +{ + /// + /// Utility class for parsing MCP tool arguments. + /// + public static class McpArgumentParser + { + /// + /// Parses entity and keys arguments for delete/update operations. + /// + public static bool TryParseEntityAndKeys( + JsonElement root, + out string entityName, + out Dictionary keys, + out string error) + { + entityName = string.Empty; + keys = new Dictionary(); + error = string.Empty; + + if (!root.TryGetProperty("entity", out JsonElement entityEl) || + !root.TryGetProperty("keys", out JsonElement keysEl)) + { + error = "Missing required arguments 'entity' or 'keys'."; + return false; + } + + // Parse and validate entity name + entityName = entityEl.GetString() ?? string.Empty; + if (string.IsNullOrWhiteSpace(entityName)) + { + error = "Entity is required"; + return false; + } + + // Parse and validate keys + if (keysEl.ValueKind != JsonValueKind.Object) + { + error = "'keys' must be a JSON object."; + return false; + } + + try + { + keys = JsonSerializer.Deserialize>(keysEl) ?? new Dictionary(); + } + catch (Exception ex) + { + error = $"Failed to parse 'keys': {ex.Message}"; + return false; + } + + if (keys.Count == 0) + { + error = "Keys are required"; + return false; + } + + // Validate key values + foreach (KeyValuePair kv in keys) + { + if (kv.Value is null || (kv.Value is string str && string.IsNullOrWhiteSpace(str))) + { + error = $"Primary key value for '{kv.Key}' cannot be null or empty"; + return false; + } + } + + return true; + } + + /// + /// Parses entity, keys, and fields arguments for update operations. + /// + public static bool TryParseEntityKeysAndFields( + JsonElement root, + out string entityName, + out Dictionary keys, + out Dictionary fields, + out string error) + { + fields = new Dictionary(); + + // First parse entity and keys + if (!TryParseEntityAndKeys(root, out entityName, out keys, out error)) + { + return false; + } + + // Then parse fields + if (!root.TryGetProperty("fields", out JsonElement fieldsEl)) + { + error = "Missing required argument 'fields'."; + return false; + } + + if (fieldsEl.ValueKind != JsonValueKind.Object) + { + error = "'fields' must be a JSON object."; + return false; + } + + try + { + fields = JsonSerializer.Deserialize>(fieldsEl) ?? new Dictionary(); + } + catch (Exception ex) + { + error = $"Failed to parse 'fields': {ex.Message}"; + return false; + } + + if (fields.Count == 0) + { + error = "At least one field must be provided"; + return false; + } + + return true; + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpAuthorizationHelper.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpAuthorizationHelper.cs new file mode 100644 index 0000000000..1fdf7d45d3 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpAuthorizationHelper.cs @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using Azure.DataApiBuilder.Auth; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Core.Authorization; +using Microsoft.AspNetCore.Http; + +namespace Azure.DataApiBuilder.Mcp.Utils +{ + /// + /// Helper class for MCP tool authorization operations. + /// + public static class McpAuthorizationHelper + { + /// + /// Validates if the current request has a valid role context. + /// + public static bool ValidateRoleContext( + HttpContext? httpContext, + IAuthorizationResolver authResolver, + out string error) + { + error = string.Empty; + + if (httpContext is null || !authResolver.IsValidRoleContext(httpContext)) + { + error = "Unable to resolve a valid role context"; + return false; + } + + return true; + } + + /// + /// Tries to resolve an authorized role for the given entity and operation. + /// + public static bool TryResolveAuthorizedRole( + HttpContext httpContext, + IAuthorizationResolver authorizationResolver, + string entityName, + EntityActionOperation operation, + out string? effectiveRole, + out string error) + { + effectiveRole = null; + error = string.Empty; + + string roleHeader = httpContext.Request.Headers[AuthorizationResolver.CLIENT_ROLE_HEADER].ToString(); + + if (string.IsNullOrWhiteSpace(roleHeader)) + { + error = "Client role header is missing or empty."; + return false; + } + + string[] roles = roleHeader + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + if (roles.Length == 0) + { + error = "Client role header is missing or empty."; + return false; + } + + foreach (string role in roles) + { + bool allowed = authorizationResolver.AreRoleAndOperationDefinedForEntity( + entityName, role, operation); + + if (allowed) + { + effectiveRole = role; + return true; + } + } + + error = $"You do not have permission to perform {operation} operation for this entity."; + return false; + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpJsonHelper.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpJsonHelper.cs new file mode 100644 index 0000000000..b3d3f11492 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpJsonHelper.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; + +namespace Azure.DataApiBuilder.Mcp.Utils +{ + /// + /// Helper methods for JSON operations in MCP tools. + /// + public static class McpJsonHelper + { + /// + /// Converts JsonElement to .NET object dynamically. + /// + public static object? GetJsonValue(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString(), + JsonValueKind.Number => + // Try to get as decimal first for maximum precision + element.TryGetDecimal(out decimal d) ? d : + element.TryGetInt64(out long l) ? l : + element.GetDouble(), + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.Null => null, + _ => element.GetRawText() // fallback for arrays/objects + }; + } + + /// + /// Extracts values from a JSON value array typically returned by DAB engine. + /// + public static Dictionary ExtractValuesFromEngineResult(JsonElement engineRootElement) + { + Dictionary resultData = new(); + + // Navigate to "value" array in the engine result + if (engineRootElement.TryGetProperty("value", out JsonElement valueArray) && + valueArray.ValueKind == JsonValueKind.Array && + valueArray.GetArrayLength() > 0) + { + JsonElement firstItem = valueArray[0]; + + // Include all properties from the result + foreach (JsonProperty prop in firstItem.EnumerateObject()) + { + resultData[prop.Name] = GetJsonValue(prop.Value); + } + } + + return resultData; + } + + /// + /// Creates a formatted key details string from a dictionary of key-value pairs. + /// + public static string FormatKeyDetails(Dictionary keys) + { + return string.Join(", ", keys.Select(k => $"{k.Key}={k.Value}")); + } + } +} diff --git a/src/Azure.DataApiBuilder.Mcp/Utils/McpResponseBuilder.cs b/src/Azure.DataApiBuilder.Mcp/Utils/McpResponseBuilder.cs new file mode 100644 index 0000000000..afbccbda38 --- /dev/null +++ b/src/Azure.DataApiBuilder.Mcp/Utils/McpResponseBuilder.cs @@ -0,0 +1,103 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; +using ModelContextProtocol.Protocol; + +namespace Azure.DataApiBuilder.Mcp.Utils +{ + /// + /// Utility class for building standardized MCP tool responses. + /// + public static class McpResponseBuilder + { + /// + /// Builds a success response for MCP tools. + /// + public static CallToolResult BuildSuccessResult( + Dictionary responseData, + ILogger? logger = null, + string? logMessage = null) + { + responseData["status"] = "success"; + + string output = JsonSerializer.Serialize(responseData, new JsonSerializerOptions { WriteIndented = true }); + + if (logger != null && !string.IsNullOrEmpty(logMessage)) + { + logger.LogInformation(logMessage); + } + + return new CallToolResult + { + Content = new List + { + new TextContentBlock { Type = "text", Text = output } + } + }; + } + + /// + /// Builds an error response for MCP tools. + /// + public static CallToolResult BuildErrorResult( + string errorType, + string message, + ILogger? logger = null) + { + Dictionary errorObj = new() + { + ["status"] = "error", + ["error"] = new Dictionary + { + ["type"] = errorType, + ["message"] = message + } + }; + + string output = JsonSerializer.Serialize(errorObj, new JsonSerializerOptions { WriteIndented = true }); + + logger?.LogWarning("MCP Tool error {ErrorType}: {Message}", errorType, message); + + return new CallToolResult + { + Content = new List + { + new TextContentBlock { Type = "text", Text = output } + }, + IsError = true + }; + } + + /// + /// Extracts a JSON string from a typical IActionResult. + /// Falls back to "{}" for unsupported/empty cases to avoid leaking internals. + /// + public static string ExtractResultJson(IActionResult? result) + { + switch (result) + { + case ObjectResult obj: + if (obj.Value is JsonElement je) + { + return je.GetRawText(); + } + + if (obj.Value is JsonDocument jd) + { + return jd.RootElement.GetRawText(); + } + + return JsonSerializer.Serialize(obj.Value ?? new object()); + + case ContentResult content: + return string.IsNullOrWhiteSpace(content.Content) ? "{}" : content.Content; + + default: + return "{}"; + } + } + } +} diff --git a/src/Azure.DataApiBuilder.sln b/src/Azure.DataApiBuilder.sln index e7f61fa3ed..aa3c8e2bad 100644 --- a/src/Azure.DataApiBuilder.sln +++ b/src/Azure.DataApiBuilder.sln @@ -31,6 +31,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.DataApiBuilder.Core", EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Azure.DataApiBuilder.Product", "Product\Azure.DataApiBuilder.Product.csproj", "{E3D2076C-EE49-43A0-8F92-5FC41EC99DA7}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Azure.DataApiBuilder.Mcp", "Azure.DataApiBuilder.Mcp\Azure.DataApiBuilder.Mcp.csproj", "{A287E849-A043-4F37-BC40-A87C4705F583}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -73,6 +75,10 @@ Global {E3D2076C-EE49-43A0-8F92-5FC41EC99DA7}.Debug|Any CPU.Build.0 = Debug|Any CPU {E3D2076C-EE49-43A0-8F92-5FC41EC99DA7}.Release|Any CPU.ActiveCfg = Release|Any CPU {E3D2076C-EE49-43A0-8F92-5FC41EC99DA7}.Release|Any CPU.Build.0 = Release|Any CPU + {A287E849-A043-4F37-BC40-A87C4705F583}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A287E849-A043-4F37-BC40-A87C4705F583}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A287E849-A043-4F37-BC40-A87C4705F583}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A287E849-A043-4F37-BC40-A87C4705F583}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/src/Cli.Tests/AddEntityTests.cs b/src/Cli.Tests/AddEntityTests.cs index 5dc218dcc6..9386916f7f 100644 --- a/src/Cli.Tests/AddEntityTests.cs +++ b/src/Cli.Tests/AddEntityTests.cs @@ -31,6 +31,7 @@ public Task AddNewEntityWhenEntitiesEmpty() source: "MyTable", permissions: new string[] { "anonymous", "read,update" }, entity: "FirstEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, @@ -44,9 +45,16 @@ public Task AddNewEntityWhenEntitiesEmpty() cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); - return ExecuteVerifyTest(options); } @@ -60,6 +68,7 @@ public Task AddNewEntityWhenEntitiesNotEmpty() source: "MyTable", permissions: new string[] { "anonymous", "*" }, entity: "SecondEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, @@ -73,7 +82,15 @@ public Task AddNewEntityWhenEntitiesNotEmpty() cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); string initialConfiguration = AddPropertiesToJson(INITIAL_CONFIG, GetFirstEntityConfiguration()); @@ -91,6 +108,7 @@ public void AddDuplicateEntity() source: "MyTable", permissions: new string[] { "anonymous", "*" }, entity: "FirstEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, @@ -104,7 +122,15 @@ public void AddDuplicateEntity() cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); string initialConfiguration = AddPropertiesToJson(INITIAL_CONFIG, GetFirstEntityConfiguration()); @@ -126,6 +152,7 @@ public Task AddEntityWithAnExistingNameButWithDifferentCase() source: "MyTable", permissions: new string[] { "anonymous", "*" }, entity: "FIRSTEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, @@ -139,7 +166,15 @@ public Task AddEntityWithAnExistingNameButWithDifferentCase() cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); string initialConfiguration = AddPropertiesToJson(INITIAL_CONFIG, GetFirstEntityConfiguration()); @@ -156,6 +191,7 @@ public Task AddEntityWithCachingEnabled() source: "MyTable", permissions: new string[] { "anonymous", "*" }, entity: "CachingEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, @@ -169,7 +205,15 @@ public Task AddEntityWithCachingEnabled() cacheTtl: "1", config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); return ExecuteVerifyTest(options); @@ -192,6 +236,7 @@ public Task AddEntityWithPolicyAndFieldProperties( source: "MyTable", permissions: new string[] { "anonymous", "delete" }, entity: "MyEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, @@ -205,7 +250,15 @@ public Task AddEntityWithPolicyAndFieldProperties( cacheEnabled: null, cacheTtl: null, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); // Create VerifySettings and add all arguments to the method as parameters @@ -224,6 +277,7 @@ public Task AddNewEntityWhenEntitiesWithSourceAsStoredProcedure() source: "s001.book", permissions: new string[] { "anonymous", "execute" }, entity: "MyEntity", + description: null, sourceType: "stored-procedure", sourceParameters: new string[] { "param1:123", "param2:hello", "param3:true" }, sourceKeyFields: null, @@ -237,7 +291,15 @@ public Task AddNewEntityWhenEntitiesWithSourceAsStoredProcedure() cacheEnabled: null, cacheTtl: null, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: ["This is a test parameter description."], + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); return ExecuteVerifyTest(options); @@ -255,6 +317,7 @@ public Task TestAddStoredProcedureWithRestMethodsAndGraphQLOperations() source: "s001.book", permissions: new string[] { "anonymous", "execute" }, entity: "MyEntity", + description: null, sourceType: "stored-procedure", sourceParameters: new string[] { "param1:123", "param2:hello", "param3:true" }, sourceKeyFields: null, @@ -268,12 +331,60 @@ public Task TestAddStoredProcedureWithRestMethodsAndGraphQLOperations() cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: new string[] { "Post", "Put", "Patch" }, - graphQLOperationForStoredProcedure: "Query" + graphQLOperationForStoredProcedure: "Query", + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); return ExecuteVerifyTest(options); } + [TestMethod] + public void AddEntityWithDescriptionAndVerifyInConfig() + { + string description = "This is a test entity description."; + AddOptions options = new( + source: "MyTable", + permissions: new string[] { "anonymous", "read" }, + entity: "EntityWithDescription", + description: description, + sourceType: null, + sourceParameters: null, + sourceKeyFields: null, + restRoute: null, + graphQLType: null, + fieldsToInclude: new string[] { }, + fieldsToExclude: new string[] { }, + policyRequest: null, + policyDatabase: null, + cacheEnabled: null, + cacheTtl: null, + config: TEST_RUNTIME_CONFIG_FILE, + restMethodsForStoredProcedure: null, + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] + ); + + string config = INITIAL_CONFIG; + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(config, out RuntimeConfig? runtimeConfig), "Loaded base config."); + Assert.IsTrue(TryAddNewEntity(options, runtimeConfig, out RuntimeConfig updatedRuntimeConfig), "Added entity to config."); + Assert.IsNotNull(updatedRuntimeConfig.Entities["EntityWithDescription"].Description); + Assert.AreEqual(description, updatedRuntimeConfig.Entities["EntityWithDescription"].Description); + } + /// /// Simple test to verify success on adding a new entity with source object for valid fields. /// @@ -305,6 +416,7 @@ public void TestAddNewEntityWithSourceObjectHavingValidFields( source: "testSource", permissions: new string[] { "anonymous", operations }, entity: "book", + description: null, sourceType: sourceType, sourceParameters: parameters, sourceKeyFields: keyFields, @@ -318,7 +430,15 @@ public void TestAddNewEntityWithSourceObjectHavingValidFields( cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); RuntimeConfigLoader.TryParseConfig(INITIAL_CONFIG, out RuntimeConfig? runtimeConfig); @@ -364,6 +484,7 @@ public Task TestAddNewSpWithDifferentRestAndGraphQLOptions( source: "s001.book", permissions: new string[] { "anonymous", "execute" }, entity: "MyEntity", + description: null, sourceType: "stored-procedure", sourceParameters: null, sourceKeyFields: null, @@ -377,7 +498,15 @@ public Task TestAddNewSpWithDifferentRestAndGraphQLOptions( cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: restMethods, - graphQLOperationForStoredProcedure: graphQLOperation + graphQLOperationForStoredProcedure: graphQLOperation, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); VerifySettings settings = new(); @@ -399,6 +528,7 @@ public void TestAddStoredProcedureWithConflictingRestGraphQLOptions( source: "s001.book", permissions: new string[] { "anonymous", "execute" }, entity: "MyEntity", + description: null, sourceType: "stored-procedure", sourceParameters: null, sourceKeyFields: null, @@ -412,7 +542,15 @@ public void TestAddStoredProcedureWithConflictingRestGraphQLOptions( cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: restMethods, - graphQLOperationForStoredProcedure: graphQLOperation + graphQLOperationForStoredProcedure: graphQLOperation, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); RuntimeConfigLoader.TryParseConfig(INITIAL_CONFIG, out RuntimeConfig? runtimeConfig); @@ -437,6 +575,7 @@ public void TestAddEntityPermissionWithInvalidOperation(IEnumerable perm source: "MyTable", permissions: permissions, entity: "MyEntity", + description: null, sourceType: null, sourceParameters: null, sourceKeyFields: null, @@ -450,7 +589,15 @@ public void TestAddEntityPermissionWithInvalidOperation(IEnumerable perm cacheTtl: null, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: null, - graphQLOperationForStoredProcedure: null + graphQLOperationForStoredProcedure: null, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: [], + fieldsAliasCollection: [], + fieldsDescriptionCollection: [], + fieldsPrimaryKeyCollection: [] ); RuntimeConfigLoader.TryParseConfig(INITIAL_CONFIG, out RuntimeConfig? runtimeConfig); diff --git a/src/Cli.Tests/ConfigGeneratorTests.cs b/src/Cli.Tests/ConfigGeneratorTests.cs index 6094189f93..58e006b75d 100644 --- a/src/Cli.Tests/ConfigGeneratorTests.cs +++ b/src/Cli.Tests/ConfigGeneratorTests.cs @@ -163,6 +163,10 @@ public void TestSpecialCharactersInConnectionString() ""path"": ""/An_"", ""allow-introspection"": true }, + ""mcp"": { + ""enabled"": true, + ""path"": ""/mcp"" + }, ""host"": { ""cors"": { ""origins"": [], diff --git a/src/Cli.Tests/ConfigureOptionsTests.cs b/src/Cli.Tests/ConfigureOptionsTests.cs index 8ee064e262..073f349a67 100644 --- a/src/Cli.Tests/ConfigureOptionsTests.cs +++ b/src/Cli.Tests/ConfigureOptionsTests.cs @@ -1,6 +1,8 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. +using Serilog; + namespace Cli.Tests { /// @@ -111,6 +113,126 @@ public void TestAddDepthLimitForGraphQL() Assert.AreEqual(maxDepthLimit, config.Runtime.GraphQL.DepthLimit); } + /// + /// Tests that running the "configure --azure-key-vault" commands on a config without AKV properties results + /// in a valid config being generated. + [TestMethod] + public void TestAddAKVOptions() + { + // Arrange + _fileSystem!.AddFile(TEST_RUNTIME_CONFIG_FILE, new MockFileData(INITIAL_CONFIG)); + + Assert.IsTrue(_fileSystem!.File.Exists(TEST_RUNTIME_CONFIG_FILE)); + + // Act: Attempts to add AKV options + ConfigureOptions options = new( + azureKeyVaultEndpoint: "foo", + azureKeyVaultRetryPolicyMaxCount: 1, + azureKeyVaultRetryPolicyDelaySeconds: 1, + azureKeyVaultRetryPolicyMaxDelaySeconds: 1, + azureKeyVaultRetryPolicyMode: AKVRetryPolicyMode.Exponential, + azureKeyVaultRetryPolicyNetworkTimeoutSeconds: 1, + config: TEST_RUNTIME_CONFIG_FILE + ); + bool isSuccess = TryConfigureSettings(options, _runtimeConfigLoader!, _fileSystem!); + + // Assert: Validate the AKV options are added. + Assert.IsTrue(isSuccess); + string updatedConfig = _fileSystem!.File.ReadAllText(TEST_RUNTIME_CONFIG_FILE); + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(updatedConfig, out RuntimeConfig? config)); + Assert.IsNotNull(config.AzureKeyVault); + Assert.IsNotNull(config.AzureKeyVault?.RetryPolicy); + Assert.AreEqual("foo", config.AzureKeyVault?.Endpoint); + Assert.AreEqual(AKVRetryPolicyMode.Exponential, config.AzureKeyVault?.RetryPolicy.Mode); + Assert.AreEqual(1, config.AzureKeyVault?.RetryPolicy.MaxCount); + Assert.AreEqual(1, config.AzureKeyVault?.RetryPolicy.DelaySeconds); + Assert.AreEqual(1, config.AzureKeyVault?.RetryPolicy.MaxDelaySeconds); + Assert.AreEqual(1, config.AzureKeyVault?.RetryPolicy.NetworkTimeoutSeconds); + } + + /// + /// Tests that running the "configure --azure-log-analytics" commands on a config without Azure Log Analytics properties results + /// in a valid config being generated. + [TestMethod] + public void TestAddAzureLogAnalyticsOptions() + { + // Arrange + _fileSystem!.AddFile(TEST_RUNTIME_CONFIG_FILE, new MockFileData(INITIAL_CONFIG)); + + Assert.IsTrue(_fileSystem!.File.Exists(TEST_RUNTIME_CONFIG_FILE)); + + // Act: Attempts to add Azure Log Analytics options + ConfigureOptions options = new( + azureLogAnalyticsEnabled: CliBool.True, + azureLogAnalyticsDabIdentifier: "dab-identifier-test", + azureLogAnalyticsFlushIntervalSeconds: 1, + azureLogAnalyticsCustomTableName: "custom-table-name-test", + azureLogAnalyticsDcrImmutableId: "dcr-immutable-id-test", + azureLogAnalyticsDceEndpoint: "dce-endpoint-test", + config: TEST_RUNTIME_CONFIG_FILE + ); + + bool isSuccess = TryConfigureSettings(options, _runtimeConfigLoader!, _fileSystem!); + + // Assert: Validate the Azure Log Analytics options are added. + Assert.IsTrue(isSuccess); + string updatedConfig = _fileSystem!.File.ReadAllText(TEST_RUNTIME_CONFIG_FILE); + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(updatedConfig, out RuntimeConfig? config)); + Assert.IsNotNull(config.Runtime); + Assert.IsNotNull(config.Runtime.Telemetry); + Assert.IsNotNull(config.Runtime.Telemetry.AzureLogAnalytics); + Assert.AreEqual(true, config.Runtime.Telemetry.AzureLogAnalytics.Enabled); + Assert.AreEqual("dab-identifier-test", config.Runtime.Telemetry.AzureLogAnalytics.DabIdentifier); + Assert.AreEqual(1, config.Runtime.Telemetry.AzureLogAnalytics.FlushIntervalSeconds); + Assert.IsNotNull(config.Runtime.Telemetry.AzureLogAnalytics.Auth); + Assert.AreEqual("custom-table-name-test", config.Runtime.Telemetry.AzureLogAnalytics.Auth.CustomTableName); + Assert.AreEqual("dcr-immutable-id-test", config.Runtime.Telemetry.AzureLogAnalytics.Auth.DcrImmutableId); + Assert.AreEqual("dce-endpoint-test", config.Runtime.Telemetry.AzureLogAnalytics.Auth.DceEndpoint); + } + + /// + /// Tests that running the "configure --file" commands on a config without file sink properties results + /// in a valid config being generated. + /// + [TestMethod] + public void TestAddFileSinkOptions() + { + // Arrange + string fileSinkPath = "/custom/log/path.txt"; + RollingInterval fileSinkRollingInterval = RollingInterval.Hour; + int fileSinkRetainedFileCountLimit = 5; + int fileSinkFileSizeLimitBytes = 2097152; + + _fileSystem!.AddFile(TEST_RUNTIME_CONFIG_FILE, new MockFileData(INITIAL_CONFIG)); + + Assert.IsTrue(_fileSystem!.File.Exists(TEST_RUNTIME_CONFIG_FILE)); + + // Act: Attempts to add file options + ConfigureOptions options = new( + fileSinkEnabled: CliBool.True, + fileSinkPath: fileSinkPath, + fileSinkRollingInterval: fileSinkRollingInterval, + fileSinkRetainedFileCountLimit: fileSinkRetainedFileCountLimit, + fileSinkFileSizeLimitBytes: fileSinkFileSizeLimitBytes, + config: TEST_RUNTIME_CONFIG_FILE + ); + + bool isSuccess = TryConfigureSettings(options, _runtimeConfigLoader!, _fileSystem!); + + // Assert: Validate the file options are added. + Assert.IsTrue(isSuccess); + string updatedConfig = _fileSystem!.File.ReadAllText(TEST_RUNTIME_CONFIG_FILE); + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(updatedConfig, out RuntimeConfig? config)); + Assert.IsNotNull(config.Runtime); + Assert.IsNotNull(config.Runtime.Telemetry); + Assert.IsNotNull(config.Runtime.Telemetry.File); + Assert.AreEqual(true, config.Runtime.Telemetry.File.Enabled); + Assert.AreEqual(fileSinkPath, config.Runtime.Telemetry.File.Path); + Assert.AreEqual(fileSinkRollingInterval.ToString(), config.Runtime.Telemetry.File.RollingInterval); + Assert.AreEqual(fileSinkRetainedFileCountLimit, config.Runtime.Telemetry.File.RetainedFileCountLimit); + Assert.AreEqual(fileSinkFileSizeLimitBytes, config.Runtime.Telemetry.File.FileSizeLimitBytes); + } + /// /// Tests that running "dab configure --runtime.graphql.enabled" on a config with various values results /// in runtime. Takes in updated value for graphql.enabled and diff --git a/src/Cli.Tests/EndToEndTests.cs b/src/Cli.Tests/EndToEndTests.cs index 28fecfb2d5..7fe017501f 100644 --- a/src/Cli.Tests/EndToEndTests.cs +++ b/src/Cli.Tests/EndToEndTests.cs @@ -771,9 +771,11 @@ public void TestUpdateEntity() CollectionAssert.AreEqual(new string[] { "todo_id" }, relationship.LinkingSourceFields); CollectionAssert.AreEqual(new string[] { "id" }, relationship.LinkingTargetFields); - Assert.IsNotNull(entity.Mappings); - Assert.AreEqual("identity", entity.Mappings["id"]); - Assert.AreEqual("Company Name", entity.Mappings["name"]); + Assert.IsNotNull(entity.Fields); + Assert.AreEqual(2, entity.Fields.Count); + Assert.AreEqual(entity.Fields[0].Alias, "identity"); + Assert.AreEqual(entity.Fields[1].Alias, "Company Name"); + Assert.IsNull(entity.Mappings); } /// diff --git a/src/Cli.Tests/ExporterTests.cs b/src/Cli.Tests/ExporterTests.cs index aecd6455a3..3735dc43a1 100644 --- a/src/Cli.Tests/ExporterTests.cs +++ b/src/Cli.Tests/ExporterTests.cs @@ -21,7 +21,7 @@ public void ExportGraphQLFromDabService_LogsWhenHttpsWorks() RuntimeConfig runtimeConfig = new( Schema: "schema", DataSource: new DataSource(DatabaseType.MSSQL, "", new()), - Runtime: new(Rest: new(), GraphQL: new(), Host: new(null, null)), + Runtime: new(Rest: new(), GraphQL: new(), Mcp: new(), Host: new(null, null)), Entities: new(new Dictionary()) ); @@ -59,7 +59,7 @@ public void ExportGraphQLFromDabService_LogsFallbackToHttp_WhenHttpsFails() RuntimeConfig runtimeConfig = new( Schema: "schema", DataSource: new DataSource(DatabaseType.MSSQL, "", new()), - Runtime: new(Rest: new(), GraphQL: new(), Host: new(null, null)), + Runtime: new(Rest: new(), GraphQL: new(), Mcp: new(), Host: new(null, null)), Entities: new(new Dictionary()) ); @@ -105,7 +105,7 @@ public void ExportGraphQLFromDabService_ThrowsException_WhenBothHttpsAndHttpFail RuntimeConfig runtimeConfig = new( Schema: "schema", DataSource: new DataSource(DatabaseType.MSSQL, "", new()), - Runtime: new(Rest: new(), GraphQL: new(), Host: new(null, null)), + Runtime: new(Rest: new(), GraphQL: new(), Mcp: new(), Host: new(null, null)), Entities: new(new Dictionary()) ); diff --git a/src/Cli.Tests/ModuleInitializer.cs b/src/Cli.Tests/ModuleInitializer.cs index 2cfba899ea..e00dc00a89 100644 --- a/src/Cli.Tests/ModuleInitializer.cs +++ b/src/Cli.Tests/ModuleInitializer.cs @@ -47,6 +47,10 @@ public static void Init() VerifierSettings.IgnoreMember(options => options.IsGraphQLEnabled); // Ignore the entity IsGraphQLEnabled as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(entity => entity.IsGraphQLEnabled); + // Ignore the global IsMcpEnabled as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.IsMcpEnabled); + // Ignore the global RuntimeOptions.IsMcpEnabled as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(options => options.IsMcpEnabled); // Ignore the global IsHealthEnabled as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.IsHealthEnabled); // Ignore the global RuntimeOptions.IsHealthCheckEnabled as that's unimportant from a test standpoint. @@ -67,12 +71,18 @@ public static void Init() VerifierSettings.IgnoreMember(config => config.IsGraphQLEnabled); // Ignore the IsRestEnabled as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.IsRestEnabled); + // Ignore the IsMcpEnabled as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.IsMcpEnabled); + // Ignore the McpDmlTools as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.McpDmlTools); // Ignore the IsStaticWebAppsIdentityProvider as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.IsStaticWebAppsIdentityProvider); // Ignore the RestPath as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.RestPath); // Ignore the GraphQLPath as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.GraphQLPath); + // Ignore the McpPath as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.McpPath); // Ignore the AllowIntrospection as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.AllowIntrospection); // Ignore the EnableAggregation as that's unimportant from a test standpoint. @@ -101,6 +111,8 @@ public static void Init() VerifierSettings.IgnoreMember(options => options.UserProvidedDepthLimit); // Ignore EnableLegacyDateTimeScalar as that's not serialized in our config file. VerifierSettings.IgnoreMember(options => options.EnableLegacyDateTimeScalar); + // Ignore UserProvidedPath as that's not serialized in our config file. + VerifierSettings.IgnoreMember(options => options.UserProvidedPath); // Customise the path where we store snapshots, so they are easier to locate in a PR review. VerifyBase.DerivePathInfo( (sourceFile, projectDirectory, type, method) => new( diff --git a/src/Cli.Tests/Snapshots/AddEntityTests.AddNewEntityWhenEntitiesWithSourceAsStoredProcedure.verified.txt b/src/Cli.Tests/Snapshots/AddEntityTests.AddNewEntityWhenEntitiesWithSourceAsStoredProcedure.verified.txt index 17e8de5193..21759deeed 100644 --- a/src/Cli.Tests/Snapshots/AddEntityTests.AddNewEntityWhenEntitiesWithSourceAsStoredProcedure.verified.txt +++ b/src/Cli.Tests/Snapshots/AddEntityTests.AddNewEntityWhenEntitiesWithSourceAsStoredProcedure.verified.txt @@ -28,11 +28,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/AddEntityTests.TestAddStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt b/src/Cli.Tests/Snapshots/AddEntityTests.TestAddStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt index 9aca5ba640..83d3882a96 100644 --- a/src/Cli.Tests/Snapshots/AddEntityTests.TestAddStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt +++ b/src/Cli.Tests/Snapshots/AddEntityTests.TestAddStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt @@ -28,11 +28,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt index a76f72b9a0..4411b47348 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestAddingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false @@ -31,11 +35,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt index 95415c1685..636d44805e 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceAsStoredProcedure.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false @@ -31,11 +35,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt index ee8dbf6199..a77ecc134b 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithSourceWithDefaultType.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt index 0d0afda2bf..a19694b688 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterAddingEntityWithoutIEnumerables.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterUpdatingEntityWithSourceAsStoredProcedure.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterUpdatingEntityWithSourceAsStoredProcedure.verified.txt index 5c940443a5..49af50b975 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterUpdatingEntityWithSourceAsStoredProcedure.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestConfigGeneratedAfterUpdatingEntityWithSourceAsStoredProcedure.verified.txt @@ -2,11 +2,23 @@ Source: { Object: dbo.books, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt index cbb2df5fb8..081c5f8e55 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestInitForCosmosDBNoSql.verified.txt @@ -17,6 +17,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt index 0c20e9fc25..fef1d83bf2 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethods.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false @@ -31,11 +35,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt index 27b20753d3..09007e27f8 100644 --- a/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt +++ b/src/Cli.Tests/Snapshots/EndToEndTests.TestUpdatingStoredProcedureWithRestMethodsAndGraphQLOperations.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false @@ -31,11 +35,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt b/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt index 2af3cbc907..b3f63dd336 100644 --- a/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.CosmosDbNoSqlDatabase.verified.txt @@ -17,6 +17,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt b/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt index ca3b61588b..42e0ff5e2f 100644 --- a/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.CosmosDbPostgreSqlDatabase.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt index 93190d1d9d..0af93023dc 100644 --- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_171ea8114ff71814.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt index 5c52bc12c1..9e77b24d74 100644 --- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_2df7a1794712f154.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt index 7b0a4674eb..32f72a7a54 100644 --- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_59fe1a10aa78899d.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt index dc60d762cc..24416a0d02 100644 --- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_b95b637ea87f16a7.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt index 7a67eca701..6c674a4772 100644 --- a/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.EnsureCorrectConfigGenerationWithDifferentAuthenticationProviders_daacbd948b7ef72f.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt b/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt index 8c2ffbbcac..b6aac13236 100644 --- a/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.GraphQLPathWithoutStartingSlashWillHaveItAdded.verified.txt @@ -16,6 +16,10 @@ Path: /abc, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt b/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt index da7937d1d9..8841c0f326 100644 --- a/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.MsSQLDatabase.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt b/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt index ef8c7173d5..68e4d231fd 100644 --- a/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.RestPathWithoutStartingSlashWillHaveItAdded.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt b/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt index 72f66f82c9..3c281ad6aa 100644 --- a/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.TestInitializingConfigWithoutConnectionString.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt b/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt index 7b0a4674eb..32f72a7a54 100644 --- a/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.TestSpecialCharactersInConnectionString.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { AllowCredentials: false diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt index cbaaa45754..888466ab4a 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0546bef37027a950.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt index da7937d1d9..8841c0f326 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0ac567dd32a2e8f5.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt index 62fc407842..d56e05c483 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_0c06949221514e77.verified.txt @@ -21,6 +21,10 @@ } } }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt index 3285438ab7..bc31484242 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_18667ab7db033e9d.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt index cbaaa45754..888466ab4a 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_2f42f44c328eb020.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt index 3285438ab7..bc31484242 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_3243d3f3441fdcc1.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt index a43e68277c..48f5e7a7c9 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_53350b8b47df2112.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt index 9740a85a77..8fa9677f1d 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_6584e0ec46b8a11d.verified.txt @@ -17,6 +17,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt index be47d537b2..e3108801f5 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_81cc88db3d4eecfb.verified.txt @@ -21,6 +21,10 @@ } } }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt index 673c21dae4..59f6636fb2 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_8ea187616dbb5577.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt index cbaaa45754..888466ab4a 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_905845c29560a3ef.verified.txt @@ -16,6 +16,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt index 9740a85a77..8fa9677f1d 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_b2fd24fab5b80917.verified.txt @@ -17,6 +17,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt index 9740a85a77..8fa9677f1d 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_bd7cd088755287c9.verified.txt @@ -17,6 +17,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt index a43e68277c..48f5e7a7c9 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d2eccba2f836b380.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt index 673c21dae4..59f6636fb2 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d463eed7fe5e4bbe.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt index a43e68277c..48f5e7a7c9 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_d5520dd5c33f7b8d.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt index 3285438ab7..bc31484242 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_eab4a6010e602b59.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt index 673c21dae4..59f6636fb2 100644 --- a/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt +++ b/src/Cli.Tests/Snapshots/InitTests.VerifyCorrectConfigGenerationWithMultipleMutationOptions_ecaa688829b4030e.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_036a859f50ce167c.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_036a859f50ce167c.verified.txt index a78465898d..260eecd0c9 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_036a859f50ce167c.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_036a859f50ce167c.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: View, - KeyFields: [ - col1, - col2 - ] + Type: View }, + Fields: [ + { + Name: col1, + PrimaryKey: true + }, + { + Name: col2, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_103655d39b48d89f.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_103655d39b48d89f.verified.txt index d3ed32cf42..80f61e17ac 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_103655d39b48d89f.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_103655d39b48d89f.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: Table, - KeyFields: [ - id, - name - ] + Type: Table }, + Fields: [ + { + Name: id, + PrimaryKey: true + }, + { + Name: name, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_442649c7ef2176bd.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_442649c7ef2176bd.verified.txt index a78465898d..260eecd0c9 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_442649c7ef2176bd.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_442649c7ef2176bd.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: View, - KeyFields: [ - col1, - col2 - ] + Type: View }, + Fields: [ + { + Name: col1, + PrimaryKey: true + }, + { + Name: col2, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_a70c086a74142c82.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_a70c086a74142c82.verified.txt index 17e8de5193..21759deeed 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_a70c086a74142c82.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_a70c086a74142c82.verified.txt @@ -28,11 +28,23 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_c26902b0e44f97cd.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_c26902b0e44f97cd.verified.txt index 2b4a7b8518..2d00804545 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_c26902b0e44f97cd.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestConversionOfSourceObject_c26902b0e44f97cd.verified.txt @@ -29,6 +29,16 @@ Object: s001.book, Type: stored-procedure }, + Fields: [ + { + Name: id, + PrimaryKey: true + }, + { + Name: name, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithMappings.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithMappings.verified.txt index 63ba7e2898..54d9077f1c 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithMappings.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithMappings.verified.txt @@ -33,6 +33,18 @@ Object: MyTable, Type: Table }, + Fields: [ + { + Name: id, + Alias: Identity, + PrimaryKey: false + }, + { + Name: name, + Alias: Company Name, + PrimaryKey: false + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, @@ -53,11 +65,7 @@ } ] } - ], - Mappings: { - id: Identity, - name: Company Name - } + ] } } ] diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithSpecialCharacterInMappings.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithSpecialCharacterInMappings.verified.txt index 8dcadec7b1..1906f87425 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithSpecialCharacterInMappings.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateEntityWithSpecialCharacterInMappings.verified.txt @@ -33,6 +33,28 @@ Object: MyTable, Type: Table }, + Fields: [ + { + Name: Macaroni, + Alias: Mac & Cheese, + PrimaryKey: false + }, + { + Name: region, + Alias: United State's Region, + PrimaryKey: false + }, + { + Name: russian, + Alias: русский, + PrimaryKey: false + }, + { + Name: chinese, + Alias: 中文, + PrimaryKey: false + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, @@ -53,13 +75,7 @@ } ] } - ], - Mappings: { - chinese: 中文, - Macaroni: Mac & Cheese, - region: United State's Region, - russian: русский - } + ] } } ] diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateExistingMappings.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateExistingMappings.verified.txt index 13e994a5cc..56ce5b55c3 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateExistingMappings.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateExistingMappings.verified.txt @@ -33,6 +33,23 @@ Object: MyTable, Type: Table }, + Fields: [ + { + Name: name, + Alias: Company Name, + PrimaryKey: false + }, + { + Name: addr, + Alias: Company Address, + PrimaryKey: false + }, + { + Name: number, + Alias: Contact Details, + PrimaryKey: false + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, @@ -53,12 +70,7 @@ } ] } - ], - Mappings: { - addr: Company Address, - name: Company Name, - number: Contact Details - } + ] } } ] diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_574e1995f787740f.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_574e1995f787740f.verified.txt index a78465898d..260eecd0c9 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_574e1995f787740f.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_574e1995f787740f.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: View, - KeyFields: [ - col1, - col2 - ] + Type: View }, + Fields: [ + { + Name: col1, + PrimaryKey: true + }, + { + Name: col2, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a13a9ca73b21f261.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a13a9ca73b21f261.verified.txt index d3ed32cf42..80f61e17ac 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a13a9ca73b21f261.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a13a9ca73b21f261.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: Table, - KeyFields: [ - id, - name - ] + Type: Table }, + Fields: [ + { + Name: id, + PrimaryKey: true + }, + { + Name: name, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a5ce76c8bea25cc8.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a5ce76c8bea25cc8.verified.txt index d3ed32cf42..80f61e17ac 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a5ce76c8bea25cc8.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.TestUpdateSourceStringToDatabaseSourceObject_a5ce76c8bea25cc8.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: Table, - KeyFields: [ - id, - name - ] + Type: Table }, + Fields: [ + { + Name: id, + PrimaryKey: true + }, + { + Name: name, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceKeyFields.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceKeyFields.verified.txt index 697074cedf..544a3484f9 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceKeyFields.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceKeyFields.verified.txt @@ -27,12 +27,18 @@ MyEntity: { Source: { Object: s001.book, - Type: Table, - KeyFields: [ - col1, - col2 - ] + Type: Table }, + Fields: [ + { + Name: col1, + PrimaryKey: true + }, + { + Name: col2, + PrimaryKey: true + } + ], GraphQL: { Singular: MyEntity, Plural: MyEntities, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceName.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceName.verified.txt index 967a59f1f9..1719e1ade2 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceName.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceName.verified.txt @@ -28,11 +28,23 @@ Source: { Object: newSourceName, Type: stored-procedure, - Parameters: { - param1: 123, - param2: hello, - param3: true - } + Parameters: [ + { + Name: param1, + Required: false, + Default: 123 + }, + { + Name: param2, + Required: false, + Default: hello + }, + { + Name: param3, + Required: false, + Default: True + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceParameters.verified.txt b/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceParameters.verified.txt index 016527cd68..0cbdc4347f 100644 --- a/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceParameters.verified.txt +++ b/src/Cli.Tests/Snapshots/UpdateEntityTests.UpdateDatabaseSourceParameters.verified.txt @@ -28,10 +28,18 @@ Source: { Object: s001.book, Type: stored-procedure, - Parameters: { - param1: dab, - param2: false - } + Parameters: [ + { + Name: param1, + Required: false, + Default: dab + }, + { + Name: param2, + Required: false, + Default: False + } + ] }, GraphQL: { Singular: MyEntity, diff --git a/src/Cli.Tests/UpdateEntityTests.cs b/src/Cli.Tests/UpdateEntityTests.cs index 2719cf7df7..3a106c0adc 100644 --- a/src/Cli.Tests/UpdateEntityTests.cs +++ b/src/Cli.Tests/UpdateEntityTests.cs @@ -1004,7 +1004,7 @@ public void TestVerifyCanUpdateRelationshipInvalidOptions(string db, string card RuntimeConfig runtimeConfig = new( Schema: "schema", DataSource: new DataSource(EnumExtensions.Deserialize(db), "", new()), - Runtime: new(Rest: new(), GraphQL: new(), Host: new(null, null)), + Runtime: new(Rest: new(), GraphQL: new(), Mcp: new(), Host: new(null, null)), Entities: new(new Dictionary()) ); @@ -1030,6 +1030,7 @@ public void EnsureFailure_AddRelationshipToEntityWithDisabledGraphQL() Entity sampleEntity1 = new( Source: new("SOURCE1", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("SOURCE1", "SOURCE1s"), Permissions: new[] { permissionForEntity }, @@ -1040,6 +1041,7 @@ public void EnsureFailure_AddRelationshipToEntityWithDisabledGraphQL() // entity with graphQL disabled Entity sampleEntity2 = new( Source: new("SOURCE2", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("SOURCE2", "SOURCE2s", false), Permissions: new[] { permissionForEntity }, @@ -1056,13 +1058,48 @@ public void EnsureFailure_AddRelationshipToEntityWithDisabledGraphQL() RuntimeConfig runtimeConfig = new( Schema: "schema", DataSource: new DataSource(DatabaseType.MSSQL, "", new()), - Runtime: new(Rest: new(), GraphQL: new(), Host: new(null, null)), + Runtime: new(Rest: new(), GraphQL: new(), Mcp: new(), Host: new(null, null)), Entities: new(entityMap) ); Assert.IsFalse(VerifyCanUpdateRelationship(runtimeConfig, cardinality: "one", targetEntity: "SampleEntity2")); } + /// + /// Test to verify updating the description property of an entity. + /// + [TestMethod] + public void TestUpdateEntityDescription() + { + // Initial config with an old description + string initialConfig = GetInitialConfigString() + "," + @" + ""entities"": { + ""MyEntity"": { + ""source"": ""MyTable"", + ""description"": ""Old description"", + ""permissions"": [ + { + ""role"": ""anonymous"", + ""actions"": [""read""] + } + ] + } + } + }"; + + // UpdateOptions with a new description + UpdateOptions options = GenerateBaseUpdateOptions( + entity: "MyEntity", + description: "Updated description" + ); + + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(initialConfig, out RuntimeConfig? runtimeConfig), "Parsed config file."); + Assert.IsTrue(TryUpdateExistingEntity(options, runtimeConfig, out RuntimeConfig updatedRuntimeConfig), "Successfully updated entity in the config."); + + // Assert that the description was updated + Assert.AreEqual("Updated description", updatedRuntimeConfig.Entities["MyEntity"].Description); + } + private static string GetInitialConfigString() { return @"{" + @@ -1122,7 +1159,8 @@ private static UpdateOptions GenerateBaseUpdateOptions( IEnumerable? restMethodsForStoredProcedure = null, string? graphQLOperationForStoredProcedure = null, string? cacheEnabled = null, - string? cacheTtl = null + string? cacheTtl = null, + string? description = null ) { return new( @@ -1150,7 +1188,16 @@ private static UpdateOptions GenerateBaseUpdateOptions( cacheTtl: cacheTtl, config: TEST_RUNTIME_CONFIG_FILE, restMethodsForStoredProcedure: restMethodsForStoredProcedure, - graphQLOperationForStoredProcedure: graphQLOperationForStoredProcedure + graphQLOperationForStoredProcedure: graphQLOperationForStoredProcedure, + description: description, + parametersNameCollection: null, + parametersDescriptionCollection: null, + parametersRequiredCollection: null, + parametersDefaultCollection: null, + fieldsNameCollection: null, + fieldsAliasCollection: null, + fieldsDescriptionCollection: null, + fieldsPrimaryKeyCollection: null ); } diff --git a/src/Cli.Tests/UtilsTests.cs b/src/Cli.Tests/UtilsTests.cs index 486d09f253..b02649339d 100644 --- a/src/Cli.Tests/UtilsTests.cs +++ b/src/Cli.Tests/UtilsTests.cs @@ -137,13 +137,13 @@ public void TestConfigSelectionBasedOnCliPrecedence( public void TestTryParseSourceParameterDictionary() { IEnumerable? parametersList = new string[] { "param1:123", "param2:-243", "param3:220.12", "param4:True", "param5:dab" }; - Assert.IsTrue(TryParseSourceParameterDictionary(parametersList, out Dictionary? sourceParameters)); + Assert.IsTrue(TryParseSourceParameterDictionary(parametersList, out List? sourceParameters)); Assert.IsNotNull(sourceParameters); - Assert.AreEqual(sourceParameters.GetValueOrDefault("param1"), 123); - Assert.AreEqual(sourceParameters.GetValueOrDefault("param2"), -243); - Assert.AreEqual(sourceParameters.GetValueOrDefault("param3"), 220.12); - Assert.AreEqual(sourceParameters.GetValueOrDefault("param4"), true); - Assert.AreEqual(sourceParameters.GetValueOrDefault("param5"), "dab"); + Assert.AreEqual(123, Convert.ToInt32(sourceParameters.First(p => p.Name == "param1").Default)); + Assert.AreEqual(-243, Convert.ToInt32(sourceParameters.First(p => p.Name == "param2").Default)); + Assert.AreEqual(220.12, Convert.ToDouble(sourceParameters.First(p => p.Name == "param3").Default)); + Assert.AreEqual(true, Convert.ToBoolean(sourceParameters.First(p => p.Name == "param4").Default)); + Assert.AreEqual("dab", Convert.ToString(sourceParameters.First(p => p.Name == "param5").Default)); } /// diff --git a/src/Cli.Tests/ValidateConfigTests.cs b/src/Cli.Tests/ValidateConfigTests.cs index 29826635b1..e40a32e291 100644 --- a/src/Cli.Tests/ValidateConfigTests.cs +++ b/src/Cli.Tests/ValidateConfigTests.cs @@ -1,6 +1,10 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. +using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; +using Serilog; + namespace Cli.Tests; /// /// Test for config file initialization. @@ -9,7 +13,7 @@ namespace Cli.Tests; public class ValidateConfigTests : VerifyBase { - private IFileSystem? _fileSystem; + private MockFileSystem? _fileSystem; private FileSystemRuntimeConfigLoader? _runtimeConfigLoader; [TestInitialize] @@ -237,7 +241,7 @@ public void TestValidateConfigFailsWithNoDataSource() /// "object": "s001.book", /// "parameters": { /// "param1": "@env('sp_param1_int')", // INT - /// "param2": "@env('sp_param2_bool')" // BOOL + /// "param2": "@env('sp_param3_bool')" // BOOL /// } /// } /// @@ -248,7 +252,7 @@ public void ValidateConfigSchemaWhereConfigReferencesEnvironmentVariables() Environment.SetEnvironmentVariable($"connection-string", SAMPLE_TEST_CONN_STRING); Environment.SetEnvironmentVariable($"database-type", "mssql"); Environment.SetEnvironmentVariable($"sp_param1_int", "123"); - Environment.SetEnvironmentVariable($"sp_param2_bool", "true"); + Environment.SetEnvironmentVariable($"sp_param3_bool", "true"); // Capture console output to get error messaging. StringWriter writer = new(); @@ -271,4 +275,88 @@ public void ValidateConfigSchemaWhereConfigReferencesEnvironmentVariables() condition: loggerOutput.Contains("The config satisfies the schema requirements."), message: "RuntimeConfigValidator::ValidateConfigSchema(...) didn't communicate successful config schema validation."); } + + /// + /// Tests that validation fails when AKV options are configured without an endpoint. + /// + [TestMethod] + public async Task TestValidateAKVOptionsWithoutEndpointFails() + { + // Arrange + ConfigureOptions options = new( + azureKeyVaultRetryPolicyMaxCount: 1, + azureKeyVaultRetryPolicyDelaySeconds: 1, + azureKeyVaultRetryPolicyMaxDelaySeconds: 1, + azureKeyVaultRetryPolicyMode: AKVRetryPolicyMode.Exponential, + azureKeyVaultRetryPolicyNetworkTimeoutSeconds: 1, + config: TEST_RUNTIME_CONFIG_FILE + ); + + // Act + await ValidatePropertyOptionsFails(options); + } + + /// + /// Tests that validation fails when Azure Log Analytics options are configured without the Auth options. + /// + [TestMethod] + public async Task TestValidateAzureLogAnalyticsOptionsWithoutAuthFails() + { + // Arrange + ConfigureOptions options = new( + azureLogAnalyticsEnabled: CliBool.True, + azureLogAnalyticsDabIdentifier: "dab-identifier-test", + azureLogAnalyticsFlushIntervalSeconds: 1, + config: TEST_RUNTIME_CONFIG_FILE + ); + + // Act + await ValidatePropertyOptionsFails(options); + } + + /// + /// Tests that validation fails when File Sink options are configured without the 'path' property. + /// + [TestMethod] + public async Task TestValidateFileSinkOptionsWithoutPathFails() + { + // Arrange + ConfigureOptions options = new( + fileSinkEnabled: CliBool.True, + fileSinkRollingInterval: RollingInterval.Day, + fileSinkRetainedFileCountLimit: 1, + fileSinkFileSizeLimitBytes: 1024, + config: TEST_RUNTIME_CONFIG_FILE + ); + + // Act + await ValidatePropertyOptionsFails(options); + } + + /// + /// Helper function that ensures properties with missing options fail validation. + /// + private async Task ValidatePropertyOptionsFails(ConfigureOptions options) + { + _fileSystem!.AddFile(TEST_RUNTIME_CONFIG_FILE, new MockFileData(INITIAL_CONFIG)); + Assert.IsTrue(_fileSystem!.File.Exists(TEST_RUNTIME_CONFIG_FILE)); + Mock mockRuntimeConfigProvider = new(_runtimeConfigLoader); + RuntimeConfigValidator validator = new(mockRuntimeConfigProvider.Object, _fileSystem, new Mock>().Object); + + Mock mockLoggerFactory = new(); + Mock> mockLogger = new(); + mockLoggerFactory + .Setup(factory => factory.CreateLogger(typeof(JsonConfigSchemaValidator).FullName!)) + .Returns(mockLogger.Object); + + // Act: Attempts to add File Sink options without empty path + bool isSuccess = TryConfigureSettings(options, _runtimeConfigLoader!, _fileSystem!); + + // Assert: Settings are configured, config parses, validation fails. + Assert.IsTrue(isSuccess); + string updatedConfig = _fileSystem!.File.ReadAllText(TEST_RUNTIME_CONFIG_FILE); + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(updatedConfig, out RuntimeConfig? config)); + JsonSchemaValidationResult result = await validator.ValidateConfigSchema(config, TEST_RUNTIME_CONFIG_FILE, mockLoggerFactory.Object); + Assert.IsFalse(result.IsValid); + } } diff --git a/src/Cli/Commands/AddOptions.cs b/src/Cli/Commands/AddOptions.cs index 26efee7a2f..b7d9fbeb08 100644 --- a/src/Cli/Commands/AddOptions.cs +++ b/src/Cli/Commands/AddOptions.cs @@ -34,22 +34,43 @@ public AddOptions( string? policyDatabase, string? cacheEnabled, string? cacheTtl, - string? config) - : base(entity, - sourceType, - sourceParameters, - sourceKeyFields, - restRoute, - restMethodsForStoredProcedure, - graphQLType, - graphQLOperationForStoredProcedure, - fieldsToInclude, - fieldsToExclude, - policyRequest, - policyDatabase, - cacheEnabled, - cacheTtl, - config) + string? description, + IEnumerable? parametersNameCollection, + IEnumerable? parametersDescriptionCollection, + IEnumerable? parametersRequiredCollection, + IEnumerable? parametersDefaultCollection, + IEnumerable? fieldsNameCollection, + IEnumerable? fieldsAliasCollection, + IEnumerable? fieldsDescriptionCollection, + IEnumerable? fieldsPrimaryKeyCollection, + string? config + ) + : base( + entity, + sourceType, + sourceParameters, + sourceKeyFields, + restRoute, + restMethodsForStoredProcedure, + graphQLType, + graphQLOperationForStoredProcedure, + fieldsToInclude, + fieldsToExclude, + policyRequest, + policyDatabase, + cacheEnabled, + cacheTtl, + description, + parametersNameCollection, + parametersDescriptionCollection, + parametersRequiredCollection, + parametersDefaultCollection, + fieldsNameCollection, + fieldsAliasCollection, + fieldsDescriptionCollection, + fieldsPrimaryKeyCollection, + config + ) { Source = source; Permissions = permissions; @@ -72,12 +93,14 @@ public int Handler(ILogger logger, FileSystemRuntimeConfigLoader loader, IFileSy bool isSuccess = ConfigGenerator.TryAddEntityToConfigWithOptions(this, loader, fileSystem); if (isSuccess) { - logger.LogInformation("Added new entity: {Entity} with source: {Source} and permissions: {permissions}.", Entity, Source, string.Join(SEPARATOR, Permissions)); + logger.LogInformation("Added new entity: {Entity} with source: {Source} and permissions: {permissions}.", + Entity, Source, string.Join(SEPARATOR, Permissions)); logger.LogInformation("SUGGESTION: Use 'dab update [entity-name] [options]' to update any entities in your config."); } else { - logger.LogError("Could not add entity: {Entity} with source: {Source} and permissions: {permissions}.", Entity, Source, string.Join(SEPARATOR, Permissions)); + logger.LogError("Could not add entity: {Entity} with source: {Source} and permissions: {permissions}.", + Entity, Source, string.Join(SEPARATOR, Permissions)); } return isSuccess ? CliReturnCode.SUCCESS : CliReturnCode.GENERAL_ERROR; diff --git a/src/Cli/Commands/ConfigureOptions.cs b/src/Cli/Commands/ConfigureOptions.cs index a11d6b65f9..60cb12c3f8 100644 --- a/src/Cli/Commands/ConfigureOptions.cs +++ b/src/Cli/Commands/ConfigureOptions.cs @@ -8,7 +8,9 @@ using Cli.Constants; using CommandLine; using Microsoft.Extensions.Logging; +using Serilog; using static Cli.Utils; +using ILogger = Microsoft.Extensions.Logging.ILogger; namespace Cli.Commands { @@ -34,6 +36,15 @@ public ConfigureOptions( bool? runtimeRestEnabled = null, string? runtimeRestPath = null, bool? runtimeRestRequestBodyStrict = null, + bool? runtimeMcpEnabled = null, + string? runtimeMcpPath = null, + bool? runtimeMcpDmlToolsEnabled = null, + bool? runtimeMcpDmlToolsDescribeEntitiesEnabled = null, + bool? runtimeMcpDmlToolsCreateRecordEnabled = null, + bool? runtimeMcpDmlToolsReadRecordsEnabled = null, + bool? runtimeMcpDmlToolsUpdateRecordEnabled = null, + bool? runtimeMcpDmlToolsDeleteRecordEnabled = null, + bool? runtimeMcpDmlToolsExecuteEntityEnabled = null, bool? runtimeCacheEnabled = null, int? runtimeCacheTtl = null, HostMode? runtimeHostMode = null, @@ -42,6 +53,23 @@ public ConfigureOptions( string? runtimeHostAuthenticationProvider = null, string? runtimeHostAuthenticationJwtAudience = null, string? runtimeHostAuthenticationJwtIssuer = null, + string? azureKeyVaultEndpoint = null, + AKVRetryPolicyMode? azureKeyVaultRetryPolicyMode = null, + int? azureKeyVaultRetryPolicyMaxCount = null, + int? azureKeyVaultRetryPolicyDelaySeconds = null, + int? azureKeyVaultRetryPolicyMaxDelaySeconds = null, + int? azureKeyVaultRetryPolicyNetworkTimeoutSeconds = null, + CliBool? azureLogAnalyticsEnabled = null, + string? azureLogAnalyticsDabIdentifier = null, + int? azureLogAnalyticsFlushIntervalSeconds = null, + string? azureLogAnalyticsCustomTableName = null, + string? azureLogAnalyticsDcrImmutableId = null, + string? azureLogAnalyticsDceEndpoint = null, + CliBool? fileSinkEnabled = null, + string? fileSinkPath = null, + RollingInterval? fileSinkRollingInterval = null, + int? fileSinkRetainedFileCountLimit = null, + long? fileSinkFileSizeLimitBytes = null, string? config = null) : base(config) { @@ -62,6 +90,16 @@ public ConfigureOptions( RuntimeRestEnabled = runtimeRestEnabled; RuntimeRestPath = runtimeRestPath; RuntimeRestRequestBodyStrict = runtimeRestRequestBodyStrict; + // Mcp + RuntimeMcpEnabled = runtimeMcpEnabled; + RuntimeMcpPath = runtimeMcpPath; + RuntimeMcpDmlToolsEnabled = runtimeMcpDmlToolsEnabled; + RuntimeMcpDmlToolsDescribeEntitiesEnabled = runtimeMcpDmlToolsDescribeEntitiesEnabled; + RuntimeMcpDmlToolsCreateRecordEnabled = runtimeMcpDmlToolsCreateRecordEnabled; + RuntimeMcpDmlToolsReadRecordsEnabled = runtimeMcpDmlToolsReadRecordsEnabled; + RuntimeMcpDmlToolsUpdateRecordEnabled = runtimeMcpDmlToolsUpdateRecordEnabled; + RuntimeMcpDmlToolsDeleteRecordEnabled = runtimeMcpDmlToolsDeleteRecordEnabled; + RuntimeMcpDmlToolsExecuteEntityEnabled = runtimeMcpDmlToolsExecuteEntityEnabled; // Cache RuntimeCacheEnabled = runtimeCacheEnabled; RuntimeCacheTTL = runtimeCacheTtl; @@ -72,6 +110,26 @@ public ConfigureOptions( RuntimeHostAuthenticationProvider = runtimeHostAuthenticationProvider; RuntimeHostAuthenticationJwtAudience = runtimeHostAuthenticationJwtAudience; RuntimeHostAuthenticationJwtIssuer = runtimeHostAuthenticationJwtIssuer; + // Azure Key Vault + AzureKeyVaultEndpoint = azureKeyVaultEndpoint; + AzureKeyVaultRetryPolicyMode = azureKeyVaultRetryPolicyMode; + AzureKeyVaultRetryPolicyMaxCount = azureKeyVaultRetryPolicyMaxCount; + AzureKeyVaultRetryPolicyDelaySeconds = azureKeyVaultRetryPolicyDelaySeconds; + AzureKeyVaultRetryPolicyMaxDelaySeconds = azureKeyVaultRetryPolicyMaxDelaySeconds; + AzureKeyVaultRetryPolicyNetworkTimeoutSeconds = azureKeyVaultRetryPolicyNetworkTimeoutSeconds; + // Azure Log Analytics + AzureLogAnalyticsEnabled = azureLogAnalyticsEnabled; + AzureLogAnalyticsDabIdentifier = azureLogAnalyticsDabIdentifier; + AzureLogAnalyticsFlushIntervalSeconds = azureLogAnalyticsFlushIntervalSeconds; + AzureLogAnalyticsCustomTableName = azureLogAnalyticsCustomTableName; + AzureLogAnalyticsDcrImmutableId = azureLogAnalyticsDcrImmutableId; + AzureLogAnalyticsDceEndpoint = azureLogAnalyticsDceEndpoint; + // File + FileSinkEnabled = fileSinkEnabled; + FileSinkPath = fileSinkPath; + FileSinkRollingInterval = fileSinkRollingInterval; + FileSinkRetainedFileCountLimit = fileSinkRetainedFileCountLimit; + FileSinkFileSizeLimitBytes = fileSinkFileSizeLimitBytes; } [Option("data-source.database-type", Required = false, HelpText = "Database type. Allowed values: MSSQL, PostgreSQL, CosmosDB_NoSQL, MySQL.")] @@ -116,6 +174,33 @@ public ConfigureOptions( [Option("runtime.rest.request-body-strict", Required = false, HelpText = "Prohibit extraneous REST request body fields. Default: true (boolean).")] public bool? RuntimeRestRequestBodyStrict { get; } + [Option("runtime.mcp.enabled", Required = false, HelpText = "Enable DAB's MCP endpoint. Default: true (boolean).")] + public bool? RuntimeMcpEnabled { get; } + + [Option("runtime.mcp.path", Required = false, HelpText = "Customize DAB's MCP endpoint path. Default: '/mcp' Conditions: Prefix path with '/'.")] + public string? RuntimeMcpPath { get; } + + [Option("runtime.mcp.dml-tools.enabled", Required = false, HelpText = "Enable DAB's MCP DML tools endpoint. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsEnabled { get; } + + [Option("runtime.mcp.dml-tools.describe-entities.enabled", Required = false, HelpText = "Enable DAB's MCP describe entities tool. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsDescribeEntitiesEnabled { get; } + + [Option("runtime.mcp.dml-tools.create-record.enabled", Required = false, HelpText = "Enable DAB's MCP create record tool. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsCreateRecordEnabled { get; } + + [Option("runtime.mcp.dml-tools.read-records.enabled", Required = false, HelpText = "Enable DAB's MCP read record tool. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsReadRecordsEnabled { get; } + + [Option("runtime.mcp.dml-tools.update-record.enabled", Required = false, HelpText = "Enable DAB's MCP update record tool. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsUpdateRecordEnabled { get; } + + [Option("runtime.mcp.dml-tools.delete-record.enabled", Required = false, HelpText = "Enable DAB's MCP delete record tool. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsDeleteRecordEnabled { get; } + + [Option("runtime.mcp.dml-tools.execute-entity.enabled", Required = false, HelpText = "Enable DAB's MCP execute entity tool. Default: true (boolean).")] + public bool? RuntimeMcpDmlToolsExecuteEntityEnabled { get; } + [Option("runtime.cache.enabled", Required = false, HelpText = "Enable DAB's cache globally. (You must also enable each entity's cache separately.). Default: false (boolean).")] public bool? RuntimeCacheEnabled { get; } @@ -140,6 +225,57 @@ public ConfigureOptions( [Option("runtime.host.authentication.jwt.issuer", Required = false, HelpText = "Configure the entity that issued the Jwt Token.")] public string? RuntimeHostAuthenticationJwtIssuer { get; } + [Option("azure-key-vault.endpoint", Required = false, HelpText = "Configure the Azure Key Vault endpoint URL.")] + public string? AzureKeyVaultEndpoint { get; } + + [Option("azure-key-vault.retry-policy.mode", Required = false, HelpText = "Configure the retry policy mode. Allowed values: fixed, exponential. Default: exponential.")] + public AKVRetryPolicyMode? AzureKeyVaultRetryPolicyMode { get; } + + [Option("azure-key-vault.retry-policy.max-count", Required = false, HelpText = "Configure the maximum number of retry attempts. Default: 3.")] + public int? AzureKeyVaultRetryPolicyMaxCount { get; } + + [Option("azure-key-vault.retry-policy.delay-seconds", Required = false, HelpText = "Configure the initial delay between retries in seconds. Default: 1.")] + public int? AzureKeyVaultRetryPolicyDelaySeconds { get; } + + [Option("azure-key-vault.retry-policy.max-delay-seconds", Required = false, HelpText = "Configure the maximum delay between retries in seconds (for exponential mode). Default: 60.")] + public int? AzureKeyVaultRetryPolicyMaxDelaySeconds { get; } + + [Option("azure-key-vault.retry-policy.network-timeout-seconds", Required = false, HelpText = "Configure the network timeout for requests in seconds. Default: 60.")] + public int? AzureKeyVaultRetryPolicyNetworkTimeoutSeconds { get; } + + [Option("runtime.telemetry.azure-log-analytics.enabled", Required = false, HelpText = "Enable/Disable Azure Log Analytics. Default: False (boolean)")] + public CliBool? AzureLogAnalyticsEnabled { get; } + + [Option("runtime.telemetry.azure-log-analytics.dab-identifier", Required = false, HelpText = "Configure DAB Identifier to allow user to differentiate which logs come from DAB in Azure Log Analytics . Default: DABLogs")] + public string? AzureLogAnalyticsDabIdentifier { get; } + + [Option("runtime.telemetry.azure-log-analytics.flush-interval-seconds", Required = false, HelpText = "Configure Flush Interval in seconds for Azure Log Analytics to specify the time interval to send the telemetry data. Default: 5")] + public int? AzureLogAnalyticsFlushIntervalSeconds { get; } + + [Option("runtime.telemetry.azure-log-analytics.auth.custom-table-name", Required = false, HelpText = "Configure Custom Table Name for Azure Log Analytics used to find table to connect")] + public string? AzureLogAnalyticsCustomTableName { get; } + + [Option("runtime.telemetry.azure-log-analytics.auth.dcr-immutable-id", Required = false, HelpText = "Configure DCR Immutable ID for Azure Log Analytics to find the data collection rule that defines how data is collected")] + public string? AzureLogAnalyticsDcrImmutableId { get; } + + [Option("runtime.telemetry.azure-log-analytics.auth.dce-endpoint", Required = false, HelpText = "Configure DCE Endpoint for Azure Log Analytics to find table to send telemetry data")] + public string? AzureLogAnalyticsDceEndpoint { get; } + + [Option("runtime.telemetry.file.enabled", Required = false, HelpText = "Enable/Disable File Sink logging. Default: False (boolean)")] + public CliBool? FileSinkEnabled { get; } + + [Option("runtime.telemetry.file.path", Required = false, HelpText = "Configure path for File Sink logging. Default: /logs/dab-log.txt")] + public string? FileSinkPath { get; } + + [Option("runtime.telemetry.file.rolling-interval", Required = false, HelpText = "Configure rolling interval for File Sink logging. Default: Day")] + public RollingInterval? FileSinkRollingInterval { get; } + + [Option("runtime.telemetry.file.retained-file-count-limit", Required = false, HelpText = "Configure maximum number of retained files. Default: 1")] + public int? FileSinkRetainedFileCountLimit { get; } + + [Option("runtime.telemetry.file.file-size-limit-bytes", Required = false, HelpText = "Configure maximum file size limit in bytes. Default: 1048576")] + public long? FileSinkFileSizeLimitBytes { get; } + public int Handler(ILogger logger, FileSystemRuntimeConfigLoader loader, IFileSystem fileSystem) { logger.LogInformation("{productName} {version}", PRODUCT_NAME, ProductInfo.GetProductVersion()); diff --git a/src/Cli/Commands/EntityOptions.cs b/src/Cli/Commands/EntityOptions.cs index 737c9bd047..7f26816800 100644 --- a/src/Cli/Commands/EntityOptions.cs +++ b/src/Cli/Commands/EntityOptions.cs @@ -25,7 +25,17 @@ public EntityOptions( string? policyDatabase, string? cacheEnabled, string? cacheTtl, - string? config) + string? description, + IEnumerable? parametersNameCollection, + IEnumerable? parametersDescriptionCollection, + IEnumerable? parametersRequiredCollection, + IEnumerable? parametersDefaultCollection, + IEnumerable? fieldsNameCollection, + IEnumerable? fieldsAliasCollection, + IEnumerable? fieldsDescriptionCollection, + IEnumerable? fieldsPrimaryKeyCollection, + string? config + ) : base(config) { Entity = entity; @@ -42,6 +52,15 @@ public EntityOptions( PolicyDatabase = policyDatabase; CacheEnabled = cacheEnabled; CacheTtl = cacheTtl; + Description = description; + ParametersNameCollection = parametersNameCollection; + ParametersDescriptionCollection = parametersDescriptionCollection; + ParametersRequiredCollection = parametersRequiredCollection; + ParametersDefaultCollection = parametersDefaultCollection; + FieldsNameCollection = fieldsNameCollection; + FieldsAliasCollection = fieldsAliasCollection; + FieldsDescriptionCollection = fieldsDescriptionCollection; + FieldsPrimaryKeyCollection = fieldsPrimaryKeyCollection; } // Entity is required but we have made required as false to have custom error message (more user friendly), if not provided. @@ -86,5 +105,32 @@ public EntityOptions( [Option("cache.ttl", Required = false, HelpText = "Specify time to live in seconds for cache entries for Entity.")] public string? CacheTtl { get; } + + [Option("description", Required = false, HelpText = "Description of the entity.")] + public string? Description { get; } + + [Option("parameters.name", Required = false, Separator = ',', HelpText = "Comma-separated list of parameter names for stored procedure.")] + public IEnumerable? ParametersNameCollection { get; } + + [Option("parameters.description", Required = false, Separator = ',', HelpText = "Comma-separated list of parameter descriptions for stored procedure.")] + public IEnumerable? ParametersDescriptionCollection { get; } + + [Option("parameters.required", Required = false, Separator = ',', HelpText = "Comma-separated list of parameter required flags (true/false) for stored procedure.")] + public IEnumerable? ParametersRequiredCollection { get; } + + [Option("parameters.default", Required = false, Separator = ',', HelpText = "Comma-separated list of parameter default values for stored procedure.")] + public IEnumerable? ParametersDefaultCollection { get; } + + [Option("fields.name", Required = false, Separator = ',', HelpText = "Name of the database column to expose as a field.")] + public IEnumerable? FieldsNameCollection { get; } + + [Option("fields.alias", Required = false, Separator = ',', HelpText = "Alias for the field.")] + public IEnumerable? FieldsAliasCollection { get; } + + [Option("fields.description", Required = false, Separator = ',', HelpText = "Description for the field.")] + public IEnumerable? FieldsDescriptionCollection { get; } + + [Option("fields.primary-key", Required = false, Separator = ',', HelpText = "Set this field as a primary key.")] + public IEnumerable? FieldsPrimaryKeyCollection { get; } } } diff --git a/src/Cli/Commands/InitOptions.cs b/src/Cli/Commands/InitOptions.cs index 5d5608a200..91786d99ff 100644 --- a/src/Cli/Commands/InitOptions.cs +++ b/src/Cli/Commands/InitOptions.cs @@ -35,8 +35,11 @@ public InitOptions( bool restDisabled = false, string graphQLPath = GraphQLRuntimeOptions.DEFAULT_PATH, bool graphqlDisabled = false, + string mcpPath = McpRuntimeOptions.DEFAULT_PATH, + bool mcpDisabled = false, CliBool restEnabled = CliBool.None, CliBool graphqlEnabled = CliBool.None, + CliBool mcpEnabled = CliBool.None, CliBool restRequestBodyStrict = CliBool.None, CliBool multipleCreateOperationEnabled = CliBool.None, string? config = null) @@ -58,8 +61,11 @@ public InitOptions( RestDisabled = restDisabled; GraphQLPath = graphQLPath; GraphQLDisabled = graphqlDisabled; + McpPath = mcpPath; + McpDisabled = mcpDisabled; RestEnabled = restEnabled; GraphQLEnabled = graphqlEnabled; + McpEnabled = mcpEnabled; RestRequestBodyStrict = restRequestBodyStrict; MultipleCreateOperationEnabled = multipleCreateOperationEnabled; } @@ -112,12 +118,21 @@ public InitOptions( [Option("graphql.disabled", Default = false, Required = false, HelpText = "Disables GraphQL endpoint for all entities.")] public bool GraphQLDisabled { get; } + [Option("mcp.path", Default = McpRuntimeOptions.DEFAULT_PATH, Required = false, HelpText = "Specify the MCP endpoint's default prefix.")] + public string McpPath { get; } + + [Option("mcp.disabled", Default = false, Required = false, HelpText = "Disables MCP endpoint for all entities.")] + public bool McpDisabled { get; } + [Option("rest.enabled", Required = false, HelpText = "(Default: true) Enables REST endpoint for all entities. Supported values: true, false.")] public CliBool RestEnabled { get; } [Option("graphql.enabled", Required = false, HelpText = "(Default: true) Enables GraphQL endpoint for all entities. Supported values: true, false.")] public CliBool GraphQLEnabled { get; } + [Option("mcp.enabled", Required = false, HelpText = "(Default: true) Enables MCP endpoint for all entities. Supported values: true, false.")] + public CliBool McpEnabled { get; } + // Since the rest.request-body-strict option does not have a default value, it is required to specify a value for this option if it is // included in the init command. [Option("rest.request-body-strict", Required = false, HelpText = "(Default: true) Allow extraneous fields in the request body for REST.")] diff --git a/src/Cli/Commands/UpdateOptions.cs b/src/Cli/Commands/UpdateOptions.cs index f757117f08..fe1664c5bb 100644 --- a/src/Cli/Commands/UpdateOptions.cs +++ b/src/Cli/Commands/UpdateOptions.cs @@ -42,7 +42,16 @@ public UpdateOptions( string? policyDatabase, string? cacheEnabled, string? cacheTtl, - string config) + string? description, + IEnumerable? parametersNameCollection, + IEnumerable? parametersDescriptionCollection, + IEnumerable? parametersRequiredCollection, + IEnumerable? parametersDefaultCollection, + IEnumerable? fieldsNameCollection, + IEnumerable? fieldsAliasCollection, + IEnumerable? fieldsDescriptionCollection, + IEnumerable? fieldsPrimaryKeyCollection, + string? config) : base(entity, sourceType, sourceParameters, @@ -57,6 +66,15 @@ public UpdateOptions( policyDatabase, cacheEnabled, cacheTtl, + description, + parametersNameCollection, + parametersDescriptionCollection, + parametersRequiredCollection, + parametersDefaultCollection, + fieldsNameCollection, + fieldsAliasCollection, + fieldsDescriptionCollection, + fieldsPrimaryKeyCollection, config) { Source = source; diff --git a/src/Cli/ConfigGenerator.cs b/src/Cli/ConfigGenerator.cs index 7655b84cee..9a56f83c4a 100644 --- a/src/Cli/ConfigGenerator.cs +++ b/src/Cli/ConfigGenerator.cs @@ -13,6 +13,7 @@ using Azure.DataApiBuilder.Service; using Cli.Commands; using Microsoft.Extensions.Logging; +using Serilog; using static Cli.Utils; namespace Cli @@ -88,6 +89,7 @@ public static bool TryCreateRuntimeConfig(InitOptions options, FileSystemRuntime DatabaseType dbType = options.DatabaseType; string? restPath = options.RestPath; string graphQLPath = options.GraphQLPath; + string mcpPath = options.McpPath; string? runtimeBaseRoute = options.RuntimeBaseRoute; Dictionary dbOptions = new(); @@ -107,9 +109,10 @@ public static bool TryCreateRuntimeConfig(InitOptions options, FileSystemRuntime " We recommend that you use the --graphql.enabled option instead."); } - bool restEnabled, graphQLEnabled; + bool restEnabled, graphQLEnabled, mcpEnabled; if (!TryDetermineIfApiIsEnabled(options.RestDisabled, options.RestEnabled, ApiType.REST, out restEnabled) || - !TryDetermineIfApiIsEnabled(options.GraphQLDisabled, options.GraphQLEnabled, ApiType.GraphQL, out graphQLEnabled)) + !TryDetermineIfApiIsEnabled(options.GraphQLDisabled, options.GraphQLEnabled, ApiType.GraphQL, out graphQLEnabled) || + !TryDetermineIfMcpIsEnabled(options.McpEnabled, out mcpEnabled)) { return false; } @@ -261,6 +264,7 @@ public static bool TryCreateRuntimeConfig(InitOptions options, FileSystemRuntime Runtime: new( Rest: new(restEnabled, restPath ?? RestRuntimeOptions.DEFAULT_PATH, options.RestRequestBodyStrict is CliBool.False ? false : true), GraphQL: new(Enabled: graphQLEnabled, Path: graphQLPath, MultipleMutationOptions: multipleMutationOptions), + Mcp: new(mcpEnabled, mcpPath ?? McpRuntimeOptions.DEFAULT_PATH), Host: new( Cors: new(options.CorsOrigin?.ToArray() ?? Array.Empty()), Authentication: new( @@ -313,6 +317,17 @@ private static bool TryDetermineIfApiIsEnabled(bool apiDisabledOptionValue, CliB return true; } + /// + /// Helper method to determine if the mcp api is enabled or not based on the enabled/disabled options in the dab init command. + /// + /// True, if MCP is enabled + /// Out param isMcpEnabled + /// True if MCP is enabled + private static bool TryDetermineIfMcpIsEnabled(CliBool mcpEnabledOptionValue, out bool isMcpEnabled) + { + return TryDetermineIfApiIsEnabled(false, mcpEnabledOptionValue, ApiType.MCP, out isMcpEnabled); + } + /// /// Helper method to determine if the multiple create operation is enabled or not based on the inputs from dab init command. /// @@ -438,12 +453,14 @@ public static bool TryAddNewEntity(AddOptions options, RuntimeConfig initialRunt // Create new entity. Entity entity = new( Source: source, + Fields: null, Rest: restOptions, GraphQL: graphqlOptions, Permissions: permissionSettings, Relationships: null, Mappings: null, - Cache: cacheOptions); + Cache: cacheOptions, + Description: string.IsNullOrWhiteSpace(options.Description) ? null : options.Description); // Add entity to existing runtime config. IDictionary entities = new Dictionary(initialRuntimeConfig.Entities.Entities) @@ -457,6 +474,7 @@ public static bool TryAddNewEntity(AddOptions options, RuntimeConfig initialRunt /// /// This method creates the source object for a new entity /// if the given source fields specified by the user are valid. + /// Supports both old (dictionary) and new (ParameterMetadata list) parameter formats. /// public static bool TryCreateSourceObjectForNewEntity( AddOptions options, @@ -485,19 +503,55 @@ public static bool TryCreateSourceObjectForNewEntity( if (!VerifyCorrectPairingOfParameterAndKeyFieldsWithType( objectType, options.SourceParameters, + options.ParametersNameCollection, options.SourceKeyFields)) { return false; } - // Parses the string array to parameter Dictionary - if (!TryParseSourceParameterDictionary( - options.SourceParameters, - out Dictionary? parametersDictionary)) + // Check for both old and new parameter formats + bool hasOldParams = options.SourceParameters != null && options.SourceParameters.Any(); + bool hasNewParams = options.ParametersNameCollection != null && options.ParametersNameCollection.Any(); + + if (hasOldParams && hasNewParams) { + _logger.LogError("Cannot use both --source.params and --parameters.name/description/required/default together. Please use only one format."); return false; } + List? parameters = null; + if (hasNewParams) + { + // Parse new format + List names = options.ParametersNameCollection != null ? options.ParametersNameCollection.ToList() : new List(); + List descriptions = options.ParametersDescriptionCollection?.ToList() ?? new List(); + List requiredFlags = options.ParametersRequiredCollection?.ToList() ?? new List(); + List defaults = options.ParametersDefaultCollection?.ToList() ?? new List(); + + parameters = []; + for (int i = 0; i < names.Count; i++) + { + parameters.Add(new ParameterMetadata + { + Name = names[i], + Description = descriptions.ElementAtOrDefault(i), + Required = requiredFlags.ElementAtOrDefault(i)?.ToLower() == "true", + Default = defaults.ElementAtOrDefault(i) + }); + } + } + else if (hasOldParams) + { + // Parse old format and convert to new type + if (!TryParseSourceParameterDictionary(options.SourceParameters, out parameters)) + { + return false; + } + + _logger.LogWarning("The --source.params format is deprecated. Please use --parameters.name/description/required/default instead."); + + } + string[]? sourceKeyFields = null; if (options.SourceKeyFields is not null && options.SourceKeyFields.Any()) { @@ -508,7 +562,7 @@ public static bool TryCreateSourceObjectForNewEntity( if (!TryCreateSourceObject( options.Source, objectType, - parametersDictionary, + parameters, sourceKeyFields, out sourceObject)) { @@ -518,7 +572,6 @@ public static bool TryCreateSourceObjectForNewEntity( return true; } - /// /// Tries to update the runtime settings based on the provided runtime options. /// @@ -551,6 +604,11 @@ public static bool TryConfigureSettings(ConfigureOptions options, FileSystemRunt return false; } + if (!TryUpdateConfiguredAzureKeyVaultOptions(options, ref runtimeConfig)) + { + return false; + } + return WriteRuntimeConfigToFile(runtimeConfigFile, runtimeConfig, fileSystem); } @@ -737,6 +795,23 @@ private static bool TryUpdateConfiguredRuntimeOptions( } } + // MCP: Enabled and Path + if (options.RuntimeMcpEnabled != null || + options.RuntimeMcpPath != null) + { + McpRuntimeOptions updatedMcpOptions = runtimeConfig?.Runtime?.Mcp ?? new(); + bool status = TryUpdateConfiguredMcpValues(options, ref updatedMcpOptions); + + if (status) + { + runtimeConfig = runtimeConfig! with { Runtime = runtimeConfig.Runtime! with { Mcp = updatedMcpOptions } }; + } + else + { + return false; + } + } + // Cache: Enabled and TTL if (options.RuntimeCacheEnabled != null || options.RuntimeCacheTTL != null) @@ -773,6 +848,45 @@ private static bool TryUpdateConfiguredRuntimeOptions( } } + // Telemetry: Azure Log Analytics + if (options.AzureLogAnalyticsEnabled is not null || + options.AzureLogAnalyticsDabIdentifier is not null || + options.AzureLogAnalyticsFlushIntervalSeconds is not null || + options.AzureLogAnalyticsCustomTableName is not null || + options.AzureLogAnalyticsDcrImmutableId is not null || + options.AzureLogAnalyticsDceEndpoint is not null) + { + AzureLogAnalyticsOptions updatedAzureLogAnalyticsOptions = runtimeConfig?.Runtime?.Telemetry?.AzureLogAnalytics ?? new(); + bool status = TryUpdateConfiguredAzureLogAnalyticsOptions(options, ref updatedAzureLogAnalyticsOptions); + if (status) + { + runtimeConfig = runtimeConfig! with { Runtime = runtimeConfig.Runtime! with { Telemetry = runtimeConfig.Runtime!.Telemetry is not null ? runtimeConfig.Runtime!.Telemetry with { AzureLogAnalytics = updatedAzureLogAnalyticsOptions } : new TelemetryOptions(AzureLogAnalytics: updatedAzureLogAnalyticsOptions) } }; + } + else + { + return false; + } + } + + // Telemetry: File Sink + if (options.FileSinkEnabled is not null || + options.FileSinkPath is not null || + options.FileSinkRollingInterval is not null || + options.FileSinkRetainedFileCountLimit is not null || + options.FileSinkFileSizeLimitBytes is not null) + { + FileSinkOptions updatedFileSinkOptions = runtimeConfig?.Runtime?.Telemetry?.File ?? new(); + bool status = TryUpdateConfiguredFileOptions(options, ref updatedFileSinkOptions); + if (status) + { + runtimeConfig = runtimeConfig! with { Runtime = runtimeConfig.Runtime! with { Telemetry = runtimeConfig.Runtime!.Telemetry is not null ? runtimeConfig.Runtime!.Telemetry with { File = updatedFileSinkOptions } : new TelemetryOptions(File: updatedFileSinkOptions) } }; + } + else + { + return false; + } + } + return runtimeConfig != null; } @@ -839,7 +953,7 @@ private static bool TryUpdateConfiguredRestValues(ConfigureOptions options, ref /// /// options. /// updatedGraphQLOptions. - /// True if the value needs to be udpated in the runtime config, else false + /// True if the value needs to be updated in the runtime config, else false private static bool TryUpdateConfiguredGraphQLValues( ConfigureOptions options, ref GraphQLRuntimeOptions? updatedGraphQLOptions) @@ -898,6 +1012,142 @@ private static bool TryUpdateConfiguredGraphQLValues( } } + /// + /// Attempts to update the Config parameters in the Mcp runtime settings based on the provided value. + /// Validates that any user-provided values are valid and then returns true if the updated Mcp options + /// need to be overwritten on the existing config parameters + /// + /// options. + /// updatedMcpOptions + /// True if the value needs to be updated in the runtime config, else false + private static bool TryUpdateConfiguredMcpValues( + ConfigureOptions options, + ref McpRuntimeOptions updatedMcpOptions) + { + object? updatedValue; + + try + { + // Runtime.Mcp.Enabled + updatedValue = options?.RuntimeMcpEnabled; + if (updatedValue != null) + { + updatedMcpOptions = updatedMcpOptions! with { Enabled = (bool)updatedValue }; + _logger.LogInformation("Updated RuntimeConfig with Runtime.Mcp.Enabled as '{updatedValue}'", updatedValue); + } + + // Runtime.Mcp.Path + updatedValue = options?.RuntimeMcpPath; + if (updatedValue != null) + { + bool status = RuntimeConfigValidatorUtil.TryValidateUriComponent(uriComponent: (string)updatedValue, out string exceptionMessage); + if (status) + { + updatedMcpOptions = updatedMcpOptions! with { Path = (string)updatedValue }; + _logger.LogInformation("Updated RuntimeConfig with Runtime.Mcp.Path as '{updatedValue}'", updatedValue); + } + else + { + _logger.LogError("Failed to update Runtime.Mcp.Path as '{updatedValue}' due to exception message: {exceptionMessage}", updatedValue, exceptionMessage); + return false; + } + } + + // Handle DML tools configuration + bool hasToolUpdates = false; + DmlToolsConfig? currentDmlTools = updatedMcpOptions?.DmlTools; + + // If setting all tools at once + updatedValue = options?.RuntimeMcpDmlToolsEnabled; + if (updatedValue != null) + { + updatedMcpOptions = updatedMcpOptions! with { DmlTools = DmlToolsConfig.FromBoolean((bool)updatedValue) }; + _logger.LogInformation("Updated RuntimeConfig with Runtime.Mcp.Dml-Tools as '{updatedValue}'", updatedValue); + return true; // Return early since we're setting all tools at once + } + + // Handle individual tool updates + bool? describeEntities = currentDmlTools?.DescribeEntities; + bool? createRecord = currentDmlTools?.CreateRecord; + bool? readRecord = currentDmlTools?.ReadRecords; + bool? updateRecord = currentDmlTools?.UpdateRecord; + bool? deleteRecord = currentDmlTools?.DeleteRecord; + bool? executeEntity = currentDmlTools?.ExecuteEntity; + + updatedValue = options?.RuntimeMcpDmlToolsDescribeEntitiesEnabled; + if (updatedValue != null) + { + describeEntities = (bool)updatedValue; + hasToolUpdates = true; + _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.describe-entities as '{updatedValue}'", updatedValue); + } + + updatedValue = options?.RuntimeMcpDmlToolsCreateRecordEnabled; + if (updatedValue != null) + { + createRecord = (bool)updatedValue; + hasToolUpdates = true; + _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.create-record as '{updatedValue}'", updatedValue); + } + + updatedValue = options?.RuntimeMcpDmlToolsReadRecordsEnabled; + if (updatedValue != null) + { + readRecord = (bool)updatedValue; + hasToolUpdates = true; + _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.read-records as '{updatedValue}'", updatedValue); + } + + updatedValue = options?.RuntimeMcpDmlToolsUpdateRecordEnabled; + if (updatedValue != null) + { + updateRecord = (bool)updatedValue; + hasToolUpdates = true; + _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.update-record as '{updatedValue}'", updatedValue); + } + + updatedValue = options?.RuntimeMcpDmlToolsDeleteRecordEnabled; + if (updatedValue != null) + { + deleteRecord = (bool)updatedValue; + hasToolUpdates = true; + _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.delete-record as '{updatedValue}'", updatedValue); + } + + updatedValue = options?.RuntimeMcpDmlToolsExecuteEntityEnabled; + if (updatedValue != null) + { + executeEntity = (bool)updatedValue; + hasToolUpdates = true; + _logger.LogInformation("Updated RuntimeConfig with runtime.mcp.dml-tools.execute-entity as '{updatedValue}'", updatedValue); + } + + if (hasToolUpdates) + { + updatedMcpOptions = updatedMcpOptions! with + { + DmlTools = new DmlToolsConfig + { + AllToolsEnabled = false, + DescribeEntities = describeEntities, + CreateRecord = createRecord, + ReadRecords = readRecord, + UpdateRecord = updateRecord, + DeleteRecord = deleteRecord, + ExecuteEntity = executeEntity + } + }; + } + + return true; + } + catch (Exception ex) + { + _logger.LogError("Failed to update RuntimeConfig.Mcp with exception message: {exceptionMessage}.", ex.Message); + return false; + } + } + /// /// Attempts to update the Config parameters in the Cache runtime settings based on the provided value. /// Validates user-provided parameters and then returns true if the updated Cache options @@ -905,7 +1155,7 @@ private static bool TryUpdateConfiguredGraphQLValues( /// /// options. /// updatedCacheOptions. - /// True if the value needs to be udpated in the runtime config, else false + /// True if the value needs to be updated in the runtime config, else false private static bool TryUpdateConfiguredCacheValues( ConfigureOptions options, ref RuntimeCacheOptions? updatedCacheOptions) @@ -954,7 +1204,7 @@ private static bool TryUpdateConfiguredCacheValues( /// /// options. /// updatedHostOptions. - /// True if the value needs to be udpated in the runtime config, else false + /// True if the value needs to be updated in the runtime config, else false private static bool TryUpdateConfiguredHostValues( ConfigureOptions options, ref HostOptions? updatedHostOptions) @@ -1090,6 +1340,158 @@ private static bool TryUpdateConfiguredHostValues( } } + /// + /// Attempts to update the Azure Log Analytics configuration options based on the provided values. + /// Validates that any user-provided parameter value is valid and updates the runtime configuration accordingly. + /// + /// The configuration options provided by the user. + /// The Azure Log Analytics options to be updated. + /// True if the Azure Log Analytics options were successfully configured; otherwise, false. + private static bool TryUpdateConfiguredAzureLogAnalyticsOptions( + ConfigureOptions options, + ref AzureLogAnalyticsOptions azureLogAnalyticsOptions) + { + try + { + AzureLogAnalyticsAuthOptions? updatedAuthOptions = azureLogAnalyticsOptions.Auth; + + // Runtime.Telemetry.AzureLogAnalytics.Enabled + if (options.AzureLogAnalyticsEnabled is not null) + { + azureLogAnalyticsOptions = azureLogAnalyticsOptions with { Enabled = options.AzureLogAnalyticsEnabled is CliBool.True, UserProvidedEnabled = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.enabled as '{options.AzureLogAnalyticsEnabled}'"); + } + + // Runtime.Telemetry.AzureLogAnalytics.DabIdentifier + if (options.AzureLogAnalyticsDabIdentifier is not null) + { + azureLogAnalyticsOptions = azureLogAnalyticsOptions with { DabIdentifier = options.AzureLogAnalyticsDabIdentifier, UserProvidedDabIdentifier = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.dab-identifier as '{options.AzureLogAnalyticsDabIdentifier}'"); + } + + // Runtime.Telemetry.AzureLogAnalytics.FlushIntervalSeconds + if (options.AzureLogAnalyticsFlushIntervalSeconds is not null) + { + if (options.AzureLogAnalyticsFlushIntervalSeconds <= 0) + { + _logger.LogError("Failed to update configuration with runtime.telemetry.azure-log-analytics.flush-interval-seconds. Value must be a positive integer greater than 0."); + return false; + } + + azureLogAnalyticsOptions = azureLogAnalyticsOptions with { FlushIntervalSeconds = options.AzureLogAnalyticsFlushIntervalSeconds, UserProvidedFlushIntervalSeconds = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.flush-interval-seconds as '{options.AzureLogAnalyticsFlushIntervalSeconds}'"); + } + + // Runtime.Telemetry.AzureLogAnalytics.Auth.CustomTableName + if (options.AzureLogAnalyticsCustomTableName is not null) + { + updatedAuthOptions = updatedAuthOptions is not null + ? updatedAuthOptions with { CustomTableName = options.AzureLogAnalyticsCustomTableName, UserProvidedCustomTableName = true } + : new AzureLogAnalyticsAuthOptions { CustomTableName = options.AzureLogAnalyticsCustomTableName, UserProvidedCustomTableName = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.auth.custom-table-name as '{options.AzureLogAnalyticsCustomTableName}'"); + } + + // Runtime.Telemetry.AzureLogAnalytics.Auth.DcrImmutableId + if (options.AzureLogAnalyticsDcrImmutableId is not null) + { + updatedAuthOptions = updatedAuthOptions is not null + ? updatedAuthOptions with { DcrImmutableId = options.AzureLogAnalyticsDcrImmutableId, UserProvidedDcrImmutableId = true } + : new AzureLogAnalyticsAuthOptions { DcrImmutableId = options.AzureLogAnalyticsDcrImmutableId, UserProvidedDcrImmutableId = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.auth.dcr-immutable-id as '{options.AzureLogAnalyticsDcrImmutableId}'"); + } + + // Runtime.Telemetry.AzureLogAnalytics.Auth.DceEndpoint + if (options.AzureLogAnalyticsDceEndpoint is not null) + { + updatedAuthOptions = updatedAuthOptions is not null + ? updatedAuthOptions with { DceEndpoint = options.AzureLogAnalyticsDceEndpoint, UserProvidedDceEndpoint = true } + : new AzureLogAnalyticsAuthOptions { DceEndpoint = options.AzureLogAnalyticsDceEndpoint, UserProvidedDceEndpoint = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.azure-log-analytics.auth.dce-endpoint as '{options.AzureLogAnalyticsDceEndpoint}'"); + } + + // Update Azure Log Analytics options with Auth options if it was modified + if (updatedAuthOptions is not null) + { + azureLogAnalyticsOptions = azureLogAnalyticsOptions with { Auth = updatedAuthOptions }; + } + + return true; + } + catch (Exception ex) + { + _logger.LogError($"Failed to update configuration with runtime.telemetry.azure-log-analytics. Exception message: {ex.Message}."); + return false; + } + } + + /// + /// Updates the file sink options in the configuration. + /// + /// The configuration options provided by the user. + /// The file sink options to be updated. + /// True if the options were successfully updated; otherwise, false. + private static bool TryUpdateConfiguredFileOptions( + ConfigureOptions options, + ref FileSinkOptions fileOptions) + { + try + { + // Runtime.Telemetry.File.Enabled + if (options.FileSinkEnabled is not null) + { + fileOptions = fileOptions with { Enabled = options.FileSinkEnabled is CliBool.True, UserProvidedEnabled = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.file.enabled as '{options.FileSinkEnabled}'"); + } + + // Runtime.Telemetry.File.Path + if (options.FileSinkPath is not null) + { + fileOptions = fileOptions with { Path = options.FileSinkPath, UserProvidedPath = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.file.path as '{options.FileSinkPath}'"); + } + + // Runtime.Telemetry.File.RollingInterval + if (options.FileSinkRollingInterval is not null) + { + fileOptions = fileOptions with { RollingInterval = ((RollingInterval)options.FileSinkRollingInterval).ToString(), UserProvidedRollingInterval = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.file.rolling-interval as '{options.FileSinkRollingInterval}'"); + } + + // Runtime.Telemetry.File.RetainedFileCountLimit + if (options.FileSinkRetainedFileCountLimit is not null) + { + if (options.FileSinkRetainedFileCountLimit <= 0) + { + _logger.LogError("Failed to update configuration with runtime.telemetry.file.retained-file-count-limit. Value must be a positive integer greater than 0."); + return false; + } + + fileOptions = fileOptions with { RetainedFileCountLimit = (int)options.FileSinkRetainedFileCountLimit, UserProvidedRetainedFileCountLimit = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.file.retained-file-count-limit as '{options.FileSinkRetainedFileCountLimit}'"); + } + + // Runtime.Telemetry.File.FileSizeLimitBytes + if (options.FileSinkFileSizeLimitBytes is not null) + { + if (options.FileSinkFileSizeLimitBytes <= 0) + { + _logger.LogError("Failed to update configuration with runtime.telemetry.file.file-size-limit-bytes. Value must be a positive integer greater than 0."); + return false; + } + + fileOptions = fileOptions with { FileSizeLimitBytes = (long)options.FileSinkFileSizeLimitBytes, UserProvidedFileSizeLimitBytes = true }; + _logger.LogInformation($"Updated configuration with runtime.telemetry.file.file-size-limit-bytes as '{options.FileSinkFileSizeLimitBytes}'"); + } + + return true; + } + catch (Exception ex) + { + _logger.LogError($"Failed to update configuration with runtime.telemetry.file. Exception message: {ex.Message}."); + return false; + } + } + /// /// Parse permission string to create PermissionSetting array. /// @@ -1281,23 +1683,182 @@ public static bool TryUpdateExistingEntity(UpdateOptions options, RuntimeConfig updatedRelationships[options.Relationship] = new_relationship; } - if (options.Map is not null && options.Map.Any()) + bool hasFields = options.FieldsNameCollection != null && options.FieldsNameCollection.Count() > 0; + bool hasMappings = options.Map != null && options.Map.Any(); + bool hasKeyFields = options.SourceKeyFields != null && options.SourceKeyFields.Any(); + + List? fields; + if (hasFields) { - // Parsing mappings dictionary from Collection - if (!TryParseMappingDictionary(options.Map, out updatedMappings)) + if (hasMappings && hasKeyFields) + { + _logger.LogError("Entity cannot define 'fields', 'mappings', and 'key-fields' together. Please use only one."); + return false; + } + + if (hasMappings) + { + _logger.LogError("Entity cannot define both 'fields' and 'mappings'. Please use only one."); + return false; + } + + if (hasKeyFields) { + _logger.LogError("Entity cannot define both 'fields' and 'key-fields'. Please use only one."); return false; } + + // Merge updated fields with existing fields + List existingFields = entity.Fields?.ToList() ?? []; + List updatedFieldsList = ComposeFieldsFromOptions(options); + Dictionary updatedFieldsDict = updatedFieldsList.ToDictionary(f => f.Name, f => f); + List mergedFields = []; + + foreach (FieldMetadata field in existingFields) + { + if (updatedFieldsDict.TryGetValue(field.Name, out FieldMetadata? updatedField)) + { + mergedFields.Add(new FieldMetadata + { + Name = updatedField.Name, + Alias = updatedField.Alias ?? field.Alias, + Description = updatedField.Description ?? field.Description, + PrimaryKey = updatedField.PrimaryKey + }); + updatedFieldsDict.Remove(field.Name); // Remove so only new fields remain + } + else + { + mergedFields.Add(field); // Keep existing field + } + } + + // Add any new fields that didn't exist before + mergedFields.AddRange(updatedFieldsDict.Values); + + fields = mergedFields; + + // If user didn't mark any PK in fields, carry over existing source key-fields + if (!fields.Any(f => f.PrimaryKey) && updatedSource.KeyFields is { Length: > 0 }) + { + foreach (string k in updatedSource.KeyFields) + { + FieldMetadata? f = fields.FirstOrDefault(f => string.Equals(f.Name, k, StringComparison.OrdinalIgnoreCase)); + if (f is not null) + { + f.PrimaryKey = true; + } + else + { + fields.Add(new FieldMetadata { Name = k, PrimaryKey = true }); + } + } + } + + // Remove legacy props if fields present + updatedSource = updatedSource with { KeyFields = null }; + updatedMappings = null; + } + else if (hasMappings || hasKeyFields) + { + // If mappings or key-fields are provided, convert them to fields and remove legacy props + // Start with existing fields + List existingFields = entity.Fields?.ToList() ?? new List(); + + // Build a dictionary for quick lookup and merging + Dictionary fieldDict = existingFields + .ToDictionary(f => f.Name, StringComparer.OrdinalIgnoreCase); + + // Parse mappings from options + if (hasMappings) + { + if (options.Map is null || !TryParseMappingDictionary(options.Map, out updatedMappings)) + { + _logger.LogError("Failed to parse mappings from --map option."); + return false; + } + + foreach (KeyValuePair mapping in updatedMappings) + { + if (fieldDict.TryGetValue(mapping.Key, out FieldMetadata? existing) && existing != null) + { + // Update alias, preserve PK and description + existing.Alias = mapping.Value ?? existing.Alias; + } + else + { + // New field from mapping + fieldDict[mapping.Key] = new FieldMetadata + { + Name = mapping.Key, + Alias = mapping.Value + }; + } + } + } + + // Always carry over existing PKs on the entity/update, not only when the user re-supplies --source.key-fields. + string[]? existingKeys = updatedSource.KeyFields; + if (existingKeys is not null && existingKeys.Length > 0) + { + foreach (string key in existingKeys) + { + if (fieldDict.TryGetValue(key, out FieldMetadata? pkField) && pkField != null) + { + pkField.PrimaryKey = true; + } + else + { + fieldDict[key] = new FieldMetadata { Name = key, PrimaryKey = true }; + } + } + } + + // Final merged list, no duplicates + fields = fieldDict.Values.ToList(); + + // Remove legacy props only after we have safely embedded PKs into fields. + updatedSource = updatedSource with { KeyFields = null }; + updatedMappings = null; + } + else if (!hasFields && !hasMappings && !hasKeyFields && entity.Source.KeyFields?.Length > 0) + { + // If no fields, mappings, or key-fields are provided with update command, use the entity's key-fields added using add command. + fields = entity.Source.KeyFields.Select(k => new FieldMetadata + { + Name = k, + PrimaryKey = true + }).ToList(); + + updatedSource = updatedSource with { KeyFields = null }; + updatedMappings = null; + } + else + { + fields = entity.Fields?.ToList() ?? new List(); + if (entity.Mappings is not null || entity.Source?.KeyFields is not null) + { + _logger.LogWarning("Using legacy 'mappings' and 'key-fields' properties. Consider using 'fields' for new entities."); + } + } + + if (!ValidateFields(fields, out string errorMessage)) + { + _logger.LogError(errorMessage); + return false; } Entity updatedEntity = new( Source: updatedSource, + Fields: fields, Rest: updatedRestDetails, GraphQL: updatedGraphQLDetails, Permissions: updatedPermissions, Relationships: updatedRelationships, Mappings: updatedMappings, - Cache: updatedCacheOptions); + Cache: updatedCacheOptions, + Description: string.IsNullOrWhiteSpace(options.Description) ? entity.Description : options.Description + ); IDictionary entities = new Dictionary(initialConfig.Entities.Entities) { [options.Entity] = updatedEntity @@ -1454,10 +2015,12 @@ private static bool TryGetUpdatedSourceObjectWithOptions( string updatedSourceName = options.Source ?? entity.Source.Object; string[]? updatedKeyFields = entity.Source.KeyFields; EntitySourceType? updatedSourceType = entity.Source.Type; - Dictionary? updatedSourceParameters = entity.Source.Parameters; - // If SourceType provided by user is null, - // no update is required. + // Support for new parameter format + bool hasOldParams = options.SourceParameters is not null && options.SourceParameters.Any(); + bool hasNewParams = options.ParametersNameCollection is not null && options.ParametersNameCollection.Any(); + + // If SourceType provided by user is not null, update type if (options.SourceType is not null) { if (!EnumExtensions.TryDeserialize(options.SourceType, out EntitySourceType? deserializedEntityType)) @@ -1467,7 +2030,6 @@ private static bool TryGetUpdatedSourceObjectWithOptions( } updatedSourceType = (EntitySourceType)deserializedEntityType; - if (IsStoredProcedureConvertedToOtherTypes(entity, options) || IsEntityBeingConvertedToStoredProcedure(entity, options)) { _logger.LogWarning( @@ -1476,13 +2038,15 @@ private static bool TryGetUpdatedSourceObjectWithOptions( } } - // No need to validate parameter and key field usage when there are no changes to the source object defined in 'options' + // Validate correct pairing of parameters and key fields if ((options.SourceType is not null - || (options.SourceParameters is not null && options.SourceParameters.Any()) - || (options.SourceKeyFields is not null && options.SourceKeyFields.Any())) + || hasOldParams + || (options.SourceKeyFields is not null && options.SourceKeyFields.Any()) + || hasNewParams) && !VerifyCorrectPairingOfParameterAndKeyFieldsWithType( updatedSourceType, options.SourceParameters, + options.ParametersNameCollection, options.SourceKeyFields)) { return false; @@ -1490,35 +2054,139 @@ private static bool TryGetUpdatedSourceObjectWithOptions( // Changing source object from stored-procedure to table/view // should automatically update the parameters to be null. - // Similarly from table/view to stored-procedure, key-fields - // should be marked null. + // Similarly from table/view to stored-procedure, key-fields should be marked null. if (EntitySourceType.StoredProcedure.Equals(updatedSourceType)) { updatedKeyFields = null; } else { - updatedSourceParameters = null; + hasOldParams = false; + hasNewParams = false; } - // If given SourceParameter is null or is Empty, no update is required. - // Else updatedSourceParameters will contain the parsed dictionary of parameters. - if (options.SourceParameters is not null && options.SourceParameters.Any() && - !TryParseSourceParameterDictionary(options.SourceParameters, out updatedSourceParameters)) + // Warn and error if both formats are provided + if (hasOldParams && hasNewParams) { + _logger.LogError("Cannot use both --source.params and --parameters.name/description/required/default together. Please use only one format."); return false; } + List? parameters = null; + + if (hasNewParams) + { + // Parse new format + List names = options.ParametersNameCollection != null ? options.ParametersNameCollection.ToList() : new List(); + List descriptions = options.ParametersDescriptionCollection?.ToList() ?? new List(); + List requiredFlags = options.ParametersRequiredCollection?.ToList() ?? new List(); + List defaults = options.ParametersDefaultCollection?.ToList() ?? new List(); + + parameters = []; + for (int i = 0; i < names.Count; i++) + { + parameters.Add(new ParameterMetadata + { + Name = names[i], + Description = descriptions.ElementAtOrDefault(i), + Required = requiredFlags.ElementAtOrDefault(i)?.ToLower() == "true", + Default = defaults.ElementAtOrDefault(i) + }); + } + } + else if (hasOldParams) + { + // Parse old format and convert to new type + if (!TryParseSourceParameterDictionary(options.SourceParameters, out parameters)) + { + return false; + } + + _logger.LogWarning("The --source.params format is deprecated. Please use --parameters.name/description/required/default instead."); + } + + // In TryGetUpdatedSourceObjectWithOptions, before TryCreateSourceObject: + if (parameters == null && EntitySourceType.StoredProcedure.Equals(updatedSourceType)) + { + parameters = entity.Source.Parameters?.ToList(); + } + if (options.SourceKeyFields is not null && options.SourceKeyFields.Any()) { updatedKeyFields = options.SourceKeyFields.ToArray(); } + if (hasNewParams && EntitySourceType.StoredProcedure.Equals(updatedSourceType)) + { + List existingParams; + if (entity.Source.Parameters != null) + { + existingParams = entity.Source.Parameters.ToList(); + } + else + { + existingParams = new List(); + } + + List mergedParams = new(); + + if (parameters != null) + { + foreach (ParameterMetadata newParam in parameters) + { + ParameterMetadata? match = null; + foreach (ParameterMetadata p in existingParams) + { + if (p.Name == newParam.Name) + { + match = p; + break; + } + } + + if (match != null) + { + mergedParams.Add(new ParameterMetadata + { + Name = newParam.Name, + Description = newParam.Description != null ? newParam.Description : match.Description, + Required = newParam.Required, + Default = newParam.Default != null ? newParam.Default : match.Default + }); + } + else + { + mergedParams.Add(newParam); + } + } + } + + foreach (ParameterMetadata param in existingParams) + { + bool found = false; + foreach (ParameterMetadata p in mergedParams) + { + if (p.Name == param.Name) + { + found = true; + break; + } + } + + if (!found) + { + mergedParams.Add(param); + } + } + + parameters = mergedParams; + } + // Try Creating Source Object with the updated values. if (!TryCreateSourceObject( updatedSourceName, updatedSourceType, - updatedSourceParameters, + parameters, updatedKeyFields, out updatedSourceObject)) { @@ -1711,7 +2379,29 @@ public static bool IsConfigValid(ValidateOptions options, FileSystemRuntimeConfi ILogger runtimeConfigValidatorLogger = LoggerFactoryForCli.CreateLogger(); RuntimeConfigValidator runtimeConfigValidator = new(runtimeConfigProvider, fileSystem, runtimeConfigValidatorLogger, true); - return runtimeConfigValidator.TryValidateConfig(runtimeConfigFile, LoggerFactoryForCli).Result; + bool isValid = runtimeConfigValidator.TryValidateConfig(runtimeConfigFile, LoggerFactoryForCli).Result; + + // Additional validation: warn if fields are missing and MCP is enabled + if (isValid) + { + if (runtimeConfigProvider.TryGetConfig(out RuntimeConfig? config) && config is not null) + { + bool mcpEnabled = config.Runtime?.Mcp?.Enabled == true; + if (mcpEnabled) + { + foreach (KeyValuePair entity in config.Entities) + { + if (entity.Value.Fields == null || !entity.Value.Fields.Any()) + { + _logger.LogWarning($"Entity '{entity.Key}' is missing 'fields' definition while MCP is enabled. " + + "It's recommended to define fields explicitly to ensure optimal performance with MCP."); + } + } + } + } + } + + return isValid; } /// @@ -1990,5 +2680,185 @@ public static bool TryAddTelemetry(AddTelemetryOptions options, FileSystemRuntim return WriteRuntimeConfigToFile(runtimeConfigFile, runtimeConfig, fileSystem); } + + /// + /// Attempts to update the Azure Key Vault configuration options based on the provided values. + /// Validates that any user-provided parameter value is valid and updates the runtime configuration accordingly. + /// + /// The configuration options provided by the user. + /// The runtime configuration to be updated. + /// True if the Azure Key Vault options were successfully configured; otherwise, false. + private static bool TryUpdateConfiguredAzureKeyVaultOptions( + ConfigureOptions options, + [NotNullWhen(true)] ref RuntimeConfig runtimeConfig) + { + try + { + AzureKeyVaultOptions? updatedAzureKeyVaultOptions = runtimeConfig.AzureKeyVault; + AKVRetryPolicyOptions? updatedRetryPolicyOptions = updatedAzureKeyVaultOptions?.RetryPolicy; + + // Azure Key Vault Endpoint + if (options.AzureKeyVaultEndpoint is not null) + { + updatedAzureKeyVaultOptions = updatedAzureKeyVaultOptions is not null + ? updatedAzureKeyVaultOptions with { Endpoint = options.AzureKeyVaultEndpoint } + : new AzureKeyVaultOptions { Endpoint = options.AzureKeyVaultEndpoint }; + _logger.LogInformation("Updated RuntimeConfig with azure-key-vault.endpoint as '{endpoint}'", options.AzureKeyVaultEndpoint); + } + + // Retry Policy Mode + if (options.AzureKeyVaultRetryPolicyMode is not null) + { + updatedRetryPolicyOptions = updatedRetryPolicyOptions is not null + ? updatedRetryPolicyOptions with { Mode = options.AzureKeyVaultRetryPolicyMode.Value, UserProvidedMode = true } + : new AKVRetryPolicyOptions { Mode = options.AzureKeyVaultRetryPolicyMode.Value, UserProvidedMode = true }; + _logger.LogInformation("Updated RuntimeConfig with azure-key-vault.retry-policy.mode as '{mode}'", options.AzureKeyVaultRetryPolicyMode.Value); + } + + // Retry Policy Max Count + if (options.AzureKeyVaultRetryPolicyMaxCount is not null) + { + if (options.AzureKeyVaultRetryPolicyMaxCount.Value < 1) + { + _logger.LogError("Failed to update configuration with runtime.azure-key-vault.retry-policy.max-count. Value must be a positive integer greater than 0."); + return false; + } + + updatedRetryPolicyOptions = updatedRetryPolicyOptions is not null + ? updatedRetryPolicyOptions with { MaxCount = options.AzureKeyVaultRetryPolicyMaxCount.Value, UserProvidedMaxCount = true } + : new AKVRetryPolicyOptions { MaxCount = options.AzureKeyVaultRetryPolicyMaxCount.Value, UserProvidedMaxCount = true }; + _logger.LogInformation("Updated RuntimeConfig with azure-key-vault.retry-policy.max-count as '{maxCount}'", options.AzureKeyVaultRetryPolicyMaxCount.Value); + } + + // Retry Policy Delay Seconds + if (options.AzureKeyVaultRetryPolicyDelaySeconds is not null) + { + if (options.AzureKeyVaultRetryPolicyDelaySeconds.Value < 1) + { + _logger.LogError("Failed to update configuration with runtime.azure-key-vault.retry-policy.delay-seconds. Value must be a positive integer greater than 0."); + return false; + } + + updatedRetryPolicyOptions = updatedRetryPolicyOptions is not null + ? updatedRetryPolicyOptions with { DelaySeconds = options.AzureKeyVaultRetryPolicyDelaySeconds.Value, UserProvidedDelaySeconds = true } + : new AKVRetryPolicyOptions { DelaySeconds = options.AzureKeyVaultRetryPolicyDelaySeconds.Value, UserProvidedDelaySeconds = true }; + _logger.LogInformation("Updated RuntimeConfig with azure-key-vault.retry-policy.delay-seconds as '{delaySeconds}'", options.AzureKeyVaultRetryPolicyDelaySeconds.Value); + } + + // Retry Policy Max Delay Seconds + if (options.AzureKeyVaultRetryPolicyMaxDelaySeconds is not null) + { + if (options.AzureKeyVaultRetryPolicyMaxDelaySeconds.Value < 1) + { + _logger.LogError("Failed to update configuration with runtime.azure-key-vault.retry-policy.max-delay-seconds. Value must be a positive integer greater than 0."); + return false; + } + + updatedRetryPolicyOptions = updatedRetryPolicyOptions is not null + ? updatedRetryPolicyOptions with { MaxDelaySeconds = options.AzureKeyVaultRetryPolicyMaxDelaySeconds.Value, UserProvidedMaxDelaySeconds = true } + : new AKVRetryPolicyOptions { MaxDelaySeconds = options.AzureKeyVaultRetryPolicyMaxDelaySeconds.Value, UserProvidedMaxDelaySeconds = true }; + _logger.LogInformation("Updated RuntimeConfig with azure-key-vault.retry-policy.max-delay-seconds as '{maxDelaySeconds}'", options.AzureKeyVaultRetryPolicyMaxDelaySeconds.Value); + } + + // Retry Policy Network Timeout Seconds + if (options.AzureKeyVaultRetryPolicyNetworkTimeoutSeconds is not null) + { + if (options.AzureKeyVaultRetryPolicyNetworkTimeoutSeconds.Value < 1) + { + _logger.LogError("Failed to update configuration with runtime.azure-key-vault.retry-policy.network-timeout-seconds. Value must be a positive integer greater than 0."); + return false; + } + + updatedRetryPolicyOptions = updatedRetryPolicyOptions is not null + ? updatedRetryPolicyOptions with { NetworkTimeoutSeconds = options.AzureKeyVaultRetryPolicyNetworkTimeoutSeconds.Value, UserProvidedNetworkTimeoutSeconds = true } + : new AKVRetryPolicyOptions { NetworkTimeoutSeconds = options.AzureKeyVaultRetryPolicyNetworkTimeoutSeconds.Value, UserProvidedNetworkTimeoutSeconds = true }; + _logger.LogInformation("Updated RuntimeConfig with azure-key-vault.retry-policy.network-timeout-seconds as '{networkTimeoutSeconds}'", options.AzureKeyVaultRetryPolicyNetworkTimeoutSeconds.Value); + } + + // Update Azure Key Vault options with retry policy if retry policy was modified + if (updatedRetryPolicyOptions is not null) + { + updatedAzureKeyVaultOptions = updatedAzureKeyVaultOptions is not null + ? updatedAzureKeyVaultOptions with { RetryPolicy = updatedRetryPolicyOptions } + : new AzureKeyVaultOptions { RetryPolicy = updatedRetryPolicyOptions }; + } + + // Update runtime config if Azure Key Vault options were modified + if (updatedAzureKeyVaultOptions is not null) + { + runtimeConfig = runtimeConfig with { AzureKeyVault = updatedAzureKeyVaultOptions }; + } + + return true; + } + catch (Exception ex) + { + _logger.LogError("Failed to update RuntimeConfig.AzureKeyVault with exception message: {exceptionMessage}.", ex.Message); + return false; + } + } + + /// + /// Helper to build a list of FieldMetadata from UpdateOptions. + /// + private static List ComposeFieldsFromOptions(UpdateOptions options) + { + List fields = []; + if (options.FieldsNameCollection != null) + { + List names = options.FieldsNameCollection.ToList(); + List aliases = options.FieldsAliasCollection?.ToList() ?? []; + List descriptions = options.FieldsDescriptionCollection?.ToList() ?? []; + List keys = options.FieldsPrimaryKeyCollection?.ToList() ?? []; + + for (int i = 0; i < names.Count; i++) + { + fields.Add(new FieldMetadata + { + Name = names[i], + Alias = aliases.Count > i ? aliases[i] : null, + Description = descriptions.Count > i ? descriptions[i] : null, + PrimaryKey = keys.Count > i && keys[i], + }); + } + } + + return fields; + } + + /// + /// Validates that the provided fields are valid against the database columns and constraints. + /// + private static bool ValidateFields( + List fields, + out string errorMessage) + { + errorMessage = string.Empty; + HashSet aliases = []; + HashSet keys = []; + + foreach (FieldMetadata field in fields) + { + if (!string.IsNullOrEmpty(field.Alias)) + { + if (!aliases.Add(field.Alias)) + { + errorMessage = $"Alias '{field.Alias}' is not unique within the entity."; + return false; + } + } + + if (field.PrimaryKey) + { + if (!keys.Add(field.Name)) + { + errorMessage = $"Duplicate key field '{field.Name}' detected."; + return false; + } + } + } + + return true; + } } } diff --git a/src/Cli/Utils.cs b/src/Cli/Utils.cs index ac6493ff65..451c330503 100644 --- a/src/Cli/Utils.cs +++ b/src/Cli/Utils.cs @@ -329,19 +329,24 @@ public static bool TryGetConfigFileBasedOnCliPrecedence( } /// - /// This method checks that parameter is only used with Stored Procedure, while - /// key-fields only with table/views. Also ensures that key-fields are always - /// provided for views. - /// - /// type of the source object. - /// IEnumerable string containing parameters for stored-procedure. - /// IEnumerable string containing key columns for table/view. - /// Returns true when successful else on failure, returns false. + /// Validates correct usage of parameters and key-fields based on the source type. + /// Ensures that parameters are only used with stored procedures, key-fields only with tables/views, + /// and that key-fields are always provided for views. + /// + /// Type of the source object. + /// IEnumerable of strings containing parameters for stored procedures (old format). + /// IEnumerable of strings containing parameter names for stored procedures (new format). + /// IEnumerable of strings containing key columns for tables/views. + /// True if the pairing is valid; otherwise, false. public static bool VerifyCorrectPairingOfParameterAndKeyFieldsWithType( EntitySourceType? sourceType, - IEnumerable? parameters, + IEnumerable? parameters, // old format + IEnumerable? parametersNameCollection, // new format IEnumerable? keyFields) { + bool hasOldParams = parameters is not null && parameters.Any(); + bool hasNewParams = parametersNameCollection is not null && parametersNameCollection.Any(); + if (sourceType is EntitySourceType.StoredProcedure) { if (keyFields is not null && keyFields.Any()) @@ -353,7 +358,7 @@ public static bool VerifyCorrectPairingOfParameterAndKeyFieldsWithType( else { // For Views and Tables - if (parameters is not null && parameters.Any()) + if (hasOldParams || hasNewParams) { _logger.LogError("Tables/Views don't support parameters."); return false; @@ -382,7 +387,7 @@ public static bool VerifyCorrectPairingOfParameterAndKeyFieldsWithType( public static bool TryCreateSourceObject( string name, EntitySourceType? type, - Dictionary? parameters, + List? parameters, string[]? keyFields, [NotNullWhen(true)] out EntitySource? sourceObject) { @@ -407,21 +412,23 @@ public static bool TryCreateSourceObject( /// Returns true when successful else on failure, returns false. public static bool TryParseSourceParameterDictionary( IEnumerable? parametersList, - out Dictionary? sourceParameters) + out List? parameterMetadataList) { - sourceParameters = null; + parameterMetadataList = null; + if (parametersList is null) { return true; } - sourceParameters = new(StringComparer.OrdinalIgnoreCase); + parameterMetadataList = new(); + foreach (string param in parametersList) { string[] items = param.Split(SEPARATOR); if (items.Length != 2) { - sourceParameters = null; + parameterMetadataList = null; _logger.LogError("Invalid format for --source.params"); _logger.LogError("Correct source parameter syntax: --source.params \"key1:value1,key2:value2,...\"."); return false; @@ -430,12 +437,19 @@ public static bool TryParseSourceParameterDictionary( string paramKey = items[0]; object paramValue = ParseStringValue(items[1]); - sourceParameters.Add(paramKey, paramValue); + // Add to ParameterMetadata list with default values for rich metadata + parameterMetadataList.Add(new ParameterMetadata + { + Name = paramKey, + Default = paramValue.ToString(), + Required = false, + Description = null + }); } - if (!sourceParameters.Any()) + if (!parameterMetadataList.Any()) { - sourceParameters = null; + parameterMetadataList = null; } return true; diff --git a/src/Config/Azure.DataApiBuilder.Config.csproj b/src/Config/Azure.DataApiBuilder.Config.csproj index 25dd0716f9..a494bc38ae 100644 --- a/src/Config/Azure.DataApiBuilder.Config.csproj +++ b/src/Config/Azure.DataApiBuilder.Config.csproj @@ -18,6 +18,7 @@ + @@ -25,7 +26,7 @@ - + diff --git a/src/Config/Converters/AKVRetryPolicyOptionsConverterFactory.cs b/src/Config/Converters/AKVRetryPolicyOptionsConverterFactory.cs new file mode 100644 index 0000000000..06d00b64d3 --- /dev/null +++ b/src/Config/Converters/AKVRetryPolicyOptionsConverterFactory.cs @@ -0,0 +1,208 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.ObjectModel; + +namespace Azure.DataApiBuilder.Config.Converters; + +/// +/// Defines how DAB reads and writes Azure Key Vault Retry Policies (JSON). +/// +internal class AKVRetryPolicyOptionsConverterFactory : JsonConverterFactory +{ + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// + public override bool CanConvert(Type typeToConvert) + { + return typeToConvert.IsAssignableTo(typeof(AKVRetryPolicyOptions)); + } + + /// + public override JsonConverter? CreateConverter(Type typeToConvert, JsonSerializerOptions options) + { + return new AKVRetryPolicyOptionsConverter(_replaceEnvVar); + } + + /// Whether to replace environment variable with its + /// value or not while deserializing. + internal AKVRetryPolicyOptionsConverterFactory(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + private class AKVRetryPolicyOptionsConverter : JsonConverter + { + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// Whether to replace environment variable with its + /// value or not while deserializing. + public AKVRetryPolicyOptionsConverter(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + /// + /// Defines how DAB reads AKV Retry Policy options and defines which values are + /// used to instantiate those options. + /// + /// Thrown when improperly formatted cache options are provided. + public override AKVRetryPolicyOptions? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType is JsonTokenType.StartObject) + { + AKVRetryPolicyMode? mode = null; + int? maxCount = null; + int? delaySeconds = null; + int? maxDelaySeconds = null; + int? networkTimeoutSeconds = null; + + while (reader.Read()) + { + if (reader.TokenType is JsonTokenType.EndObject) + { + return new AKVRetryPolicyOptions(mode, maxCount, delaySeconds, maxDelaySeconds, networkTimeoutSeconds); + } + + string? property = reader.GetString(); + reader.Read(); + + switch (property) + { + case "mode": + if (reader.TokenType is JsonTokenType.Null) + { + mode = null; + } + else + { + mode = EnumExtensions.Deserialize(reader.DeserializeString(_replaceEnvVar)!); + } + + break; + case "max-count": + if (reader.TokenType is JsonTokenType.Null) + { + maxCount = null; + } + else + { + int parseMaxCount = reader.GetInt32(); + if (parseMaxCount < 0) + { + throw new JsonException($"Invalid value for max-count: {parseMaxCount}. Value must not be negative."); + } + + maxCount = parseMaxCount; + } + + break; + case "delay-seconds": + if (reader.TokenType is JsonTokenType.Null) + { + delaySeconds = null; + } + else + { + int parseDelaySeconds = reader.GetInt32(); + if (parseDelaySeconds <= 0) + { + throw new JsonException($"Invalid value for delay-seconds: {parseDelaySeconds}. Value must be greater than 0."); + } + + delaySeconds = parseDelaySeconds; + } + + break; + case "max-delay-seconds": + if (reader.TokenType is JsonTokenType.Null) + { + maxDelaySeconds = null; + } + else + { + int parseMaxDelaySeconds = reader.GetInt32(); + if (parseMaxDelaySeconds <= 0) + { + throw new JsonException($"Invalid value for max-delay-seconds: {parseMaxDelaySeconds}. Value must be greater than 0."); + } + + maxDelaySeconds = parseMaxDelaySeconds; + } + + break; + case "network-timeout-seconds": + if (reader.TokenType is JsonTokenType.Null) + { + networkTimeoutSeconds = null; + } + else + { + int parseNetworkTimeoutSeconds = reader.GetInt32(); + if (parseNetworkTimeoutSeconds <= 0) + { + throw new JsonException($"Invalid value for network-timeout-seconds: {parseNetworkTimeoutSeconds}. Value must be greater than 0."); + } + + networkTimeoutSeconds = parseNetworkTimeoutSeconds; + } + + break; + } + } + } + + throw new JsonException("Failed to read the Azure Key Vault Retry Policy Options"); + } + + /// + /// When writing the AKVRetryPolicyOptions back to a JSON file, only write the properties and values + /// when those AKVRetryPolicyOptions are user provided. + /// This avoids polluting the written JSON file with a property the user most likely + /// omitted when writing the original DAB runtime config file. + /// This Write operation is only used when a RuntimeConfig object is serialized to JSON. + /// + public override void Write(Utf8JsonWriter writer, AKVRetryPolicyOptions value, JsonSerializerOptions options) + { + writer.WriteStartObject(); + + if (value?.UserProvidedMode is true) + { + writer.WritePropertyName("mode"); + JsonSerializer.Serialize(writer, value.Mode, options); + } + + if (value?.UserProvidedMaxCount is true) + { + writer.WritePropertyName("max-count"); + JsonSerializer.Serialize(writer, value.MaxCount, options); + } + + if (value?.UserProvidedDelaySeconds is true) + { + writer.WritePropertyName("delay-seconds"); + JsonSerializer.Serialize(writer, value.DelaySeconds, options); + } + + if (value?.UserProvidedMaxDelaySeconds is true) + { + writer.WritePropertyName("max-delay-seconds"); + JsonSerializer.Serialize(writer, value.MaxDelaySeconds, options); + } + + if (value?.UserProvidedNetworkTimeoutSeconds is true) + { + writer.WritePropertyName("network-timeout-seconds"); + JsonSerializer.Serialize(writer, value.NetworkTimeoutSeconds, options); + } + + writer.WriteEndObject(); + } + } +} diff --git a/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs b/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs new file mode 100644 index 0000000000..1428c0d75f --- /dev/null +++ b/src/Config/Converters/AzureLogAnalyticsAuthOptionsConverter.cs @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.ObjectModel; + +namespace Azure.DataApiBuilder.Config.Converters; + +internal class AzureLogAnalyticsAuthOptionsConverter : JsonConverter +{ + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// Whether to replace environment variable with its + /// value or not while deserializing. + public AzureLogAnalyticsAuthOptionsConverter(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + /// + /// Defines how DAB reads Azure Log Analytics Auth options and defines which values are + /// used to instantiate AzureLogAnalyticsAuthOptions. + /// + /// Thrown when improperly formatted Azure Log Analytics Auth options are provided. + public override AzureLogAnalyticsAuthOptions? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType is JsonTokenType.StartObject) + { + string? customTableName = null; + string? dcrImmutableId = null; + string? dceEndpoint = null; + + while (reader.Read()) + { + if (reader.TokenType == JsonTokenType.EndObject) + { + return new AzureLogAnalyticsAuthOptions(customTableName, dcrImmutableId, dceEndpoint); + } + + string? propertyName = reader.GetString(); + + reader.Read(); + switch (propertyName) + { + case "custom-table-name": + if (reader.TokenType is not JsonTokenType.Null) + { + customTableName = reader.DeserializeString(_replaceEnvVar); + } + + break; + + case "dcr-immutable-id": + if (reader.TokenType is not JsonTokenType.Null) + { + dcrImmutableId = reader.DeserializeString(_replaceEnvVar); + } + + break; + + case "dce-endpoint": + if (reader.TokenType is not JsonTokenType.Null) + { + dceEndpoint = reader.DeserializeString(_replaceEnvVar); + } + + break; + + default: + throw new JsonException($"Unexpected property {propertyName}"); + } + } + } + + throw new JsonException("Failed to read the Azure Log Analytics Auth Options"); + } + + /// + /// When writing the AzureLogAnalyticsAuthOptions back to a JSON file, only write the properties + /// if they are user provided. This avoids polluting the written JSON file with properties + /// the user most likely omitted when writing the original DAB runtime config file. + /// This Write operation is only used when a RuntimeConfig object is serialized to JSON. + /// + public override void Write(Utf8JsonWriter writer, AzureLogAnalyticsAuthOptions value, JsonSerializerOptions options) + { + writer.WriteStartObject(); + + if (value?.UserProvidedCustomTableName is true) + { + writer.WritePropertyName("custom-table-name"); + JsonSerializer.Serialize(writer, value.CustomTableName, options); + } + + if (value?.UserProvidedDcrImmutableId is true) + { + writer.WritePropertyName("dcr-immutable-id"); + JsonSerializer.Serialize(writer, value.DcrImmutableId, options); + } + + if (value?.UserProvidedDceEndpoint is true) + { + writer.WritePropertyName("dce-endpoint"); + JsonSerializer.Serialize(writer, value.DceEndpoint, options); + } + + writer.WriteEndObject(); + } +} diff --git a/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs b/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs new file mode 100644 index 0000000000..3fcbe8c7bd --- /dev/null +++ b/src/Config/Converters/AzureLogAnalyticsOptionsConverterFactory.cs @@ -0,0 +1,168 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.ObjectModel; + +namespace Azure.DataApiBuilder.Config.Converters; + +/// +/// Defines how DAB reads and writes Azure Log Analytics options. +/// +internal class AzureLogAnalyticsOptionsConverterFactory : JsonConverterFactory +{ + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// + public override bool CanConvert(Type typeToConvert) + { + return typeToConvert.IsAssignableTo(typeof(AzureLogAnalyticsOptions)); + } + + /// + public override JsonConverter? CreateConverter(Type typeToConvert, JsonSerializerOptions options) + { + return new AzureLogAnalyticsOptionsConverter(_replaceEnvVar); + } + + /// Whether to replace environment variable with its + /// value or not while deserializing. + internal AzureLogAnalyticsOptionsConverterFactory(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + private class AzureLogAnalyticsOptionsConverter : JsonConverter + { + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// Whether to replace environment variable with its + /// value or not while deserializing. + internal AzureLogAnalyticsOptionsConverter(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + /// + /// Defines how DAB reads Azure Log Analytics options and defines which values are + /// used to instantiate AzureLogAnalyticsOptions. + /// + /// Thrown when improperly formatted Azure Log Analytics options are provided. + public override AzureLogAnalyticsOptions? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType is JsonTokenType.StartObject) + { + AzureLogAnalyticsAuthOptionsConverter authOptionsConverter = new(_replaceEnvVar); + + bool? enabled = null; + AzureLogAnalyticsAuthOptions? auth = null; + string? logType = null; + int? flushIntervalSeconds = null; + + while (reader.Read()) + { + if (reader.TokenType == JsonTokenType.EndObject) + { + return new AzureLogAnalyticsOptions(enabled, auth, logType, flushIntervalSeconds); + } + + string? propertyName = reader.GetString(); + + reader.Read(); + switch (propertyName) + { + case "enabled": + if (reader.TokenType is not JsonTokenType.Null) + { + enabled = reader.GetBoolean(); + } + + break; + + case "auth": + auth = authOptionsConverter.Read(ref reader, typeToConvert, options); + break; + + case "dab-identifier": + if (reader.TokenType is not JsonTokenType.Null) + { + logType = reader.DeserializeString(_replaceEnvVar); + } + + break; + + case "flush-interval-seconds": + if (reader.TokenType is not JsonTokenType.Null) + { + try + { + flushIntervalSeconds = reader.GetInt32(); + } + catch (FormatException) + { + throw new JsonException($"The JSON token value is of the incorrect numeric format."); + } + + if (flushIntervalSeconds <= 0) + { + throw new JsonException($"Invalid flush-interval-seconds: {flushIntervalSeconds}. Specify a number > 0."); + } + } + + break; + + default: + throw new JsonException($"Unexpected property {propertyName}"); + } + } + } + + throw new JsonException("Failed to read the Azure Log Analytics Options"); + } + + /// + /// When writing the AzureLogAnalyticsOptions back to a JSON file, only write the properties + /// if they are user provided. This avoids polluting the written JSON file with properties + /// the user most likely omitted when writing the original DAB runtime config file. + /// This Write operation is only used when a RuntimeConfig object is serialized to JSON. + /// + public override void Write(Utf8JsonWriter writer, AzureLogAnalyticsOptions value, JsonSerializerOptions options) + { + writer.WriteStartObject(); + + if (value?.UserProvidedEnabled is true) + { + writer.WritePropertyName("enabled"); + JsonSerializer.Serialize(writer, value.Enabled, options); + } + + if (value?.Auth is not null && (value.Auth.UserProvidedCustomTableName || value.Auth.UserProvidedDcrImmutableId || value.Auth.UserProvidedDceEndpoint)) + { + AzureLogAnalyticsAuthOptionsConverter authOptionsConverter = options.GetConverter(typeof(AzureLogAnalyticsAuthOptions)) as AzureLogAnalyticsAuthOptionsConverter ?? + throw new JsonException("Failed to get azure-log-analytics.auth options converter"); + + writer.WritePropertyName("auth"); + authOptionsConverter.Write(writer, value.Auth, options); + } + + if (value?.UserProvidedDabIdentifier is true) + { + writer.WritePropertyName("dab-identifier"); + JsonSerializer.Serialize(writer, value.DabIdentifier, options); + } + + if (value?.UserProvidedFlushIntervalSeconds is true) + { + writer.WritePropertyName("flush-interval-seconds"); + JsonSerializer.Serialize(writer, value.FlushIntervalSeconds, options); + } + + writer.WriteEndObject(); + } + } +} diff --git a/src/Config/Converters/DmlToolsConfigConverter.cs b/src/Config/Converters/DmlToolsConfigConverter.cs new file mode 100644 index 0000000000..9acef0f9b2 --- /dev/null +++ b/src/Config/Converters/DmlToolsConfigConverter.cs @@ -0,0 +1,188 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.ObjectModel; + +namespace Azure.DataApiBuilder.Config.Converters; + +/// +/// JSON converter for DmlToolsConfig that handles both boolean and object formats. +/// +internal class DmlToolsConfigConverter : JsonConverter +{ + /// + /// Reads DmlToolsConfig from JSON which can be either: + /// - A boolean: all tools are enabled/disabled + /// - An object: individual tool settings (unspecified tools default to true) + /// - Null/undefined: defaults to all tools enabled (true) + /// + public override DmlToolsConfig? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + // Handle null + if (reader.TokenType is JsonTokenType.Null) + { + // Return default config with all tools enabled + return DmlToolsConfig.Default; + } + + // Handle boolean format: "dml-tools": true/false + if (reader.TokenType is JsonTokenType.True || reader.TokenType is JsonTokenType.False) + { + bool enabled = reader.GetBoolean(); + return DmlToolsConfig.FromBoolean(enabled); + } + + // Handle object format + if (reader.TokenType is JsonTokenType.StartObject) + { + // When using object format, unspecified tools default to true + bool? describeEntities = null; + bool? createRecord = null; + bool? readRecords = null; + bool? updateRecord = null; + bool? deleteRecord = null; + bool? executeEntity = null; + + while (reader.Read()) + { + if (reader.TokenType is JsonTokenType.EndObject) + { + break; + } + + if (reader.TokenType is JsonTokenType.PropertyName) + { + string? property = reader.GetString(); + reader.Read(); + + // Handle the property value + if (reader.TokenType is JsonTokenType.True || reader.TokenType is JsonTokenType.False) + { + bool value = reader.GetBoolean(); + + switch (property?.ToLowerInvariant()) + { + case "describe-entities": + describeEntities = value; + break; + case "create-record": + createRecord = value; + break; + case "read-records": + readRecords = value; + break; + case "update-record": + updateRecord = value; + break; + case "delete-record": + deleteRecord = value; + break; + case "execute-entity": + executeEntity = value; + break; + default: + // Skip unknown properties + break; + } + } + else + { + // Error on non-boolean values for known properties + if (property?.ToLowerInvariant() is "describe-entities" or "create-record" + or "read-records" or "update-record" or "delete-record" or "execute-entity") + { + throw new JsonException($"Property '{property}' must be a boolean value."); + } + + // Skip unknown properties + reader.Skip(); + } + } + } + + // Create the config with specified values + // Unspecified values (null) will default to true in the DmlToolsConfig constructor + return new DmlToolsConfig( + allToolsEnabled: null, + describeEntities: describeEntities, + createRecord: createRecord, + readRecords: readRecords, + updateRecord: updateRecord, + deleteRecord: deleteRecord, + executeEntity: executeEntity); + } + + // For any other unexpected token type, return default (all enabled) + return DmlToolsConfig.Default; + } + + /// + /// Writes DmlToolsConfig to JSON. + /// - If all tools have the same value, writes as boolean + /// - Otherwise writes as object with only user-provided properties + /// + public override void Write(Utf8JsonWriter writer, DmlToolsConfig? value, JsonSerializerOptions options) + { + if (value is null) + { + return; + } + + // Check if any individual settings were provided by the user + bool hasIndividualSettings = value.UserProvidedDescribeEntities || + value.UserProvidedCreateRecord || + value.UserProvidedReadRecords || + value.UserProvidedUpdateRecord || + value.UserProvidedDeleteRecord || + value.UserProvidedExecuteEntity; + + // Only write the boolean value if it's provided by user + // This prevents writing "dml-tools": true when it's the default + if (!hasIndividualSettings && value.UserProvidedAllToolsEnabled) + { + writer.WritePropertyName("dml-tools"); + writer.WriteBooleanValue(value.AllToolsEnabled); + } + else + { + writer.WritePropertyName("dml-tools"); + + // Write as object with only user-provided properties + writer.WriteStartObject(); + + if (value.UserProvidedDescribeEntities && value.DescribeEntities.HasValue) + { + writer.WriteBoolean("describe-entities", value.DescribeEntities.Value); + } + + if (value.UserProvidedCreateRecord && value.CreateRecord.HasValue) + { + writer.WriteBoolean("create-record", value.CreateRecord.Value); + } + + if (value.UserProvidedReadRecords && value.ReadRecords.HasValue) + { + writer.WriteBoolean("read-records", value.ReadRecords.Value); + } + + if (value.UserProvidedUpdateRecord && value.UpdateRecord.HasValue) + { + writer.WriteBoolean("update-record", value.UpdateRecord.Value); + } + + if (value.UserProvidedDeleteRecord && value.DeleteRecord.HasValue) + { + writer.WriteBoolean("delete-record", value.DeleteRecord.Value); + } + + if (value.UserProvidedExecuteEntity && value.ExecuteEntity.HasValue) + { + writer.WriteBoolean("execute-entity", value.ExecuteEntity.Value); + } + + writer.WriteEndObject(); + } + } +} diff --git a/src/Config/Converters/EntitySourceConverterFactory.cs b/src/Config/Converters/EntitySourceConverterFactory.cs index 51af00717d..a748382e01 100644 --- a/src/Config/Converters/EntitySourceConverterFactory.cs +++ b/src/Config/Converters/EntitySourceConverterFactory.cs @@ -57,17 +57,61 @@ public EntitySourceConverter(bool replaceEnvVar) JsonSerializerOptions innerOptions = new(options); innerOptions.Converters.Remove(innerOptions.Converters.First(c => c is EntitySourceConverterFactory)); - EntitySource? source = JsonSerializer.Deserialize(ref reader, innerOptions); + using JsonDocument doc = JsonDocument.ParseValue(ref reader); + JsonElement root = doc.RootElement; - if (source?.Parameters is not null) + if (root.TryGetProperty("parameters", out JsonElement parametersElement) && + parametersElement.ValueKind == JsonValueKind.Object) { - // If we get parameters back the value field will be JsonElement, since that's what System.Text.Json uses for the `object` type. - // But we want to convert that to a CLR type so we can use it in our code and avoid having to do our own type checking - // and casting elsewhere. - return source with { Parameters = source.Parameters.ToDictionary(p => p.Key, p => GetClrValue((JsonElement)p.Value)) }; + // Old format detected + List paramList = []; + foreach (JsonProperty prop in parametersElement.EnumerateObject()) + { + string? defaultValue = GetClrValue(prop.Value)?.ToString(); + paramList.Add(new ParameterMetadata + { + Name = prop.Name, + Default = defaultValue, + }); + } + + // Remove "parameters" from the JSON before deserialization + Dictionary modObj = []; + foreach (JsonProperty property in root.EnumerateObject()) + { + if (!property.NameEquals("parameters")) + { + modObj[property.Name] = property.Value.Deserialize(innerOptions) ?? new object(); + } + } + + modObj["parameters"] = paramList; + + string modJson = JsonSerializer.Serialize(modObj, innerOptions); + + // Deserialize to EntitySource without parameters + EntitySource? entitySource = JsonSerializer.Deserialize(modJson, innerOptions) + ?? throw new JsonException("Failed to deserialize EntitySource from modified JSON."); + + // Use the with expression to set the correct Parameters + return entitySource with { Parameters = paramList }; + } + else + { + string rawJson = root.GetRawText(); + // If already in new format, deserialize as usual + EntitySource? source = JsonSerializer.Deserialize(rawJson, innerOptions); + + if (source?.Parameters is not null) + { + if (source.Parameters is IEnumerable paramList) + { + return source with { Parameters = [.. paramList] }; + } + } + + return source; } - - return source; } private static object GetClrValue(JsonElement element) diff --git a/src/Config/Converters/FileSinkConverter.cs b/src/Config/Converters/FileSinkConverter.cs new file mode 100644 index 0000000000..cc7d138a1b --- /dev/null +++ b/src/Config/Converters/FileSinkConverter.cs @@ -0,0 +1,166 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.ObjectModel; +using Serilog; + +namespace Azure.DataApiBuilder.Config.Converters; +class FileSinkConverter : JsonConverter +{ + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// + /// Whether to replace environment variable with its value or not while deserializing. + /// + public FileSinkConverter(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + /// + /// Defines how DAB reads File Sink options and defines which values are + /// used to instantiate FileSinkOptions. + /// + /// Thrown when improperly formatted File Sink options are provided. + public override FileSinkOptions? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType == JsonTokenType.StartObject) + { + bool? enabled = null; + string? path = null; + RollingInterval? rollingInterval = null; + int? retainedFileCountLimit = null; + long? fileSizeLimitBytes = null; + + while (reader.Read()) + { + if (reader.TokenType == JsonTokenType.EndObject) + { + return new FileSinkOptions(enabled, path, rollingInterval, retainedFileCountLimit, fileSizeLimitBytes); + } + + string? propertyName = reader.GetString(); + + reader.Read(); + switch (propertyName) + { + case "enabled": + if (reader.TokenType is not JsonTokenType.Null) + { + enabled = reader.GetBoolean(); + } + + break; + + case "path": + if (reader.TokenType is not JsonTokenType.Null) + { + path = reader.DeserializeString(_replaceEnvVar); + } + + break; + + case "rolling-interval": + if (reader.TokenType is not JsonTokenType.Null) + { + rollingInterval = EnumExtensions.Deserialize(reader.DeserializeString(_replaceEnvVar)!); + } + + break; + + case "retained-file-count-limit": + if (reader.TokenType is not JsonTokenType.Null) + { + try + { + retainedFileCountLimit = reader.GetInt32(); + } + catch (FormatException) + { + throw new JsonException($"The JSON token value is of the incorrect numeric format."); + } + + if (retainedFileCountLimit <= 0) + { + throw new JsonException($"Invalid retained-file-count-limit: {retainedFileCountLimit}. Specify a number > 0."); + } + } + + break; + + case "file-size-limit-bytes": + if (reader.TokenType is not JsonTokenType.Null) + { + try + { + fileSizeLimitBytes = reader.GetInt64(); + } + catch (FormatException) + { + throw new JsonException($"The JSON token value is of the incorrect numeric format."); + } + + if (retainedFileCountLimit <= 0) + { + throw new JsonException($"Invalid file-size-limit-bytes: {fileSizeLimitBytes}. Specify a number > 0."); + } + } + + break; + + default: + throw new JsonException($"Unexpected property {propertyName}"); + } + } + } + + throw new JsonException("Failed to read the File Sink Options"); + } + + /// + /// When writing the FileSinkOptions back to a JSON file, only write the properties + /// if they are user provided. This avoids polluting the written JSON file with properties + /// the user most likely omitted when writing the original DAB runtime config file. + /// This Write operation is only used when a RuntimeConfig object is serialized to JSON. + /// + public override void Write(Utf8JsonWriter writer, FileSinkOptions value, JsonSerializerOptions options) + { + writer.WriteStartObject(); + + if (value?.UserProvidedEnabled is true) + { + writer.WritePropertyName("enabled"); + JsonSerializer.Serialize(writer, value.Enabled, options); + } + + if (value?.UserProvidedPath is true) + { + writer.WritePropertyName("path"); + JsonSerializer.Serialize(writer, value.Path, options); + } + + if (value?.UserProvidedRollingInterval is true) + { + writer.WritePropertyName("rolling-interval"); + JsonSerializer.Serialize(writer, value.RollingInterval, options); + } + + if (value?.UserProvidedRetainedFileCountLimit is true) + { + writer.WritePropertyName("retained-file-count-limit"); + JsonSerializer.Serialize(writer, value.RetainedFileCountLimit, options); + } + + if (value?.UserProvidedFileSizeLimitBytes is true) + { + writer.WritePropertyName("file-size-limit-bytes"); + JsonSerializer.Serialize(writer, value.FileSizeLimitBytes, options); + } + + writer.WriteEndObject(); + } +} diff --git a/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs b/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs new file mode 100644 index 0000000000..db9acfa603 --- /dev/null +++ b/src/Config/Converters/McpRuntimeOptionsConverterFactory.cs @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.ObjectModel; + +namespace Azure.DataApiBuilder.Config.Converters; + +/// +/// JSON converter factory for McpRuntimeOptions that handles both boolean and object formats. +/// +internal class McpRuntimeOptionsConverterFactory : JsonConverterFactory +{ + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// + public override bool CanConvert(Type typeToConvert) + { + return typeToConvert.IsAssignableTo(typeof(McpRuntimeOptions)); + } + + /// + public override JsonConverter? CreateConverter(Type typeToConvert, JsonSerializerOptions options) + { + return new McpRuntimeOptionsConverter(_replaceEnvVar); + } + + internal McpRuntimeOptionsConverterFactory(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + private class McpRuntimeOptionsConverter : JsonConverter + { + // Determines whether to replace environment variable with its + // value or not while deserializing. + private bool _replaceEnvVar; + + /// Whether to replace environment variable with its + /// value or not while deserializing. + internal McpRuntimeOptionsConverter(bool replaceEnvVar) + { + _replaceEnvVar = replaceEnvVar; + } + + /// + /// Defines how DAB reads MCP options and defines which values are + /// used to instantiate McpRuntimeOptions. + /// + /// Thrown when improperly formatted MCP options are provided. + public override McpRuntimeOptions? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType == JsonTokenType.True || reader.TokenType == JsonTokenType.False) + { + return new McpRuntimeOptions(Enabled: reader.GetBoolean()); + } + + if (reader.TokenType is JsonTokenType.StartObject) + { + DmlToolsConfigConverter dmlToolsConfigConverter = new(); + + bool enabled = true; + string? path = null; + DmlToolsConfig? dmlTools = null; + + while (reader.Read()) + { + if (reader.TokenType == JsonTokenType.EndObject) + { + return new McpRuntimeOptions(enabled, path, dmlTools); + } + + string? propertyName = reader.GetString(); + + reader.Read(); + switch (propertyName) + { + case "enabled": + if (reader.TokenType is not JsonTokenType.Null) + { + enabled = reader.GetBoolean(); + } + + break; + + case "path": + if (reader.TokenType is not JsonTokenType.Null) + { + path = reader.DeserializeString(_replaceEnvVar); + } + + break; + + case "dml-tools": + dmlTools = dmlToolsConfigConverter.Read(ref reader, typeToConvert, options); + break; + + default: + throw new JsonException($"Unexpected property {propertyName}"); + } + } + } + + throw new JsonException("Failed to read the MCP Options"); + } + + /// + /// When writing the McpRuntimeOptions back to a JSON file, only write the properties + /// if they are user provided. This avoids polluting the written JSON file with properties + /// the user most likely omitted when writing the original DAB runtime config file. + /// This Write operation is only used when a RuntimeConfig object is serialized to JSON. + /// + public override void Write(Utf8JsonWriter writer, McpRuntimeOptions value, JsonSerializerOptions options) + { + writer.WriteStartObject(); + writer.WriteBoolean("enabled", value.Enabled); + + if (value?.UserProvidedPath is true) + { + writer.WritePropertyName("path"); + JsonSerializer.Serialize(writer, value.Path, options); + } + + // Only write the boolean value if it's not the default (true) + // This prevents writing "dml-tools": true when it's the default + if (value?.DmlTools is not null) + { + DmlToolsConfigConverter dmlToolsOptionsConverter = options.GetConverter(typeof(DmlToolsConfig)) as DmlToolsConfigConverter ?? + throw new JsonException("Failed to get mcp.dml-tools options converter"); + + dmlToolsOptionsConverter.Write(writer, value.DmlTools, options); + } + + writer.WriteEndObject(); + } + } +} diff --git a/src/Config/Converters/RuntimeHealthOptionsConvertorFactory.cs b/src/Config/Converters/RuntimeHealthOptionsConvertorFactory.cs index cca3366cc4..d49cc264e7 100644 --- a/src/Config/Converters/RuntimeHealthOptionsConvertorFactory.cs +++ b/src/Config/Converters/RuntimeHealthOptionsConvertorFactory.cs @@ -55,12 +55,13 @@ internal HealthCheckOptionsConverter(bool replaceEnvVar) bool? enabled = null; int? cacheTtlSeconds = null; HashSet? roles = null; + int? maxQueryParallelism = null; while (reader.Read()) { if (reader.TokenType is JsonTokenType.EndObject) { - return new RuntimeHealthCheckConfig(enabled, roles, cacheTtlSeconds); + return new RuntimeHealthCheckConfig(enabled, roles, cacheTtlSeconds, maxQueryParallelism); } string? property = reader.GetString(); @@ -120,7 +121,17 @@ internal HealthCheckOptionsConverter(bool replaceEnvVar) } break; + case "max-query-parallelism": + if (reader.TokenType is not JsonTokenType.Null) + { + // Allow user to set values between 1 and 8 (inclusive). If not set, the value will be set to 4 during health check. + int userValue = reader.GetInt32(); + int parseMaxQueryParallelism = Math.Clamp(userValue, RuntimeHealthCheckConfig.LOWEST_MAX_QUERY_PARALLELISM, + RuntimeHealthCheckConfig.UPPER_LIMIT_MAX_QUERY_PARALLELISM); + maxQueryParallelism = parseMaxQueryParallelism; + } + break; default: throw new JsonException($"Unexpected property {property}"); } @@ -149,6 +160,12 @@ public override void Write(Utf8JsonWriter writer, RuntimeHealthCheckConfig value JsonSerializer.Serialize(writer, value.Roles, options); } + if (value?.UserProvidedMaxQueryParallelism is true) + { + writer.WritePropertyName("max-query-parallelism"); + JsonSerializer.Serialize(writer, value.MaxQueryParallelism, options); + } + writer.WriteEndObject(); } else diff --git a/src/Config/DataApiBuilderException.cs b/src/Config/DataApiBuilderException.cs index d322391b53..b7696c4deb 100644 --- a/src/Config/DataApiBuilderException.cs +++ b/src/Config/DataApiBuilderException.cs @@ -18,6 +18,8 @@ public class DataApiBuilderException : Exception public const string GRAPHQL_FILTER_FIELD_AUTHZ_FAILURE = "Access forbidden to a field referenced in the filter."; public const string AUTHORIZATION_FAILURE = "Authorization Failure: Access Not Allowed."; public const string GRAPHQL_MUTATION_FIELD_AUTHZ_FAILURE = "Unauthorized due to one or more fields in this mutation."; + public const string GRAPHQL_GROUPBY_FIELD_AUTHZ_FAILURE = "Access forbidden to field '{0}' referenced in the groupBy argument."; + public const string GRAPHQL_AGGREGATION_FIELD_AUTHZ_FAILURE = "Access forbidden to field '{0}' referenced in the aggregation function '{1}'."; public enum SubStatusCodes { @@ -103,6 +105,10 @@ public enum SubStatusCodes /// GlobalRestEndpointDisabled, /// + /// Global MCP endpoint disabled in runtime configuration. + /// + GlobalMcpEndpointDisabled, + /// /// DataSource not found for multiple db scenario. /// DataSourceNotFound, diff --git a/src/Config/DatabasePrimitives/DatabaseObject.cs b/src/Config/DatabasePrimitives/DatabaseObject.cs index f3714cd1b0..8636e8c005 100644 --- a/src/Config/DatabasePrimitives/DatabaseObject.cs +++ b/src/Config/DatabasePrimitives/DatabaseObject.cs @@ -130,6 +130,10 @@ public class StoredProcedureDefinition : SourceDefinition public class ParameterDefinition { + public string Name { get; set; } = null!; + public bool? Required { get; set; } = false; + public string? Default { get; set; } + public string? Description { get; set; } public Type SystemType { get; set; } = null!; public DbType? DbType { get; set; } public SqlDbType? SqlDbType { get; set; } diff --git a/src/Config/HealthCheck/RuntimeHealthCheckConfig.cs b/src/Config/HealthCheck/RuntimeHealthCheckConfig.cs index 02c6cf42f2..fef45cd7f9 100644 --- a/src/Config/HealthCheck/RuntimeHealthCheckConfig.cs +++ b/src/Config/HealthCheck/RuntimeHealthCheckConfig.cs @@ -7,22 +7,46 @@ namespace Azure.DataApiBuilder.Config.ObjectModel; public record RuntimeHealthCheckConfig : HealthCheckConfig { + /// + /// Represents the lowest maximum query parallelism for health check. + /// + public const int LOWEST_MAX_QUERY_PARALLELISM = 1; + + /// + /// Default maximum query parallelism for health check. + /// + public const int DEFAULT_MAX_QUERY_PARALLELISM = 4; + + /// + /// Upper limit of maximum query parallelism for health check. + /// + public const int UPPER_LIMIT_MAX_QUERY_PARALLELISM = 8; + [JsonPropertyName("cache-ttl-seconds")] public int CacheTtlSeconds { get; set; } public HashSet? Roles { get; set; } - // TODO: Add support for parallel stream to run the health check query in upcoming PRs - // public int MaxDop { get; set; } = 1; // Parallelized streams to run Health Check (Default: 1) - [JsonIgnore(Condition = JsonIgnoreCondition.Always)] public bool UserProvidedTtlOptions { get; init; } = false; + /// + /// Flag to indicate if the user has provided a value for MaxQueryParallelism. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + public bool UserProvidedMaxQueryParallelism { get; init; } = false; + + /// + /// Gets or sets the maximum number of queries that can be executed in parallel. + /// + [JsonPropertyName("max-query-parallelism")] + public int? MaxQueryParallelism { get; set; } + public RuntimeHealthCheckConfig() : base() { } - public RuntimeHealthCheckConfig(bool? enabled, HashSet? roles = null, int? cacheTtlSeconds = null) : base(enabled) + public RuntimeHealthCheckConfig(bool? enabled, HashSet? roles = null, int? cacheTtlSeconds = null, int? maxQueryParallelism = null) : base(enabled) { this.Roles = roles; @@ -35,5 +59,17 @@ public RuntimeHealthCheckConfig(bool? enabled, HashSet? roles = null, in { this.CacheTtlSeconds = EntityCacheOptions.DEFAULT_TTL_SECONDS; } + + // Allow user to set values between 1 and 8 (inclusive). If not set, the value will be set to 4 during health check. + if (maxQueryParallelism is not null) + { + this.MaxQueryParallelism = maxQueryParallelism; + UserProvidedMaxQueryParallelism = true; + } + else + { + this.MaxQueryParallelism = DEFAULT_MAX_QUERY_PARALLELISM; + } + } } diff --git a/src/Config/ObjectModel/AKVRetryPolicyMode.cs b/src/Config/ObjectModel/AKVRetryPolicyMode.cs new file mode 100644 index 0000000000..ed9fafc792 --- /dev/null +++ b/src/Config/ObjectModel/AKVRetryPolicyMode.cs @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum AKVRetryPolicyMode +{ + // Fixed retry policy mode will use a fixed value when waiting on retries + Fixed, + // Exponential retry policy mode will use exponential back-off when waiting on retries + Exponential +} diff --git a/src/Config/ObjectModel/AKVRetryPolicyOptions.cs b/src/Config/ObjectModel/AKVRetryPolicyOptions.cs new file mode 100644 index 0000000000..4b62a5bd1e --- /dev/null +++ b/src/Config/ObjectModel/AKVRetryPolicyOptions.cs @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +public record AKVRetryPolicyOptions +{ + public const AKVRetryPolicyMode DEFAULT_MODE = AKVRetryPolicyMode.Exponential; + + public const int DEFAULT_MAX_COUNT = 3; + + public const int DEFAULT_DELAY_SECONDS = 1; + + public const int DEFAULT_MAX_DELAY_SECONDS = 60; + + public const int DEFAULT_NETWORK_TIMEOUT_SECONDS = 60; + + [JsonPropertyName("mode")] + public AKVRetryPolicyMode? Mode { get; init; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(Mode))] + public bool UserProvidedMode { get; init; } = false; + + [JsonPropertyName("max-count")] + public int? MaxCount { get; init; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(MaxCount))] + public bool UserProvidedMaxCount { get; init; } = false; + + [JsonPropertyName("delay-seconds")] + public int? DelaySeconds { get; init; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(DelaySeconds))] + public bool UserProvidedDelaySeconds { get; init; } = false; + + [JsonPropertyName("max-delay-seconds")] + public int? MaxDelaySeconds { get; init; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(MaxDelaySeconds))] + public bool UserProvidedMaxDelaySeconds { get; init; } = false; + + [JsonPropertyName("network-timeout-seconds")] + public int? NetworkTimeoutSeconds { get; init; } = null; + + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(NetworkTimeoutSeconds))] + public bool UserProvidedNetworkTimeoutSeconds { get; init; } = false; + + public AKVRetryPolicyOptions( + AKVRetryPolicyMode? mode = null, + int? maxCount = null, + int? delaySeconds = null, + int? maxDelaySeconds = null, + int? networkTimeoutSeconds = null) + { + if (mode is not null) + { + this.Mode = mode; + UserProvidedMode = true; + } + else + { + this.Mode = DEFAULT_MODE; + } + + if (maxCount is not null) + { + this.MaxCount = maxCount; + UserProvidedMaxCount = true; + } + else + { + this.MaxCount = DEFAULT_MAX_COUNT; + } + + if (delaySeconds is not null) + { + this.DelaySeconds = delaySeconds; + UserProvidedDelaySeconds = true; + } + else + { + this.DelaySeconds = DEFAULT_DELAY_SECONDS; + } + + if (maxDelaySeconds is not null) + { + this.MaxDelaySeconds = maxDelaySeconds; + UserProvidedMaxDelaySeconds = true; + } + else + { + this.MaxDelaySeconds = DEFAULT_MAX_DELAY_SECONDS; + } + + if (networkTimeoutSeconds is not null) + { + this.NetworkTimeoutSeconds = networkTimeoutSeconds; + UserProvidedNetworkTimeoutSeconds = true; + } + else + { + this.NetworkTimeoutSeconds = DEFAULT_NETWORK_TIMEOUT_SECONDS; + } + } +} diff --git a/src/Config/ObjectModel/ApiType.cs b/src/Config/ObjectModel/ApiType.cs index 5583e67098..fb57fe2859 100644 --- a/src/Config/ObjectModel/ApiType.cs +++ b/src/Config/ObjectModel/ApiType.cs @@ -10,6 +10,7 @@ public enum ApiType { REST, GraphQL, + MCP, // This is required to indicate features common between all APIs. All } diff --git a/src/Config/ObjectModel/AuthenticationOptions.cs b/src/Config/ObjectModel/AuthenticationOptions.cs index 189540fbe6..6750d6e807 100644 --- a/src/Config/ObjectModel/AuthenticationOptions.cs +++ b/src/Config/ObjectModel/AuthenticationOptions.cs @@ -17,6 +17,7 @@ public record AuthenticationOptions(string Provider = nameof(EasyAuthType.Static public const string CLIENT_PRINCIPAL_HEADER = "X-MS-CLIENT-PRINCIPAL"; public const string NAME_CLAIM_TYPE = "name"; public const string ROLE_CLAIM_TYPE = "roles"; + public const string ORIGINAL_ROLE_CLAIM_TYPE = "original_roles"; /// /// Returns whether the configured Provider matches an diff --git a/src/Config/ObjectModel/AzureKeyVaultOptions.cs b/src/Config/ObjectModel/AzureKeyVaultOptions.cs new file mode 100644 index 0000000000..27094cd16f --- /dev/null +++ b/src/Config/ObjectModel/AzureKeyVaultOptions.cs @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +public record AzureKeyVaultOptions +{ + [JsonPropertyName("endpoint")] + public string? Endpoint { get; init; } + + [JsonPropertyName("retry-policy")] + public AKVRetryPolicyOptions? RetryPolicy { get; init; } +} diff --git a/src/Config/ObjectModel/AzureLogAnalyticsAuthOptions.cs b/src/Config/ObjectModel/AzureLogAnalyticsAuthOptions.cs new file mode 100644 index 0000000000..58b83630e9 --- /dev/null +++ b/src/Config/ObjectModel/AzureLogAnalyticsAuthOptions.cs @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +/// +/// Represents the authentication options for Azure Log Analytics. +/// +public record AzureLogAnalyticsAuthOptions +{ + /// + /// Whether Azure Log Analytics is enabled. + /// + public string? CustomTableName { get; init; } + + /// + /// Authentication options for Azure Log Analytics. + /// + public string? DcrImmutableId { get; init; } + + /// + /// Custom log table name in Log Analytics. + /// + public string? DceEndpoint { get; init; } + + [JsonConstructor] + public AzureLogAnalyticsAuthOptions(string? customTableName = null, string? dcrImmutableId = null, string? dceEndpoint = null) + { + if (customTableName is not null) + { + CustomTableName = customTableName; + UserProvidedCustomTableName = true; + } + + if (dcrImmutableId is not null) + { + DcrImmutableId = dcrImmutableId; + UserProvidedDcrImmutableId = true; + } + + if (dceEndpoint is not null) + { + DceEndpoint = dceEndpoint; + UserProvidedDceEndpoint = true; + } + } + + /// + /// Flag which informs CLI and JSON serializer whether to write workspace-id + /// property and value to the runtime config file. + /// When user doesn't provide the custom-table-name property/value, which signals DAB to not write anything, + /// the DAB CLI should not write the current value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(CustomTableName))] + public bool UserProvidedCustomTableName { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write dcr-immutable-id + /// property and value to the runtime config file. + /// When user doesn't provide the dcr-immutable-id property/value, which signals DAB to not write anything, + /// the DAB CLI should not write the current value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(DcrImmutableId))] + public bool UserProvidedDcrImmutableId { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write dce-endpoint + /// property and value to the runtime config file. + /// When user doesn't provide the dce-endpoint property/value, which signals DAB to not write anything, + /// the DAB CLI should not write the current value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(DceEndpoint))] + public bool UserProvidedDceEndpoint { get; init; } = false; +} diff --git a/src/Config/ObjectModel/AzureLogAnalyticsLogs.cs b/src/Config/ObjectModel/AzureLogAnalyticsLogs.cs new file mode 100644 index 0000000000..8b914be681 --- /dev/null +++ b/src/Config/ObjectModel/AzureLogAnalyticsLogs.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +/// +/// Class used to save the components for the logs that are sent to Azure Log Analytics +/// +public class AzureLogAnalyticsLogs +{ + public string Time { get; set; } + public string LogLevel { get; set; } + public string? Message { get; set; } + public string? Component { get; set; } + public string? Identifier { get; set; } + + public AzureLogAnalyticsLogs(string time, string logLevel, string? message, string? component, string? identifier = null) + { + Time = time; + LogLevel = logLevel; + Message = message; + Component = component; + Identifier = identifier; + } +} diff --git a/src/Config/ObjectModel/AzureLogAnalyticsOptions.cs b/src/Config/ObjectModel/AzureLogAnalyticsOptions.cs new file mode 100644 index 0000000000..d67e98be3a --- /dev/null +++ b/src/Config/ObjectModel/AzureLogAnalyticsOptions.cs @@ -0,0 +1,116 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +/// +/// Represents the options for configuring Azure Log Analytics. +/// Properties are nullable to support DAB CLI merge config +/// expected behavior. +/// +public record AzureLogAnalyticsOptions +{ + /// + /// Default enabled for Azure Log Analytics. + /// + public const bool DEFAULT_ENABLED = false; + + /// + /// Default log type for Azure Log Analytics. + /// + public const string DEFAULT_DAB_IDENTIFIER = "DabLogs"; + + /// + /// Default flush interval in seconds. + /// + public const int DEFAULT_FLUSH_INTERVAL_SECONDS = 5; + + /// + /// Whether Azure Log Analytics is enabled. + /// + public bool Enabled { get; init; } + + /// + /// Authentication options for Azure Log Analytics. + /// + public AzureLogAnalyticsAuthOptions? Auth { get; init; } + + /// + /// Custom identifier name to send to Log Analytics. + /// + public string? DabIdentifier { get; init; } + + /// + /// Interval between log batch pushes (in seconds). + /// + public int? FlushIntervalSeconds { get; init; } + + [JsonConstructor] + public AzureLogAnalyticsOptions(bool? enabled = null, AzureLogAnalyticsAuthOptions? auth = null, string? dabIdentifier = null, int? flushIntervalSeconds = null) + { + Auth = auth; + + if (enabled is not null) + { + Enabled = (bool)enabled; + UserProvidedEnabled = true; + } + else + { + Enabled = DEFAULT_ENABLED; + } + + if (dabIdentifier is not null) + { + DabIdentifier = dabIdentifier; + UserProvidedDabIdentifier = true; + } + else + { + DabIdentifier = DEFAULT_DAB_IDENTIFIER; + } + + if (flushIntervalSeconds is not null) + { + FlushIntervalSeconds = flushIntervalSeconds; + UserProvidedFlushIntervalSeconds = true; + } + else + { + FlushIntervalSeconds = DEFAULT_FLUSH_INTERVAL_SECONDS; + } + } + + /// + /// Flag which informs CLI and JSON serializer whether to write enabled + /// property and value to the runtime config file. + /// When user doesn't provide the enabled property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(Enabled))] + public bool UserProvidedEnabled { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write dab-identifier + /// property and value to the runtime config file. + /// When user doesn't provide the dab-identifier property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(DabIdentifier))] + public bool UserProvidedDabIdentifier { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write flush-interval-seconds + /// property and value to the runtime config file. + /// When user doesn't provide the flush-interval-seconds property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(FlushIntervalSeconds))] + public bool UserProvidedFlushIntervalSeconds { get; init; } = false; +} diff --git a/src/Config/ObjectModel/DmlToolsConfig.cs b/src/Config/ObjectModel/DmlToolsConfig.cs new file mode 100644 index 0000000000..c14f8e49ed --- /dev/null +++ b/src/Config/ObjectModel/DmlToolsConfig.cs @@ -0,0 +1,188 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +/// +/// DML Tools configuration that can be either a boolean or object with individual tool settings +/// +public record DmlToolsConfig +{ + /// + /// Default value for all tools when not specified + /// + public const bool DEFAULT_ENABLED = true; + + /// + /// Indicates if all tools are enabled/disabled uniformly + /// + public bool AllToolsEnabled { get; init; } + + /// + /// Whether describe-entities tool is enabled + /// + public bool? DescribeEntities { get; init; } + + /// + /// Whether create-record tool is enabled + /// + public bool? CreateRecord { get; init; } + + /// + /// Whether read-records tool is enabled + /// + public bool? ReadRecords { get; init; } + + /// + /// Whether update-record tool is enabled + /// + public bool? UpdateRecord { get; init; } + + /// + /// Whether delete-record tool is enabled + /// + public bool? DeleteRecord { get; init; } + + /// + /// Whether execute-entity tool is enabled + /// + public bool? ExecuteEntity { get; init; } + + [JsonConstructor] + public DmlToolsConfig( + bool? allToolsEnabled = null, + bool? describeEntities = null, + bool? createRecord = null, + bool? readRecords = null, + bool? updateRecord = null, + bool? deleteRecord = null, + bool? executeEntity = null) + { + if (allToolsEnabled is not null) + { + AllToolsEnabled = allToolsEnabled.Value; + UserProvidedAllToolsEnabled = true; + } + else + { + AllToolsEnabled = DEFAULT_ENABLED; + } + + if (describeEntities is not null) + { + DescribeEntities = describeEntities; + UserProvidedDescribeEntities = true; + } + + if (createRecord is not null) + { + CreateRecord = createRecord; + UserProvidedCreateRecord = true; + } + + if (readRecords is not null) + { + ReadRecords = readRecords; + UserProvidedReadRecords = true; + } + + if (updateRecord is not null) + { + UpdateRecord = updateRecord; + UserProvidedUpdateRecord = true; + } + + if (deleteRecord is not null) + { + DeleteRecord = deleteRecord; + UserProvidedDeleteRecord = true; + } + + if (executeEntity is not null) + { + ExecuteEntity = executeEntity; + UserProvidedExecuteEntity = true; + } + } + + /// + /// Creates a DmlToolsConfig with all tools set to the same state + /// + public static DmlToolsConfig FromBoolean(bool enabled) + { + return new DmlToolsConfig + { + AllToolsEnabled = enabled, + DescribeEntities = null, + CreateRecord = null, + ReadRecords = null, + UpdateRecord = null, + DeleteRecord = null, + ExecuteEntity = null + }; + } + + /// + /// Creates a default DmlToolsConfig with all tools enabled + /// + public static DmlToolsConfig Default => FromBoolean(DEFAULT_ENABLED); + + /// + /// Flag which informs CLI and JSON serializer whether to write all-tools-enabled + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(AllToolsEnabled))] + public bool UserProvidedAllToolsEnabled { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write describe-entities + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(DescribeEntities))] + public bool UserProvidedDescribeEntities { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write create-record + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(CreateRecord))] + public bool UserProvidedCreateRecord { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write read-records + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(ReadRecords))] + public bool UserProvidedReadRecords { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write update-record + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(UpdateRecord))] + public bool UserProvidedUpdateRecord { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write delete-record + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(DeleteRecord))] + public bool UserProvidedDeleteRecord { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write execute-entity + /// property/value to the runtime config file. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(ExecuteEntity))] + public bool UserProvidedExecuteEntity { get; init; } = false; +} diff --git a/src/Config/ObjectModel/Entity.cs b/src/Config/ObjectModel/Entity.cs index 5660864088..c9f247e0f6 100644 --- a/src/Config/ObjectModel/Entity.cs +++ b/src/Config/ObjectModel/Entity.cs @@ -23,18 +23,20 @@ namespace Azure.DataApiBuilder.Config.ObjectModel; /// how long that response should be valid in the cache. /// Defines whether to enable comprehensive health check for the entity /// and how many rows to return in query and under what threshold-ms. +/// Optional description for the entity. Used for API documentation and GraphQL schema comments. public record Entity { public const string PROPERTY_PATH = "path"; public const string PROPERTY_METHODS = "methods"; + public string? Description { get; init; } public EntitySource Source { get; init; } + public List? Fields { get; init; } public EntityGraphQLOptions GraphQL { get; init; } public EntityRestOptions Rest { get; init; } public EntityPermission[] Permissions { get; init; } public Dictionary? Mappings { get; init; } public Dictionary? Relationships { get; init; } public EntityCacheOptions? Cache { get; init; } - public EntityHealthCheckConfig? Health { get; init; } [JsonIgnore] @@ -44,16 +46,19 @@ public record Entity public Entity( EntitySource Source, EntityGraphQLOptions GraphQL, + List? Fields, EntityRestOptions Rest, EntityPermission[] Permissions, Dictionary? Mappings, Dictionary? Relationships, EntityCacheOptions? Cache = null, bool IsLinkingEntity = false, - EntityHealthCheckConfig? Health = null) + EntityHealthCheckConfig? Health = null, + string? Description = null) { this.Health = Health; this.Source = Source; + this.Fields = Fields; this.GraphQL = GraphQL; this.Rest = Rest; this.Permissions = Permissions; @@ -61,6 +66,7 @@ public Entity( this.Relationships = Relationships; this.Cache = Cache; this.IsLinkingEntity = IsLinkingEntity; + this.Description = Description; } /// diff --git a/src/Config/ObjectModel/EntitySource.cs b/src/Config/ObjectModel/EntitySource.cs index e6cbd70a0b..8844796413 100644 --- a/src/Config/ObjectModel/EntitySource.cs +++ b/src/Config/ObjectModel/EntitySource.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. - namespace Azure.DataApiBuilder.Config.ObjectModel; /// @@ -13,4 +12,4 @@ namespace Azure.DataApiBuilder.Config.ObjectModel; /// If Type is SourceType.StoredProcedure, /// Parameters to be passed as defaults to the procedure call /// The field(s) to be used as primary keys. -public record EntitySource(string Object, EntitySourceType? Type, Dictionary? Parameters, string[]? KeyFields); +public record EntitySource(string Object, EntitySourceType? Type, List? Parameters, string[]? KeyFields); diff --git a/src/Config/ObjectModel/FieldMetadata.cs b/src/Config/ObjectModel/FieldMetadata.cs new file mode 100644 index 0000000000..118f38c0c2 --- /dev/null +++ b/src/Config/ObjectModel/FieldMetadata.cs @@ -0,0 +1,28 @@ +namespace Azure.DataApiBuilder.Config.ObjectModel +{ + /// + /// Represents metadata for a field in an entity. + /// + public class FieldMetadata + { + /// + /// The name of the field (must match a database column). + /// + public string Name { get; set; } = string.Empty; + + /// + /// The alias for the field (must be unique per entity). + /// + public string? Alias { get; set; } + + /// + /// The description for the field. + /// + public string? Description { get; set; } + + /// + /// Whether this field is a key (must be unique). + /// + public bool PrimaryKey { get; set; } + } +} diff --git a/src/Config/ObjectModel/FileSinkOptions.cs b/src/Config/ObjectModel/FileSinkOptions.cs new file mode 100644 index 0000000000..a5de58642f --- /dev/null +++ b/src/Config/ObjectModel/FileSinkOptions.cs @@ -0,0 +1,168 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; +using Serilog; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +/// +/// Represents the options for configuring file sink telemetry. +/// +public record FileSinkOptions +{ + /// + /// Default enabled for File Sink. + /// + public const bool DEFAULT_ENABLED = false; + + /// + /// Default path for File Sink. + /// + public const string DEFAULT_PATH = @"logs\dab-log.txt"; + + /// + /// Default rolling interval for File Sink. + /// + public const string DEFAULT_ROLLING_INTERVAL = nameof(Serilog.RollingInterval.Day); + + /// + /// Default retained file count limit for File Sink. + /// + public const int DEFAULT_RETAINED_FILE_COUNT_LIMIT = 1; + + /// + /// Default file size limit bytes for File Sink. + /// + public const int DEFAULT_FILE_SIZE_LIMIT_BYTES = 1048576; + + /// + /// Whether File Sink is enabled. + /// + public bool Enabled { get; init; } + + /// + /// Path to the file where logs will be uploaded. + /// + public string Path { get; init; } + + /// + /// Time it takes for files with logs to be discarded. + /// + public string RollingInterval { get; init; } + + /// + /// Amount of files that can exist simultaneously in which logs are saved. + /// + public int RetainedFileCountLimit { get; init; } + + /// + /// File size limit in bytes before a new file needs to be created. + /// + public long FileSizeLimitBytes { get; init; } + + [JsonConstructor] + public FileSinkOptions(bool? enabled = null, string? path = null, RollingInterval? rollingInterval = null, int? retainedFileCountLimit = null, long? fileSizeLimitBytes = null) + { + if (enabled is not null) + { + Enabled = (bool)enabled; + UserProvidedEnabled = true; + } + else + { + Enabled = DEFAULT_ENABLED; + } + + if (path is not null) + { + Path = path; + UserProvidedPath = true; + } + else + { + Path = DEFAULT_PATH; + } + + if (rollingInterval is not null) + { + RollingInterval = ((RollingInterval)rollingInterval).ToString(); + UserProvidedRollingInterval = true; + } + else + { + RollingInterval = DEFAULT_ROLLING_INTERVAL; + } + + if (retainedFileCountLimit is not null) + { + RetainedFileCountLimit = (int)retainedFileCountLimit; + UserProvidedRetainedFileCountLimit = true; + } + else + { + RetainedFileCountLimit = DEFAULT_RETAINED_FILE_COUNT_LIMIT; + } + + if (fileSizeLimitBytes is not null) + { + FileSizeLimitBytes = (long)fileSizeLimitBytes; + UserProvidedFileSizeLimitBytes = true; + } + else + { + FileSizeLimitBytes = DEFAULT_FILE_SIZE_LIMIT_BYTES; + } + } + + /// + /// Flag which informs CLI and JSON serializer whether to write enabled + /// property/value to the runtime config file. + /// When user doesn't provide the enabled property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(Enabled))] + public bool UserProvidedEnabled { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write path + /// property/value to the runtime config file. + /// When user doesn't provide the path property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(Path))] + public bool UserProvidedPath { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write rolling-interval + /// property/value to the runtime config file. + /// When user doesn't provide the rolling-interval property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(RollingInterval))] + public bool UserProvidedRollingInterval { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write retained-file-count-limit + /// property/value to the runtime config file. + /// When user doesn't provide the retained-file-count-limit property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(RetainedFileCountLimit))] + public bool UserProvidedRetainedFileCountLimit { get; init; } = false; + + /// + /// Flag which informs CLI and JSON serializer whether to write file-size-limit-bytes + /// property/value to the runtime config file. + /// When user doesn't provide the file-size-limit-bytes property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(FileSizeLimitBytes))] + public bool UserProvidedFileSizeLimitBytes { get; init; } = false; +} diff --git a/src/Config/ObjectModel/McpRuntimeOptions.cs b/src/Config/ObjectModel/McpRuntimeOptions.cs new file mode 100644 index 0000000000..73d695ee4a --- /dev/null +++ b/src/Config/ObjectModel/McpRuntimeOptions.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Serialization; +using Azure.DataApiBuilder.Config.Converters; + +namespace Azure.DataApiBuilder.Config.ObjectModel; + +public record McpRuntimeOptions +{ + public const string DEFAULT_PATH = "/mcp"; + + /// + /// Whether MCP endpoints are enabled + /// + [JsonPropertyName("enabled")] + public bool Enabled { get; init; } = true; + + /// + /// The path where MCP endpoints will be exposed + /// + [JsonPropertyName("path")] + public string Path { get; init; } = DEFAULT_PATH; + + /// + /// Configuration for DML tools + /// + [JsonPropertyName("dml-tools")] + [JsonConverter(typeof(DmlToolsConfigConverter))] + public DmlToolsConfig? DmlTools { get; init; } + + [JsonConstructor] + public McpRuntimeOptions( + bool Enabled = true, + string? Path = null, + DmlToolsConfig? DmlTools = null) + { + this.Enabled = Enabled; + + if (Path is not null) + { + this.Path = Path; + UserProvidedPath = true; + } + else + { + this.Path = DEFAULT_PATH; + } + + this.DmlTools = DmlTools; + } + + /// + /// Flag which informs CLI and JSON serializer whether to write path + /// property and value to the runtime config file. + /// When user doesn't provide the path property/value, which signals DAB to use the default, + /// the DAB CLI should not write the default value to a serialized config. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.Always)] + [MemberNotNullWhen(true, nameof(Enabled))] + public bool UserProvidedPath { get; init; } = false; +} diff --git a/src/Config/ObjectModel/PaginationOptions.cs b/src/Config/ObjectModel/PaginationOptions.cs index ab4bff29ff..ccb0808386 100644 --- a/src/Config/ObjectModel/PaginationOptions.cs +++ b/src/Config/ObjectModel/PaginationOptions.cs @@ -37,8 +37,14 @@ public record PaginationOptions [JsonPropertyName("max-page-size")] public int? MaxPageSize { get; init; } = null; + /// + /// When true, nextLink in paginated responses will be relative (default: false). + /// + [JsonPropertyName("next-link-relative")] + public bool? NextLinkRelative { get; init; } = false; + [JsonConstructor] - public PaginationOptions(int? DefaultPageSize = null, int? MaxPageSize = null) + public PaginationOptions(int? DefaultPageSize = null, int? MaxPageSize = null, bool? NextLinkRelative = null) { if (MaxPageSize is not null) { @@ -69,6 +75,8 @@ public PaginationOptions(int? DefaultPageSize = null, int? MaxPageSize = null) statusCode: HttpStatusCode.ServiceUnavailable, subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError); } + + this.NextLinkRelative = NextLinkRelative ?? false; } /// diff --git a/src/Config/ObjectModel/ParameterMetadata.cs b/src/Config/ObjectModel/ParameterMetadata.cs new file mode 100644 index 0000000000..334979d728 --- /dev/null +++ b/src/Config/ObjectModel/ParameterMetadata.cs @@ -0,0 +1,28 @@ +namespace Azure.DataApiBuilder.Config.ObjectModel +{ + /// + /// Represents metadata for a parameter, including its name, description, requirement status, and default value. + /// + public class ParameterMetadata + { + /// + /// Gets or sets the name of the parameter. + /// + public required string Name { get; set; } + + /// + /// Gets or sets the description of the parameter. + /// + public string? Description { get; set; } + + /// + /// Gets or sets a value indicating whether the parameter is required. + /// + public bool Required { get; set; } + + /// + /// Gets or sets the default value of the parameter, if any. + /// + public string? Default { get; set; } + } +} diff --git a/src/Config/ObjectModel/RuntimeConfig.cs b/src/Config/ObjectModel/RuntimeConfig.cs index 1172b60a8f..a450e1265c 100644 --- a/src/Config/ObjectModel/RuntimeConfig.cs +++ b/src/Config/ObjectModel/RuntimeConfig.cs @@ -22,6 +22,9 @@ public record RuntimeConfig public RuntimeOptions? Runtime { get; init; } + [JsonPropertyName("azure-key-vault")] + public AzureKeyVaultOptions? AzureKeyVault { get; init; } + public virtual RuntimeEntities Entities { get; init; } public DataSourceFiles? DataSourceFiles { get; init; } @@ -69,6 +72,15 @@ Runtime.Rest is null || Runtime.Rest.Enabled) && DataSource.DatabaseType != DatabaseType.CosmosDB_NoSQL; + /// + /// Retrieves the value of runtime.mcp.enabled property if present, default is true. + /// + [JsonIgnore] + public bool IsMcpEnabled => + Runtime is null || + Runtime.Mcp is null || + Runtime.Mcp.Enabled; + [JsonIgnore] public bool IsHealthEnabled => Runtime is null || @@ -124,6 +136,25 @@ public string GraphQLPath } } + /// + /// The path at which MCP API is available + /// + [JsonIgnore] + public string McpPath + { + get + { + if (Runtime is null || Runtime.Mcp is null || Runtime.Mcp.Path is null) + { + return McpRuntimeOptions.DEFAULT_PATH; + } + else + { + return Runtime.Mcp.Path; + } + } + } + /// /// Indicates whether introspection is allowed or not. /// @@ -216,11 +247,13 @@ public RuntimeConfig( DataSource DataSource, RuntimeEntities Entities, RuntimeOptions? Runtime = null, - DataSourceFiles? DataSourceFiles = null) + DataSourceFiles? DataSourceFiles = null, + AzureKeyVaultOptions? AzureKeyVault = null) { this.Schema = Schema ?? DEFAULT_CONFIG_SCHEMA_LINK; this.DataSource = DataSource; this.Runtime = Runtime; + this.AzureKeyVault = AzureKeyVault; this.Entities = Entities; this.DefaultDataSourceName = Guid.NewGuid().ToString(); @@ -305,7 +338,7 @@ public RuntimeConfig( /// Dictionary mapping datasourceName to datasource object. /// Dictionary mapping entityName to datasourceName. /// Datasource files which represent list of child runtimeconfigs for multi-db scenario. - public RuntimeConfig(string Schema, DataSource DataSource, RuntimeOptions Runtime, RuntimeEntities Entities, string DefaultDataSourceName, Dictionary DataSourceNameToDataSource, Dictionary EntityNameToDataSourceName, DataSourceFiles? DataSourceFiles = null) + public RuntimeConfig(string Schema, DataSource DataSource, RuntimeOptions Runtime, RuntimeEntities Entities, string DefaultDataSourceName, Dictionary DataSourceNameToDataSource, Dictionary EntityNameToDataSourceName, DataSourceFiles? DataSourceFiles = null, AzureKeyVaultOptions? AzureKeyVault = null) { this.Schema = Schema; this.DataSource = DataSource; @@ -315,6 +348,7 @@ public RuntimeConfig(string Schema, DataSource DataSource, RuntimeOptions Runtim _dataSourceNameToDataSource = DataSourceNameToDataSource; _entityNameToDataSourceName = EntityNameToDataSourceName; this.DataSourceFiles = DataSourceFiles; + this.AzureKeyVault = AzureKeyVault; SetupDataSourcesUsed(); } @@ -586,6 +620,11 @@ public uint MaxPageSize() return (uint?)Runtime?.Pagination?.MaxPageSize ?? PaginationOptions.MAX_PAGE_SIZE; } + public bool NextLinkRelative() + { + return Runtime?.Pagination?.NextLinkRelative ?? false; + } + public int MaxResponseSizeMB() { return Runtime?.Host?.MaxResponseSizeMB ?? HostOptions.MAX_RESPONSE_LENGTH_DAB_ENGINE_MB; @@ -696,4 +735,10 @@ public LogLevel GetConfiguredLogLevel(string loggerFilter = "") return LogLevel.Error; } + + /// + /// Gets the MCP DML tools configuration + /// + [JsonIgnore] + public DmlToolsConfig? McpDmlTools => Runtime?.Mcp?.DmlTools; } diff --git a/src/Config/ObjectModel/RuntimeOptions.cs b/src/Config/ObjectModel/RuntimeOptions.cs index 8e05df4b62..6f6c046651 100644 --- a/src/Config/ObjectModel/RuntimeOptions.cs +++ b/src/Config/ObjectModel/RuntimeOptions.cs @@ -10,6 +10,7 @@ public record RuntimeOptions { public RestRuntimeOptions? Rest { get; init; } public GraphQLRuntimeOptions? GraphQL { get; init; } + public McpRuntimeOptions? Mcp { get; init; } public HostOptions? Host { get; set; } public string? BaseRoute { get; init; } public TelemetryOptions? Telemetry { get; init; } @@ -21,6 +22,7 @@ public record RuntimeOptions public RuntimeOptions( RestRuntimeOptions? Rest, GraphQLRuntimeOptions? GraphQL, + McpRuntimeOptions? Mcp, HostOptions? Host, string? BaseRoute = null, TelemetryOptions? Telemetry = null, @@ -30,6 +32,7 @@ public RuntimeOptions( { this.Rest = Rest; this.GraphQL = GraphQL; + this.Mcp = Mcp; this.Host = Host; this.BaseRoute = BaseRoute; this.Telemetry = Telemetry; @@ -60,6 +63,12 @@ GraphQL is null || GraphQL?.Enabled is null || GraphQL?.Enabled is true; + [JsonIgnore] + public bool IsMcpEnabled => + Mcp is null || + Mcp?.Enabled is null || + Mcp?.Enabled is true; + [JsonIgnore] public bool IsHealthCheckEnabled => Health is null || diff --git a/src/Config/ObjectModel/TelemetryOptions.cs b/src/Config/ObjectModel/TelemetryOptions.cs index ed2099f2a4..b0343e53bc 100644 --- a/src/Config/ObjectModel/TelemetryOptions.cs +++ b/src/Config/ObjectModel/TelemetryOptions.cs @@ -9,7 +9,17 @@ namespace Azure.DataApiBuilder.Config.ObjectModel; /// /// Represents the options for telemetry. /// -public record TelemetryOptions(ApplicationInsightsOptions? ApplicationInsights = null, OpenTelemetryOptions? OpenTelemetry = null, Dictionary? LoggerLevel = null) +/// Options for configuring Application Insights. +/// Options for configuring Open Telemetry. +/// Options for configuring Azure Log Analytics. +/// Options for configuring File Sink. +/// Options for configuring the Log Level filters. +public record TelemetryOptions( + ApplicationInsightsOptions? ApplicationInsights = null, + OpenTelemetryOptions? OpenTelemetry = null, + AzureLogAnalyticsOptions? AzureLogAnalytics = null, + FileSinkOptions? File = null, + Dictionary? LoggerLevel = null) { [JsonPropertyName("log-level")] public Dictionary? LoggerLevel { get; init; } = LoggerLevel; diff --git a/src/Config/RuntimeConfigLoader.cs b/src/Config/RuntimeConfigLoader.cs index b4f72335c3..f78c32ebc1 100644 --- a/src/Config/RuntimeConfigLoader.cs +++ b/src/Config/RuntimeConfigLoader.cs @@ -246,6 +246,8 @@ public static JsonSerializerOptions GetSerializationOptions( options.Converters.Add(new EntityHealthOptionsConvertorFactory()); options.Converters.Add(new RestRuntimeOptionsConverterFactory()); options.Converters.Add(new GraphQLRuntimeOptionsConverterFactory(replaceEnvVar)); + options.Converters.Add(new McpRuntimeOptionsConverterFactory(replaceEnvVar)); + options.Converters.Add(new DmlToolsConfigConverter()); options.Converters.Add(new EntitySourceConverterFactory(replaceEnvVar)); options.Converters.Add(new EntityGraphQLOptionsConverterFactory(replaceEnvVar)); options.Converters.Add(new EntityRestOptionsConverterFactory(replaceEnvVar)); @@ -258,6 +260,10 @@ public static JsonSerializerOptions GetSerializationOptions( options.Converters.Add(new MultipleMutationOptionsConverter(options)); options.Converters.Add(new DataSourceConverterFactory(replaceEnvVar)); options.Converters.Add(new HostOptionsConvertorFactory()); + options.Converters.Add(new AKVRetryPolicyOptionsConverterFactory(replaceEnvVar)); + options.Converters.Add(new AzureLogAnalyticsOptionsConverterFactory(replaceEnvVar)); + options.Converters.Add(new AzureLogAnalyticsAuthOptionsConverter(replaceEnvVar)); + options.Converters.Add(new FileSinkConverter(replaceEnvVar)); if (replaceEnvVar) { diff --git a/src/Core/Authorization/AuthorizationResolver.cs b/src/Core/Authorization/AuthorizationResolver.cs index 2ab6e70a4c..f0c73fafcf 100644 --- a/src/Core/Authorization/AuthorizationResolver.cs +++ b/src/Core/Authorization/AuthorizationResolver.cs @@ -620,6 +620,11 @@ public static Dictionary> GetAllAuthenticatedUserClaims(Http // 'roles' claim has already been processed. if (claim.Type.Equals(AuthenticationOptions.ROLE_CLAIM_TYPE)) { + if (!resolvedClaims.TryAdd(AuthenticationOptions.ORIGINAL_ROLE_CLAIM_TYPE, new List() { claim })) + { + resolvedClaims[AuthenticationOptions.ORIGINAL_ROLE_CLAIM_TYPE].Add(claim); + } + continue; } diff --git a/src/Core/Configurations/RuntimeConfigValidator.cs b/src/Core/Configurations/RuntimeConfigValidator.cs index 4d293d0cd2..fd8f811c9e 100644 --- a/src/Core/Configurations/RuntimeConfigValidator.cs +++ b/src/Core/Configurations/RuntimeConfigValidator.cs @@ -42,6 +42,13 @@ public class RuntimeConfigValidator : IConfigValidator // of the form @claims.*** delimited by space character,end of the line or end of the string. private static readonly string _claimChars = @"@claims\.[^\s\)]*"; + // List of databases that support row level policy with create action + private static readonly HashSet _databaseTypesSupportingCreatePolicy = + [ + DatabaseType.MSSQL, + DatabaseType.DWSQL + ]; + // Error messages. public const string INVALID_CLAIMS_IN_POLICY_ERR_MSG = "One or more claim types supplied in the database policy are not supported."; @@ -74,6 +81,8 @@ public void ValidateConfigProperties() ValidateGlobalEndpointRouteConfig(runtimeConfig); ValidateAppInsightsTelemetryConnectionString(runtimeConfig); ValidateLoggerFilters(runtimeConfig); + ValidateAzureLogAnalyticsAuth(runtimeConfig); + ValidateFileSinkPath(runtimeConfig); // Running these graphQL validations only in development mode to ensure // fast startup of engine in production mode. @@ -149,6 +158,81 @@ public static void ValidateLoggerFilters(RuntimeConfig runtimeConfig) } } + /// + /// The auth options in Azure Log Analytics are required if it is enabled. + /// + public void ValidateAzureLogAnalyticsAuth(RuntimeConfig runtimeConfig) + { + if (runtimeConfig.Runtime!.Telemetry is not null && runtimeConfig.Runtime.Telemetry.AzureLogAnalytics is not null) + { + AzureLogAnalyticsOptions azureLogAnalyticsOptions = runtimeConfig.Runtime.Telemetry.AzureLogAnalytics; + AzureLogAnalyticsAuthOptions? azureLogAnalyticsAuthOptions = azureLogAnalyticsOptions.Auth; + if (azureLogAnalyticsOptions.Enabled && (azureLogAnalyticsAuthOptions is null || string.IsNullOrWhiteSpace(azureLogAnalyticsAuthOptions.CustomTableName) || + string.IsNullOrWhiteSpace(azureLogAnalyticsAuthOptions.DcrImmutableId) || string.IsNullOrWhiteSpace(azureLogAnalyticsAuthOptions.DceEndpoint))) + { + HandleOrRecordException(new DataApiBuilderException( + message: "Azure Log Analytics Auth options 'custom-table-name', 'dcr-immutable-id', and 'dce-endpoint' cannot be null or empty if enabled.", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + } + } + } + + /// + /// The path in File Sink is required if it is enabled. + /// + public void ValidateFileSinkPath(RuntimeConfig runtimeConfig) + { + if (runtimeConfig.Runtime!.Telemetry is not null && runtimeConfig.Runtime.Telemetry.File is not null) + { + FileSinkOptions fileSinkOptions = runtimeConfig.Runtime.Telemetry.File; + if (fileSinkOptions.Enabled && string.IsNullOrWhiteSpace(fileSinkOptions.Path)) + { + HandleOrRecordException(new DataApiBuilderException( + message: "File option 'path' cannot be null or empty if enabled.", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + } + + if (fileSinkOptions.Path.Length > 260) + { + _logger.LogWarning("File option 'path' exceeds 260 characters, it is recommended that the path does not exceed this limit."); + } + + // Checks if path is valid by checking if there are any invalid characters and then + // attempting to retrieve the full path, returns an exception if it is unable. + try + { + string fileName = System.IO.Path.GetFileName(fileSinkOptions.Path); + if (string.IsNullOrWhiteSpace(fileName) || fileName.IndexOfAny(System.IO.Path.GetInvalidFileNameChars()) != -1) + { + HandleOrRecordException(new DataApiBuilderException( + message: "File option 'path' cannot have invalid characters in its directory or file name.", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + } + + string? directoryName = System.IO.Path.GetDirectoryName(fileSinkOptions.Path); + if (directoryName is not null && directoryName.IndexOfAny(System.IO.Path.GetInvalidPathChars()) != -1) + { + HandleOrRecordException(new DataApiBuilderException( + message: "File option 'path' cannot have invalid characters in its directory or file name.", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + } + + System.IO.Path.GetFullPath(fileSinkOptions.Path); + } + catch (Exception ex) + { + HandleOrRecordException(new DataApiBuilderException( + message: ex.Message, + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + } + } + } + /// /// This method runs several validations against the config file such as schema validation, /// validation of entities metadata, validation of permissions, validation of entity configuration. @@ -618,11 +702,11 @@ private void ValidateNameRequirements(string entityName) /// The config that will be validated. public void ValidateGlobalEndpointRouteConfig(RuntimeConfig runtimeConfig) { - // Both REST and GraphQL endpoints cannot be disabled at the same time. - if (!runtimeConfig.IsRestEnabled && !runtimeConfig.IsGraphQLEnabled) + // REST, GraphQL and MCP endpoints cannot be disabled at the same time. + if (!runtimeConfig.IsRestEnabled && !runtimeConfig.IsGraphQLEnabled && !runtimeConfig.IsMcpEnabled) { HandleOrRecordException(new DataApiBuilderException( - message: $"Both GraphQL and REST endpoints are disabled.", + message: $"GraphQL, REST, and MCP endpoints are disabled.", statusCode: HttpStatusCode.ServiceUnavailable, subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); } @@ -651,19 +735,30 @@ public void ValidateGlobalEndpointRouteConfig(RuntimeConfig runtimeConfig) ValidateRestURI(runtimeConfig); ValidateGraphQLURI(runtimeConfig); - // Do not check for conflicts if GraphQL or REST endpoints are disabled. - if (!runtimeConfig.IsRestEnabled || !runtimeConfig.IsGraphQLEnabled) + ValidateMcpUri(runtimeConfig); + // Do not check for conflicts if two of the endpoints are disabled between GraphQL, REST, and MCP. + if ((!runtimeConfig.IsRestEnabled && !runtimeConfig.IsGraphQLEnabled) || + (!runtimeConfig.IsRestEnabled && !runtimeConfig.IsMcpEnabled) || + (!runtimeConfig.IsGraphQLEnabled && !runtimeConfig.IsMcpEnabled)) { return; } if (string.Equals( - a: runtimeConfig.RestPath, - b: runtimeConfig.GraphQLPath, - comparisonType: StringComparison.OrdinalIgnoreCase)) + a: runtimeConfig.RestPath, + b: runtimeConfig.GraphQLPath, + comparisonType: StringComparison.OrdinalIgnoreCase) || + string.Equals( + a: runtimeConfig.RestPath, + b: runtimeConfig.McpPath, + comparisonType: StringComparison.OrdinalIgnoreCase) || + string.Equals( + a: runtimeConfig.McpPath, + b: runtimeConfig.GraphQLPath, + comparisonType: StringComparison.OrdinalIgnoreCase)) { HandleOrRecordException(new DataApiBuilderException( - message: $"Conflicting GraphQL and REST path configuration.", + message: $"Conflicting path configuration between GraphQL, REST, and MCP.", statusCode: HttpStatusCode.ServiceUnavailable, subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); } @@ -710,6 +805,41 @@ public void ValidateGraphQLURI(RuntimeConfig runtimeConfig) } } + /// + /// Method to validate that the MCP URI (MCP path prefix). + /// + /// + public void ValidateMcpUri(RuntimeConfig runtimeConfig) + { + // Skip validation if MCP is not configured + if (runtimeConfig.Runtime?.Mcp is null) + { + return; + } + + // Get the MCP path from the configuration + string? mcpPath = runtimeConfig.Runtime.Mcp.Path; + + // Validate that the path is not null or empty when MCP is configured + if (string.IsNullOrWhiteSpace(mcpPath)) + { + HandleOrRecordException(new DataApiBuilderException( + message: "MCP path cannot be null or empty when MCP is configured.", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + return; + } + + // Validate the MCP path using the same validation as REST and GraphQL + if (!RuntimeConfigValidatorUtil.TryValidateUriComponent(mcpPath, out string exceptionMsgSuffix)) + { + HandleOrRecordException(new DataApiBuilderException( + message: $"MCP path {exceptionMsgSuffix}", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ConfigValidationError)); + } + } + private void ValidateAuthenticationOptions(RuntimeConfig runtimeConfig) { // Bypass validation of auth if there is no auth provided @@ -808,7 +938,8 @@ public void ValidatePermissionsInConfig(RuntimeConfig runtimeConfig) DataSource entityDataSource = runtimeConfig.GetDataSourceFromEntityName(entityName); - if (entityDataSource.DatabaseType is not DatabaseType.MSSQL && !IsValidDatabasePolicyForAction(action)) + // Create operation does not support defining a database policy for certain database types. + if (!_databaseTypesSupportingCreatePolicy.Contains(entityDataSource.DatabaseType) && !IsValidDatabasePolicyForAction(action)) { throw new DataApiBuilderException( message: $"The Create action does not support defining a database policy." + diff --git a/src/Core/Generator/SchemaGenerator.cs b/src/Core/Generator/SchemaGenerator.cs index 3950da26b1..ebd2aa9be9 100644 --- a/src/Core/Generator/SchemaGenerator.cs +++ b/src/Core/Generator/SchemaGenerator.cs @@ -28,11 +28,16 @@ internal class SchemaGenerator // List of JSON documents to process. private List _data; + // Name of the Azure Cosmos DB container from which the JSON data is obtained. private string _containerName; + // Dictionary mapping plural entity names to singular names based on the provided configuration. private Dictionary _entityAndSingularNameMapping = new(); + // Entities from config for description lookup + private IReadOnlyDictionary? _entities; + /// /// Initializes a new instance of the class. /// @@ -57,6 +62,9 @@ private SchemaGenerator(List data, string containerName, RuntimeCo { _entityAndSingularNameMapping.Add(item.Value.GraphQL.Singular.Pascalize(), item.Key); } + + // Convert RuntimeEntities to Dictionary for description lookup + _entities = config.Entities.ToDictionary(x => x.Key, x => x.Value); } } @@ -129,6 +137,22 @@ private string GenerateGQLSchema() // Determine if the entity is the root entity. bool isRoot = entity.Key == _containerName.Pascalize(); + // Get description from config if available + string? description = null; + if (_entityAndSingularNameMapping.ContainsKey(entity.Key) && _entities != null) + { + string configEntityName = _entityAndSingularNameMapping[entity.Key]; + if (_entities.ContainsKey(configEntityName)) + { + description = _entities[configEntityName].Description; + } + } + + if (!string.IsNullOrWhiteSpace(description)) + { + sb.AppendLine($"\"\"\"{description}\"\"\""); + } + sb.Append($"type {entity.Key} "); // Append model directive if applicable. diff --git a/src/Core/Models/GraphQLFilterParsers.cs b/src/Core/Models/GraphQLFilterParsers.cs index f93c7f9f3d..153def832f 100644 --- a/src/Core/Models/GraphQLFilterParsers.cs +++ b/src/Core/Models/GraphQLFilterParsers.cs @@ -43,7 +43,7 @@ public GQLFilterParser(RuntimeConfigProvider runtimeConfigProvider, IMetadataPro /// Parse a predicate for a *FilterInput input type /// /// The GraphQL context, used to get the query variables - /// An IInputField object which describes the schema of the filter argument + /// An IInputValueDefinition object which describes the schema of the filter argument /// The fields in the *FilterInput being processed /// The query structure for the entity being filtered providing /// the source alias of the underlying *FilterInput being processed, @@ -51,7 +51,7 @@ public GQLFilterParser(RuntimeConfigProvider runtimeConfigProvider, IMetadataPro /// and the function that parametrizes literals before they are written in string predicate operands. public Predicate Parse( IMiddlewareContext ctx, - IInputField filterArgumentSchema, + IInputValueDefinition filterArgumentSchema, List fields, BaseQueryStructure queryStructure) { @@ -285,7 +285,7 @@ public Predicate Parse( /// private void HandleNestedFilterForCosmos( IMiddlewareContext ctx, - IInputField filterField, + IInputValueDefinition filterField, List subfields, string columnName, string entityType, @@ -466,7 +466,7 @@ public HttpContext GetHttpContextFromMiddlewareContext(IMiddlewareContext ctx) /// the fields. /// /// The GraphQL context, used to get the query variables - /// An IInputField object which describes the schema of the scalar input argument (e.g. IntFilterInput) + /// An IInputValueDefinition object which describes the schema of the scalar input argument (e.g. IntFilterInput) /// The name of the field /// The subfields of the scalar field /// The db schema name to which the table belongs @@ -476,7 +476,7 @@ public HttpContext GetHttpContextFromMiddlewareContext(IMiddlewareContext ctx) /// Flag to give a hint about the node type. It is only applicable for CosmosDB private static Predicate ParseScalarType( IMiddlewareContext ctx, - IInputField argumentSchema, + IInputValueDefinition argumentSchema, string fieldName, List fields, string schemaName, @@ -498,8 +498,8 @@ private static Predicate ParseScalarType( /// If and/or is passed as empty, a predicate representing 1 != 1 is returned /// /// The GraphQL context, used to get the query variables - /// An IInputField object which describes the and/or filter input argument - /// An IInputField object which describes the base filter input argument (e.g. BookFilterInput) + /// An IInputValueDefinition object which describes the and/or filter input argument + /// An IInputValueDefinition object which describes the base filter input argument (e.g. BookFilterInput) /// to which the and/or belongs /// The subfields of the and/or field /// The db schema name to which the table belongs @@ -510,8 +510,8 @@ private static Predicate ParseScalarType( /// Parametrizes literals before they are written in string predicate operands private Predicate ParseAndOr( IMiddlewareContext ctx, - IInputField argumentSchema, - IInputField filterArgumentSchema, + IInputValueDefinition argumentSchema, + IInputValueDefinition filterArgumentSchema, List fields, BaseQueryStructure baseQuery, PredicateOperation op) @@ -600,14 +600,14 @@ public static class FieldFilterParser /// Parse a scalar field into a predicate /// /// The GraphQL context, used to get the query variables - /// An IInputField object which describes the schema of the scalar input argument (e.g. IntFilterInput) + /// An IInputValueDefinition object which describes the schema of the scalar input argument (e.g. IntFilterInput) /// The table column targeted by the field /// The subfields of the scalar field /// Parametrizes literals before they are written in string predicate operands /// Flag which gives a hint about the node type in the given schema. only for CosmosDB it can be of list type. Refer here. public static Predicate Parse( IMiddlewareContext ctx, - IInputField argumentSchema, + IInputValueDefinition argumentSchema, Column column, List fields, Func processLiterals, diff --git a/src/Core/Models/PaginationMetadata.cs b/src/Core/Models/PaginationMetadata.cs index 2d00c06fd7..32ce7d8b46 100644 --- a/src/Core/Models/PaginationMetadata.cs +++ b/src/Core/Models/PaginationMetadata.cs @@ -17,6 +17,8 @@ public class PaginationMetadata : IMetadata /// public bool IsPaginated { get; set; } = DEFAULT_PAGINATION_FLAGS_VALUE; + public int TotalCount { get; set; } + /// /// Shows if items is requested from the pagination result /// @@ -37,6 +39,8 @@ public class PaginationMetadata : IMetadata /// public bool RequestedHasNextPage { get; set; } = DEFAULT_PAGINATION_FLAGS_VALUE; + public bool RequestedTotalCount { get; set; } = DEFAULT_PAGINATION_FLAGS_VALUE; + /// /// Keeps a reference to the SqlQueryStructure the pagination metadata is associated with /// diff --git a/src/Core/Parsers/FilterParser.cs b/src/Core/Parsers/FilterParser.cs index ec765e26a6..c9cfc1eb53 100644 --- a/src/Core/Parsers/FilterParser.cs +++ b/src/Core/Parsers/FilterParser.cs @@ -44,7 +44,6 @@ public FilterClause GetFilterClause(string filterQueryString, string resourcePat { if (_model is null) { - throw new DataApiBuilderException( message: "The runtime has not been initialized with an Edm model.", statusCode: HttpStatusCode.InternalServerError, diff --git a/src/Core/Parsers/RequestParser.cs b/src/Core/Parsers/RequestParser.cs index bb4dd8d51e..8d2d2d8a34 100644 --- a/src/Core/Parsers/RequestParser.cs +++ b/src/Core/Parsers/RequestParser.cs @@ -30,7 +30,7 @@ public class RequestParser /// /// Prefix used for specifying limit in the query string of the URL. /// - public const string FIRST_URL = "$first"; + public const string FIRST_URL = "$top"; /// /// Prefix used for specifying paging in the query string of the URL. /// @@ -147,7 +147,7 @@ public static void ParseQueryString(RestRequestContext context, ISqlMetadataProv /// associated with the sort param. /// A List /// - private static (List?, List?) GenerateOrderByLists(RestRequestContext context, + public static (List?, List?) GenerateOrderByLists(RestRequestContext context, ISqlMetadataProvider sqlMetadataProvider, string sortQueryString) { diff --git a/src/Core/Resolvers/BaseQueryStructure.cs b/src/Core/Resolvers/BaseQueryStructure.cs index 88d30e521c..7f5564f831 100644 --- a/src/Core/Resolvers/BaseQueryStructure.cs +++ b/src/Core/Resolvers/BaseQueryStructure.cs @@ -198,7 +198,7 @@ public virtual SourceDefinition GetUnderlyingSourceDefinition() /// /// Extracts the *Connection.items schema field from the *Connection schema field /// - internal static IObjectField ExtractItemsSchemaField(IObjectField connectionSchemaField) + internal static ObjectField ExtractItemsSchemaField(ObjectField connectionSchemaField) { return connectionSchemaField.Type.NamedType().Fields[QueryBuilder.PAGINATION_FIELD_NAME]; } diff --git a/src/Core/Resolvers/BaseSqlQueryBuilder.cs b/src/Core/Resolvers/BaseSqlQueryBuilder.cs index a509e9d842..7ae3d80028 100644 --- a/src/Core/Resolvers/BaseSqlQueryBuilder.cs +++ b/src/Core/Resolvers/BaseSqlQueryBuilder.cs @@ -143,6 +143,13 @@ private static string GetComparisonFromDirection(OrderBy direction) /// protected virtual string Build(Column column) { + // If the table is a subQUery, we return some fancy JSON_VALUE + + if(column.TableAlias != null && column.TableAlias.Contains("_subq") && column is OrderByColumn) + { + return $"JSON_VALUE({QuoteIdentifier(column.TableAlias)}.[data], '$.{column.ColumnName}')"; + } + // If the table alias is not empty, we return [{SourceAlias}].[{Column}] if (!string.IsNullOrEmpty(column.TableAlias)) { diff --git a/src/Core/Resolvers/CosmosClientProvider.cs b/src/Core/Resolvers/CosmosClientProvider.cs index 374492325c..82e2b5e9c7 100644 --- a/src/Core/Resolvers/CosmosClientProvider.cs +++ b/src/Core/Resolvers/CosmosClientProvider.cs @@ -79,7 +79,7 @@ private void InitializeClient(RuntimeConfig? configuration) } else if (!_accessToken.ContainsKey(dataSourceName)) { - client = new CosmosClient(accountEndPoint, new DefaultAzureCredential(), options); + client = new CosmosClient(accountEndPoint, new DefaultAzureCredential(), options); // CodeQL [SM05137] DefaultAzureCredential will use Managed Identity if available or fallback to default. } else { diff --git a/src/Core/Resolvers/CosmosQueryEngine.cs b/src/Core/Resolvers/CosmosQueryEngine.cs index e9d4caa380..7525318089 100644 --- a/src/Core/Resolvers/CosmosQueryEngine.cs +++ b/src/Core/Resolvers/CosmosQueryEngine.cs @@ -241,14 +241,14 @@ public Task ExecuteAsync(StoredProcedureRequestContext context, s } /// - public JsonElement ResolveObject(JsonElement element, IObjectField fieldSchema, ref IMetadata metadata) + public JsonElement ResolveObject(JsonElement element, ObjectField fieldSchema, ref IMetadata metadata) { return element; } /// /// metadata is not used in this method, but it is required by the interface. - public object ResolveList(JsonElement array, IObjectField fieldSchema, ref IMetadata metadata) + public object ResolveList(JsonElement array, ObjectField fieldSchema, ref IMetadata metadata) { IType listType = fieldSchema.Type; // Is the List type nullable? [...]! vs [...] diff --git a/src/Core/Resolvers/IQueryEngine.cs b/src/Core/Resolvers/IQueryEngine.cs index 0350b3efd2..1b89d3df54 100644 --- a/src/Core/Resolvers/IQueryEngine.cs +++ b/src/Core/Resolvers/IQueryEngine.cs @@ -58,11 +58,11 @@ public interface IQueryEngine /// /// Resolves a jsonElement representing an inner object based on the field's schema and metadata /// - public JsonElement ResolveObject(JsonElement element, IObjectField fieldSchema, ref IMetadata metadata); + public JsonElement ResolveObject(JsonElement element, ObjectField fieldSchema, ref IMetadata metadata); /// /// Resolves a jsonElement representing a list type based on the field's schema and metadata /// - public object ResolveList(JsonElement array, IObjectField fieldSchema, ref IMetadata? metadata); + public object ResolveList(JsonElement array, ObjectField fieldSchema, ref IMetadata? metadata); } } diff --git a/src/Core/Resolvers/MsSqlQueryBuilder.cs b/src/Core/Resolvers/MsSqlQueryBuilder.cs index 798c20975a..60f7e26b88 100644 --- a/src/Core/Resolvers/MsSqlQueryBuilder.cs +++ b/src/Core/Resolvers/MsSqlQueryBuilder.cs @@ -32,6 +32,8 @@ public override string QuoteIdentifier(string ident) /// public string Build(SqlQueryStructure structure) { + StringBuilder query = new(); + string dataIdent = QuoteIdentifier(SqlQueryStructure.DATA_IDENT); string fromSql = $"{QuoteIdentifier(structure.DatabaseObject.SchemaName)}.{QuoteIdentifier(structure.DatabaseObject.Name)} " + $"AS {QuoteIdentifier($"{structure.SourceAlias}")}{Build(structure.Joins)}"; @@ -45,16 +47,47 @@ public string Build(SqlQueryStructure structure) string aggregations = BuildAggregationColumns(structure); - StringBuilder query = new(); - - query.Append($"SELECT TOP {structure.Limit()} {WrappedColumns(structure)} {aggregations}") - .Append($" FROM {fromSql}") - .Append($" WHERE {predicates}") - .Append(BuildGroupBy(structure)) - .Append(BuildHaving(structure)) - .Append(BuildOrderBy(structure)) - .Append(BuildJsonPath(structure)); + //Add recordcount if needed + if (structure.IsListQuery) + { + StringBuilder recordCountSql = new(); + + recordCountSql.Append($"SELECT cast(count(1) as int) as RecordCount ") + .Append($" FROM {fromSql}") + .Append($" WHERE {predicates}") + .Append(BuildGroupBy(structure)) + .Append(BuildHaving(structure)); + + fromSql += $" OUTER APPLY ({recordCountSql.ToString()}) RecordCountQuery"; + + query.Append ($"SELECT {WrappedColumns(structure)} {aggregations}, RecordCountQuery.RecordCount") + .Append($" FROM {fromSql}") + .Append($" WHERE {predicates}") + .Append(BuildGroupBy(structure)) + .Append(BuildHaving(structure)) + .Append(BuildOrderBy(structure)) + .Append($" OFFSET {structure.Offset()} ROWS FETCH NEXT {structure.Limit()} ROWS ONLY") + .Append(BuildJsonPath(structure)); + } else + { + query.Append ($"SELECT {WrappedColumns(structure)} {aggregations}") + .Append($" FROM {fromSql}") + .Append($" WHERE {predicates}") + .Append(BuildGroupBy(structure)) + .Append(BuildHaving(structure)) + .Append(BuildOrderBy(structure)) + .Append(BuildJsonPath(structure)); + } + /* + query.Append($" FROM {fromSql}") + .Append($" WHERE {predicates}") + .Append(BuildGroupBy(structure)) + .Append(BuildHaving(structure)) + .Append(BuildOrderBy(structure)) + .Append(" OFFSET {structure.Offset()} ROWS FETCH NEXT {structure.Limit()} ROWS ONLY") + .Append(BuildJsonPath(structure)); + */ return query.ToString(); } diff --git a/src/Core/Resolvers/MsSqlQueryExecutor.cs b/src/Core/Resolvers/MsSqlQueryExecutor.cs index 45d641bb32..5cbe9f6a76 100644 --- a/src/Core/Resolvers/MsSqlQueryExecutor.cs +++ b/src/Core/Resolvers/MsSqlQueryExecutor.cs @@ -44,7 +44,7 @@ public class MsSqlQueryExecutor : QueryExecutor public override IDictionary ConnectionStringBuilders => base.ConnectionStringBuilders; - public DefaultAzureCredential AzureCredential { get; set; } = new(); + public DefaultAzureCredential AzureCredential { get; set; } = new(); // CodeQL [SM05137] DefaultAzureCredential will use Managed Identity if available or fallback to default. /// /// The saved cached access token obtained from DefaultAzureCredentials @@ -284,7 +284,7 @@ public override string GetSessionParamsQuery(HttpContext? httpContext, IDictiona string paramName = $"{SESSION_PARAM_NAME}{counter.Next()}"; parameters.Add(paramName, new(claimValue)); // Append statement to set read only param value - can be set only once for a connection. - string statementToSetReadOnlyParam = "EXEC sp_set_session_context " + $"'{claimType}', " + paramName + ", @read_only = 1;"; + string statementToSetReadOnlyParam = "EXEC sp_set_session_context " + $"'{claimType}', " + paramName + ", @read_only = 0;"; sessionMapQuery = sessionMapQuery.Append(statementToSetReadOnlyParam); } diff --git a/src/Core/Resolvers/MySqlQueryExecutor.cs b/src/Core/Resolvers/MySqlQueryExecutor.cs index b28620b09a..670232b826 100644 --- a/src/Core/Resolvers/MySqlQueryExecutor.cs +++ b/src/Core/Resolvers/MySqlQueryExecutor.cs @@ -32,7 +32,7 @@ public class MySqlQueryExecutor : QueryExecutor /// private Dictionary _accessTokensFromConfiguration; - public DefaultAzureCredential AzureCredential { get; set; } = new(); + public DefaultAzureCredential AzureCredential { get; set; } = new(); // CodeQL [SM05137] DefaultAzureCredential will use Managed Identity if available or fallback to default. /// /// The MySql specific connection string builders. diff --git a/src/Core/Resolvers/PostgreSqlExecutor.cs b/src/Core/Resolvers/PostgreSqlExecutor.cs index 9bab1f8a39..70fa0f1079 100644 --- a/src/Core/Resolvers/PostgreSqlExecutor.cs +++ b/src/Core/Resolvers/PostgreSqlExecutor.cs @@ -33,7 +33,7 @@ public class PostgreSqlQueryExecutor : QueryExecutor /// private Dictionary _accessTokensFromConfiguration; - public DefaultAzureCredential AzureCredential { get; set; } = new(); + public DefaultAzureCredential AzureCredential { get; set; } = new(); // CodeQL [SM05137]: DefaultAzureCredential will use Managed Identity if available or fallback to default. /// /// The PostgreSql specific connection string builders. diff --git a/src/Core/Resolvers/QueryExecutor.cs b/src/Core/Resolvers/QueryExecutor.cs index 908c1bb1e8..82d20ae722 100644 --- a/src/Core/Resolvers/QueryExecutor.cs +++ b/src/Core/Resolvers/QueryExecutor.cs @@ -124,7 +124,8 @@ public QueryExecutor(DbExceptionParser dbExceptionParser, if (!ConfigProvider.IsLateConfigured) { string correlationId = HttpContextExtensions.GetLoggerCorrelationId(httpContext); - QueryExecutorLogger.LogDebug("{correlationId} Executing query: {queryText}", correlationId, sqltext); + QueryExecutorLogger.LogDebug("{correlationId} Executing query : {queryText}", correlationId, sqltext); + QueryExecutorLogger.LogDebug($"Paramaters: {string.Join(", ", parameters.Select(param => $"{param.Key}: {param.Value?.Value} (DbType: {param.Value?.DbType}, SqlDbType: {param.Value?.SqlDbType})"))}"); } TResult? result = ExecuteQueryAgainstDb(conn, sqltext, parameters, dataReaderHandler, httpContext, dataSourceName, args); @@ -202,6 +203,7 @@ public QueryExecutor(DbExceptionParser dbExceptionParser, if (!ConfigProvider.IsLateConfigured) { string correlationId = HttpContextExtensions.GetLoggerCorrelationId(httpContext); + QueryExecutorLogger.LogDebug("{correlationId} {ts} Executing query: {queryText}", correlationId, DateTime.Now.ToString() , sqltext); QueryExecutorLogger.LogDebug("{correlationId} Executing query: {queryText}", correlationId, sqltext); } @@ -474,6 +476,12 @@ public bool Read(DbDataReader reader) public async Task ExtractResultSetFromDbDataReaderAsync(DbDataReader dbDataReader, List? args = null) { + // If the first dataset has no records, try if there is a second one ... + if (!dbDataReader.HasRows) + { + dbDataReader.NextResult(); + } + DbResultSet dbResultSet = new(resultProperties: GetResultPropertiesAsync(dbDataReader).Result ?? new()); long availableBytes = _maxResponseSizeBytes; while (await ReadAsync(dbDataReader)) @@ -740,6 +748,12 @@ internal int StreamCharData(DbDataReader dbDataReader, long availableSize, Strin // else we throw exception. ValidateSize(availableSize, resultFieldSize); + // If the cell is empty, don't append anything to the resultJsonString and return 0. + if (resultFieldSize == 0) + { + return 0; + } + char[] buffer = new char[resultFieldSize]; // read entire field into buffer and reduce available size. @@ -766,6 +780,13 @@ internal int StreamByteData(DbDataReader dbDataReader, long availableSize, int o // else we throw exception. ValidateSize(availableSize, resultFieldSize); + // If the cell is empty, set resultBytes to an empty array and return 0. + if (resultFieldSize == 0) + { + resultBytes = Array.Empty(); + return 0; + } + resultBytes = new byte[resultFieldSize]; dbDataReader.GetBytes(ordinal: ordinal, dataOffset: 0, buffer: resultBytes, bufferOffset: 0, length: resultBytes.Length); diff --git a/src/Core/Resolvers/Sql Query Structures/BaseSqlQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/BaseSqlQueryStructure.cs index dd96d9a3fc..99a5b1e72c 100644 --- a/src/Core/Resolvers/Sql Query Structures/BaseSqlQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/BaseSqlQueryStructure.cs @@ -514,8 +514,8 @@ internal static List GetSubArgumentNamesFromGQLMutArguments if (mutationParameters.TryGetValue(fieldName, out object? item)) { - IObjectField fieldSchema = context.Selection.Field; - IInputField itemsArgumentSchema = fieldSchema.Arguments[fieldName]; + ObjectField fieldSchema = context.Selection.Field; + IInputValueDefinition itemsArgumentSchema = fieldSchema.Arguments[fieldName]; InputObjectType itemsArgumentObject = ExecutionHelper.InputObjectTypeFromIInputField(itemsArgumentSchema); // An inline argument was set diff --git a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs index cedb98a305..10284048ff 100644 --- a/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/SqlQueryStructure.cs @@ -3,6 +3,7 @@ using System.Data; using System.Net; +using System.Text.RegularExpressions; using Azure.DataApiBuilder.Auth; using Azure.DataApiBuilder.Config.ObjectModel; using Azure.DataApiBuilder.Core.Configurations; @@ -64,6 +65,8 @@ public class SqlQueryStructure : BaseSqlQueryStructure /// private uint? _limit = PaginationOptions.DEFAULT_PAGE_SIZE; + private int? _offset; + /// /// If this query is built because of a GraphQL query (as opposed to /// REST), then this is set to the resolver context of that query. @@ -172,7 +175,7 @@ public SqlQueryStructure( _ctx = ctx; IsMultipleCreateOperation = isMultipleCreateOperation; - IObjectField schemaField = _ctx.Selection.Field; + ObjectField schemaField = _ctx.Selection.Field; FieldNode? queryField = _ctx.Selection.SyntaxNode; IOutputType outputType = schemaField.Type; @@ -388,7 +391,7 @@ private SqlQueryStructure( IDictionary queryParams, ISqlMetadataProvider sqlMetadataProvider, IAuthorizationResolver authorizationResolver, - IObjectField schemaField, + ObjectField schemaField, FieldNode? queryField, IncrementingInteger counter, RuntimeConfigProvider runtimeConfigProvider, @@ -408,7 +411,7 @@ private SqlQueryStructure( // extract the query argument schemas before switching schemaField to point to *Connetion.items // since the pagination arguments are not placed on the items, but on the pagination query - IFieldCollection queryArgumentSchemas = schemaField.Arguments; + ArgumentCollection queryArgumentSchemas = schemaField.Arguments; PaginationMetadata.IsPaginated = QueryBuilder.IsPaginationType(_underlyingFieldType); @@ -459,7 +462,7 @@ private SqlQueryStructure( { if (isGroupByQuery) { - ProcessGroupByField(queryField, ctx); + ProcessGroupByField(queryField, ctx, authorizationResolver); } else { @@ -488,12 +491,25 @@ private SqlQueryStructure( // parse first parameter for all list queries object? firstObject = queryParams[QueryBuilder.PAGE_START_ARGUMENT_NAME]; _limit = runtimeConfig?.GetPaginationLimit((int?)firstObject); + } else { // if first is not passed, we should use the default page size. _limit = runtimeConfig?.DefaultPageSize(); } + + if (queryParams.ContainsKey(QueryBuilder.OFFSET_FIELD_NAME)) + { + // parse the offset parameter for all list queries + object? offsetObject = queryParams[QueryBuilder.OFFSET_FIELD_NAME]; + _offset = (int?)offsetObject; + + } + else + { + _offset = 0; + } } if (IsListQuery && queryParams.ContainsKey(QueryBuilder.FILTER_FIELD_NAME)) @@ -714,6 +730,9 @@ void ProcessPaginationFields(IReadOnlyList paginationSelections) case QueryBuilder.HAS_NEXT_PAGE_FIELD_NAME: PaginationMetadata.RequestedHasNextPage = true; break; + case QueryBuilder.TOTAL_COUNT_FIELD_NAME: + PaginationMetadata.RequestedTotalCount = true; + break; case QueryBuilder.GROUP_BY_FIELD_NAME: PaginationMetadata.RequestedGroupBy = true; break; @@ -796,7 +815,7 @@ private void AddGraphQLFields(IReadOnlyList selections, RuntimeC } else { - IObjectField? subschemaField = _underlyingFieldType.Fields[fieldName]; + ObjectField? subschemaField = _underlyingFieldType.Fields[fieldName]; if (_ctx == null) { @@ -877,12 +896,14 @@ private void AddGraphQLFields(IReadOnlyList selections, RuntimeC /// } /// } /// - private void ProcessGroupByField(FieldNode groupByField, IMiddlewareContext ctx) + private void ProcessGroupByField(FieldNode groupByField, IMiddlewareContext ctx, IAuthorizationResolver authorizationResolver) { // Extract 'fields' argument ArgumentNode? fieldsArg = groupByField.Arguments.FirstOrDefault(a => a.Name.Value == QueryBuilder.GROUP_BY_FIELDS_FIELD_NAME); HashSet fieldsInArgument = new(); + string roleOfGraphQLRequest = Authorization.AuthorizationResolver.GetRoleOfGraphQLRequest(ctx); + if (fieldsArg is { Value: ListValueNode fieldsList }) { foreach (EnumValueNode value in fieldsList.Items) @@ -890,6 +911,18 @@ private void ProcessGroupByField(FieldNode groupByField, IMiddlewareContext ctx) string fieldName = value.Value; string columnName = MetadataProvider.TryGetBackingColumn(EntityName, fieldName, out string? backingColumn) ? backingColumn : fieldName; + // Validate that the current role has access to groupBy argument fields + IEnumerable roles = authorizationResolver.GetRolesForField(EntityName, field: columnName, operation: EntityActionOperation.Read); + if (roles != null && !roles.Contains(roleOfGraphQLRequest, StringComparer.OrdinalIgnoreCase)) + { + // raising exception for the first unauthorized groupBy field found + throw new DataApiBuilderException( + message: string.Format(DataApiBuilderException.GRAPHQL_GROUPBY_FIELD_AUTHZ_FAILURE, fieldName), + statusCode: HttpStatusCode.Forbidden, + subStatusCode: DataApiBuilderException.SubStatusCodes.AuthorizationCheckFailed + ); + } + GroupByMetadata.Fields[columnName] = new Column(DatabaseObject.SchemaName, DatabaseObject.Name, columnName, SourceAlias); AddColumn(fieldName, backingColumn ?? fieldName); fieldsInArgument.Add(fieldName); @@ -913,7 +946,7 @@ private void ProcessGroupByField(FieldNode groupByField, IMiddlewareContext ctx) case QueryBuilder.GROUP_BY_AGGREGATE_FIELD_NAME: GroupByMetadata.RequestedAggregations = true; - ProcessAggregations(field, ctx); + ProcessAggregations(field, ctx, authorizationResolver, roleOfGraphQLRequest); break; } } @@ -963,7 +996,7 @@ private void ProcessGroupByFieldSelections(FieldNode groupByFieldSelection, Hash /// /// The FieldNode representing the aggregations field in the GraphQL query. /// middleware context. - private void ProcessAggregations(FieldNode aggregationsField, IMiddlewareContext ctx) + private void ProcessAggregations(FieldNode aggregationsField, IMiddlewareContext ctx, IAuthorizationResolver authorizationResolver, string roleOfGraphQLRequest) { // If there are no selections in the aggregation field, exit early if (aggregationsField.SelectionSet == null) @@ -972,14 +1005,14 @@ private void ProcessAggregations(FieldNode aggregationsField, IMiddlewareContext } // Retrieve the schema field from the GraphQL context - IObjectField schemaField = ctx.Selection.Field; + ObjectField schemaField = ctx.Selection.Field; // Get the 'group by' field from the schema's entity type - IObjectField groupByField = schemaField.Type.NamedType() + ObjectField groupByField = schemaField.Type.NamedType() .Fields[QueryBuilder.GROUP_BY_FIELD_NAME]; // Get the 'aggregations' field from the 'group by' entity type - IObjectField aggregationsObjectField = groupByField.Type.NamedType() + ObjectField aggregationsObjectField = groupByField.Type.NamedType() .Fields[QueryBuilder.GROUP_BY_AGGREGATE_FIELD_NAME]; // Iterate through each selection in the aggregation field @@ -1010,7 +1043,18 @@ private void ProcessAggregations(FieldNode aggregationsField, IMiddlewareContext if (MetadataProvider.TryGetBackingColumn(EntityName, fieldName, out string? backingColumn)) { columnName = backingColumn; - fieldName = backingColumn; + } + + // Validate that the current role has access to field in the aggregation function argument + IEnumerable roles = authorizationResolver.GetRolesForField(EntityName, field: columnName, operation: EntityActionOperation.Read); + if (roles != null && !roles.Contains(roleOfGraphQLRequest, StringComparer.OrdinalIgnoreCase)) + { + // raising exception for the first unauthorized field found + throw new DataApiBuilderException( + message: string.Format(DataApiBuilderException.GRAPHQL_AGGREGATION_FIELD_AUTHZ_FAILURE, fieldName, operation), + statusCode: HttpStatusCode.Forbidden, + subStatusCode: DataApiBuilderException.SubStatusCodes.AuthorizationCheckFailed + ); } // Use the field alias if provided, otherwise default to the operation name @@ -1039,7 +1083,7 @@ private void ProcessAggregations(FieldNode aggregationsField, IMiddlewareContext List filterFields = (List)havingArg.Value.Value!; // Retrieve the corresponding aggregation operation field from the schema - IObjectField operationObjectField = aggregationsObjectField.Type.NamedType() + ObjectField operationObjectField = aggregationsObjectField.Type.NamedType() .Fields[operation.ToString()]; // Parse the filtering conditions and apply them to the aggregation @@ -1074,13 +1118,57 @@ private void ProcessAggregations(FieldNode aggregationsField, IMiddlewareContext } } + public int? Offset() + { + // Check if the offset argument is present in the query, if not, return 0 + try + { + //return this._ctx?.ArgumentValue("offset") ?? 0; + return _offset; + } + catch (HotChocolate.GraphQLException) + { + return 0; // This is a stop-gat and indicated a very fishy situation + } + } + + private static string ExtractColumnName(string fieldValue) + { + string pattern = @"\{\s*([^:]+)\s*:"; + Match match = Regex.Match(fieldValue, pattern); + if (match.Success) + { + string columnName = match.Groups[1].Value.Trim(); + return columnName; + } + else + { + return ""; + } + } + + private static string ExtractValue(string fieldValue) + { + string pattern = @"\{\s*([^:]+)\s*:\s*(.*?)\s*\}"; + Match match = Regex.Match(fieldValue, pattern); + if (match.Success) + { + string value = match.Groups[2].Value.Trim(); + return value; + } + else + { + return ""; + } + } + /// /// Create a list of orderBy columns from the orderBy argument /// passed to the gql query. The orderBy argument could contain mapped field names /// so we find their backing column names before creating the orderBy list. /// All the remaining primary key columns are also added to ensure there are no tie breaks. /// - private List ProcessGqlOrderByArg(List orderByFields, IInputField orderByArgumentSchema, bool isGroupByQuery = false) + private List ProcessGqlOrderByArg(List orderByFields, IInputValueDefinition orderByArgumentSchema, bool isGroupByQuery = false) { if (_ctx is null) { @@ -1110,6 +1198,38 @@ private List ProcessGqlOrderByArg(List orderByFi string fieldName = field.Name.ToString(); + // Let's check if we're trying to sort on a child object. If tgis is a 'one' relationship this will just work + if (field.Value.ToString().Contains(':')) + { + // Check if the fieldName is a relationship element + if (MetadataProvider.TryGetEntityDefenition(EntityName, out Entity? baseEntity)) + { + if (baseEntity!.Relationships!.ContainsKey(fieldName)) + { + // Look up out alias in the JoinQueries + //myJoin = this.JoinQueries; + //stuff + Column? linkColumn = FindColumnByLabel(fieldName); + if (linkColumn == null) + { + throw new DataApiBuilderException(message: "Unable to resolve relation " + fieldName, + statusCode: HttpStatusCode.InternalServerError, + subStatusCode: DataApiBuilderException.SubStatusCodes.UnexpectedError); + } + + orderByColumnsList.Add(new OrderByColumn(tableSchema: linkColumn.TableSchema, + tableName: linkColumn.TableName, + columnName: ExtractColumnName(field.Value.ToString()), + tableAlias: linkColumn.TableAlias, + direction: Enum.Parse(ExtractValue(field.Value.ToString())) + )); + + } + } + + continue; + } + if (!MetadataProvider.TryGetBackingColumn(EntityName, fieldName, out string? backingColumnName)) { throw new DataApiBuilderException(message: "Mapped fieldname could not be found.", @@ -1221,6 +1341,11 @@ public bool IsSubqueryColumn(Column column) return column.TableAlias == null ? false : JoinQueries.ContainsKey(column.TableAlias); } + public LabelledColumn? FindColumnByLabel(string fieldName) + { + return Columns.FirstOrDefault(column => column.Label.Equals(fieldName, StringComparison.OrdinalIgnoreCase)); + } + /// /// Add column label string literals as parameters to the query structure /// diff --git a/src/Core/Resolvers/Sql Query Structures/SqlUpdateQueryStructure.cs b/src/Core/Resolvers/Sql Query Structures/SqlUpdateQueryStructure.cs index c73e72c230..ecbbf3fc5c 100644 --- a/src/Core/Resolvers/Sql Query Structures/SqlUpdateQueryStructure.cs +++ b/src/Core/Resolvers/Sql Query Structures/SqlUpdateQueryStructure.cs @@ -185,9 +185,9 @@ private Predicate CreatePredicateForParam(KeyValuePair param) { predicate = new( new PredicateOperand( - new Column(tableSchema: DatabaseObject.SchemaName, tableName: DatabaseObject.Name, param.Key)), + new Column(tableSchema: DatabaseObject.SchemaName, tableName: DatabaseObject.Name, backingColumn)), PredicateOperation.Equal, - new PredicateOperand($"{MakeDbConnectionParam(GetParamAsSystemType(param.Value.ToString()!, param.Key, GetColumnSystemType(param.Key)), param.Key)}")); + new PredicateOperand($"{MakeDbConnectionParam(GetParamAsSystemType(param.Value.ToString()!, backingColumn, GetColumnSystemType(backingColumn)), backingColumn)}")); } return predicate; diff --git a/src/Core/Resolvers/SqlMutationEngine.cs b/src/Core/Resolvers/SqlMutationEngine.cs index 493b7900e7..dfc53449f8 100644 --- a/src/Core/Resolvers/SqlMutationEngine.cs +++ b/src/Core/Resolvers/SqlMutationEngine.cs @@ -106,7 +106,7 @@ public SqlMutationEngine( mutationOperation is EntityActionOperation.Create) { // Multiple create mutation request is validated to ensure that the request is valid semantically. - IInputField schemaForArgument = context.Selection.Field.Arguments[inputArgumentName]; + IInputValueDefinition schemaForArgument = context.Selection.Field.Arguments[inputArgumentName]; MultipleMutationEntityInputValidationContext multipleMutationEntityInputValidationContext = new( entityName: entityName, parentEntityName: string.Empty, @@ -1689,8 +1689,8 @@ private static void PopulateCurrentAndLinkingEntityParams( { if (mutationParameters.TryGetValue(rootFieldName, out object? inputParameters)) { - IObjectField fieldSchema = context.Selection.Field; - IInputField itemsArgumentSchema = fieldSchema.Arguments[rootFieldName]; + ObjectField fieldSchema = context.Selection.Field; + IInputValueDefinition itemsArgumentSchema = fieldSchema.Arguments[rootFieldName]; InputObjectType inputObjectType = ExecutionHelper.InputObjectTypeFromIInputField(itemsArgumentSchema); return GQLMultipleCreateArgumentToDictParamsHelper(context, inputObjectType, inputParameters); } @@ -1871,7 +1871,7 @@ private static void PopulateCurrentAndLinkingEntityParams( /// private static InputObjectType GetInputObjectTypeForAField(string fieldName, FieldCollection fields) { - if (fields.TryGetField(fieldName, out IInputField? field)) + if (fields.TryGetField(fieldName, out InputField? field)) { return ExecutionHelper.InputObjectTypeFromIInputField(field); } @@ -1886,7 +1886,7 @@ private static InputObjectType GetInputObjectTypeForAField(string fieldName, Fie /// /// The name of the entity. /// The parameters for the DELETE operation. - /// Metadataprovider for db on which to perform operation. + /// Metadata provider for db on which to perform operation. /// A dictionary of properties of the Db Data Reader like RecordsAffected, HasRows. private async Task?> PerformDeleteOperation( @@ -2127,7 +2127,7 @@ private void AuthorizeEntityAndFieldsForMutation( IDictionary parametersDictionary ) { - if (context.Selection.Field.Arguments.TryGetField(inputArgumentName, out IInputField? schemaForArgument)) + if (context.Selection.Field.Arguments.TryGetField(inputArgumentName, out Argument? schemaForArgument)) { // Dictionary to store all the entities and their corresponding exposed column names referenced in the mutation. Dictionary> entityToExposedColumns = new(); @@ -2173,42 +2173,53 @@ private void AuthorizeEntityAndFieldsForMutation( /// Dictionary to store all the entities and their corresponding exposed column names referenced in the mutation. /// Schema for the input field. /// Name of the entity. - /// Middleware Context. + /// Middleware context. /// Value for the input field. - /// 1. mutation { - /// createbook( - /// item: { - /// title: "book #1", - /// reviews: [{ content: "Good book." }, { content: "Great book." }], - /// publishers: { name: "Macmillan publishers" }, - /// authors: [{ birthdate: "1997-09-03", name: "Red house authors", royal_percentage: 4.6 }] - /// }) - /// { - /// id - /// } - /// 2. mutation { - /// createbooks( - /// items: [{ - /// title: "book #1", - /// reviews: [{ content: "Good book." }, { content: "Great book." }], - /// publishers: { name: "Macmillan publishers" }, - /// authors: [{ birthdate: "1997-09-03", name: "Red house authors", royal_percentage: 4.9 }] - /// }, - /// { - /// title: "book #2", - /// reviews: [{ content: "Awesome book." }, { content: "Average book." }], - /// publishers: { name: "Pearson Education" }, - /// authors: [{ birthdate: "1990-11-04", name: "Penguin Random House", royal_percentage: 8.2 }] - /// }]) - /// { - /// items{ - /// id - /// title - /// } - /// } + /// + /// Example 1 - Single item creation: + /// + /// mutation { + /// createbook( + /// item: { + /// title: "book #1", + /// reviews: [{ content: "Good book." }, { content: "Great book." }], + /// publishers: { name: "Macmillan publishers" }, + /// authors: [{ birthdate: "1997-09-03", name: "Red house authors", royal_percentage: 4.6 }] + /// }) + /// { + /// id + /// } + /// } + /// + /// + /// Example 2 - Multiple items creation: + /// + /// mutation { + /// createbooks( + /// items: [{ + /// title: "book #1", + /// reviews: [{ content: "Good book." }, { content: "Great book." }], + /// publishers: { name: "Macmillan publishers" }, + /// authors: [{ birthdate: "1997-09-03", name: "Red house authors", royal_percentage: 4.9 }] + /// }, + /// { + /// title: "book #2", + /// reviews: [{ content: "Awesome book." }, { content: "Average book." }], + /// publishers: { name: "Pearson Education" }, + /// authors: [{ birthdate: "1990-11-04", name: "Penguin Random House", royal_percentage: 8.2 }] + /// }]) + /// { + /// items { + /// id + /// title + /// } + /// } + /// } + /// + /// private void PopulateMutationEntityAndFieldsToAuthorize( Dictionary> entityToExposedColumns, - IInputField schema, + IInputValueDefinition schema, string entityName, IMiddlewareContext context, object parameters) diff --git a/src/Core/Resolvers/SqlPaginationUtil.cs b/src/Core/Resolvers/SqlPaginationUtil.cs index b06c5b8aa5..e6d278d7a1 100644 --- a/src/Core/Resolvers/SqlPaginationUtil.cs +++ b/src/Core/Resolvers/SqlPaginationUtil.cs @@ -12,8 +12,10 @@ using Azure.DataApiBuilder.Core.Services; using Azure.DataApiBuilder.Service.Exceptions; using Azure.DataApiBuilder.Service.GraphQLBuilder.GraphQLTypes; -using Azure.DataApiBuilder.Service.GraphQLBuilder.Queries; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Http.Extensions; using Microsoft.AspNetCore.WebUtilities; +using QueryBuilder = Azure.DataApiBuilder.Service.GraphQLBuilder.Queries.QueryBuilder; namespace Azure.DataApiBuilder.Core.Resolvers { @@ -97,6 +99,21 @@ private static JsonObject CreatePaginationConnection(JsonElement root, Paginatio // values we need to determine the correct pagination logic bool isPaginationRequested = paginationMetadata.RequestedHasNextPage || paginationMetadata.RequestedEndCursor; + // Id the request Includes a totalCount, make sure we always add it to the connection + + if (paginationMetadata.RequestedTotalCount) + { + if (root.GetArrayLength() > 0 && int.TryParse(root[0].GetProperty("RecordCount").ToString(), out int recordCount)) + { + connection.Add(QueryBuilder.TOTAL_COUNT_FIELD_NAME, recordCount); + } + else + { + // Handle the case where parsing fails, if necessary + connection.Add(QueryBuilder.TOTAL_COUNT_FIELD_NAME, 0); // or some default value + } + } + IEnumerable rootEnumerated = root.EnumerateArray(); int returnedElementCount = rootEnumerated.Count(); bool hasExtraElement = false; @@ -163,7 +180,7 @@ private static JsonObject CreatePaginationConnection(JsonElement root, Paginatio paginationMetadata.RequestedGroupBy)); } } - + return connection; } @@ -572,15 +589,50 @@ public static string Base64Decode(string base64EncodedData) } /// - /// Create the URL that will provide for the next page of results - /// using the same query options. - /// Return value formatted as a JSON array: [{"nextLink":"[base]/api/[entity]?[queryParams_URIescaped]$after=[base64encodedPaginationToken]"}] + /// Constructs the base Uri for Pagination + /// + /// + /// This method uses the "X-Forwarded-Proto" and "X-Forwarded-Host" headers to determine + /// the scheme and host of the request, falling back to the request's original scheme and host if the headers + /// are not present or invalid. The method ensures that the scheme is either "http" or "https" and that the host + /// is a valid hostname or IP address. + /// + /// The HTTP context containing the request information. + /// An optional base route to prepend to the request path. If not specified, no base route is used. + /// A string representing the fully constructed Base request URL for Pagination. + public static string ConstructBaseUriForPagination(HttpContext httpContext, string? baseRoute = null) + { + HttpRequest req = httpContext.Request; + + // use scheme from X-Forwarded-Proto or fallback to request scheme + string scheme = ResolveRequestScheme(req); + + // Use host from X-Forwarded-Host or fallback to request host + string host = ResolveRequestHost(req); + + // If the base route is not empty, we need to insert it into the URI before the rest path. + // Path is of the form ....restPath/pathNameForEntity. We want to insert the base route before the restPath. + // Finally, it will be of the form: .../baseRoute/restPath/pathNameForEntity. + return UriHelper.BuildAbsolute( + scheme: scheme, + host: new HostString(host), + pathBase: string.IsNullOrWhiteSpace(baseRoute) ? PathString.Empty : new PathString(baseRoute), + path: req.Path); + } + + /// + /// Builds a query string by appending or replacing the $after token with the specified value. /// - /// The request path excluding query parameters (e.g. https://localhost/api/myEntity) - /// Collection of query string parameters that are URI escaped. - /// The contents to add to the $after query parameter. Should be base64 encoded pagination token. - /// JSON element - array with nextLink. - public static JsonElement CreateNextLink(string path, NameValueCollection? queryStringParameters, string newAfterPayload) + /// This method does not include the in the returned query + /// string. It only processes and formats the query string parameters. + /// A collection of existing query string parameters. If , an empty collection is used. + /// The $after parameter, if present, will be removed before appending the new token. + /// The new value for the $after token. If this value is , empty, or whitespace, no + /// $after token will be appended. + /// A URL-encoded query string containing the updated parameters, including the new $after token if + /// specified. If no parameters are provided and is empty, an empty string is + /// returned. + public static string BuildQueryStringWithAfterToken(NameValueCollection? queryStringParameters, string newAfterPayload) { if (queryStringParameters is null) { @@ -588,33 +640,50 @@ public static JsonElement CreateNextLink(string path, NameValueCollection? query } else { - // Purge old $after value so this function can replace it. queryStringParameters.Remove("$after"); } - // To prevent regression of current behavior, retain the call to FormatQueryString - // which URI escapes other query parameters. Since $after has been removed, - // this will not affect the base64 encoded paging token. - string queryString = FormatQueryString(queryStringParameters: queryStringParameters); + // Format existing query string (URL encoded) + string queryString = FormatQueryString(queryStringParameters); - // When a new $after payload is provided, append it to the query string with the - // appropriate prefix: ? if $after is the only query parameter. & if $after is one of many query parameters. + // Append new $after token if (!string.IsNullOrWhiteSpace(newAfterPayload)) { string afterPrefix = string.IsNullOrWhiteSpace(queryString) ? "?" : "&"; queryString += $"{afterPrefix}{RequestParser.AFTER_URL}={newAfterPayload}"; } - // ValueKind will be array so we can differentiate from other objects in the response - // to be returned. - // [{"nextLink":"[base]/api/[entity]?[queryParams_URIescaped]$after=[base64encodedPaginationToken]"}] + // Construct final link + // return $"{path}{queryString}"; + return queryString; + } + + /// + /// Gets a consolidated next link for pagination in JSON format. + /// + /// The base Pagination Uri + /// The query string with after value + /// True, if the next link should be relative + /// + public static JsonElement GetConsolidatedNextLinkForPagination(string baseUri, string queryString, bool isNextLinkRelative = false) + { + UriBuilder uriBuilder = new(baseUri) + { + // Form final link by appending the query string + Query = queryString + }; + + // Construct final link- absolute or relative + string nextLinkValue = isNextLinkRelative + ? uriBuilder.Uri.PathAndQuery // returns just "/api/?$after...", no host + : uriBuilder.Uri.AbsoluteUri; // returns full URL + + // Return serialized JSON object string jsonString = JsonSerializer.Serialize(new[] { - new - { - nextLink = @$"{path}{queryString}" - } + new { nextLink = nextLinkValue } }); + return JsonSerializer.Deserialize(jsonString); } @@ -695,5 +764,94 @@ public static string FormatQueryString(NameValueCollection? queryStringParameter return queryString; } + + /// + /// Extracts and request scheme from "X-Forwarded-Proto" or falls back to the request scheme. + /// + /// The HTTP request. + /// The scheme string ("http" or "https"). + /// Thrown when client explicitly sets an invalid scheme. + private static string ResolveRequestScheme(HttpRequest req) + { + string? rawScheme = req.Headers["X-Forwarded-Proto"].FirstOrDefault(); + string? normalized = rawScheme?.Trim().ToLowerInvariant(); + + bool isExplicit = !string.IsNullOrEmpty(rawScheme); + bool isValid = IsValidScheme(normalized); + + if (isExplicit && !isValid) + { + // Log a warning and ignore the invalid value, fallback to request's scheme + Console.WriteLine($"Warning: Invalid scheme '{rawScheme}' in X-Forwarded-Proto header. Falling back to request scheme: '{req.Scheme}'."); + return req.Scheme; + } + + return isValid ? normalized! : req.Scheme; + } + + /// + /// Extracts the request host from "X-Forwarded-Host" or falls back to the request host. + /// + /// The HTTP request. + /// The host string. + /// Thrown when client explicitly sets an invalid host. + private static string ResolveRequestHost(HttpRequest req) + { + string? rawHost = req.Headers["X-Forwarded-Host"].FirstOrDefault(); + string? trimmed = rawHost?.Trim(); + + bool isExplicit = !string.IsNullOrEmpty(rawHost); + bool isValid = IsValidHost(trimmed); + + if (isExplicit && !isValid) + { + // Log a warning and ignore the invalid value, fallback to request's host + Console.WriteLine($"Warning: Invalid host '{rawHost}' in X-Forwarded-Host header. Falling back to request host: '{req.Host}'."); + return req.Host.ToString(); + } + + return isValid ? trimmed! : req.Host.ToString(); + } + + /// + /// Checks if the provided scheme is valid. + /// + /// Scheme, e.g., "http" or "https". + /// True if valid, otherwise false. + private static bool IsValidScheme(string? scheme) + { + return scheme is "http" or "https"; + } + + /// + /// Checks if the provided host is a valid hostname or IP address. + /// + /// The host name (with optional port). + /// True if valid, otherwise false. + private static bool IsValidHost(string? host) + { + if (string.IsNullOrWhiteSpace(host)) + { + return false; + } + + // Reject dangerous characters + if (host.Contains('\r') || host.Contains('\n') || host.Contains(' ') || + host.Contains('<') || host.Contains('>') || host.Contains('@')) + { + return false; + } + + // Validate host part (exclude port if present) + string hostnamePart = host.Split(':')[0]; + + if (Uri.CheckHostName(hostnamePart) == UriHostNameType.Unknown) + { + return false; + } + + // Final sanity check: ensure it parses into a full URI + return Uri.TryCreate($"http://{host}", UriKind.Absolute, out _); + } } } diff --git a/src/Core/Resolvers/SqlQueryEngine.cs b/src/Core/Resolvers/SqlQueryEngine.cs index 12a305c574..f183050b8c 100644 --- a/src/Core/Resolvers/SqlQueryEngine.cs +++ b/src/Core/Resolvers/SqlQueryEngine.cs @@ -212,7 +212,7 @@ public async Task ExecuteAsync(StoredProcedureRequestContext cont } /// - public JsonElement ResolveObject(JsonElement element, IObjectField fieldSchema, ref IMetadata metadata) + public JsonElement ResolveObject(JsonElement element, ObjectField fieldSchema, ref IMetadata metadata) { PaginationMetadata parentMetadata = (PaginationMetadata)metadata; @@ -259,7 +259,7 @@ public JsonElement ResolveObject(JsonElement element, IObjectField fieldSchema, /// List of JsonElements parsed from the provided JSON array. /// Return type is 'object' instead of a 'List of JsonElements' because when this function returns JsonElement, /// the HC12 engine doesn't know how to handle the JsonElement and results in requests failing at runtime. - public object ResolveList(JsonElement array, IObjectField fieldSchema, ref IMetadata? metadata) + public object ResolveList(JsonElement array, ObjectField fieldSchema, ref IMetadata? metadata) { if (metadata is not null) { @@ -330,6 +330,7 @@ public object ResolveList(JsonElement array, IObjectField fieldSchema, ref IMeta // We want to avoid caching token metadata because token metadata can change frequently and we want to avoid caching it. if (!dbPolicyConfigured && entityCacheEnabled) { + return await GetResultInCacheScenario( runtimeConfig, structure, diff --git a/src/Core/Resolvers/SqlResponseHelpers.cs b/src/Core/Resolvers/SqlResponseHelpers.cs index 7701d662d3..6c8ce21f01 100644 --- a/src/Core/Resolvers/SqlResponseHelpers.cs +++ b/src/Core/Resolvers/SqlResponseHelpers.cs @@ -23,21 +23,23 @@ public class SqlResponseHelpers /// /// Format the results from a Find operation. Check if there is a requirement - /// for a nextLink, and if so, add this value to the array of JsonElements to + /// for a nextLink/after, and if so, add this value to the array of JsonElements to /// be used as part of the response. /// /// The JsonDocument from the query. /// The RequestContext. - /// the metadataprovider. + /// The metadataprovider. /// Runtimeconfig object /// HTTP context associated with the API request + /// True if request is done through MCP endpoint /// An OkObjectResult from a Find operation that has been correctly formatted. public static OkObjectResult FormatFindResult( JsonElement findOperationResponse, FindRequestContext context, ISqlMetadataProvider sqlMetadataProvider, RuntimeConfig runtimeConfig, - HttpContext httpContext) + HttpContext httpContext, + bool? isMcpRequest = null) { // When there are no rows returned from the database, the jsonElement will be an empty array. @@ -51,11 +53,18 @@ public static OkObjectResult FormatFindResult( ? DetermineExtraFieldsInResponse(findOperationResponse, context.FieldsToBeReturned) : DetermineExtraFieldsInResponse(findOperationResponse.EnumerateArray().First(), context.FieldsToBeReturned); + //Remove RecordCOunt from extraFieldsInResponse if present + /* + if (extraFieldsInResponse.Contains("RecordCount")) + { + extraFieldsInResponse.Remove("RecordCount"); + } + */ uint defaultPageSize = runtimeConfig.DefaultPageSize(); uint maxPageSize = runtimeConfig.MaxPageSize(); // If the results are not a collection or if the query does not have a next page - // no nextLink is needed. So, the response is returned after removing the extra fields. + // no nextLink/after is needed. So, the response is returned after removing the extra fields. if (findOperationResponse.ValueKind is not JsonValueKind.Array || !SqlPaginationUtil.HasNext(findOperationResponse, context.First, defaultPageSize, maxPageSize)) { // If there are no additional fields present, the response is returned directly. When there @@ -89,37 +98,53 @@ public static OkObjectResult FormatFindResult( tableName: context.DatabaseObject.Name, sqlMetadataProvider: sqlMetadataProvider); - // nextLink is the URL needed to get the next page of records using the same query options - // with $after base64 encoded for opaqueness - string path = UriHelper.GetEncodedUrl(httpContext!.Request).Split('?')[0]; - - // If the base route is not empty, we need to insert it into the URI before the rest path. - string? baseRoute = runtimeConfig.Runtime?.BaseRoute; - if (!string.IsNullOrWhiteSpace(baseRoute)) + // When there are extra fields present, they are removed before returning the response. + if (extraFieldsInResponse.Count > 0) { - HttpRequest request = httpContext!.Request; - - // Path is of the form ....restPath/pathNameForEntity. We want to insert the base route before the restPath. - // Finally, it will be of the form: .../baseRoute/restPath/pathNameForEntity. - path = UriHelper.BuildAbsolute( - scheme: request.Scheme, - host: request.Host, - pathBase: baseRoute, - path: request.Path); + rootEnumerated = RemoveExtraFieldsInResponseWithMultipleItems(rootEnumerated, extraFieldsInResponse); } - JsonElement nextLink = SqlPaginationUtil.CreateNextLink( - path, - queryStringParameters: context!.ParsedQueryString, - after); + // Create an 'after' object if the request comes from MCP endpoint. + if (isMcpRequest is true) + { + string jsonString = JsonSerializer.Serialize(new[] + { + new { after = after } + }); + JsonElement afterElement = JsonSerializer.Deserialize(jsonString); - // When there are extra fields present, they are removed before returning the response. - if (extraFieldsInResponse.Count > 0) + rootEnumerated.Add(afterElement); + } + // Create a 'nextLink' object if the request comes from REST endpoint. + else { - rootEnumerated = RemoveExtraFieldsInResponseWithMultipleItems(rootEnumerated, extraFieldsInResponse); + string basePaginationUri = SqlPaginationUtil.ConstructBaseUriForPagination(httpContext, runtimeConfig.Runtime?.BaseRoute); + + // Build the query string with the $after token. + string queryString = SqlPaginationUtil.BuildQueryStringWithAfterToken( + queryStringParameters: context!.ParsedQueryString, + newAfterPayload: after); + + // Get the final consolidated nextLink for the pagination. + JsonElement nextLink = SqlPaginationUtil.GetConsolidatedNextLinkForPagination( + baseUri: basePaginationUri, + queryString: queryString, + isNextLinkRelative: runtimeConfig.NextLinkRelative()); + + rootEnumerated.Add(nextLink); } - rootEnumerated.Add(nextLink); + //Get the element RecordCount from the first element of the array + //JsonElement recordCountElement = rootEnumerated[0].GetProperty("RecordCount"); + string jsonRecordCount = JsonSerializer.Serialize(new[] + { + new + { + recordCount = @$"{rootEnumerated[0].GetProperty("RecordCount")}" + } + }); + + rootEnumerated.Add(JsonSerializer.Deserialize(jsonRecordCount)); return OkResponse(JsonSerializer.SerializeToElement(rootEnumerated)); } @@ -197,8 +222,9 @@ private static JsonElement RemoveExtraFieldsInResponseWithSingleItem(JsonElement /// form that complies with vNext Api guidelines. /// /// Value representing the Json results of the client's request. + /// True if request is done through MCP endpoint. /// Correctly formatted OkObjectResult. - public static OkObjectResult OkResponse(JsonElement jsonResult) + public static OkObjectResult OkResponse(JsonElement jsonResult, bool? isMcpRequest = null) { // For consistency we return all values as type Array if (jsonResult.ValueKind != JsonValueKind.Array) @@ -211,20 +237,34 @@ public static OkObjectResult OkResponse(JsonElement jsonResult) // More than 0 records, and the last element is of type array, then we have pagination if (resultEnumerated.Count > 0 && resultEnumerated[resultEnumerated.Count - 1].ValueKind == JsonValueKind.Array) { - // Get the nextLink + // Get the 'nextLink' or 'after' // resultEnumerated will be an array of the form - // [{object1}, {object2},...{objectlimit}, [{nextLinkObject}]] - // if the last element is of type array, we know it is nextLink - // we strip the "[" and "]" and then save the nextLink element - // into a dictionary with a key of "nextLink" and a value that - // represents the nextLink data we require. - string nextLinkJsonString = JsonSerializer.Serialize(resultEnumerated[resultEnumerated.Count - 1]); - Dictionary nextLink = JsonSerializer.Deserialize>(nextLinkJsonString[1..^1])!; + // [{object1}, {object2},...{objectlimit}, [{nextLinkObject/afterObject}]] + // if the last element is of type array, we know it is 'nextLink' + // if the request is done through the REST endpoint and it is + // 'after' if the request is done through the MCP endpoint, + // we strip the "[" and "]" and then save the element + // into a dictionary with a key of "nextLinkAfter" and a value that + // represents the nextLink/after data we require. + string nextLinkAfterJsonString = JsonSerializer.Serialize(resultEnumerated[resultEnumerated.Count - 1]); + Dictionary nextLinkAfter = JsonSerializer.Deserialize>(nextLinkAfterJsonString[1..^1])!; IEnumerable value = resultEnumerated.Take(resultEnumerated.Count - 1); + + // Check 'after' object if request is done through MCP endpoint. + if (isMcpRequest is true) + { + return new OkObjectResult(new + { + value = value, + after = nextLinkAfter["after"] + }); + } + + // Check 'nextLink' object if request is done through REST endpoint. return new OkObjectResult(new { value = value, - @nextLink = nextLink["nextLink"] + @nextLink = nextLinkAfter["nextLink"] }); } @@ -424,6 +464,5 @@ public static OkObjectResult OkMutationResponse(JsonElement jsonResult) value = resultEnumerated }); } - } } diff --git a/src/Core/Services/BuildRequestStateMiddleware.cs b/src/Core/Services/BuildRequestStateMiddleware.cs index 0cebec29e1..395538ce7d 100644 --- a/src/Core/Services/BuildRequestStateMiddleware.cs +++ b/src/Core/Services/BuildRequestStateMiddleware.cs @@ -32,7 +32,7 @@ public BuildRequestStateMiddleware(RequestDelegate next, RuntimeConfigProvider r /// http context's "X-MS-API-ROLE" header/value to HotChocolate's request context. /// /// HotChocolate execution request context. - public async ValueTask InvokeAsync(IRequestContext context) + public async ValueTask InvokeAsync(RequestContext context) { bool isIntrospectionQuery = context.Request.OperationName == "IntrospectionQuery"; ApiType apiType = ApiType.GraphQL; @@ -77,11 +77,11 @@ public async ValueTask InvokeAsync(IRequestContext context) // There is an error in GraphQL when ContextData is not null if (context.Result!.ContextData is not null) { - if (context.Result.ContextData.ContainsKey(WellKnownContextData.ValidationErrors)) + if (context.Result.ContextData.ContainsKey(ExecutionContextData.ValidationErrors)) { statusCode = HttpStatusCode.BadRequest; } - else if (context.Result.ContextData.ContainsKey(WellKnownContextData.OperationNotAllowed)) + else if (context.Result.ContextData.ContainsKey(ExecutionContextData.OperationNotAllowed)) { statusCode = HttpStatusCode.MethodNotAllowed; } diff --git a/src/Core/Services/Cache/DabCacheService.cs b/src/Core/Services/Cache/DabCacheService.cs index 942534fbec..5fab06691f 100644 --- a/src/Core/Services/Cache/DabCacheService.cs +++ b/src/Core/Services/Cache/DabCacheService.cs @@ -93,10 +93,10 @@ public DabCacheService(IFusionCache cache, ILogger? logger, IHt /// /// Try to get cacheValue from the cache with the derived cache key. /// - /// The type of value in the cache + /// The type of value in the cache /// Metadata used to create a cache key or fetch a response from the database. /// JSON Response - public MaybeValue? TryGet(DatabaseQueryMetadata queryMetadata, EntityCacheLevel cacheEntryLevel) + public MaybeValue? TryGet(DatabaseQueryMetadata queryMetadata, EntityCacheLevel cacheEntryLevel) { string cacheKey = CreateCacheKey(queryMetadata); FusionCacheEntryOptions options = new(); @@ -106,7 +106,7 @@ public DabCacheService(IFusionCache cache, ILogger? logger, IHt options.SetSkipDistributedCache(true, true); } - return _cache.TryGet(key: cacheKey); + return _cache.TryGet(key: cacheKey); } /// diff --git a/src/Core/Services/DetermineStatusCodeMiddleware.cs b/src/Core/Services/DetermineStatusCodeMiddleware.cs index 01384485d9..dcddc62971 100644 --- a/src/Core/Services/DetermineStatusCodeMiddleware.cs +++ b/src/Core/Services/DetermineStatusCodeMiddleware.cs @@ -18,7 +18,7 @@ public sealed class DetermineStatusCodeMiddleware(RequestDelegate next) { private const string ERROR_CODE = nameof(DataApiBuilderException.SubStatusCodes.DatabaseInputError); - public async ValueTask InvokeAsync(IRequestContext context) + public async ValueTask InvokeAsync(RequestContext context) { await next(context).ConfigureAwait(false); @@ -34,7 +34,7 @@ public async ValueTask InvokeAsync(IRequestContext context) contextData.AddRange(singleResult.ContextData); } - contextData[WellKnownContextData.HttpStatusCode] = HttpStatusCode.BadRequest; + contextData[ExecutionContextData.HttpStatusCode] = HttpStatusCode.BadRequest; context.Result = singleResult.WithContextData(contextData.ToImmutable()); } } diff --git a/src/Core/Services/ExecutionHelper.cs b/src/Core/Services/ExecutionHelper.cs index 9745adbcd5..fe527d9f0c 100644 --- a/src/Core/Services/ExecutionHelper.cs +++ b/src/Core/Services/ExecutionHelper.cs @@ -78,7 +78,8 @@ public async ValueTask ExecuteQueryAsync(IMiddlewareContext context) } return ValueTask.CompletedTask; - }); + }, + cleanAfter: CleanAfter.Request); context.Result = result.Item1.Select(t => t.RootElement).ToArray(); SetNewMetadata(context, result.Item2); @@ -125,7 +126,8 @@ public async ValueTask ExecuteMutateAsync(IMiddlewareContext context) } return ValueTask.CompletedTask; - }); + }, + cleanAfter: CleanAfter.Request); context.Result = result.Item1.Select(t => t.RootElement).ToArray(); SetNewMetadata(context, result.Item2); @@ -184,7 +186,7 @@ fieldValue.ValueKind is not (JsonValueKind.Undefined or JsonValueKind.Null)) { // The selection type can be a wrapper type like NonNullType or ListType. // To get the most inner type (aka the named type) we use our named type helper. - INamedType namedType = context.Selection.Field.Type.NamedType(); + ITypeDefinition namedType = context.Selection.Field.Type.NamedType(); // Each scalar in HotChocolate has a runtime type representation. // In order to let scalar values flow through the GraphQL type completion @@ -312,6 +314,7 @@ private static void SetContextResult(IMiddlewareContext context, JsonDocument? r result.Dispose(); return ValueTask.CompletedTask; }); + // The disposal could occur before we were finished using the value from the jsondocument, // thus needing to ensure copying the root element. Hence, we clone the root element. context.Result = result.RootElement.Clone(); @@ -353,14 +356,14 @@ private static bool TryGetPropertyFromParent( /// the request context variable values needed to resolve value nodes represented as variables public static object? ExtractValueFromIValueNode( IValueNode value, - IInputField argumentSchema, + IInputValueDefinition argumentSchema, IVariableValueCollection variables) { // extract value from the variable if the IValueNode is a variable if (value.Kind == SyntaxKind.Variable) { string variableName = ((VariableNode)value).Name.Value; - IValueNode? variableValue = variables.GetVariable(variableName); + IValueNode? variableValue = variables.GetValue(variableName); if (variableValue is null) { @@ -411,14 +414,14 @@ private static bool TryGetPropertyFromParent( /// Value: (object) argument value /// public static IDictionary GetParametersFromSchemaAndQueryFields( - IObjectField schema, + ObjectField schema, FieldNode query, IVariableValueCollection variables) { IDictionary collectedParameters = new Dictionary(); // Fill the parameters dictionary with the default argument values - IFieldCollection schemaArguments = schema.Arguments; + ArgumentCollection schemaArguments = schema.Arguments; // Example 'argumentSchemas' IInputField objects of type 'HotChocolate.Types.Argument': // These are all default arguments defined in the schema for queries. @@ -428,7 +431,7 @@ private static bool TryGetPropertyFromParent( // {orderBy:entityOrderByInput} // The values in schemaArguments will have default values when the backing // entity is a stored procedure with runtime config defined default parameter values. - foreach (IInputField argument in schemaArguments) + foreach (IInputValueDefinition argument in schemaArguments) { if (argument.DefaultValue != null) { @@ -440,7 +443,7 @@ private static bool TryGetPropertyFromParent( variables: variables)); } } - + // Overwrite the default values with the passed in arguments // Example: { myEntity(first: $first, orderBy: {entityField: ASC) { items { entityField } } } // User supplied $first filter variable overwrites the default value of 'first'. @@ -450,7 +453,7 @@ private static bool TryGetPropertyFromParent( foreach (ArgumentNode argument in passedArguments) { string argumentName = argument.Name.Value; - IInputField argumentSchema = schemaArguments[argumentName]; + IInputValueDefinition argumentSchema = schemaArguments[argumentName]; object? nodeValue = ExtractValueFromIValueNode( value: argument.Value, @@ -486,7 +489,7 @@ internal static IType InnerMostType(IType type) return InnerMostType(type.InnerType()); } - public static InputObjectType InputObjectTypeFromIInputField(IInputField field) + public static InputObjectType InputObjectTypeFromIInputField(IInputValueDefinition field) { return (InputObjectType)InnerMostType(field.Type); } diff --git a/src/Core/Services/GraphQLSchemaCreator.cs b/src/Core/Services/GraphQLSchemaCreator.cs index 90e918c833..a96ebfa271 100644 --- a/src/Core/Services/GraphQLSchemaCreator.cs +++ b/src/Core/Services/GraphQLSchemaCreator.cs @@ -285,9 +285,11 @@ private DocumentNode GenerateSqlGraphQLObjects(RuntimeEntities entities, Diction } else { + Environment.Exit(-777); throw new DataApiBuilderException(message: $"Database Object definition for {entityName} has not been inferred.", statusCode: HttpStatusCode.InternalServerError, subStatusCode: DataApiBuilderException.SubStatusCodes.ErrorInInitialization); + } } diff --git a/src/Core/Services/MetadataProviders/CosmosSqlMetadataProvider.cs b/src/Core/Services/MetadataProviders/CosmosSqlMetadataProvider.cs index 61ffeeab09..082e96b435 100644 --- a/src/Core/Services/MetadataProviders/CosmosSqlMetadataProvider.cs +++ b/src/Core/Services/MetadataProviders/CosmosSqlMetadataProvider.cs @@ -626,5 +626,10 @@ public void InitializeAsync( { throw new NotImplementedException(); } + + public bool TryGetEntityDefenition(string entityName, out Entity? entityDefenition) + { + throw new NotImplementedException(); + } } } diff --git a/src/Core/Services/MetadataProviders/ISqlMetadataProvider.cs b/src/Core/Services/MetadataProviders/ISqlMetadataProvider.cs index 83989b645a..6f73043c18 100644 --- a/src/Core/Services/MetadataProviders/ISqlMetadataProvider.cs +++ b/src/Core/Services/MetadataProviders/ISqlMetadataProvider.cs @@ -251,5 +251,6 @@ public bool TryGetFKDefinition( string referencedEntityName, [NotNullWhen(true)] out ForeignKeyDefinition? foreignKeyDefinition, bool isMToNRelationship) => throw new NotImplementedException(); + public bool TryGetEntityDefenition(string entityName, out Entity? entityDefenition); } } diff --git a/src/Core/Services/MetadataProviders/MsSqlMetadataProvider.cs b/src/Core/Services/MetadataProviders/MsSqlMetadataProvider.cs index f03216ba89..7d02798427 100644 --- a/src/Core/Services/MetadataProviders/MsSqlMetadataProvider.cs +++ b/src/Core/Services/MetadataProviders/MsSqlMetadataProvider.cs @@ -200,11 +200,14 @@ protected override async Task FillSchemaForStoredProcedureAsync( // Loop through parameters specified in config, throw error if not found in schema // else set runtime config defined default values. // Note: we defer type checking of parameters specified in config until request time - Dictionary? configParameters = procedureEntity.Source.Parameters; + List? configParameters = procedureEntity.Source.Parameters; if (configParameters is not null) { - foreach ((string configParamKey, object configParamValue) in configParameters) + foreach (ParameterMetadata paramMetadata in configParameters) { + string configParamKey = paramMetadata.Name; + object? configParamValue = paramMetadata.Default; + if (!storedProcedureDefinition.Parameters.TryGetValue(configParamKey, out ParameterDefinition? parameterDefinition)) { throw new DataApiBuilderException( @@ -214,8 +217,11 @@ protected override async Task FillSchemaForStoredProcedureAsync( } else { - parameterDefinition.HasConfigDefault = true; - parameterDefinition.ConfigDefaultValue = configParamValue?.ToString(); + parameterDefinition.Description = paramMetadata.Description; + parameterDefinition.Required = paramMetadata.Required; + parameterDefinition.Default = paramMetadata.Default; + parameterDefinition.HasConfigDefault = paramMetadata.Default is not null; + parameterDefinition.ConfigDefaultValue = paramMetadata.Default?.ToString(); } } } @@ -247,6 +253,7 @@ protected override void PopulateMetadataForLinkingObject( // GraphQL is enabled/disabled. The linking object definitions are not exposed in the schema to the user. Entity linkingEntity = new( Source: new EntitySource(Type: EntitySourceType.Table, Object: linkingObject, Parameters: null, KeyFields: null), + Fields: null, Rest: new(Array.Empty(), Enabled: false), GraphQL: new(Singular: linkingEntityName, Plural: linkingEntityName, Enabled: false), Permissions: Array.Empty(), diff --git a/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs b/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs index dd4703d241..2df24a9b4d 100644 --- a/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs +++ b/src/Core/Services/MetadataProviders/SqlMetadataProvider.cs @@ -102,6 +102,16 @@ private void HandleOrRecordException(Exception e) } } + public bool TryGetEntityDefenition(string entityName, out Entity? entityDefenition) + { + if (!_entities.TryGetValue(entityName, out entityDefenition)) + { + throw new KeyNotFoundException($"Initialization of metadata incomplete for entity: {entityName}"); + } + + return true; + } + public SqlMetadataProvider( RuntimeConfigProvider runtimeConfigProvider, IAbstractQueryManagerFactory engineFactory, @@ -162,6 +172,7 @@ public virtual string GetSchemaName(string entityName) { if (!EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? databaseObject)) { + Environment.Exit(-777); throw new DataApiBuilderException(message: $"Table Definition for {entityName} has not been inferred.", statusCode: HttpStatusCode.InternalServerError, subStatusCode: DataApiBuilderException.SubStatusCodes.EntityNotFound); @@ -180,6 +191,7 @@ public string GetDatabaseObjectName(string entityName) { if (!EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? databaseObject)) { + Environment.Exit(-777); throw new DataApiBuilderException(message: $"Table Definition for {entityName} has not been inferred.", statusCode: HttpStatusCode.InternalServerError, subStatusCode: DataApiBuilderException.SubStatusCodes.EntityNotFound); @@ -193,6 +205,7 @@ public SourceDefinition GetSourceDefinition(string entityName) { if (!EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? databaseObject)) { + Environment.Exit(-777); throw new DataApiBuilderException(message: $"Table Definition for {entityName} has not been inferred.", statusCode: HttpStatusCode.InternalServerError, subStatusCode: DataApiBuilderException.SubStatusCodes.EntityNotFound); @@ -217,13 +230,33 @@ public StoredProcedureDefinition GetStoredProcedureDefinition(string entityName) /// public bool TryGetExposedColumnName(string entityName, string backingFieldName, [NotNullWhen(true)] out string? name) { - Dictionary? backingColumnsToExposedNamesMap; - if (!EntityBackingColumnsToExposedNames.TryGetValue(entityName, out backingColumnsToExposedNamesMap)) + if (!EntityBackingColumnsToExposedNames.TryGetValue(entityName, out Dictionary? backingToExposed)) { throw new KeyNotFoundException($"Initialization of metadata incomplete for entity: {entityName}"); } - return backingColumnsToExposedNamesMap.TryGetValue(backingFieldName, out name); + if (backingToExposed.TryGetValue(backingFieldName, out name)) + { + return true; + } + + if (_entities.TryGetValue(entityName, out Entity? entityDefinition) && entityDefinition.Fields is not null) + { + // Find the field by backing name and use its Alias if present. + FieldMetadata? matched = entityDefinition + .Fields + .FirstOrDefault(f => f.Name.Equals(backingFieldName, StringComparison.OrdinalIgnoreCase) + && !string.IsNullOrEmpty(f.Alias)); + + if (matched is not null) + { + name = matched.Alias!; + return true; + } + } + + name = null; + return false; } /// @@ -235,6 +268,23 @@ public bool TryGetBackingColumn(string entityName, string field, [NotNullWhen(tr throw new KeyNotFoundException($"Initialization of metadata incomplete for entity: {entityName}"); } + if (exposedNamesToBackingColumnsMap.TryGetValue(field, out name)) + { + return true; + } + + if (_entities.TryGetValue(entityName, out Entity? entityDefinition) && entityDefinition.Fields is not null) + { + FieldMetadata? matchedField = entityDefinition.Fields.FirstOrDefault(f => + f.Alias != null && f.Alias.Equals(field, StringComparison.OrdinalIgnoreCase)); + + if (matchedField is not null) + { + name = matchedField.Name; + return true; + } + } + return exposedNamesToBackingColumnsMap.TryGetValue(field, out name); } @@ -455,11 +505,12 @@ protected virtual async Task FillSchemaForStoredProcedureAsync( // Loop through parameters specified in config, throw error if not found in schema // else set runtime config defined default values. // Note: we defer type checking of parameters specified in config until request time - Dictionary? configParameters = procedureEntity.Source.Parameters; + List? configParameters = procedureEntity.Source.Parameters; if (configParameters is not null) { - foreach ((string configParamKey, object configParamValue) in configParameters) + foreach (ParameterMetadata paramMeta in configParameters) { + string configParamKey = paramMeta.Name; if (!storedProcedureDefinition.Parameters.TryGetValue(configParamKey, out ParameterDefinition? parameterDefinition)) { HandleOrRecordException(new DataApiBuilderException( @@ -469,8 +520,12 @@ protected virtual async Task FillSchemaForStoredProcedureAsync( } else { - parameterDefinition.HasConfigDefault = true; - parameterDefinition.ConfigDefaultValue = configParamValue?.ToString(); + // Map all metadata from config + parameterDefinition.Description = paramMeta.Description; + parameterDefinition.Required = paramMeta.Required; + parameterDefinition.Default = paramMeta.Default; + parameterDefinition.HasConfigDefault = paramMeta.Default is not null; + parameterDefinition.ConfigDefaultValue = paramMeta.Default?.ToString(); } } } @@ -1094,22 +1149,81 @@ await PopulateResultSetDefinitionsForStoredProcedureAsync( } else if (entitySourceType is EntitySourceType.Table) { + List pkFields = new(); + + // Resolve PKs from fields first + if (entity.Fields is not null && entity.Fields.Any()) + { + pkFields = entity.Fields + .Where(f => f.PrimaryKey) + .Select(f => f.Name) + .ToList(); + } + + // Fallback to key-fields from config + if (pkFields.Count == 0 && entity.Source.KeyFields is not null) + { + pkFields = entity.Source.KeyFields.ToList(); + } + + // If still empty, fallback to DB schema PKs + if (pkFields.Count == 0) + { + DataTable dataTable = await GetTableWithSchemaFromDataSetAsync( + entityName, + GetSchemaName(entityName), + GetDatabaseObjectName(entityName)); + + pkFields = dataTable.PrimaryKey.Select(pk => pk.ColumnName).ToList(); + } + + // Final safeguard + pkFields ??= new List(); + await PopulateSourceDefinitionAsync( entityName, GetSchemaName(entityName), GetDatabaseObjectName(entityName), GetSourceDefinition(entityName), - entity.Source.KeyFields); + pkFields); } else { + List pkFields = new(); + + // Resolve PKs from fields first + if (entity.Fields is not null && entity.Fields.Any()) + { + pkFields = entity.Fields + .Where(f => f.PrimaryKey) + .Select(f => f.Name) + .ToList(); + } + + // Fallback to key-fields from config + if (pkFields.Count == 0 && entity.Source.KeyFields is not null) + { + pkFields = entity.Source.KeyFields.ToList(); + } + + // If still empty, fallback to DB schema PKs + if (pkFields.Count == 0) + { + DataTable dataTable = await GetTableWithSchemaFromDataSetAsync( + entityName, + GetSchemaName(entityName), + GetDatabaseObjectName(entityName)); + + pkFields = dataTable.PrimaryKey.Select(pk => pk.ColumnName).ToList(); + } + ViewDefinition viewDefinition = (ViewDefinition)GetSourceDefinition(entityName); await PopulateSourceDefinitionAsync( entityName, GetSchemaName(entityName), GetDatabaseObjectName(entityName), viewDefinition, - entity.Source.KeyFields); + pkFields); } } catch (Exception e) @@ -1151,30 +1265,24 @@ private async Task PopulateResultSetDefinitionsForStoredProcedureAsync( Type resultFieldType = SqlToCLRType(element.GetProperty(BaseSqlQueryBuilder.STOREDPROC_COLUMN_SYSTEMTYPENAME).ToString()); bool isResultFieldNullable = element.GetProperty(BaseSqlQueryBuilder.STOREDPROC_COLUMN_ISNULLABLE).GetBoolean(); + // Validate that the stored procedure returns columns with proper names + // This commonly occurs when using aggregate functions or expressions without aliases + if (string.IsNullOrWhiteSpace(resultFieldName)) + { + throw new DataApiBuilderException( + message: $"The stored procedure '{dbStoredProcedureName}' returns a column without a name. " + + "This typically happens when using aggregate functions (like MAX, MIN, COUNT) or expressions " + + "without providing an alias. Please add column aliases to your SELECT statement. " + + "For example: 'SELECT MAX(id) AS MaxId' instead of 'SELECT MAX(id)'.", + statusCode: HttpStatusCode.ServiceUnavailable, + subStatusCode: DataApiBuilderException.SubStatusCodes.ErrorInInitialization); + } + // Store the dictionary containing result set field with its type as Columns storedProcedureDefinition.Columns.TryAdd(resultFieldName, new(resultFieldType) { IsNullable = isResultFieldNullable }); } } - /// - /// Helper method to create params for the query. - /// - /// Common prefix of param names. - /// Values of the param. - /// - private static Dictionary GetQueryParams( - string paramName, - object[] paramValues) - { - Dictionary parameters = new(); - for (int paramNumber = 0; paramNumber < paramValues.Length; paramNumber++) - { - parameters.Add($"{paramName}{paramNumber}", paramValues[paramNumber]); - } - - return parameters; - } - /// /// Generate the mappings of exposed names to /// backing columns, and of backing columns to @@ -1216,19 +1324,66 @@ private void GenerateExposedToBackingColumnMapUtil(string entityName) { try { - // For StoredProcedures, result set definitions become the column definition. - Dictionary? mapping = GetMappingForEntity(entityName); - EntityBackingColumnsToExposedNames[entityName] = mapping is not null ? mapping : new(); - EntityExposedNamesToBackingColumnNames[entityName] = EntityBackingColumnsToExposedNames[entityName].ToDictionary(x => x.Value, x => x.Key); + // Build case-insensitive maps per entity. + Dictionary backToExposed = new(StringComparer.OrdinalIgnoreCase); + Dictionary exposedToBack = new(StringComparer.OrdinalIgnoreCase); + + // Pull definitions. + _entities.TryGetValue(entityName, out Entity? entity); SourceDefinition sourceDefinition = GetSourceDefinition(entityName); - foreach (string columnName in sourceDefinition.Columns.Keys) + + // 1) Prefer new-style fields (backing = f.Name, exposed = f.Alias ?? f.Name) + if (entity?.Fields is not null) + { + foreach (FieldMetadata f in entity.Fields) + { + string backing = f.Name; + string exposed = string.IsNullOrWhiteSpace(f.Alias) ? backing : f.Alias!; + backToExposed[backing] = exposed; + exposedToBack[exposed] = backing; + } + } + + // 2) Overlay legacy mappings (backing -> alias) only where we don't already have an alias from fields. + if (entity?.Mappings is not null) + { + foreach (KeyValuePair kvp in entity.Mappings) + { + string backing = kvp.Key; + string exposed = kvp.Value; + + // If fields already provided an alias for this backing column, keep fields precedence. + if (!backToExposed.ContainsKey(backing)) + { + backToExposed[backing] = exposed; + } + + // Always ensure reverse map is coherent (fields still take precedence if the same exposed already exists). + if (!exposedToBack.ContainsKey(exposed)) + { + exposedToBack[exposed] = backing; + } + } + } + + // 3) Ensure all physical columns are mapped (identity default). + foreach (string backing in sourceDefinition.Columns.Keys) { - if (!EntityExposedNamesToBackingColumnNames[entityName].ContainsKey(columnName) && !EntityBackingColumnsToExposedNames[entityName].ContainsKey(columnName)) + if (!backToExposed.ContainsKey(backing)) + { + backToExposed[backing] = backing; + } + + string exposed = backToExposed[backing]; + if (!exposedToBack.ContainsKey(exposed)) { - EntityBackingColumnsToExposedNames[entityName].Add(columnName, columnName); - EntityExposedNamesToBackingColumnNames[entityName].Add(columnName, columnName); + exposedToBack[exposed] = backing; } } + + // 4) Store maps for runtime + EntityBackingColumnsToExposedNames[entityName] = backToExposed; + EntityExposedNamesToBackingColumnNames[entityName] = exposedToBack; } catch (Exception e) { @@ -1236,18 +1391,6 @@ private void GenerateExposedToBackingColumnMapUtil(string entityName) } } - /// - /// Obtains the underlying mapping that belongs - /// to a given entity. - /// - /// entity whose map we get. - /// mapping belonging to entity. - private Dictionary? GetMappingForEntity(string entityName) - { - _entities.TryGetValue(entityName, out Entity? entity); - return entity?.Mappings; - } - /// /// Initialize OData parser by building OData model. /// The parser will be used for parsing filter clause and order by clause. @@ -1270,19 +1413,9 @@ private async Task PopulateSourceDefinitionAsync( string schemaName, string tableName, SourceDefinition sourceDefinition, - string[]? runtimeConfigKeyFields) + List pkFields) { - DataTable dataTable = await GetTableWithSchemaFromDataSetAsync(entityName, schemaName, tableName); - - List primaryKeys = new(dataTable.PrimaryKey); - if (runtimeConfigKeyFields is null || runtimeConfigKeyFields.Length == 0) - { - sourceDefinition.PrimaryKey = new(primaryKeys.Select(primaryKey => primaryKey.ColumnName)); - } - else - { - sourceDefinition.PrimaryKey = new(runtimeConfigKeyFields); - } + sourceDefinition.PrimaryKey = [.. pkFields]; if (sourceDefinition.PrimaryKey.Count == 0) { @@ -1298,6 +1431,7 @@ private async Task PopulateSourceDefinitionAsync( await PopulateTriggerMetadataForTable(entityName, schemaName, tableName, sourceDefinition); } + DataTable dataTable = await GetTableWithSchemaFromDataSetAsync(entityName, schemaName, tableName); using DataTableReader reader = new(dataTable); DataTable schemaTable = reader.GetSchemaTable(); RuntimeConfig runtimeConfig = _runtimeConfigProvider.GetConfig(); @@ -1405,12 +1539,21 @@ public static bool IsGraphQLReservedName(Entity entity, string databaseColumnNam if (entity.GraphQL is null || (entity.GraphQL.Enabled)) { if (entity.Mappings is not null - && entity.Mappings.TryGetValue(databaseColumnName, out string? fieldAlias) - && !string.IsNullOrWhiteSpace(fieldAlias)) + && entity.Mappings.TryGetValue(databaseColumnName, out string? fieldAlias) + && !string.IsNullOrWhiteSpace(fieldAlias)) { databaseColumnName = fieldAlias; } + if (entity.Fields is not null) + { + FieldMetadata? fieldMeta = entity.Fields.FirstOrDefault(f => f.Name == databaseColumnName); + if (fieldMeta != null && !string.IsNullOrWhiteSpace(fieldMeta.Alias)) + { + databaseColumnName = fieldMeta.Alias; + } + } + return IsIntrospectionField(databaseColumnName); } } diff --git a/src/Core/Services/MultipleMutationInputValidator.cs b/src/Core/Services/MultipleMutationInputValidator.cs index 5723795a30..946ae49951 100644 --- a/src/Core/Services/MultipleMutationInputValidator.cs +++ b/src/Core/Services/MultipleMutationInputValidator.cs @@ -70,7 +70,7 @@ public MultipleMutationInputValidator(IMetadataProviderFactory sqlMetadataProvid /// } /// } public void ValidateGraphQLValueNode( - IInputField schema, + IInputValueDefinition schema, IMiddlewareContext context, object? parameters, int nestingLevel, diff --git a/src/Core/Services/OpenAPI/OpenApiDocumentor.cs b/src/Core/Services/OpenAPI/OpenApiDocumentor.cs index 003d7ddd13..87fb96bc32 100644 --- a/src/Core/Services/OpenAPI/OpenApiDocumentor.cs +++ b/src/Core/Services/OpenAPI/OpenApiDocumentor.cs @@ -16,6 +16,7 @@ using Azure.DataApiBuilder.Core.Services.OpenAPI; using Azure.DataApiBuilder.Product; using Azure.DataApiBuilder.Service.Exceptions; +using Microsoft.OpenApi.Any; using Microsoft.OpenApi.Models; using Microsoft.OpenApi.Writers; using static Azure.DataApiBuilder.Config.DabConfigEvents; @@ -137,6 +138,19 @@ public void CreateDocument(bool doOverrideExistingDocument = false) Schemas = CreateComponentSchemas(runtimeConfig.Entities, runtimeConfig.DefaultDataSourceName) }; + // Collect all entity tags and their descriptions for the top-level tags array + List globalTags = new(); + foreach (KeyValuePair kvp in runtimeConfig.Entities) + { + Entity entity = kvp.Value; + string restPath = entity.Rest?.Path ?? kvp.Key; + globalTags.Add(new OpenApiTag + { + Name = restPath, + Description = string.IsNullOrWhiteSpace(entity.Description) ? null : entity.Description + }); + } + OpenApiDocument doc = new() { Info = new OpenApiInfo @@ -149,7 +163,8 @@ public void CreateDocument(bool doOverrideExistingDocument = false) new() { Url = url } }, Paths = BuildPaths(runtimeConfig.Entities, runtimeConfig.DefaultDataSourceName), - Components = components + Components = components, + Tags = globalTags }; _openApiDocument = doc; } @@ -212,10 +227,11 @@ private OpenApiPaths BuildPaths(RuntimeEntities entities, string defaultDataSour continue; } - // Explicitly exclude setting the tag's Description property since the Name property is self-explanatory. + // Set the tag's Description property to the entity's semantic description if present. OpenApiTag openApiTag = new() { - Name = entityRestPath + Name = entityRestPath, + Description = string.IsNullOrWhiteSpace(entity.Description) ? null : entity.Description }; // The OpenApiTag will categorize all paths created using the entity's name or overridden REST path value. @@ -995,13 +1011,13 @@ private Dictionary CreateComponentSchemas(RuntimeEntities // Response body schema whose properties map to the stored procedure's first result set columns // as described by sys.dm_exec_describe_first_result_set. - schemas.Add(entityName + SP_RESPONSE_SUFFIX, CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider)); + schemas.Add(entityName + SP_RESPONSE_SUFFIX, CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider, entities)); } else { // Create component schema for FULL entity with all primary key columns (included auto-generated) // which will typically represent the response body of a request or a stored procedure's request body. - schemas.Add(entityName, CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider)); + schemas.Add(entityName, CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider, entities)); // Create an entity's request body component schema excluding autogenerated primary keys. // A POST request requires any non-autogenerated primary key references to be in the request body. @@ -1021,7 +1037,7 @@ private Dictionary CreateComponentSchemas(RuntimeEntities } } - schemas.Add($"{entityName}_NoAutoPK", CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider)); + schemas.Add($"{entityName}_NoAutoPK", CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider, entities)); // Create an entity's request body component schema excluding all primary keys // by removing the tracked non-autogenerated primary key column names and removing them from @@ -1037,7 +1053,7 @@ private Dictionary CreateComponentSchemas(RuntimeEntities } } - schemas.Add($"{entityName}_NoPK", CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider)); + schemas.Add($"{entityName}_NoPK", CreateComponentSchema(entityName, fields: exposedColumnNames, metadataProvider, entities)); } } @@ -1056,21 +1072,32 @@ private Dictionary CreateComponentSchemas(RuntimeEntities private static OpenApiSchema CreateSpRequestComponentSchema(Dictionary fields) { Dictionary properties = new(); + HashSet required = new(); - foreach (string parameter in fields.Keys) + foreach (KeyValuePair kvp in fields) { - string typeMetadata = TypeHelper.GetJsonDataTypeFromSystemType(fields[parameter].SystemType).ToString().ToLower(); + string parameter = kvp.Key; + ParameterDefinition def = kvp.Value; + string typeMetadata = TypeHelper.GetJsonDataTypeFromSystemType(def.SystemType).ToString().ToLower(); properties.Add(parameter, new OpenApiSchema() { - Type = typeMetadata + Type = typeMetadata, + Description = def.Description, + Default = def.Default is not null ? new OpenApiString(def.Default) : null }); + + if (def.Required == true) + { + required.Add(parameter); + } } OpenApiSchema schema = new() { Type = SCHEMA_OBJECT_TYPE, - Properties = properties + Properties = properties, + Required = required }; return schema; @@ -1086,10 +1113,12 @@ private static OpenApiSchema CreateSpRequestComponentSchema(Dictionary /// Name of the entity. /// List of mapped (alias) field names. + /// Metadata provider for database objects. + /// Runtime entities from configuration. /// Raised when an entity's database metadata can't be found, /// indicating a failure due to the provided entityName. /// Entity's OpenApiSchema representation. - private static OpenApiSchema CreateComponentSchema(string entityName, HashSet fields, ISqlMetadataProvider metadataProvider) + private static OpenApiSchema CreateComponentSchema(string entityName, HashSet fields, ISqlMetadataProvider metadataProvider, RuntimeEntities entities) { if (!metadataProvider.EntityToDatabaseObject.TryGetValue(entityName, out DatabaseObject? dbObject) || dbObject is null) { @@ -1101,6 +1130,8 @@ private static OpenApiSchema CreateComponentSchema(string entityName, HashSet properties = new(); + Entity? entityConfig = entities.TryGetValue(entityName, out Entity? ent) ? ent : null; + // Get backing column metadata to resolve the correct system type which is then // used to resolve the correct Json data type. foreach (string field in fields) @@ -1109,15 +1140,24 @@ private static OpenApiSchema CreateComponentSchema(string entityName, HashSet f.Alias == field || f.Name == field); + fieldDescription = fieldMetadata?.Description; + } + properties.Add(field, new OpenApiSchema() { Type = typeMetadata, - Format = formatMetadata + Format = formatMetadata, + Description = fieldDescription }); } } @@ -1125,7 +1165,8 @@ private static OpenApiSchema CreateComponentSchema(string entityName, HashSet async context => { await executionHelper.ExecuteQueryAsync(context).ConfigureAwait(false); @@ -30,21 +30,21 @@ public ResolverTypeInterceptor(ExecutionHelper executionHelper) }); _mutationMiddleware = - new FieldMiddlewareDefinition( + new FieldMiddlewareConfiguration( next => async context => { await executionHelper.ExecuteMutateAsync(context).ConfigureAwait(false); await next(context).ConfigureAwait(false); }); - _leafFieldResolver = ctx => ExecutionHelper.ExecuteLeafField(ctx); - _objectFieldResolver = ctx => executionHelper.ExecuteObjectField(ctx); - _listFieldResolver = ctx => executionHelper.ExecuteListField(ctx); + _leafFieldResolver = ExecutionHelper.ExecuteLeafField; + _objectFieldResolver = executionHelper.ExecuteObjectField; + _listFieldResolver = executionHelper.ExecuteListField; } public override void OnAfterResolveRootType( ITypeCompletionContext completionContext, - ObjectTypeDefinition definition, + ObjectTypeConfiguration definition, OperationType operationType) { switch (operationType) @@ -69,26 +69,26 @@ public override void OnAfterResolveRootType( public override void OnBeforeCompleteType( ITypeCompletionContext completionContext, - DefinitionBase? definition) + TypeSystemConfiguration? definition) { // We are only interested in object types here as only object types can have resolvers. - if (definition is not ObjectTypeDefinition objectTypeDef) + if (definition is not ObjectTypeConfiguration objectTypeConfig) { return; } if (ReferenceEquals(completionContext.Type, _queryType)) { - foreach (ObjectFieldDefinition field in objectTypeDef.Fields) + foreach (ObjectFieldConfiguration field in objectTypeConfig.Fields) { - field.MiddlewareDefinitions.Add(_queryMiddleware); + field.MiddlewareConfigurations.Add(_queryMiddleware); } } else if (ReferenceEquals(completionContext.Type, _mutationType)) { - foreach (ObjectFieldDefinition field in objectTypeDef.Fields) + foreach (ObjectFieldConfiguration field in objectTypeConfig.Fields) { - field.MiddlewareDefinitions.Add(_mutationMiddleware); + field.MiddlewareConfigurations.Add(_mutationMiddleware); } } else if (ReferenceEquals(completionContext.Type, _subscriptionType)) @@ -97,7 +97,7 @@ public override void OnBeforeCompleteType( } else { - foreach (ObjectFieldDefinition field in objectTypeDef.Fields) + foreach (ObjectFieldConfiguration field in objectTypeConfig.Fields) { if (field.Type is not null && completionContext.TryGetType(field.Type, out IType? type)) diff --git a/src/Core/Services/RestService.cs b/src/Core/Services/RestService.cs index 0cf9f8a374..6a2308dd83 100644 --- a/src/Core/Services/RestService.cs +++ b/src/Core/Services/RestService.cs @@ -391,6 +391,14 @@ public string GetRouteAfterPathBase(string route) // forward slash '/'. configuredRestPathBase = configuredRestPathBase.Substring(1); + if (route.Equals(_runtimeConfigProvider.GetConfig().McpPath.Substring(1))) + { + throw new DataApiBuilderException( + message: $"Route {route} was not found.", + statusCode: HttpStatusCode.NotFound, + subStatusCode: DataApiBuilderException.SubStatusCodes.GlobalMcpEndpointDisabled); + } + if (!route.StartsWith(configuredRestPathBase)) { throw new DataApiBuilderException( diff --git a/src/Directory.Build.props b/src/Directory.Build.props index 7427cfb1c2..26ad392ae8 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -2,7 +2,7 @@ enable ..\out - 1.6 + 1.7 diff --git a/src/Directory.Packages.props b/src/Directory.Packages.props index ee79b16b00..14f097915c 100644 --- a/src/Directory.Packages.props +++ b/src/Directory.Packages.props @@ -1,64 +1,70 @@ - - true - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/Service.GraphQLBuilder/Directives/RelationshipDirective.cs b/src/Service.GraphQLBuilder/Directives/RelationshipDirective.cs index 049327dffb..07d96fa824 100644 --- a/src/Service.GraphQLBuilder/Directives/RelationshipDirective.cs +++ b/src/Service.GraphQLBuilder/Directives/RelationshipDirective.cs @@ -50,11 +50,11 @@ public static string Target(FieldDefinitionNode field) /// /// Gets the target object type name for an input infield with a relationship directive. /// - /// The input field that is expected to have a relationship directive defined on it. + /// The input field that is expected to have a relationship directive defined on it. /// The name of the target object if the relationship is found, null otherwise. - public static string? GetTarget(IInputField infield) + public static string? GetTarget(IInputValueDefinition inputField) { - Directive? directive = (Directive?)infield.Directives.FirstOrDefault(DirectiveName); + Directive? directive = (Directive?)inputField.Directives.FirstOrDefault(DirectiveName); return directive?.GetArgumentValue("target"); } diff --git a/src/Service.GraphQLBuilder/GraphQLStoredProcedureBuilder.cs b/src/Service.GraphQLBuilder/GraphQLStoredProcedureBuilder.cs index ed72a575e7..8aca57421c 100644 --- a/src/Service.GraphQLBuilder/GraphQLStoredProcedureBuilder.cs +++ b/src/Service.GraphQLBuilder/GraphQLStoredProcedureBuilder.cs @@ -55,17 +55,25 @@ public static FieldDefinitionNode GenerateStoredProcedureSchema( // Without database metadata, there is no way to know to cast 1 to a decimal versus an integer. IValueNode? defaultValueNode = null; - if (entity.Source.Parameters is not null && entity.Source.Parameters.TryGetValue(param, out object? value)) + if (entity.Source.Parameters is not null) { - Tuple defaultGraphQLValue = ConvertValueToGraphQLType(value.ToString()!, parameterDefinition: spdef.Parameters[param]); - defaultValueNode = defaultGraphQLValue.Item2; + ParameterMetadata? paramMetadata = entity.Source.Parameters + .FirstOrDefault(p => p.Name == param); + + if (paramMetadata is not null && paramMetadata.Default is not null) + { + Tuple defaultGraphQLValue = ConvertValueToGraphQLType(paramMetadata.Default.ToString()!, parameterDefinition: spdef.Parameters[param]); + defaultValueNode = defaultGraphQLValue.Item2; + } } inputValues.Add( new( location: null, name: new(param), - description: new StringValueNode($"parameters for {name.Value} stored-procedure"), + description: definition.Description != null + ? new StringValueNode(definition.Description) + : new StringValueNode($"parameters for {name.Value} stored-procedure"), type: new NamedTypeNode(SchemaConverter.GetGraphQLTypeFromSystemType(type: definition.SystemType)), defaultValue: defaultValueNode, directives: new List()) @@ -157,7 +165,15 @@ private static Tuple ConvertValueToGraphQLType(string defaul FLOAT_TYPE => new(FLOAT_TYPE, new FloatValueNode(double.Parse(defaultValueFromConfig))), DECIMAL_TYPE => new(DECIMAL_TYPE, new FloatValueNode(decimal.Parse(defaultValueFromConfig))), STRING_TYPE => new(STRING_TYPE, new StringValueNode(defaultValueFromConfig)), - BOOLEAN_TYPE => new(BOOLEAN_TYPE, new BooleanValueNode(bool.Parse(defaultValueFromConfig))), + BOOLEAN_TYPE => new(BOOLEAN_TYPE, new BooleanValueNode( + defaultValueFromConfig switch + { + "1" => true, + "0" => false, + var s when s.Equals("true", StringComparison.OrdinalIgnoreCase) => true, + var s when s.Equals("false", StringComparison.OrdinalIgnoreCase) => false, + _ => throw new FormatException($"String '{defaultValueFromConfig}' was not recognized as a valid Boolean.") + })), DATETIME_TYPE => new(DATETIME_TYPE, new DateTimeType().ParseResult( DateTime.Parse(defaultValueFromConfig, DateTimeFormatInfo.InvariantInfo, DateTimeStyles.AssumeUniversal))), BYTEARRAY_TYPE => new(BYTEARRAY_TYPE, new ByteArrayType().ParseValue(Convert.FromBase64String(defaultValueFromConfig))), diff --git a/src/Service.GraphQLBuilder/GraphQLUtils.cs b/src/Service.GraphQLBuilder/GraphQLUtils.cs index ff43ae68fc..44c296c12c 100644 --- a/src/Service.GraphQLBuilder/GraphQLUtils.cs +++ b/src/Service.GraphQLBuilder/GraphQLUtils.cs @@ -202,10 +202,11 @@ public static bool CreateAuthorizationDirectiveIfNecessary( /// Collection of directives on GraphQL field. /// Value of @model directive, if present. /// True when name resolution succeeded, false otherwise. - public static bool TryExtractGraphQLFieldModelName(IDirectiveCollection fieldDirectives, + public static bool TryExtractGraphQLFieldModelName( + DirectiveCollection fieldDirectives, [NotNullWhen(true)] out string? modelName) { - modelName = fieldDirectives.FirstOrDefault()?.AsValue().Name; + modelName = fieldDirectives.FirstOrDefault()?.ToValue().Name; return !string.IsNullOrEmpty(modelName); } @@ -276,7 +277,7 @@ public static string GetEntityNameFromContext(IResolverContext context) // Example: CustomersConnectionObject - for get all scenarios. if (QueryBuilder.IsPaginationType(underlyingFieldType)) { - IObjectField subField = context.Selection.Type.NamedType() + ObjectField subField = context.Selection.Type.NamedType() .Fields[QueryBuilder.PAGINATION_FIELD_NAME]; type = subField.Type; underlyingFieldType = type.NamedType(); @@ -332,7 +333,7 @@ fieldSyntaxKind is SyntaxKind.StringValue || fieldSyntaxKind is SyntaxKind.Boole if (value.Kind == SyntaxKind.Variable) { string variableName = ((VariableNode)value).Name.Value; - IValueNode? variableValue = variables.GetVariable(variableName); + IValueNode? variableValue = variables.GetValue(variableName); return GetFieldDetails(variableValue, variables); } diff --git a/src/Service.GraphQLBuilder/Mutations/CreateMutationBuilder.cs b/src/Service.GraphQLBuilder/Mutations/CreateMutationBuilder.cs index c36ec96511..c2a9b0a9ac 100644 --- a/src/Service.GraphQLBuilder/Mutations/CreateMutationBuilder.cs +++ b/src/Service.GraphQLBuilder/Mutations/CreateMutationBuilder.cs @@ -31,8 +31,8 @@ public static class CreateMutationBuilder /// Database type of the relational database to generate input type for. /// Runtime config information. /// Indicates whether multiple create operation is enabled - /// A GraphQL input type with all expected fields mapped as GraphQL inputs. - private static InputObjectTypeDefinitionNode GenerateCreateInputTypeForRelationalDb( + /// An optional GraphQL input type with all expected fields mapped as GraphQL inputs. + private static InputObjectTypeDefinitionNode? GenerateCreateInputTypeForRelationalDb( Dictionary inputs, ObjectTypeDefinitionNode objectTypeDefinitionNode, string entityName, @@ -44,6 +44,7 @@ private static InputObjectTypeDefinitionNode GenerateCreateInputTypeForRelationa bool IsMultipleCreateOperationEnabled) { NameNode inputName = GenerateInputTypeName(name.Value); + InputObjectTypeDefinitionNode? input = null; if (inputs.TryGetValue(inputName, out InputObjectTypeDefinitionNode? db)) { @@ -54,7 +55,7 @@ private static InputObjectTypeDefinitionNode GenerateCreateInputTypeForRelationa // 1. Scalar input fields corresponding to columns which belong to the table. // 2. Complex input fields corresponding to related (target) entities (table backed entities, for now) // which are defined in the runtime config. - List inputFields = new(); + List inputFields = new(); // 1. Scalar input fields. IEnumerable scalarInputFields = objectTypeDefinitionNode.Fields @@ -62,24 +63,26 @@ private static InputObjectTypeDefinitionNode GenerateCreateInputTypeForRelationa .Select(field => GenerateScalarInputType(name, field, IsMultipleCreateOperationEnabled)); // Add scalar input fields to list of input fields for current input type. - inputFields.AddRange(scalarInputFields); - - // Create input object for this entity. - InputObjectTypeDefinitionNode input = - new( - location: null, - inputName, - new StringValueNode($"Input type for creating {name}"), - new List(), - inputFields - ); - - // Add input object to the dictionary of entities for which input object has already been created. - // This input object currently holds only scalar fields. - // The complex fields (for related entities) would be added later when we return from recursion. - // Adding the input object to the dictionary ensures that we don't go into infinite recursion and return whenever - // we find that the input object has already been created for the entity. - inputs.Add(input.Name, input); + // Generate the create input type only if there are any scalar fields that are not auto-generated fields. + if (scalarInputFields.Any()) + { + inputFields.AddRange(scalarInputFields); + + // Create input object for this entity. + input = + new( + location: null, + inputName, + new StringValueNode($"Input type for creating {name}"), + new List(), + inputFields!); + // Add input object to the dictionary of entities for which input object has already been created. + // This input object currently holds only scalar fields. + // The complex fields (for related entities) would be added later when we return from recursion. + // Adding the input object to the dictionary ensures that we don't go into infinite recursion and return whenever + // we find that the input object has already been created for the entity. + inputs.Add(input.Name, input); + } // Generate fields for related entities when // 1. Multiple mutation operations are supported for the database type. @@ -88,7 +91,7 @@ private static InputObjectTypeDefinitionNode GenerateCreateInputTypeForRelationa { // 2. Complex input fields. // Evaluate input objects for related entities. - IEnumerable complexInputFields = + IEnumerable complexInputFields = objectTypeDefinitionNode.Fields .Where(field => !IsBuiltInType(field.Type) && IsComplexFieldAllowedForCreateInputInRelationalDb(field, definitions)) .Select(field => @@ -148,7 +151,7 @@ private static InputObjectTypeDefinitionNode GenerateCreateInputTypeForRelationa databaseType: databaseType, entities: entities, IsMultipleCreateOperationEnabled: IsMultipleCreateOperationEnabled); - }); + }).Where(complexInputType => complexInputType != null); // Append relationship fields to the input fields. inputFields.AddRange(complexInputFields); } @@ -307,8 +310,8 @@ private static InputValueDefinitionNode GenerateScalarInputType(NameNode name, F /// The GraphQL object type to create the input type for. /// Database type to generate the input type for. /// Runtime configuration information for entities. - /// A GraphQL input type value. - private static InputValueDefinitionNode GenerateComplexInputTypeForRelationalDb( + /// An Optional GraphQL input type value. + private static InputValueDefinitionNode? GenerateComplexInputTypeForRelationalDb( string entityName, Dictionary inputs, IEnumerable definitions, @@ -320,7 +323,7 @@ private static InputValueDefinitionNode GenerateComplexInputTypeForRelationalDb( RuntimeEntities entities, bool IsMultipleCreateOperationEnabled) { - InputObjectTypeDefinitionNode node; + InputObjectTypeDefinitionNode? node; NameNode inputTypeName = GenerateInputTypeName(typeName); if (!inputs.ContainsKey(inputTypeName)) { @@ -340,7 +343,7 @@ private static InputValueDefinitionNode GenerateComplexInputTypeForRelationalDb( node = inputs[inputTypeName]; } - return GetComplexInputType(field, node, inputTypeName, IsMultipleCreateOperationEnabled); + return node == null ? null : GetComplexInputType(field, node, inputTypeName, IsMultipleCreateOperationEnabled); } /// @@ -487,7 +490,7 @@ public static IEnumerable Build( { List createMutationNodes = new(); Entity entity = entities[dbEntityName]; - InputObjectTypeDefinitionNode input; + InputObjectTypeDefinitionNode? input; if (!IsRelationalDb(databaseType)) { input = GenerateCreateInputTypeForNonRelationalDb( @@ -528,12 +531,14 @@ public static IEnumerable Build( string singularName = GetDefinedSingularName(name.Value, entity); - // Create one node. - FieldDefinitionNode createOneNode = new( - location: null, - name: new NameNode(GetPointCreateMutationNodeName(name.Value, entity)), - description: new StringValueNode($"Creates a new {singularName}"), - arguments: new List { + if (input != null) + { + // Create one node. + FieldDefinitionNode createOneNode = new( + location: null, + name: new NameNode(GetPointCreateMutationNodeName(name.Value, entity)), + description: new StringValueNode($"Creates a new {singularName}"), + arguments: new List { new( location : null, new NameNode(MutationBuilder.ITEM_INPUT_ARGUMENT_NAME), @@ -541,15 +546,16 @@ public static IEnumerable Build( new NonNullTypeNode(new NamedTypeNode(input.Name)), defaultValue: null, new List()) - }, - type: new NamedTypeNode(returnEntityName), - directives: fieldDefinitionNodeDirectives - ); + }, + type: new NamedTypeNode(returnEntityName), + directives: fieldDefinitionNodeDirectives + ); - createMutationNodes.Add(createOneNode); + createMutationNodes.Add(createOneNode); + } // Multiple create node is created in the schema only when multiple create operation is enabled. - if (IsMultipleCreateOperationEnabled) + if (IsMultipleCreateOperationEnabled && input != null) { // Create multiple node. FieldDefinitionNode createMultipleNode = new( diff --git a/src/Service.GraphQLBuilder/Mutations/MutationBuilder.cs b/src/Service.GraphQLBuilder/Mutations/MutationBuilder.cs index 35c6e5e3a8..6ceb4445d3 100644 --- a/src/Service.GraphQLBuilder/Mutations/MutationBuilder.cs +++ b/src/Service.GraphQLBuilder/Mutations/MutationBuilder.cs @@ -152,31 +152,42 @@ private static void AddMutations( break; case EntityActionOperation.Update: // Generate Mutation operation for Patch and Update both for CosmosDB - mutationFields.Add(UpdateAndPatchMutationBuilder.Build( - name, - inputs, - objectTypeDefinitionNode, - root, - entities, - dbEntityName, - databaseType, - returnEntityName, - rolesAllowedForMutation)); + FieldDefinitionNode? mutationField = UpdateAndPatchMutationBuilder.Build( + name, + inputs, + objectTypeDefinitionNode, + root, + entities, + dbEntityName, + databaseType, + returnEntityName, + rolesAllowedForMutation); + + if (mutationField != null) + { + mutationFields.Add(mutationField); + } if (databaseType is DatabaseType.CosmosDB_NoSQL) { - mutationFields.Add(UpdateAndPatchMutationBuilder.Build( - name, - inputs, - objectTypeDefinitionNode, - root, - entities, - dbEntityName, - databaseType, - returnEntityName, - rolesAllowedForMutation, - EntityActionOperation.Patch, - operationNamePrefix: "patch")); + FieldDefinitionNode? cosmosMutationField = UpdateAndPatchMutationBuilder.Build( + name, + inputs, + objectTypeDefinitionNode, + root, + entities, + dbEntityName, + databaseType, + returnEntityName, + rolesAllowedForMutation, + EntityActionOperation.Patch, + operationNamePrefix: "patch"); + + if (cosmosMutationField != null) + { + mutationFields.Add(cosmosMutationField); + } + } break; diff --git a/src/Service.GraphQLBuilder/Mutations/UpdateAndPatchMutationBuilder.cs b/src/Service.GraphQLBuilder/Mutations/UpdateAndPatchMutationBuilder.cs index 7755e015d8..8916864a37 100644 --- a/src/Service.GraphQLBuilder/Mutations/UpdateAndPatchMutationBuilder.cs +++ b/src/Service.GraphQLBuilder/Mutations/UpdateAndPatchMutationBuilder.cs @@ -55,7 +55,7 @@ private static bool FieldAllowedOnUpdateInput(FieldDefinitionNode field, return true; } - private static InputObjectTypeDefinitionNode GenerateUpdateInputType( + private static InputObjectTypeDefinitionNode? GenerateUpdateInputType( Dictionary inputs, ObjectTypeDefinitionNode objectTypeDefinitionNode, NameNode name, @@ -65,13 +65,14 @@ private static InputObjectTypeDefinitionNode GenerateUpdateInputType( EntityActionOperation operation) { NameNode inputName = GenerateInputTypeName(operation, name.Value); + InputObjectTypeDefinitionNode? input; if (inputs.ContainsKey(inputName)) { return inputs[inputName]; } - IEnumerable inputFields = + IEnumerable inputFields = objectTypeDefinitionNode.Fields .Where(f => FieldAllowedOnUpdateInput(f, databaseType, definitions, operation, objectTypeDefinitionNode)) .Select(f => @@ -89,17 +90,26 @@ private static InputObjectTypeDefinitionNode GenerateUpdateInputType( return GenerateSimpleInputType(name, f, databaseType, operation); }); - InputObjectTypeDefinitionNode input = + if (inputFields.Any()) + { + List inputFieldsList = inputFields + .Where(i => i != null) + .Select(i => i!) + .ToList(); + input = new( location: null, inputName, new StringValueNode($"Input type for updating {name}"), new List(), - inputFields.ToList() + inputFieldsList ); - inputs.Add(input.Name, input); - return input; + inputs.Add(input.Name, input); + return input; + } + + return null; } private static InputValueDefinitionNode GenerateSimpleInputType(NameNode name, FieldDefinitionNode f, DatabaseType databaseType, EntityActionOperation operation) @@ -117,7 +127,7 @@ private static InputValueDefinitionNode GenerateSimpleInputType(NameNode name, F ); } - private static InputValueDefinitionNode GetComplexInputType( + private static InputValueDefinitionNode? GetComplexInputType( Dictionary inputs, IEnumerable definitions, FieldDefinitionNode f, @@ -127,7 +137,7 @@ private static InputValueDefinitionNode GetComplexInputType( DatabaseType databaseType, EntityActionOperation operation) { - InputObjectTypeDefinitionNode node; + InputObjectTypeDefinitionNode? node; NameNode inputTypeName = GenerateInputTypeName(operation, typeName); if (!inputs.ContainsKey(inputTypeName)) @@ -139,35 +149,40 @@ private static InputValueDefinitionNode GetComplexInputType( node = inputs[inputTypeName]; } - ITypeNode type = new NamedTypeNode(node.Name); - - // For a type like [Bar!]! we have to first unpack the outer non-null - if (f.Type.IsNonNullType()) + if ((node != null)) { - // The innerType is the raw List, scalar or object type without null settings - ITypeNode innerType = f.Type.InnerType(); + ITypeNode type = new NamedTypeNode(node.Name); + + // For a type like [Bar!]! we have to first unpack the outer non-null + if (f.Type.IsNonNullType()) + { + // The innerType is the raw List, scalar or object type without null settings + ITypeNode innerType = f.Type.InnerType(); - if (innerType.IsListType()) + if (innerType.IsListType()) + { + type = GenerateListType(type, innerType); + } + + // Wrap the input with non-null to match the field definition + type = new NonNullTypeNode((INullableTypeNode)type); + } + else if (f.Type.IsListType()) { - type = GenerateListType(type, innerType); + type = GenerateListType(type, f.Type); } - // Wrap the input with non-null to match the field definition - type = new NonNullTypeNode((INullableTypeNode)type); - } - else if (f.Type.IsListType()) - { - type = GenerateListType(type, f.Type); + return new( + location: null, + f.Name, + new StringValueNode($"Input for field {f.Name} on type {inputTypeName}"), + type, + defaultValue: null, + f.Directives + ); } - return new( - location: null, - f.Name, - new StringValueNode($"Input for field {f.Name} on type {inputTypeName}"), - type, - defaultValue: null, - f.Directives - ); + return null; } private static ITypeNode GenerateListType(ITypeNode type, ITypeNode fieldType) @@ -201,7 +216,7 @@ private static NameNode GenerateInputTypeName(EntityActionOperation operation, s /// Runtime config information for the object type. /// Collection of role names allowed for action, to be added to authorize directive. /// A update*ObjectName* field to be added to the Mutation type. - public static FieldDefinitionNode Build( + public static FieldDefinitionNode? Build( NameNode name, Dictionary inputs, ObjectTypeDefinitionNode objectTypeDefinitionNode, @@ -214,7 +229,7 @@ public static FieldDefinitionNode Build( EntityActionOperation operation = EntityActionOperation.Update, string operationNamePrefix = UPDATE_MUTATION_PREFIX) { - InputObjectTypeDefinitionNode input = GenerateUpdateInputType( + InputObjectTypeDefinitionNode? input = GenerateUpdateInputType( inputs, objectTypeDefinitionNode, name, @@ -234,19 +249,21 @@ public static FieldDefinitionNode Build( description = "The ID of the item being updated."; } - List inputValues = new(); - foreach (FieldDefinitionNode idField in idFields) + if (input != null) { - inputValues.Add(new InputValueDefinitionNode( - location: null, - idField.Name, - new StringValueNode(description), - new NonNullTypeNode(idField.Type.NamedType()), - defaultValue: null, - new List())); - } + List inputValues = new(); + foreach (FieldDefinitionNode idField in idFields) + { + inputValues.Add(new InputValueDefinitionNode( + location: null, + idField.Name, + new StringValueNode(description), + new NonNullTypeNode(idField.Type.NamedType()), + defaultValue: null, + new List())); + } - inputValues.Add(new InputValueDefinitionNode( + inputValues.Add(new InputValueDefinitionNode( location: null, new NameNode(INPUT_ARGUMENT_NAME), new StringValueNode($"Input representing all the fields for updating {name}"), @@ -254,30 +271,33 @@ public static FieldDefinitionNode Build( defaultValue: null, new List())); - // Create authorize directive denoting allowed roles - List fieldDefinitionNodeDirectives = new() - { - new DirectiveNode( - ModelDirective.Names.MODEL, - new ArgumentNode(ModelDirective.Names.NAME_ARGUMENT, dbEntityName)) - }; - - if (CreateAuthorizationDirectiveIfNecessary( - rolesAllowedForMutation, - out DirectiveNode? authorizeDirective)) - { - fieldDefinitionNodeDirectives.Add(authorizeDirective!); + // Create authorize directive denoting allowed roles + List fieldDefinitionNodeDirectives = new() + { + new DirectiveNode( + ModelDirective.Names.MODEL, + new ArgumentNode(ModelDirective.Names.NAME_ARGUMENT, dbEntityName)) + }; + + if (CreateAuthorizationDirectiveIfNecessary( + rolesAllowedForMutation, + out DirectiveNode? authorizeDirective)) + { + fieldDefinitionNodeDirectives.Add(authorizeDirective!); + } + + string singularName = GetDefinedSingularName(name.Value, entities[dbEntityName]); + return new( + location: null, + name: new NameNode($"{operationNamePrefix}{singularName}"), + description: new StringValueNode($"Updates a {singularName}"), + arguments: inputValues, + type: new NamedTypeNode(returnEntityName), + directives: fieldDefinitionNodeDirectives + ); } - string singularName = GetDefinedSingularName(name.Value, entities[dbEntityName]); - return new( - location: null, - name: new NameNode($"{operationNamePrefix}{singularName}"), - description: new StringValueNode($"Updates a {singularName}"), - arguments: inputValues, - type: new NamedTypeNode(returnEntityName), - directives: fieldDefinitionNodeDirectives - ); + return null; } } } diff --git a/src/Service.GraphQLBuilder/Queries/InputTypeBuilder.cs b/src/Service.GraphQLBuilder/Queries/InputTypeBuilder.cs index 58ff41c504..ba57bde446 100644 --- a/src/Service.GraphQLBuilder/Queries/InputTypeBuilder.cs +++ b/src/Service.GraphQLBuilder/Queries/InputTypeBuilder.cs @@ -24,7 +24,7 @@ IDictionary inputTypes { List inputFields = GenerateFilterInputFieldsForBuiltInFields(node, inputTypes); string filterInputName = GenerateObjectInputFilterName(node); - GenerateFilterInputTypeFromInputFields(inputTypes, inputFields, filterInputName, $"Filter input for {node.Name} GraphQL type"); + GenerateInputTypeFromInputFields(inputTypes, inputFields, filterInputName, $"Filter input for {node.Name} GraphQL type"); } internal static void GenerateOrderByInputTypeForObjectType(ObjectTypeDefinitionNode node, IDictionary inputTypes) @@ -32,7 +32,10 @@ internal static void GenerateOrderByInputTypeForObjectType(ObjectTypeDefinitionN List inputFields = GenerateOrderByInputFieldsForBuiltInFields(node); string orderByInputName = GenerateObjectInputOrderByName(node); + GenerateInputTypeFromInputFields(inputTypes, inputFields, orderByInputName, $"Order by input for {node.Name} GraphQL type"); + // OrderBy does not include "and" and "or" input types so we add only the orderByInputName here. + /* inputTypes.Add( orderByInputName, new( @@ -43,6 +46,7 @@ internal static void GenerateOrderByInputTypeForObjectType(ObjectTypeDefinitionN inputFields ) ); + */ } private static List GenerateOrderByInputFieldsForBuiltInFields(ObjectTypeDefinitionNode node) @@ -62,12 +66,26 @@ private static List GenerateOrderByInputFieldsForBuilt new List()) ); } + else if (RelationshipDirectiveType.Cardinality(field) == Config.ObjectModel.Cardinality.One) + { + string targetEntityName = RelationshipDirectiveType.Target(field); + + inputFields.Add( + new( + location: null, + field.Name, + new StringValueNode($"Order by options for {field.Name}"), + new NamedTypeNode(GenerateObjectInputOrderByName(targetEntityName)), + defaultValue: null, + new List()) + ); + } } return inputFields; } - private static void GenerateFilterInputTypeFromInputFields( + private static void GenerateInputTypeFromInputFields( IDictionary inputTypes, List inputFields, string inputTypeName, diff --git a/src/Service.GraphQLBuilder/Queries/QueryBuilder.cs b/src/Service.GraphQLBuilder/Queries/QueryBuilder.cs index 81842f9c60..299835c3f3 100644 --- a/src/Service.GraphQLBuilder/Queries/QueryBuilder.cs +++ b/src/Service.GraphQLBuilder/Queries/QueryBuilder.cs @@ -24,6 +24,8 @@ public static class QueryBuilder public const string ORDER_BY_FIELD_NAME = "orderBy"; public const string PARTITION_KEY_FIELD_NAME = "_partitionKeyValue"; public const string ID_FIELD_NAME = "id"; + public const string TOTAL_COUNT_FIELD_NAME = "totalCount"; + public const string OFFSET_FIELD_NAME = "offset"; public const string GROUP_BY_FIELD_NAME = "groupBy"; public const string GROUP_BY_FIELDS_FIELD_NAME = "fields"; public const string GROUP_BY_AGGREGATE_FIELD_NAME = "aggregations"; @@ -205,6 +207,8 @@ public static List QueryArgumentsForField(string filte new(location: null, new NameNode(PAGINATION_TOKEN_ARGUMENT_NAME), new StringValueNode("A pagination token from a previous query to continue through a paginated list"), new StringType().ToTypeNode(), defaultValue: null, new List()), new(location: null, new NameNode(FILTER_FIELD_NAME), new StringValueNode("Filter options for query"), new NamedTypeNode(filterInputName), defaultValue: null, new List()), new(location: null, new NameNode(ORDER_BY_FIELD_NAME), new StringValueNode("Ordering options for query"), new NamedTypeNode(orderByInputName), defaultValue: null, new List()), + new(location: null, new NameNode(OFFSET_FIELD_NAME), new StringValueNode("Partition key value for the query"), new IntType().ToTypeNode(), defaultValue: null, new List()), + }; } @@ -236,7 +240,7 @@ public static ObjectTypeDefinitionNode AddQueryArgumentsForRelationships(ObjectT return node; } - public static ObjectType PaginationTypeToModelType(ObjectType underlyingFieldType, IReadOnlyCollection types) + public static ObjectType PaginationTypeToModelType(ObjectType underlyingFieldType, IReadOnlyCollection types) { IEnumerable modelTypes = types.Where(t => t is ObjectType) .Cast() @@ -280,6 +284,13 @@ public static ObjectTypeDefinitionNode GenerateReturnType(NameNode name, bool is new StringValueNode("Indicates if there are more pages of items to return"), new List(), new NonNullType(new BooleanType()).ToTypeNode(), + new List()), + new( + location: null, + new NameNode(TOTAL_COUNT_FIELD_NAME), + new StringValueNode("The total number of items that matched the filter"), + new List(), + new NonNullType(new IntType()).ToTypeNode(), new List()) }; diff --git a/src/Service.GraphQLBuilder/Queries/StandardQueryInputs.cs b/src/Service.GraphQLBuilder/Queries/StandardQueryInputs.cs index 5ae516831d..aa6423d55d 100644 --- a/src/Service.GraphQLBuilder/Queries/StandardQueryInputs.cs +++ b/src/Service.GraphQLBuilder/Queries/StandardQueryInputs.cs @@ -45,8 +45,6 @@ public sealed class StandardQueryInputs private static readonly StringValueNode _startsWithDescription = new("Starts With"); private static readonly NameNode _endsWith = new("endsWith"); private static readonly StringValueNode _endsWithDescription = new("Ends With"); - private static readonly NameNode _caseInsensitive = new("caseInsensitive"); - private static readonly StringValueNode _caseInsensitiveDescription = new("Case Insensitive"); private static readonly NameNode _in = new("in"); private static readonly StringValueNode _inDescription = new("In"); @@ -154,7 +152,6 @@ private static InputObjectTypeDefinitionNode CreateStringFilter( new(null, _startsWith, _startsWithDescription, type, null, []), new(null, _endsWith, _endsWithDescription, type, null, []), new(null, _neq, _neqDescription, type, null, []), - new(null, _caseInsensitive, _caseInsensitiveDescription, type, null, []), new(null, _isNull, _isNullDescription, _boolean, null, []), new(null, _in, _inDescription, new ListTypeNode(type), null, []) ] diff --git a/src/Service.GraphQLBuilder/Sql/SchemaConverter.cs b/src/Service.GraphQLBuilder/Sql/SchemaConverter.cs index 206faceeaf..76057a76dc 100644 --- a/src/Service.GraphQLBuilder/Sql/SchemaConverter.cs +++ b/src/Service.GraphQLBuilder/Sql/SchemaConverter.cs @@ -78,6 +78,18 @@ public static ObjectTypeDefinitionNode GenerateObjectTypeDefinitionForDatabaseOb subStatusCode: DataApiBuilderException.SubStatusCodes.NotSupported); } + StringValueNode? descriptionNode = null; + if (!string.IsNullOrWhiteSpace(configEntity.Description)) + { + descriptionNode = new StringValueNode(configEntity.Description); + } + + // Set the description node if available + if (descriptionNode != null) + { + objectDefinitionNode = objectDefinitionNode.WithDescription(descriptionNode); + } + return objectDefinitionNode; } @@ -122,6 +134,12 @@ private static ObjectTypeDefinitionNode CreateObjectTypeDefinitionForStoredProce } } + StringValueNode? descriptionNode = null; + if (!string.IsNullOrWhiteSpace(configEntity.Description)) + { + descriptionNode = new StringValueNode(configEntity.Description); + } + // Top-level object type definition name should be singular. // The singularPlural.Singular value is used, and if not configured, // the top-level entity name value is used. No singularization occurs @@ -129,7 +147,7 @@ private static ObjectTypeDefinitionNode CreateObjectTypeDefinitionForStoredProce return new ObjectTypeDefinitionNode( location: null, name: new(value: GetDefinedSingularName(entityName, configEntity)), - description: null, + description: descriptionNode, directives: GenerateObjectTypeDirectivesForEntity(entityName, configEntity, rolesAllowedForEntity), new List(), fields.Values.ToImmutableList()); @@ -213,6 +231,12 @@ private static ObjectTypeDefinitionNode CreateObjectTypeDefinitionForTableOrView } } + StringValueNode? descriptionNode = null; + if (!string.IsNullOrWhiteSpace(configEntity.Description)) + { + descriptionNode = new StringValueNode(configEntity.Description); + } + // Top-level object type definition name should be singular. // The singularPlural.Singular value is used, and if not configured, // the top-level entity name value is used. No singularization occurs @@ -220,7 +244,7 @@ private static ObjectTypeDefinitionNode CreateObjectTypeDefinitionForTableOrView return new ObjectTypeDefinitionNode( location: null, name: new(value: GetDefinedSingularName(entityName, configEntity)), - description: null, + description: descriptionNode, directives: GenerateObjectTypeDirectivesForEntity(entityName, configEntity, rolesAllowedForEntity), new List(), fieldDefinitionNodes.Values.ToImmutableList()); @@ -399,17 +423,29 @@ private static FieldDefinitionNode GenerateFieldForColumn(Entity configEntity, s directives.Add(authZDirective!); } + // Determine the exposed column name considering mappings and aliases string exposedColumnName = columnName; if (configEntity.Mappings is not null && configEntity.Mappings.TryGetValue(key: columnName, out string? columnAlias)) { exposedColumnName = columnAlias; } + // Apply alias if present (alias overrides mapping) + FieldMetadata? fieldMetadata = null; + if (configEntity.Fields is not null) + { + fieldMetadata = configEntity.Fields.FirstOrDefault(f => f.Name == columnName); + if (fieldMetadata != null && !string.IsNullOrEmpty(fieldMetadata.Alias)) + { + exposedColumnName = fieldMetadata.Alias; + } + } + NamedTypeNode fieldType = new(GetGraphQLTypeFromSystemType(column.SystemType)); FieldDefinitionNode field = new( location: null, new(exposedColumnName), - description: null, + description: fieldMetadata?.Description is null ? null : new StringValueNode(fieldMetadata.Description), new List(), column.IsNullable ? fieldType : new NonNullTypeNode(fieldType), directives); @@ -580,8 +616,7 @@ private static bool FindNullabilityOfRelationship( bool isNullableRelationship = false; SourceDefinition sourceDefinition = databaseObject.SourceDefinition; if (// Retrieve all the relationship information for the source entity which is backed by this table definition - sourceDefinition.SourceEntityRelationshipMap.TryGetValue(entityName, out RelationshipMetadata? relationshipInfo) - && + sourceDefinition.SourceEntityRelationshipMap.TryGetValue(entityName, out RelationshipMetadata? relationshipInfo) && // From the relationship information, obtain the foreign key definition for the given target entity relationshipInfo.TargetEntityToFkDefinitionMap.TryGetValue(targetEntityName, out List? listOfForeignKeys)) diff --git a/src/Service.Tests/Authentication/Helpers/RuntimeConfigAuthHelper.cs b/src/Service.Tests/Authentication/Helpers/RuntimeConfigAuthHelper.cs index 12c7db4fce..07a8a565ec 100644 --- a/src/Service.Tests/Authentication/Helpers/RuntimeConfigAuthHelper.cs +++ b/src/Service.Tests/Authentication/Helpers/RuntimeConfigAuthHelper.cs @@ -20,6 +20,7 @@ internal static RuntimeConfig CreateTestConfigWithAuthNProvider(AuthenticationOp Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: hostOptions ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/Authorization/AuthorizationHelpers.cs b/src/Service.Tests/Authorization/AuthorizationHelpers.cs index 7c6948b484..bdd5630a50 100644 --- a/src/Service.Tests/Authorization/AuthorizationHelpers.cs +++ b/src/Service.Tests/Authorization/AuthorizationHelpers.cs @@ -112,6 +112,7 @@ public static RuntimeConfig InitRuntimeConfig( Entity sampleEntity = new( Source: entitySource, + Fields: null, Rest: new(Array.Empty()), GraphQL: new(entityName.Singularize(), entityName.Pluralize()), Permissions: new EntityPermission[] { permissionForEntity }, @@ -126,6 +127,7 @@ public static RuntimeConfig InitRuntimeConfig( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new( Cors: null, Authentication: new(authProvider, null) diff --git a/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs b/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs index 3c7c31a8ca..0dff3ac016 100644 --- a/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs +++ b/src/Service.Tests/Authorization/AuthorizationResolverUnitTests.cs @@ -1293,7 +1293,8 @@ public void UniqueClaimsResolvedForDbPolicy_SessionCtx_Usage() new("sub", "Aa_0RISCzzZ-abC1De2fGHIjKLMNo123pQ4rStUVWXY"), new("oid", "55296aad-ea7f-4c44-9a4c-bb1e8d43a005"), new(AuthenticationOptions.ROLE_CLAIM_TYPE, TEST_ROLE), - new(AuthenticationOptions.ROLE_CLAIM_TYPE, "ROLE2") + new(AuthenticationOptions.ROLE_CLAIM_TYPE, "ROLE2"), + new(AuthenticationOptions.ROLE_CLAIM_TYPE, "ROLE3") }; //Add identity object to the Mock context object. @@ -1315,6 +1316,7 @@ public void UniqueClaimsResolvedForDbPolicy_SessionCtx_Usage() Assert.AreEqual(expected: "Aa_0RISCzzZ-abC1De2fGHIjKLMNo123pQ4rStUVWXY", actual: claimsInRequestContext["sub"], message: "Expected the sub claim to be present."); Assert.AreEqual(expected: "55296aad-ea7f-4c44-9a4c-bb1e8d43a005", actual: claimsInRequestContext["oid"], message: "Expected the oid claim to be present."); Assert.AreEqual(claimsInRequestContext[AuthenticationOptions.ROLE_CLAIM_TYPE], actual: TEST_ROLE, message: "The roles claim should have the value:" + TEST_ROLE); + Assert.AreEqual(expected: "[\"" + TEST_ROLE + "\",\"ROLE2\",\"ROLE3\"]", actual: claimsInRequestContext[AuthenticationOptions.ORIGINAL_ROLE_CLAIM_TYPE], message: "Original roles should be preserved in a new context"); } /// @@ -1365,7 +1367,7 @@ public void ValidateUnauthenticatedUserClaimsAreNotResolvedWhenProcessingUserCla Dictionary resolvedClaims = AuthorizationResolver.GetProcessedUserClaims(context.Object); // Assert - Assert.AreEqual(expected: authenticatedUserclaims.Count, actual: resolvedClaims.Count, message: "Only two claims should be present."); + Assert.AreEqual(expected: authenticatedUserclaims.Count + 1, actual: resolvedClaims.Count, message: "Only " + (authenticatedUserclaims.Count + 1) + " claims should be present."); Assert.AreEqual(expected: "openid", actual: resolvedClaims["scp"], message: "Unexpected scp claim returned."); bool didResolveUnauthenticatedRoleClaim = resolvedClaims[AuthenticationOptions.ROLE_CLAIM_TYPE] == "Don't_Parse_This_Role"; @@ -1422,6 +1424,7 @@ private static RuntimeConfig BuildTestRuntimeConfig(EntityPermission[] permissio { Entity sampleEntity = new( Source: new(entityName, EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("", ""), Permissions: permissions, @@ -1439,6 +1442,7 @@ private static RuntimeConfig BuildTestRuntimeConfig(EntityPermission[] permissio Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) diff --git a/src/Service.Tests/Authorization/GraphQL/GraphQLAuthorizationHandlerTests.cs b/src/Service.Tests/Authorization/GraphQL/GraphQLAuthorizationHandlerTests.cs index 92813ab105..faefdebb4b 100644 --- a/src/Service.Tests/Authorization/GraphQL/GraphQLAuthorizationHandlerTests.cs +++ b/src/Service.Tests/Authorization/GraphQL/GraphQLAuthorizationHandlerTests.cs @@ -71,5 +71,131 @@ public async Task FieldAuthorizationProcessing(bool isAuthenticated, string clie SqlTestHelper.PerformTestEqualJsonStrings(expectedResult, actual.ToString()); } } + + /// + /// Tests that a GraphQL query with a groupBy operation on fields not allowed for aggregation results in an + /// appropriate error message. + /// + /// + [TestMethod] + public async Task Query_GroupBy_FieldNotAllowed() + { + string graphQLQueryName = "booksNF"; + string graphQLQuery = @"{ + booksNF { + groupBy (fields: [id, publisher_id]) { + fields { + id + publisher_id + } + } + } + } + "; + + JsonElement actual = await ExecuteGraphQLRequestAsync( + graphQLQuery, + graphQLQueryName, + isAuthenticated: true, + clientRoleHeader: "TestFieldExcludedForAggregation"); + + SqlTestHelper.TestForErrorInGraphQLResponse( + actual.ToString(), + message: "Access forbidden to field 'publisher_id' referenced in the groupBy argument.", + path: @"[""booksNF""]" + ); + } + + /// + /// Tests that a GraphQL query with a group by aggregation on a field not allowed for aggregation results in an + /// appropriate error message. + /// + /// + [TestMethod] + public async Task Query_GroupBy_Aggregation_FieldNotAllowed() + { + string graphQLQueryName = "booksNF"; + string graphQLQuery = @"{ + booksNF { + groupBy { + aggregations { + max (field: id) + min (field: publisher_id) + } + } + } + } + "; + + JsonElement actual = await ExecuteGraphQLRequestAsync( + graphQLQuery, + graphQLQueryName, + isAuthenticated: true, + clientRoleHeader: "TestFieldExcludedForAggregation"); + + SqlTestHelper.TestForErrorInGraphQLResponse( + actual.ToString(), + message: "Access forbidden to field 'publisher_id' referenced in the aggregation function 'min'.", + path: @"[""booksNF""]" + ); + } + + /// + /// Tests that a GraphQL query backed by stored procedure with a client role is allowed access and returns results. + /// + /// + [TestMethod] + public async Task Query_StoredProc_Allowed() + { + string graphQLQueryName = "executeGetBooksAuth"; + string graphQLQuery = @"{ + executeGetBooksAuth { + id + title + publisher_id + } + }"; + + JsonElement actual = await ExecuteGraphQLRequestAsync( + graphQLQuery, + graphQLQueryName, + isAuthenticated: true, + clientRoleHeader: "teststoredprocauth"); + + string dbQuery = $"EXEC dbo.get_books"; + string expected = await GetDatabaseResultAsync(dbQuery, expectJson: false); + + SqlTestHelper.PerformTestEqualJsonStrings(expected, actual.ToString()); + } + + /// + /// Tests that a GraphQL query backed by stored procedure with a client role is not allowed access and results in an + /// appropriate error message. + /// + /// + [TestMethod] + public async Task Query_StoredProc_NotAllowed() + { + string graphQLQueryName = "executeGetBooksAuth"; + string graphQLQuery = @"{ + executeGetBooksAuth { + id + title + publisher_id + } + }"; + + JsonElement actual = await ExecuteGraphQLRequestAsync( + graphQLQuery, + graphQLQueryName, + isAuthenticated: true, + clientRoleHeader: "roledoesnotexist"); + + SqlTestHelper.TestForErrorInGraphQLResponse( + actual.ToString(), + message: "The current user is not authorized to access this resource.", + path: @"[""executeGetBooksAuth""]" + ); + } } } diff --git a/src/Service.Tests/Caching/DabCacheServiceIntegrationTests.cs b/src/Service.Tests/Caching/DabCacheServiceIntegrationTests.cs index 74f455c822..02b7ca6492 100644 --- a/src/Service.Tests/Caching/DabCacheServiceIntegrationTests.cs +++ b/src/Service.Tests/Caching/DabCacheServiceIntegrationTests.cs @@ -740,6 +740,7 @@ private static Mock CreateMockRuntimeConfigProvider(strin { Entity entity = new( Source: new EntitySource(string.Empty, null, null, null), + Fields: null, GraphQL: new EntityGraphQLOptions(string.Empty, string.Empty), Rest: new EntityRestOptions(), Permissions: Array.Empty(), @@ -763,6 +764,7 @@ private static Mock CreateMockRuntimeConfigProvider(strin dataSource, entities, null, + null, null ); mockRuntimeConfig diff --git a/src/Service.Tests/Caching/HealthEndpointCachingTests.cs b/src/Service.Tests/Caching/HealthEndpointCachingTests.cs index 2dbff7cbb2..fcc3e097e5 100644 --- a/src/Service.Tests/Caching/HealthEndpointCachingTests.cs +++ b/src/Service.Tests/Caching/HealthEndpointCachingTests.cs @@ -119,6 +119,7 @@ private static void SetupCachingTest(int? cacheTtlSeconds) { Entity requiredEntity = new( Health: new(enabled: true), + Fields: null, Source: new("books", EntitySourceType.Table, null, null), Rest: new(Enabled: true), GraphQL: new("book", "books", true), @@ -156,6 +157,7 @@ private static void CreateCustomConfigFile(Dictionary entityMap, Health: new(enabled: true, cacheTtlSeconds: cacheTtlSeconds), Rest: new(Enabled: true), GraphQL: new(Enabled: true), + Mcp: new(Enabled: true), Host: hostOptions ), Entities: new(entityMap)); diff --git a/src/Service.Tests/Configuration/AuthenticationConfigValidatorUnitTests.cs b/src/Service.Tests/Configuration/AuthenticationConfigValidatorUnitTests.cs index 8b01d29961..963211ae40 100644 --- a/src/Service.Tests/Configuration/AuthenticationConfigValidatorUnitTests.cs +++ b/src/Service.Tests/Configuration/AuthenticationConfigValidatorUnitTests.cs @@ -194,6 +194,7 @@ private static RuntimeConfig CreateRuntimeConfigWithOptionalAuthN(Authentication Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: hostOptions ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/Configuration/ConfigurationTests.cs b/src/Service.Tests/Configuration/ConfigurationTests.cs index 73be078259..65f6e6643b 100644 --- a/src/Service.Tests/Configuration/ConfigurationTests.cs +++ b/src/Service.Tests/Configuration/ConfigurationTests.cs @@ -47,6 +47,7 @@ using Microsoft.VisualStudio.TestTools.UnitTesting; using Moq; using Moq.Protected; +using Serilog; using VerifyMSTest; using static Azure.DataApiBuilder.Config.FileSystemRuntimeConfigLoader; using static Azure.DataApiBuilder.Service.Tests.Configuration.ConfigurationEndpoints; @@ -1607,11 +1608,12 @@ public async Task TestSqlMetadataForInvalidConfigEntities() GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, new(), new()); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, new(), new(), new()); // creating an entity with invalid table name Entity entityWithInvalidSourceName = new( Source: new("bokos", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "book", Plural: "books"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -1621,6 +1623,7 @@ public async Task TestSqlMetadataForInvalidConfigEntities() Entity entityWithInvalidSourceType = new( Source: new("publishers", EntitySourceType.StoredProcedure, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "publisher", Plural: "publishers"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_AUTHENTICATED) }, @@ -1678,11 +1681,12 @@ public async Task TestSqlMetadataValidationForEntitiesWithInvalidSource() GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, new(), new()); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, new(), new(), new()); // creating an entity with invalid table name Entity entityWithInvalidSource = new( Source: new(null, EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "book", Plural: "books"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -1693,6 +1697,7 @@ public async Task TestSqlMetadataValidationForEntitiesWithInvalidSource() // creating an entity with invalid source object and adding relationship with an entity with invalid source Entity entityWithInvalidSourceAndRelationship = new( Source: new(null, EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "publisher", Plural: "publishers"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -2213,7 +2218,7 @@ public async Task TestPathRewriteMiddlewareForGraphQL( GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, new()); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, new(), new()); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -2542,7 +2547,7 @@ public async Task TestGlobalFlagToEnableRestAndGraphQLForHostedAndNonHostedEnvir DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, null); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -2617,6 +2622,7 @@ public async Task ValidateErrorMessageForMutationWithoutReadPermission() { GraphQLRuntimeOptions graphqlOptions = new(Enabled: true); RestRuntimeOptions restRuntimeOptions = new(Enabled: false); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); @@ -2640,6 +2646,7 @@ public async Task ValidateErrorMessageForMutationWithoutReadPermission() new EntityPermission( Role: AuthorizationResolver.ROLE_AUTHENTICATED , Actions: new[] { readAction, createAction, deleteAction })}; Entity entity = new(Source: new("stocks", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "Stock", Plural: "Stocks"), Permissions: permissions, @@ -2647,7 +2654,7 @@ public async Task ValidateErrorMessageForMutationWithoutReadPermission() Mappings: null); string entityName = "Stock"; - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -2918,6 +2925,7 @@ public async Task ValidateInheritanceOfReadPermissionFromAnonymous() { GraphQLRuntimeOptions graphqlOptions = new(Enabled: true); RestRuntimeOptions restRuntimeOptions = new(Enabled: false); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); @@ -2941,6 +2949,7 @@ public async Task ValidateInheritanceOfReadPermissionFromAnonymous() new EntityPermission( Role: AuthorizationResolver.ROLE_AUTHENTICATED , Actions: new[] { createAction })}; Entity entity = new(Source: new("stocks", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "Stock", Plural: "Stocks"), Permissions: permissions, @@ -2948,7 +2957,7 @@ public async Task ValidateInheritanceOfReadPermissionFromAnonymous() Mappings: null); string entityName = "Stock"; - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -3059,6 +3068,7 @@ public async Task ValidateLocationHeaderFieldForPostRequests(EntitySourceType en GraphQLRuntimeOptions graphqlOptions = new(Enabled: false); RestRuntimeOptions restRuntimeOptions = new(Enabled: true); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); @@ -3068,6 +3078,7 @@ public async Task ValidateLocationHeaderFieldForPostRequests(EntitySourceType en if (entityType is EntitySourceType.StoredProcedure) { Entity entity = new(Source: new("get_books", EntitySourceType.StoredProcedure, null, null), + Fields: null, Rest: new(new SupportedHttpVerb[] { SupportedHttpVerb.Get, SupportedHttpVerb.Post }), GraphQL: null, Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -3076,11 +3087,11 @@ public async Task ValidateLocationHeaderFieldForPostRequests(EntitySourceType en ); string entityName = "GetBooks"; - configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName); + configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName); } else { - configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions); + configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions); } const string CUSTOM_CONFIG = "custom-config.json"; @@ -3157,6 +3168,7 @@ public async Task ValidateLocationHeaderWhenBaseRouteIsConfigured( { GraphQLRuntimeOptions graphqlOptions = new(Enabled: false); RestRuntimeOptions restRuntimeOptions = new(Enabled: true); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); @@ -3166,6 +3178,7 @@ public async Task ValidateLocationHeaderWhenBaseRouteIsConfigured( if (entityType is EntitySourceType.StoredProcedure) { Entity entity = new(Source: new("get_books", EntitySourceType.StoredProcedure, null, null), + Fields: null, Rest: new(new SupportedHttpVerb[] { SupportedHttpVerb.Get, SupportedHttpVerb.Post }), GraphQL: null, Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -3174,11 +3187,11 @@ public async Task ValidateLocationHeaderWhenBaseRouteIsConfigured( ); string entityName = "GetBooks"; - configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName); + configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName); } else { - configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions); + configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions); } const string CUSTOM_CONFIG = "custom-config.json"; @@ -3187,7 +3200,7 @@ public async Task ValidateLocationHeaderWhenBaseRouteIsConfigured( HostOptions staticWebAppsHostOptions = new(null, authenticationOptions); RuntimeOptions runtimeOptions = configuration.Runtime; - RuntimeOptions baseRouteEnabledRuntimeOptions = new(runtimeOptions?.Rest, runtimeOptions?.GraphQL, staticWebAppsHostOptions, "/data-api"); + RuntimeOptions baseRouteEnabledRuntimeOptions = new(runtimeOptions?.Rest, runtimeOptions?.GraphQL, runtimeOptions?.Mcp, staticWebAppsHostOptions, "/data-api"); RuntimeConfig baseRouteEnabledConfig = configuration with { Runtime = baseRouteEnabledRuntimeOptions }; File.WriteAllText(CUSTOM_CONFIG, baseRouteEnabledConfig.ToJson()); @@ -3339,6 +3352,7 @@ public async Task TestEngineSupportViewsWithoutKeyFieldsInConfigForMsSQL() GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); Entity viewEntity = new( Source: new("books_view_all", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("", ""), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -3346,7 +3360,7 @@ public async Task TestEngineSupportViewsWithoutKeyFieldsInConfigForMsSQL() Mappings: null ); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, new(), new(), viewEntity, "books_view_all"); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, new(), new(), new(), viewEntity, "books_view_all"); const string CUSTOM_CONFIG = "custom-config.json"; @@ -3567,6 +3581,7 @@ public void TestProductionModeAppServiceEnvironmentCheck(HostMode hostMode, Easy RuntimeOptions runtimeOptions = new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, authenticationOptions, hostMode) ); RuntimeConfig configWithCustomHostMode = config with { Runtime = runtimeOptions }; @@ -3607,10 +3622,11 @@ public async Task TestSchemaIntrospectionQuery(bool enableIntrospection, bool ex { GraphQLRuntimeOptions graphqlOptions = new(AllowIntrospection: enableIntrospection); RestRuntimeOptions restRuntimeOptions = new(); + McpRuntimeOptions mcpRuntimeOptions = new(); DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -3659,6 +3675,7 @@ public void TestInvalidDatabaseColumnNameHandling( { GraphQLRuntimeOptions graphqlOptions = new(Enabled: globalGraphQLEnabled); RestRuntimeOptions restRuntimeOptions = new(Enabled: true); + McpRuntimeOptions mcpOptions = new(Enabled: true); DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); @@ -3675,6 +3692,7 @@ public void TestInvalidDatabaseColumnNameHandling( Entity entity = new( Source: new("graphql_incompatible", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("graphql_incompatible", "graphql_incompatibles", entityGraphQLEnabled), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -3682,7 +3700,7 @@ public void TestInvalidDatabaseColumnNameHandling( Mappings: mappings ); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, "graphqlNameCompat"); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpOptions, entity, "graphqlNameCompat"); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -3738,7 +3756,8 @@ public async Task OpenApi_InteractiveSwaggerUI( RuntimeConfig configuration = InitMinimalRuntimeConfig( dataSource: dataSource, graphqlOptions: new(), - restOptions: new(Path: customRestPath)); + restOptions: new(Path: customRestPath), + mcpOptions: new()); configuration = configuration with @@ -4056,6 +4075,7 @@ private static RuntimeConfig InitializeRuntimeWithLogLevel(Dictionary + /// Tests different Azure Log Analytics values to see if they are serialized and deserialized correctly to the Json config + /// + [DataTestMethod] + [TestCategory(TestCategory.MSSQL)] + [DataRow(true, "CustomTableName", "DcrImmutableId", "DceEndpoint", "TestDabLog", 1, true, "TestDabLog", 1)] + [DataRow(false, "", null, "", "", 10, false, "", 10)] + [DataRow(null, null, null, null, null, null, false, "DabLogs", 5)] + public void AzureLogAnalyticsSerialization( + bool? enabled, + string? customTableName, + string? dcrImmutableId, + string? dceEndpoint, + string? dabIdentifier, + int? flushIntSec, + bool expectedEnabled, + string expectedDabIdentifier, + int expectedFlushIntSec) + { + // Check if auth property and its values are expected to exist + bool expectedExistEnabled = enabled is not null; + bool expectedExistDabIdentifier = dabIdentifier is not null; + bool expectedExistFlushIntSec = flushIntSec is not null; + bool expectedExistCustomTableName = customTableName is not null; + bool expectedExistDcrImmutableId = dcrImmutableId is not null; + bool expectedExistDceEndpoint = dceEndpoint is not null; + + AzureLogAnalyticsAuthOptions authOptions = new(customTableName, dcrImmutableId, dceEndpoint); + AzureLogAnalyticsOptions azureLogAnalyticsOptions = new(enabled, authOptions, dabIdentifier, flushIntSec); + TelemetryOptions telemetryOptions = new(AzureLogAnalytics: azureLogAnalyticsOptions); + RuntimeConfig configWithCustomLogLevel = InitializeRuntimeWithTelemetry(telemetryOptions); + string configWithCustomLogLevelJson = configWithCustomLogLevel.ToJson(); + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(configWithCustomLogLevelJson, out RuntimeConfig? deserializedRuntimeConfig)); + + string serializedConfig = deserializedRuntimeConfig.ToJson(); + + using (JsonDocument parsedDocument = JsonDocument.Parse(serializedConfig)) + { + JsonElement root = parsedDocument.RootElement; + JsonElement runtimeElement = root.GetProperty("runtime"); + + //Validate azure-log-analytics property exists in runtime + JsonElement telemetryElement = runtimeElement.GetProperty("telemetry"); + bool azureLogAnalyticsPropertyExists = telemetryElement.TryGetProperty("azure-log-analytics", out JsonElement azureLogAnalyticsElement); + Assert.AreEqual(expected: true, actual: azureLogAnalyticsPropertyExists); + + //Validate the values inside the azure-log-analytics properties are of expected value + bool enabledExists = azureLogAnalyticsElement.TryGetProperty("enabled", out JsonElement enabledElement); + Assert.AreEqual(expected: expectedExistEnabled, actual: enabledExists); + if (enabledExists) + { + Assert.AreEqual(expectedEnabled, enabledElement.GetBoolean()); + } + + bool dabIdentifierExists = azureLogAnalyticsElement.TryGetProperty("dab-identifier", out JsonElement dabIdentifierElement); + Assert.AreEqual(expected: expectedExistDabIdentifier, actual: dabIdentifierExists); + if (dabIdentifierExists) + { + Assert.AreEqual(expectedDabIdentifier, dabIdentifierElement.GetString()); + } + + bool flushIntSecExists = azureLogAnalyticsElement.TryGetProperty("flush-interval-seconds", out JsonElement flushIntSecElement); + Assert.AreEqual(expected: expectedExistFlushIntSec, actual: flushIntSecExists); + if (flushIntSecExists) + { + Assert.AreEqual(expectedFlushIntSec, flushIntSecElement.GetInt32()); + } + + // Validate auth property exists inside of azure-log-analytics + bool authExists = azureLogAnalyticsElement.TryGetProperty("auth", out JsonElement authElement); + + // Validate the values inside the auth properties are of expected value + if (authExists) + { + bool customTableNameExists = authElement.TryGetProperty("custom-table-name", out JsonElement customTableNameElement); + Assert.AreEqual(expectedExistCustomTableName, customTableNameExists); + if (customTableNameExists) + { + Assert.AreEqual(expected: customTableName, customTableNameElement.GetString()); + } + + bool dcrImmutableIdExists = authElement.TryGetProperty("dcr-immutable-id", out JsonElement dcrImmutableIdElement); + Assert.AreEqual(expectedExistDcrImmutableId, dcrImmutableIdExists); + if (dcrImmutableIdExists) + { + Assert.AreEqual(expected: dcrImmutableId, dcrImmutableIdElement.GetString()); + } + + bool dceEndpointExists = authElement.TryGetProperty("dce-endpoint", out JsonElement dceEndpointElement); + Assert.AreEqual(expectedExistDceEndpoint, dceEndpointExists); + if (dceEndpointExists) + { + Assert.AreEqual(expected: dceEndpoint, dceEndpointElement.GetString()); + } + } + } + } + + /// + /// Tests different File Sink values to see if they are serialized and deserialized correctly to the Json config + /// + [DataTestMethod] + [TestCategory(TestCategory.MSSQL)] + [DataRow(true, "/file/path/exists.txt", RollingInterval.Minute, 27, 256, true, "/file/path/exists.txt", RollingInterval.Minute, 27, 256)] + [DataRow(true, "/test/path.csv", RollingInterval.Hour, 10, 3000, true, "/test/path.csv", RollingInterval.Hour, 10, 3000)] + [DataRow(false, "C://absolute/file/path.log", RollingInterval.Month, 2147483647, 2048, false, "C://absolute/file/path.log", RollingInterval.Month, 2147483647, 2048)] + [DataRow(false, "D://absolute/test/path.txt", RollingInterval.Year, 10, 2147483647, false, "D://absolute/test/path.txt", RollingInterval.Year, 10, 2147483647)] + [DataRow(false, "", RollingInterval.Infinite, 5, 512, false, "", RollingInterval.Infinite, 5, 512)] + [DataRow(null, null, null, null, null, false, "/logs/dab-log.txt", RollingInterval.Day, 1, 1048576)] + public void FileSinkSerialization( + bool? enabled, + string? path, + RollingInterval? rollingInterval, + int? retainedFileCountLimit, + int? fileSizeLimitBytes, + bool expectedEnabled, + string expectedPath, + RollingInterval expectedRollingInterval, + int expectedRetainedFileCountLimit, + int expectedFileSizeLimitBytes) + { + // Check if file values are expected to exist + bool isEnabledNull = enabled is null; + bool isPathNull = path is null; + bool isRollingIntervalNull = rollingInterval is null; + bool isRetainedFileCountLimitNull = retainedFileCountLimit is null; + bool isFileSizeLimitBytesNull = fileSizeLimitBytes is null; + + FileSinkOptions fileOptions = new(enabled, path, rollingInterval, retainedFileCountLimit, fileSizeLimitBytes); + TelemetryOptions telemetryOptions = new(File: fileOptions); + RuntimeConfig configWithCustomLogLevel = InitializeRuntimeWithTelemetry(telemetryOptions); + string configWithCustomLogLevelJson = configWithCustomLogLevel.ToJson(); + Assert.IsTrue(RuntimeConfigLoader.TryParseConfig(configWithCustomLogLevelJson, out RuntimeConfig? deserializedRuntimeConfig)); + + string serializedConfig = deserializedRuntimeConfig.ToJson(); + + using (JsonDocument parsedDocument = JsonDocument.Parse(serializedConfig)) + { + JsonElement root = parsedDocument.RootElement; + JsonElement runtimeElement = root.GetProperty("runtime"); + + // Validate file property exists in runtime + JsonElement telemetryElement = runtimeElement.GetProperty("telemetry"); + bool filePropertyExists = telemetryElement.TryGetProperty("file", out JsonElement fileElement); + Assert.AreEqual(expected: true, actual: filePropertyExists); + + // Validate the values inside the file properties are of expected value + bool enabledExists = fileElement.TryGetProperty("enabled", out JsonElement enabledElement); + Assert.AreEqual(expected: !isEnabledNull, actual: enabledExists); + if (enabledExists) + { + Assert.AreEqual(expectedEnabled, enabledElement.GetBoolean()); + } + + bool pathExists = fileElement.TryGetProperty("path", out JsonElement pathElement); + Assert.AreEqual(expected: !isPathNull, actual: pathExists); + if (pathExists) + { + Assert.AreEqual(expectedPath, pathElement.GetString()); + } + + bool rollingIntervalExists = fileElement.TryGetProperty("rolling-interval", out JsonElement rollingIntervalElement); + Assert.AreEqual(expected: !isRollingIntervalNull, actual: rollingIntervalExists); + if (rollingIntervalExists) + { + Assert.AreEqual(expectedRollingInterval.ToString(), rollingIntervalElement.GetString()); + } + + bool retainedFileCountLimitExists = fileElement.TryGetProperty("retained-file-count-limit", out JsonElement retainedFileCountLimitElement); + Assert.AreEqual(expected: !isRetainedFileCountLimitNull, actual: retainedFileCountLimitExists); + if (retainedFileCountLimitExists) + { + Assert.AreEqual(expectedRetainedFileCountLimit, retainedFileCountLimitElement.GetInt32()); + } + + bool fileSizeLimitBytesExists = fileElement.TryGetProperty("file-size-limit-bytes", out JsonElement fileSizeLimitBytesElement); + Assert.AreEqual(expected: !isFileSizeLimitBytesNull, actual: fileSizeLimitBytesExists); + if (fileSizeLimitBytesExists) + { + Assert.AreEqual(expectedFileSizeLimitBytes, fileSizeLimitBytesElement.GetInt32()); + } + } + } + +#nullable disable + + /// + /// Helper method to create RuntimeConfig with specified Telemetry options + /// + private static RuntimeConfig InitializeRuntimeWithTelemetry(TelemetryOptions telemetryOptions) + { + TestHelper.SetupDatabaseEnvironment(MSSQL_ENVIRONMENT); + + FileSystemRuntimeConfigLoader baseLoader = TestHelper.GetRuntimeConfigLoader(); + baseLoader.TryLoadKnownConfig(out RuntimeConfig baseConfig); + + RuntimeConfig config = new( + Schema: baseConfig.Schema, + DataSource: baseConfig.DataSource, + Runtime: new( + Rest: new(), + GraphQL: new(), + Mcp: new(), + Host: new(null, null), + Telemetry: telemetryOptions + ), + Entities: baseConfig.Entities + ); + + return config; + } + /// /// Validates the OpenAPI documentor behavior when enabling and disabling the global REST endpoint /// for the DAB engine. @@ -4084,6 +4318,7 @@ public async Task OpenApi_GlobalEntityRestPath(bool globalRestEnabled, bool expe // file creation function. Entity requiredEntity = new( Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("book", "books"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -4145,6 +4380,7 @@ public async Task HealthEndpoint_ValidateContents() // config file creation. Entity requiredEntity = new( Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("book", "books"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -4194,6 +4430,7 @@ public async Task OpenApi_EntityLevelRestEndpoint() // Create the entities under test. Entity restEnabledEntity = new( Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("", "", false), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -4202,6 +4439,7 @@ public async Task OpenApi_EntityLevelRestEndpoint() Entity restDisabledEntity = new( Source: new("publishers", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("publisher", "publishers", true), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -4267,9 +4505,11 @@ public async Task OpenApi_EntityLevelRestEndpoint() /// did not come across two $after query parameters. This addresses a customer raised issue where two $after /// query parameters were returned by DAB. /// - [TestMethod] + [DataTestMethod] + [DataRow(false, DisplayName = "NextLinkRelative is false")] + [DataRow(true, DisplayName = "NextLinkRelative is true")] [TestCategory(TestCategory.MSSQL)] - public async Task ValidateNextLinkUsage() + public async Task ValidateNextLinkUsage(bool isNextLinkRelative) { // Arrange - Setup test server with entity that has >1 record so that results can be paged. // A short cut to using an entity with >100 records is to just include the $first=1 filter @@ -4282,6 +4522,7 @@ public async Task ValidateNextLinkUsage() // file creation function. Entity requiredEntity = new( Source: new("bookmarks", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new(Singular: "", Plural: "", Enabled: false), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -4293,7 +4534,21 @@ public async Task ValidateNextLinkUsage() { ENTITY_NAME, requiredEntity } }; - CreateCustomConfigFile(entityMap, enableGlobalRest: true); + PaginationOptions paginationOptions = null; + + if (isNextLinkRelative) + { + paginationOptions = new PaginationOptions + { + DefaultPageSize = 1, + MaxPageSize = 1, + UserProvidedDefaultPageSize = true, + UserProvidedMaxPageSize = true, + NextLinkRelative = true + }; + } + + CreateCustomConfigFile(entityMap, enableGlobalRest: true, paginationOptions: paginationOptions); string[] args = new[] { @@ -4327,7 +4582,23 @@ public async Task ValidateNextLinkUsage() Dictionary followNextLinkResponseProperties = JsonSerializer.Deserialize>(followNextLinkResponseBody); string followUpResponseNextLink = followNextLinkResponseProperties["nextLink"].ToString(); - Uri nextLink = new(uriString: followUpResponseNextLink); + + // Build the Uri from nextLink string for query parsing. + // If relative, combine with base; if absolute, use as is. + Uri nextLink = null; + if (Uri.IsWellFormedUriString(followUpResponseNextLink, UriKind.Absolute)) + { + nextLink = new(followUpResponseNextLink, UriKind.Absolute); + } + else if (Uri.IsWellFormedUriString(followUpResponseNextLink, UriKind.Relative)) + { + nextLink = new(new("http://localhost:5000"), followUpResponseNextLink); + } + else + { + Assert.Fail($"Invalid nextLink URI format: {followUpResponseNextLink}"); + } + NameValueCollection parsedQueryParameters = HttpUtility.ParseQueryString(query: nextLink.Query); Assert.AreEqual(expected: false, actual: parsedQueryParameters["$after"].Contains(','), message: "nextLink erroneously contained two $after query parameters that were joined by HttpUtility.ParseQueryString(queryString)."); Assert.AreNotEqual(notExpected: nextLinkUri, actual: followUpResponseNextLink, message: "The follow up request erroneously returned the same nextLink value."); @@ -4341,6 +4612,115 @@ public async Task ValidateNextLinkUsage() { Assert.Fail(message: "$after query parameter was not a valid base64 encoded value."); } + + // Validate nextLink is relative if nextLinkRelative is true or false otherwise. + // The assertion is now done directly on the original string, not on the parsed Uri object. + if (isNextLinkRelative) + { + // The server returned a relative URL, so it should NOT start with http/https + Assert.IsFalse(Uri.IsWellFormedUriString(followUpResponseNextLink, UriKind.Absolute), + $"nextLink was expected to be relative but was absolute: {followUpResponseNextLink}"); + Assert.IsTrue(followUpResponseNextLink.StartsWith("/"), + $"nextLink was expected to start with '/' (relative), got: {followUpResponseNextLink}"); + } + else + { + Assert.IsTrue(Uri.IsWellFormedUriString(followUpResponseNextLink, UriKind.Absolute), + $"nextLink was expected to be absolute but was relative: {followUpResponseNextLink}"); + Assert.IsTrue(followUpResponseNextLink.StartsWith("http"), + $"nextLink was expected to start with http/https, got: {followUpResponseNextLink}"); + } + } + + /// + /// Validates X-Forwarded headers for nextLink in Pagination + /// + /// The X-Forwarded-Host value + /// The X-Forwarded-Proto value + [DataTestMethod] + [DataRow("localhost:5000", "http", DisplayName = "Forwarded Host and HTTP Protocol")] + [DataRow("myhost.com", "https", DisplayName = "Forwarded Host and HTTPS Protocol")] + [TestCategory(TestCategory.MSSQL)] + public async Task ValidateNextLinkRespectsXForwardedHostAndProto(string forwardedHost, string forwardedProto) + { + // Arrange - Setup test server with entity that has >1 record so that results can be paged. + const string ENTITY_NAME = "Bookmark"; + + Entity requiredEntity = new( + Source: new("bookmarks", EntitySourceType.Table, null, null), + Fields: null, + Rest: new(Enabled: true), + GraphQL: new(Singular: "", Plural: "", Enabled: false), + Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, + Relationships: null, + Mappings: null); + + Dictionary entityMap = new() + { + { ENTITY_NAME, requiredEntity } + }; + + PaginationOptions paginationOptions = new() + { + DefaultPageSize = 1, + MaxPageSize = 1, + UserProvidedDefaultPageSize = true, + UserProvidedMaxPageSize = true, + NextLinkRelative = false // Absolute nextLink required for this test + }; + + CreateCustomConfigFile(entityMap, enableGlobalRest: true, paginationOptions: paginationOptions); + + string[] args = new[] + { + $"--ConfigFileName={CUSTOM_CONFIG_FILENAME}" + }; + + using TestServer server = new(Program.CreateWebHostBuilder(args)); + using HttpClient client = server.CreateClient(); + + // Setup and send GET request with X-Forwarded-* headers + HttpRequestMessage initialPaginationRequest = new(HttpMethod.Get, $"{RestRuntimeOptions.DEFAULT_PATH}/{ENTITY_NAME}?$first=1"); + initialPaginationRequest.Headers.Add("X-Forwarded-Host", forwardedHost); + initialPaginationRequest.Headers.Add("X-Forwarded-Proto", forwardedProto); + + HttpResponseMessage initialPaginationResponse = await client.SendAsync(initialPaginationRequest); + + // Assert + Assert.AreEqual(HttpStatusCode.OK, initialPaginationResponse.StatusCode, message: "Expected request to succeed."); + + // Process response body and get nextLink + string responseBody = await initialPaginationResponse.Content.ReadAsStringAsync(); + Dictionary responseProperties = JsonSerializer.Deserialize>(responseBody); + string nextLinkUri = responseProperties.ContainsKey("nextLink") ? responseProperties["nextLink"].ToString() : null; + + Assert.IsNotNull(nextLinkUri, "nextLink missing in initial response."); + + // Assert that nextLink uses the forwarded host and proto + Uri nextLink = new(nextLinkUri, UriKind.Absolute); + + // Split host/port if present + string expectedHost; + int expectedPort = -1; + string[] hostParts = forwardedHost.Split(':'); + + if (hostParts.Length == 2 && int.TryParse(hostParts[1], out int port)) + { + expectedHost = hostParts[0]; + expectedPort = port; + } + else + { + expectedHost = forwardedHost; + } + + Assert.AreEqual(forwardedProto, nextLink.Scheme, $"nextLink scheme should be '{forwardedProto}' but was '{nextLink.Scheme}'"); + Assert.AreEqual(expectedHost, nextLink.Host, $"nextLink host should be '{expectedHost}' but was '{nextLink.Host}'"); + + if (expectedPort != -1) + { + Assert.AreEqual(expectedPort, nextLink.Port, $"nextLink port should be '{expectedPort}' but was '{nextLink.Port}'"); + } } /// @@ -4385,7 +4765,7 @@ public async Task TestDepthLimitRestrictionOnGraphQLInNonHostedMode( DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restOptions: new()); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restOptions: new(), mcpOptions: new()); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -4484,7 +4864,7 @@ public async Task TestGraphQLIntrospectionQueriesAreNotImpactedByDepthLimit() DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restOptions: new()); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restOptions: new(), mcpOptions: new()); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -4567,7 +4947,7 @@ public async Task TestNoDepthLimitOnGrahQLInNonHostedMode(int? depthLimit) DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restOptions: new()); + RuntimeConfig configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions, restOptions: new(), mcpOptions: new()); const string CUSTOM_CONFIG = "custom-config.json"; File.WriteAllText(CUSTOM_CONFIG, configuration.ToJson()); @@ -4616,22 +4996,33 @@ public async Task TestNoDepthLimitOnGrahQLInNonHostedMode(int? depthLimit) /// /// Collection of entityName -> Entity object. /// flag to enable or disabled REST globally. - private static void CreateCustomConfigFile(Dictionary entityMap, bool enableGlobalRest = true) + /// Optional pagination options to use in the runtime config. + private static void CreateCustomConfigFile(Dictionary entityMap, bool enableGlobalRest = true, PaginationOptions paginationOptions = null) { DataSource dataSource = new( DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); + HostOptions hostOptions = new(Cors: null, Authentication: new() { Provider = nameof(EasyAuthType.StaticWebApps) }); + RuntimeOptions runtime = paginationOptions != null + ? new( + Rest: new(Enabled: enableGlobalRest), + GraphQL: new(Enabled: true), + Mcp: new(Enabled: true), + Host: hostOptions, + Pagination: paginationOptions) + : new( + Rest: new(Enabled: enableGlobalRest), + GraphQL: new(Enabled: true), + Mcp: new(Enabled: true), + Host: hostOptions); + RuntimeConfig runtimeConfig = new( Schema: string.Empty, DataSource: dataSource, - Runtime: new( - Rest: new(Enabled: enableGlobalRest), - GraphQL: new(Enabled: true), - Host: hostOptions - ), + Runtime: runtime, Entities: new(entityMap)); File.WriteAllText( @@ -4949,6 +5340,8 @@ public static RuntimeConfig InitialzieRuntimeConfigForMultipleCreateTests(bool i RestRuntimeOptions restRuntimeOptions = new(Enabled: false); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); + DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); EntityAction createAction = new( @@ -4972,6 +5365,7 @@ public static RuntimeConfig InitialzieRuntimeConfigForMultipleCreateTests(bool i LinkingTargetFields: null); Entity bookEntity = new(Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "book", Plural: "books"), Permissions: permissions, @@ -4995,6 +5389,7 @@ public static RuntimeConfig InitialzieRuntimeConfigForMultipleCreateTests(bool i Entity publisherEntity = new( Source: new("publishers", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "publisher", Plural: "publishers"), Permissions: permissions, @@ -5007,7 +5402,7 @@ public static RuntimeConfig InitialzieRuntimeConfigForMultipleCreateTests(bool i RuntimeConfig runtimeConfig = new(Schema: "IntegrationTestMinimalSchema", DataSource: dataSource, - Runtime: new(restRuntimeOptions, graphqlOptions, Host: new(Cors: null, Authentication: authenticationOptions, Mode: HostMode.Development), Cache: null), + Runtime: new(restRuntimeOptions, graphqlOptions, mcpRuntimeOptions, Host: new(Cors: null, Authentication: authenticationOptions, Mode: HostMode.Development), Cache: null), Entities: new(entityMap)); return runtimeConfig; } @@ -5020,6 +5415,7 @@ public static RuntimeConfig InitMinimalRuntimeConfig( DataSource dataSource, GraphQLRuntimeOptions graphqlOptions, RestRuntimeOptions restOptions, + McpRuntimeOptions mcpOptions, Entity entity = null, string entityName = null, RuntimeCacheOptions cacheOptions = null @@ -5027,6 +5423,7 @@ public static RuntimeConfig InitMinimalRuntimeConfig( { entity ??= new( Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "book", Plural: "books"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -5044,6 +5441,7 @@ public static RuntimeConfig InitMinimalRuntimeConfig( // Adding an entity with only Authorized Access Entity anotherEntity = new( Source: new("publishers", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "publisher", Plural: "publishers"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_AUTHENTICATED) }, @@ -5057,7 +5455,7 @@ public static RuntimeConfig InitMinimalRuntimeConfig( return new( Schema: "IntegrationTestMinimalSchema", DataSource: dataSource, - Runtime: new(restOptions, graphqlOptions, + Runtime: new(restOptions, graphqlOptions, mcpOptions, Host: new(Cors: null, Authentication: authenticationOptions, Mode: HostMode.Development), Cache: cacheOptions ), @@ -5133,6 +5531,7 @@ private static RuntimeConfig CreateBasicRuntimeConfigWithNoEntity( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -5150,6 +5549,7 @@ private static RuntimeConfig CreateBasicRuntimeConfigWithSingleEntityAndAuthOpti { Entity entity = new( Source: new("books", EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "book", Plural: "books"), Permissions: new[] { GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -5170,6 +5570,7 @@ private static RuntimeConfig CreateBasicRuntimeConfigWithSingleEntityAndAuthOpti Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: authenticationOptions) ), Entities: new(entityMap) diff --git a/src/Service.Tests/Configuration/HealthEndpointRolesTests.cs b/src/Service.Tests/Configuration/HealthEndpointRolesTests.cs index a492f2c167..9ad36bfa15 100644 --- a/src/Service.Tests/Configuration/HealthEndpointRolesTests.cs +++ b/src/Service.Tests/Configuration/HealthEndpointRolesTests.cs @@ -49,6 +49,7 @@ public async Task ComprehensiveHealthEndpoint_RolesTests(string role, HostMode h // config file creation. Entity requiredEntity = new( Health: new(enabled: true), + Fields: null, Source: new("books", EntitySourceType.Table, null, null), Rest: new(Enabled: true), GraphQL: new("book", "books", true), @@ -127,6 +128,7 @@ private static void CreateCustomConfigFile(Dictionary entityMap, Health: new(enabled: true, roles: role != null ? new HashSet { role } : null), Rest: new(Enabled: true), GraphQL: new(Enabled: true), + Mcp: new(Enabled: true), Host: hostOptions ), Entities: new(entityMap)); diff --git a/src/Service.Tests/Configuration/HealthEndpointTests.cs b/src/Service.Tests/Configuration/HealthEndpointTests.cs index c765a837b7..1eac7416e3 100644 --- a/src/Service.Tests/Configuration/HealthEndpointTests.cs +++ b/src/Service.Tests/Configuration/HealthEndpointTests.cs @@ -53,19 +53,36 @@ public void CleanupAfterEachTest() /// [TestMethod] [TestCategory(TestCategory.MSSQL)] - [DataRow(true, true, true, true, true, true, true, DisplayName = "Validate Health Report all enabled.")] - [DataRow(false, true, true, true, true, true, true, DisplayName = "Validate when Comprehensive Health Report is disabled")] - [DataRow(true, true, true, false, true, true, true, DisplayName = "Validate Health Report when data-source health is disabled")] - [DataRow(true, true, true, true, false, true, true, DisplayName = "Validate Health Report when entity health is disabled")] - [DataRow(true, false, true, true, true, true, true, DisplayName = "Validate Health Report when global rest health is disabled")] - [DataRow(true, true, true, true, true, false, true, DisplayName = "Validate Health Report when entity rest health is disabled")] - [DataRow(true, true, false, true, true, true, true, DisplayName = "Validate Health Report when global graphql health is disabled")] - [DataRow(true, true, true, true, true, true, false, DisplayName = "Validate Health Report when entity graphql health is disabled")] - public async Task ComprehensiveHealthEndpoint_ValidateContents(bool enableGlobalHealth, bool enableGlobalRest, bool enableGlobalGraphql, bool enableDatasourceHealth, bool enableEntityHealth, bool enableEntityRest, bool enableEntityGraphQL) + [DataRow(true, true, true, true, true, true, true, true, DisplayName = "Validate Health Report all enabled.")] + [DataRow(false, true, true, true, true, true, true, true, DisplayName = "Validate when Comprehensive Health Report is disabled")] + [DataRow(true, true, true, false, true, true, true, true, DisplayName = "Validate Health Report when global MCP health is disabled")] + [DataRow(true, true, true, true, false, true, true, true, DisplayName = "Validate Health Report when data-source health is disabled")] + [DataRow(true, true, true, true, true, false, true, true, DisplayName = "Validate Health Report when entity health is disabled")] + [DataRow(true, false, true, true, true, true, true, true, DisplayName = "Validate Health Report when global REST health is disabled")] + [DataRow(true, true, false, true, true, true, true, true, DisplayName = "Validate Health Report when global GraphQL health is disabled")] + [DataRow(true, true, true, true, true, true, false, true, DisplayName = "Validate Health Report when entity REST health is disabled")] + [DataRow(true, true, true, true, true, true, true, false, DisplayName = "Validate Health Report when entity GraphQL health is disabled")] + public async Task ComprehensiveHealthEndpoint_ValidateContents( + bool enableGlobalHealth, + bool enableGlobalRest, + bool enableGlobalGraphql, + bool enableGlobalMcp, + bool enableDatasourceHealth, + bool enableEntityHealth, + bool enableEntityRest, + bool enableEntityGraphQL) { - // Arrange - // Create a mock entity map with a single entity for testing - RuntimeConfig runtimeConfig = SetupCustomConfigFile(enableGlobalHealth, enableGlobalRest, enableGlobalGraphql, enableDatasourceHealth, enableEntityHealth, enableEntityRest, enableEntityGraphQL); + // The body remains exactly the same except passing enableGlobalMcp + RuntimeConfig runtimeConfig = SetupCustomConfigFile( + enableGlobalHealth, + enableGlobalRest, + enableGlobalGraphql, + enableGlobalMcp, + enableDatasourceHealth, + enableEntityHealth, + enableEntityRest, + enableEntityGraphQL); + WriteToCustomConfigFile(runtimeConfig); string[] args = new[] @@ -90,7 +107,7 @@ public async Task ComprehensiveHealthEndpoint_ValidateContents(bool enableGlobal Assert.AreEqual(expected: HttpStatusCode.OK, actual: response.StatusCode, message: "Received unexpected HTTP code from health check endpoint."); ValidateBasicDetailsHealthCheckResponse(responseProperties); - ValidateConfigurationDetailsHealthCheckResponse(responseProperties, enableGlobalRest, enableGlobalGraphql); + ValidateConfigurationDetailsHealthCheckResponse(responseProperties, enableGlobalRest, enableGlobalGraphql, enableGlobalMcp); ValidateIfAttributePresentInResponse(responseProperties, enableDatasourceHealth, HealthCheckConstants.DATASOURCE); ValidateIfAttributePresentInResponse(responseProperties, enableEntityHealth, HealthCheckConstants.ENDPOINT); if (enableEntityHealth) @@ -110,7 +127,7 @@ public async Task ComprehensiveHealthEndpoint_ValidateContents(bool enableGlobal public async Task TestHealthCheckRestResponseAsync() { // Arrange - RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true); + RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true, true); HttpUtilities httpUtilities = SetupRestTest(runtimeConfig); // Act @@ -139,7 +156,7 @@ public async Task TestHealthCheckRestResponseAsync() public async Task TestFailureHealthCheckRestResponseAsync() { // Arrange - RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true); + RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true, true); HttpUtilities httpUtilities = SetupGraphQLTest(runtimeConfig, HttpStatusCode.BadRequest); // Act @@ -167,7 +184,7 @@ public async Task TestFailureHealthCheckRestResponseAsync() public async Task TestHealthCheckGraphQLResponseAsync() { // Arrange - RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true); + RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true, true); HttpUtilities httpUtilities = SetupGraphQLTest(runtimeConfig); // Act @@ -191,7 +208,7 @@ public async Task TestHealthCheckGraphQLResponseAsync() public async Task TestFailureHealthCheckGraphQLResponseAsync() { // Arrange - RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true); + RuntimeConfig runtimeConfig = SetupCustomConfigFile(true, true, true, true, true, true, true, true); HttpUtilities httpUtilities = SetupGraphQLTest(runtimeConfig, HttpStatusCode.InternalServerError); // Act @@ -206,6 +223,46 @@ public async Task TestFailureHealthCheckGraphQLResponseAsync() Assert.IsNotNull(errorMessageFromGraphQL); } + /// + /// Tests the serialization behavior of for the property." + /// + /// This test ensures that the JSON serialization behavior of adheres to the expected behavior where default values are omitted from + /// the output. + [TestMethod] + public void MaxQueryParallelismSerializationDependsOnUserInput() + { + // Case 1: default value NOT explicitly provided => should NOT serialize + RuntimeHealthCheckConfig configWithDefault = new( + enabled: true, + roles: null, + cacheTtlSeconds: null, + maxQueryParallelism: null // implicit default + ); + + Assert.IsFalse(configWithDefault.UserProvidedMaxQueryParallelism, "UserProvidedMaxQueryParallelism should be false for default value."); + + // Case 2: default value EXPLICITLY provided => should serialize + RuntimeHealthCheckConfig configWithExplicitDefault = new( + enabled: true, + roles: null, + cacheTtlSeconds: null, + maxQueryParallelism: RuntimeHealthCheckConfig.DEFAULT_MAX_QUERY_PARALLELISM + ); + + Assert.IsTrue(configWithExplicitDefault.UserProvidedMaxQueryParallelism, "UserProvidedMaxQueryParallelism should be true for explicit default value."); + + // Case 3: non-default value => should serialize + RuntimeHealthCheckConfig configWithCustomValue = new( + enabled: true, + roles: null, + cacheTtlSeconds: null, + maxQueryParallelism: RuntimeHealthCheckConfig.DEFAULT_MAX_QUERY_PARALLELISM + 1 + ); + + Assert.IsTrue(configWithCustomValue.UserProvidedMaxQueryParallelism, "UserProvidedMaxQueryParallelism should be true for custom value."); + } + #region Helper Methods private static HttpUtilities SetupRestTest(RuntimeConfig runtimeConfig, HttpStatusCode httpStatusCode = HttpStatusCode.OK) { @@ -387,7 +444,7 @@ private static void ValidateConfigurationIsCorrectFlag(Dictionary responseProperties, bool enableGlobalRest, bool enableGlobalGraphQL) + private static void ValidateConfigurationDetailsHealthCheckResponse(Dictionary responseProperties, bool enableGlobalRest, bool enableGlobalGraphQL, bool enableGlobalMcp) { if (responseProperties.TryGetValue("configuration", out JsonElement configElement) && configElement.ValueKind == JsonValueKind.Object) { @@ -403,6 +460,8 @@ private static void ValidateConfigurationDetailsHealthCheckResponse(Dictionary @@ -480,7 +540,7 @@ private static RuntimeConfig SetupCustomConfigFile(bool enableGlobalHealth, bool /// /// Collection of entityName -> Entity object. /// flag to enable or disabled REST globally. - private static RuntimeConfig CreateRuntimeConfig(Dictionary entityMap, bool enableGlobalRest = true, bool enableGlobalGraphql = true, bool enableGlobalHealth = true, bool enableDatasourceHealth = true, HostMode hostMode = HostMode.Production) + private static RuntimeConfig CreateRuntimeConfig(Dictionary entityMap, bool enableGlobalRest = true, bool enableGlobalGraphql = true, bool enabledGlobalMcp = true, bool enableGlobalHealth = true, bool enableDatasourceHealth = true, HostMode hostMode = HostMode.Production) { DataSource dataSource = new( DatabaseType.MSSQL, @@ -496,6 +556,7 @@ private static RuntimeConfig CreateRuntimeConfig(Dictionary enti Health: new(enabled: enableGlobalHealth), Rest: new(Enabled: enableGlobalRest), GraphQL: new(Enabled: enableGlobalGraphql), + Mcp: new(Enabled: enabledGlobalMcp), Host: hostOptions ), Entities: new(entityMap)); diff --git a/src/Service.Tests/Configuration/HotReload/AuthorizationResolverHotReloadTests.cs b/src/Service.Tests/Configuration/HotReload/AuthorizationResolverHotReloadTests.cs index cc397e0ca0..2175c8ac83 100644 --- a/src/Service.Tests/Configuration/HotReload/AuthorizationResolverHotReloadTests.cs +++ b/src/Service.Tests/Configuration/HotReload/AuthorizationResolverHotReloadTests.cs @@ -65,6 +65,7 @@ public async Task ValidateAuthorizationResolver_HotReload() Entity requiredEntityHR = new( Source: new("publishers", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new(Singular: "", Plural: "", Enabled: false), Permissions: new[] { permissionsHR }, @@ -131,6 +132,7 @@ private static void CreateCustomConfigFile(string fileName, Dictionary + [Ignore] [TestCategory(MSSQL_ENVIRONMENT)] [TestMethod] public void HotReloadValidationFail() diff --git a/src/Service.Tests/Configuration/Telemetry/AzureLogAnalyticsTests.cs b/src/Service.Tests/Configuration/Telemetry/AzureLogAnalyticsTests.cs new file mode 100644 index 0000000000..db6b58681b --- /dev/null +++ b/src/Service.Tests/Configuration/Telemetry/AzureLogAnalyticsTests.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.DataApiBuilder.Service.Telemetry; +using Azure.Identity; +using Azure.Monitor.Ingestion; +using Microsoft.AspNetCore.TestHost; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; +using static Azure.DataApiBuilder.Service.Tests.Configuration.ConfigurationTests; + +namespace Azure.DataApiBuilder.Service.Tests.Configuration.Telemetry; + +/// +/// Contains tests for Azure Log Analytics functionality. +/// +[TestClass, TestCategory(TestCategory.MSSQL)] +public class AzureLogAnalyticsTests +{ + public TestContext TestContext { get; set; } + + private const string CONFIG_WITH_TELEMETRY = "dab-azure-log-analytics-test-config.json"; + private const string CONFIG_WITHOUT_TELEMETRY = "dab-no-azure-log-analytics-test-config.json"; + private static RuntimeConfig _configuration; + + /// + /// This is a helper function that creates runtime config file with specified telemetry options. + /// + /// Name of the config file to be created. + /// Whether telemetry is enabled or not. + /// Telemetry connection string. + public static void SetUpTelemetryInConfig(string configFileName, bool isLogAnalyticsEnabled, string logAnalyticsCustomTable, string logAnalyticsDcrImmutableId, string logAnalyticsDceEndpoint) + { + DataSource dataSource = new(DatabaseType.MSSQL, + GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); + + _configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions: new(), restOptions: new(), mcpOptions: new()); + + TelemetryOptions _testTelemetryOptions = new(AzureLogAnalytics: new AzureLogAnalyticsOptions(isLogAnalyticsEnabled, new AzureLogAnalyticsAuthOptions(logAnalyticsCustomTable, logAnalyticsDcrImmutableId, logAnalyticsDceEndpoint))); + _configuration = _configuration with { Runtime = _configuration.Runtime with { Telemetry = _testTelemetryOptions } }; + + File.WriteAllText(configFileName, _configuration.ToJson()); + } + + /// + /// Cleans up the test environment by deleting the runtime config with telemetry options. + /// + [TestCleanup] + public void CleanUpTelemetryConfig() + { + if (File.Exists(CONFIG_WITH_TELEMETRY)) + { + File.Delete(CONFIG_WITH_TELEMETRY); + } + + if (File.Exists(CONFIG_WITHOUT_TELEMETRY)) + { + File.Delete(CONFIG_WITHOUT_TELEMETRY); + } + } + + /// + /// Tests if the services are correctly enabled for Azure Log Analytics. + /// + [TestMethod] + public void TestAzureLogAnalyticsServicesEnabled() + { + // Arrange + SetUpTelemetryInConfig(CONFIG_WITH_TELEMETRY, true, "Custom-Table-Name-Test", "DCR-Immutable-ID-Test", "https://fake.dce.endpoint"); + + string[] args = new[] + { + $"--ConfigFileName={CONFIG_WITH_TELEMETRY}" + }; + using TestServer server = new(Program.CreateWebHostBuilder(args)); + + // Additional assertions to check if AzureLogAnalytics is enabled correctly in services + IServiceProvider serviceProvider = server.Services; + AzureLogAnalyticsCustomLogCollector customLogCollector = (AzureLogAnalyticsCustomLogCollector)serviceProvider.GetService(); + AzureLogAnalyticsFlusherService flusherService = serviceProvider.GetService(); + IEnumerable loggerProvidersServices = serviceProvider.GetServices(); + AzureLogAnalyticsLoggerProvider loggerProvider = loggerProvidersServices.OfType().FirstOrDefault(); + + // If customLogCollector, flusherService, and loggerProvider are not null when AzureLogAnalytics is enabled + Assert.IsNotNull(customLogCollector, "AzureLogAnalyticsCustomLogCollector should be registered."); + Assert.IsNotNull(flusherService, "AzureLogAnalyticsFlusherService should be registered."); + Assert.IsNotNull(loggerProvider, "AzureLogAnalyticsLoggerProvider should be registered."); + } + + /// + /// Tests if the logs are flushed correctly when Azure Log Analytics is enabled. + /// + [DataTestMethod] + [DataRow("Information Test Message", LogLevel.Information)] + [DataRow("Trace Test Message", LogLevel.Trace)] + [DataRow("Warning Test Message", LogLevel.Warning)] + public async Task TestAzureLogAnalyticsFlushServiceSucceed(string message, LogLevel logLevel) + { + // Arrange + CancellationTokenSource tokenSource = new(); + AzureLogAnalyticsOptions azureLogAnalyticsOptions = new(true, new AzureLogAnalyticsAuthOptions("custom-table-name-test", "dcr-immutable-id-test", "https://fake.dce.endpoint"), "DABLogs", 1); + CustomLogsIngestionClient customClient = new(azureLogAnalyticsOptions.Auth.DceEndpoint); + AzureLogAnalyticsCustomLogCollector customLogCollector = new(); + + ILoggerFactory loggerFactory = new LoggerFactory(); + ILogger logger = loggerFactory.CreateLogger(); + AzureLogAnalyticsFlusherService flusherService = new(azureLogAnalyticsOptions, customLogCollector, customClient, logger); + + // Act + await customLogCollector.LogAsync(message, logLevel); + + _ = Task.Run(() => flusherService.StartAsync(tokenSource.Token)); + + await Task.Delay(2000); + + // Assert + AzureLogAnalyticsLogs actualLog = customClient.LogAnalyticsLogs[0]; + Assert.AreEqual(logLevel.ToString(), actualLog.LogLevel); + Assert.AreEqual(message, actualLog.Message); + } + + /// + /// Tests if the services are correctly disabled for Azure Log Analytics. + /// + [TestMethod] + public void TestAzureLogAnalyticsServicesDisabled() + { + // Arrange + SetUpTelemetryInConfig(CONFIG_WITHOUT_TELEMETRY, false, null, null, null); + + string[] args = new[] + { + $"--ConfigFileName={CONFIG_WITHOUT_TELEMETRY}" + }; + using TestServer server = new(Program.CreateWebHostBuilder(args)); + + // Additional assertions to check if Azure Log Analytics is disabled correctly in services + IServiceProvider serviceProvider = server.Services; + AzureLogAnalyticsFlusherService flusherService = serviceProvider.GetService(); + AzureLogAnalyticsLoggerProvider loggerProvider = serviceProvider.GetService(); + + // If flusherService and loggerProvider are null, Azure Log Analytics is disabled + Assert.IsNull(flusherService, "AzureLogAnalyticsFlusherService should not be registered."); + Assert.IsNull(loggerProvider, "AzureLogAnalyticsLoggerProvider should not be registered."); + } + + /// + /// Custom logs ingestion to test that all the logs are being sent correctly to Azure Log Analytics + /// + private class CustomLogsIngestionClient : LogsIngestionClient + { + public List LogAnalyticsLogs { get; } = new(); + + public CustomLogsIngestionClient(string dceEndpoint) : base(new Uri(dceEndpoint), new DefaultAzureCredential()) { } // CodeQL [SM05137] DefaultAzureCredential will use Managed Identity if available or fallback to default. + + public async override Task UploadAsync(string ruleId, string streamName, IEnumerable logs, LogsUploadOptions options = null, CancellationToken cancellationToken = default) + { + LogAnalyticsLogs.AddRange(logs.Cast()); + + Response mockResponse = Response.FromValue(Mock.Of(), Mock.Of()); + return await Task.FromResult(mockResponse); + } + } +} diff --git a/src/Service.Tests/Configuration/Telemetry/FileSinkTests.cs b/src/Service.Tests/Configuration/Telemetry/FileSinkTests.cs new file mode 100644 index 0000000000..7759ced58d --- /dev/null +++ b/src/Service.Tests/Configuration/Telemetry/FileSinkTests.cs @@ -0,0 +1,163 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.IO; +using System.Net.Http; +using System.Threading.Tasks; +using Azure.DataApiBuilder.Config.ObjectModel; +using Microsoft.AspNetCore.TestHost; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Serilog; +using Serilog.Core; +using static Azure.DataApiBuilder.Service.Tests.Configuration.ConfigurationTests; + +namespace Azure.DataApiBuilder.Service.Tests.Configuration.Telemetry; + +/// +/// Contains tests for File Sink functionality. +/// +[TestClass, TestCategory(TestCategory.MSSQL)] +public class FileSinkTests +{ + public TestContext TestContext { get; set; } + + private const string CONFIG_WITH_TELEMETRY = "dab-file-sink-test-config.json"; + private const string CONFIG_WITHOUT_TELEMETRY = "dab-no-file-sink-test-config.json"; + private static RuntimeConfig _configuration; + + /// + /// This is a helper function that creates runtime config file with specified telemetry options. + /// + /// Name of the config file to be created. + /// Whether File Sink is enabled or not. + /// Path where logs will be sent to. + /// Time it takes for logs to roll over to next file. + private static void SetUpTelemetryInConfig(string configFileName, bool isFileSinkEnabled, string fileSinkPath, RollingInterval? rollingInterval = null) + { + DataSource dataSource = new(DatabaseType.MSSQL, + GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); + + _configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions: new(), restOptions: new(), mcpOptions: new()); + + TelemetryOptions _testTelemetryOptions = new(File: new FileSinkOptions(isFileSinkEnabled, fileSinkPath, rollingInterval)); + _configuration = _configuration with { Runtime = _configuration.Runtime with { Telemetry = _testTelemetryOptions } }; + + File.WriteAllText(configFileName, _configuration.ToJson()); + } + + /// + /// Cleans up the test environment by deleting the runtime config with telemetry options. + /// + [TestCleanup] + public void CleanUpTelemetryConfig() + { + if (File.Exists(CONFIG_WITH_TELEMETRY)) + { + File.Delete(CONFIG_WITH_TELEMETRY); + } + + if (File.Exists(CONFIG_WITHOUT_TELEMETRY)) + { + File.Delete(CONFIG_WITHOUT_TELEMETRY); + } + } + + /// + /// Tests if the services are correctly enabled for File Sink. + /// + [TestMethod] + public void TestFileSinkServicesEnabled() + { + // Arrange + SetUpTelemetryInConfig(CONFIG_WITH_TELEMETRY, true, "/dab-log-test/file-sink-file.txt"); + + string[] args = new[] + { + $"--ConfigFileName={CONFIG_WITH_TELEMETRY}" + }; + using TestServer server = new(Program.CreateWebHostBuilder(args)); + + // Additional assertions to check if File Sink is enabled correctly in services + IServiceProvider serviceProvider = server.Services; + LoggerConfiguration serilogLoggerConfiguration = serviceProvider.GetService(); + Logger serilogLogger = serviceProvider.GetService(); + + // If serilogLoggerConfiguration and serilogLogger are not null, File Sink is enabled + Assert.IsNotNull(serilogLoggerConfiguration, "LoggerConfiguration for Serilog should be registered."); + Assert.IsNotNull(serilogLogger, "Logger for Serilog should be registered."); + } + + /// + /// Tests if the logs are flushed to the proper path when File Sink is enabled. + /// + /// + /// Tests if the logs are flushed to the proper path when File Sink is enabled. + /// + [DataTestMethod] + [DataRow("file-sink-test-file.txt")] + [DataRow("file-sink-test-file.log")] + [DataRow("file-sink-test-file.csv")] + public async Task TestFileSinkSucceed(string fileName) + { + // Arrange + SetUpTelemetryInConfig(CONFIG_WITH_TELEMETRY, true, fileName, RollingInterval.Infinite); + + string[] args = new[] + { + $"--ConfigFileName={CONFIG_WITH_TELEMETRY}" + }; + using TestServer server = new(Program.CreateWebHostBuilder(args)); + + // Act + using (HttpClient client = server.CreateClient()) + { + HttpRequestMessage restRequest = new(HttpMethod.Get, "/api/Book"); + await client.SendAsync(restRequest); + } + + server.Dispose(); + + // Assert + Assert.IsTrue(File.Exists(fileName)); + + bool containsInfo = false; + string[] allLines = File.ReadAllLines(fileName); + foreach (string line in allLines) + { + containsInfo = line.Contains("INF"); + if (containsInfo) + { + break; + } + } + + Assert.IsTrue(containsInfo); + } + + /// + /// Tests if the services are correctly disabled for File Sink. + /// + [TestMethod] + public void TestFileSinkServicesDisabled() + { + // Arrange + SetUpTelemetryInConfig(CONFIG_WITHOUT_TELEMETRY, false, null); + + string[] args = new[] + { + $"--ConfigFileName={CONFIG_WITHOUT_TELEMETRY}" + }; + using TestServer server = new(Program.CreateWebHostBuilder(args)); + + // Additional assertions to check if File Sink is enabled correctly in services + IServiceProvider serviceProvider = server.Services; + LoggerConfiguration serilogLoggerConfiguration = serviceProvider.GetService(); + Logger serilogLogger = serviceProvider.GetService(); + + // If serilogLoggerConfiguration and serilogLogger are null, File Sink is disabled + Assert.IsNull(serilogLoggerConfiguration, "LoggerConfiguration for Serilog should not be registered."); + Assert.IsNull(serilogLogger, "Logger for Serilog should not be registered."); + } +} diff --git a/src/Service.Tests/Configuration/OpenTelemetryTests.cs b/src/Service.Tests/Configuration/Telemetry/OpenTelemetryTests.cs similarity index 97% rename from src/Service.Tests/Configuration/OpenTelemetryTests.cs rename to src/Service.Tests/Configuration/Telemetry/OpenTelemetryTests.cs index 166dc7b001..df213c6f48 100644 --- a/src/Service.Tests/Configuration/OpenTelemetryTests.cs +++ b/src/Service.Tests/Configuration/Telemetry/OpenTelemetryTests.cs @@ -12,7 +12,7 @@ using OpenTelemetry.Trace; using static Azure.DataApiBuilder.Service.Tests.Configuration.ConfigurationTests; -namespace Azure.DataApiBuilder.Service.Tests.Configuration; +namespace Azure.DataApiBuilder.Service.Tests.Configuration.Telemetry; /// /// Contains tests for OpenTelemetry functionality. @@ -37,7 +37,7 @@ public static void SetUpTelemetryInConfig(string configFileName, bool isOtelEnab DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - _configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions: new(), restOptions: new()); + _configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions: new(), restOptions: new(), mcpOptions: new()); TelemetryOptions _testTelemetryOptions = new(OpenTelemetry: new OpenTelemetryOptions(isOtelEnabled, otelEndpoint, otelHeaders, otlpExportProtocol, "TestServiceName")); _configuration = _configuration with { Runtime = _configuration.Runtime with { Telemetry = _testTelemetryOptions } }; diff --git a/src/Service.Tests/Configuration/TelemetryTests.cs b/src/Service.Tests/Configuration/Telemetry/TelemetryTests.cs similarity index 98% rename from src/Service.Tests/Configuration/TelemetryTests.cs rename to src/Service.Tests/Configuration/Telemetry/TelemetryTests.cs index 3b49ddae5d..996e79b805 100644 --- a/src/Service.Tests/Configuration/TelemetryTests.cs +++ b/src/Service.Tests/Configuration/Telemetry/TelemetryTests.cs @@ -17,7 +17,7 @@ using Microsoft.VisualStudio.TestTools.UnitTesting; using static Azure.DataApiBuilder.Service.Tests.Configuration.ConfigurationTests; -namespace Azure.DataApiBuilder.Service.Tests.Configuration; +namespace Azure.DataApiBuilder.Service.Tests.Configuration.Telemetry; /// /// Contains tests for telemetry functionality. @@ -43,7 +43,7 @@ public static void SetUpTelemetryInConfig(string configFileName, bool isTelemetr DataSource dataSource = new(DatabaseType.MSSQL, GetConnectionStringFromEnvironmentConfig(environment: TestCategory.MSSQL), Options: null); - _configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions: new(), restOptions: new()); + _configuration = InitMinimalRuntimeConfig(dataSource, graphqlOptions: new(), restOptions: new(), mcpOptions: new()); TelemetryOptions _testTelemetryOptions = new(new ApplicationInsightsOptions(isTelemetryEnabled, telemetryConnectionString)); _configuration = _configuration with { Runtime = _configuration.Runtime with { Telemetry = _testTelemetryOptions } }; @@ -168,7 +168,7 @@ public async Task TestNoTelemetryItemsSentWhenDisabled_NonHostedScenario(bool is List telemetryItems = ((CustomTelemetryChannel)telemetryChannel).GetTelemetryItems(); // Assert that we are not sending any Traces/Requests/Exceptions to Telemetry - Assert.IsTrue(EnumerableUtilities.IsNullOrEmpty(telemetryItems)); + Assert.IsTrue(telemetryItems.IsNullOrEmpty()); } /// diff --git a/src/Service.Tests/CosmosTests/MutationTests.cs b/src/Service.Tests/CosmosTests/MutationTests.cs index 23e23a9ec7..de931dcf22 100644 --- a/src/Service.Tests/CosmosTests/MutationTests.cs +++ b/src/Service.Tests/CosmosTests/MutationTests.cs @@ -513,6 +513,7 @@ type Planet @model(name:""Planet"") { }"; GraphQLRuntimeOptions graphqlOptions = new(Enabled: true); RestRuntimeOptions restRuntimeOptions = new(Enabled: false); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); Dictionary dbOptions = new(); HyphenatedNamingPolicy namingPolicy = new(); @@ -541,6 +542,7 @@ type Planet @model(name:""Planet"") { new EntityPermission( Role: AuthorizationResolver.ROLE_AUTHENTICATED , Actions: new[] { readAction, createAction, deleteAction })}; Entity entity = new(Source: new($"graphqldb.{_containerName}", null, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "Planet", Plural: "Planets"), Permissions: permissions, @@ -548,7 +550,7 @@ type Planet @model(name:""Planet"") { Mappings: null); string entityName = "Planet"; - RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName); + RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName); const string CUSTOM_CONFIG = "custom-config.json"; const string CUSTOM_SCHEMA = "custom-schema.gql"; @@ -642,6 +644,7 @@ type Planet @model(name:""Planet"") { }"; GraphQLRuntimeOptions graphqlOptions = new(Enabled: true); RestRuntimeOptions restRuntimeOptions = new(Enabled: false); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); Dictionary dbOptions = new(); HyphenatedNamingPolicy namingPolicy = new(); @@ -670,6 +673,7 @@ type Planet @model(name:""Planet"") { new EntityPermission( Role: AuthorizationResolver.ROLE_AUTHENTICATED , Actions: new[] { createAction })}; Entity entity = new(Source: new($"graphqldb.{_containerName}", null, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "Planet", Plural: "Planets"), Permissions: permissions, @@ -677,7 +681,7 @@ type Planet @model(name:""Planet"") { Mappings: null); string entityName = "Planet"; - RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName); + RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName); const string CUSTOM_CONFIG = "custom-config.json"; const string CUSTOM_SCHEMA = "custom-schema.gql"; diff --git a/src/Service.Tests/CosmosTests/QueryTests.cs b/src/Service.Tests/CosmosTests/QueryTests.cs index 97cffa3c98..52afa8e788 100644 --- a/src/Service.Tests/CosmosTests/QueryTests.cs +++ b/src/Service.Tests/CosmosTests/QueryTests.cs @@ -682,6 +682,7 @@ type Planet @model(name:""Planet"") { GraphQLRuntimeOptions graphqlOptions = new(Enabled: true); RestRuntimeOptions restRuntimeOptions = new(Enabled: false); + McpRuntimeOptions mcpRuntimeOptions = new(Enabled: false); Dictionary dbOptions = new(); HyphenatedNamingPolicy namingPolicy = new(); @@ -709,6 +710,7 @@ type Planet @model(name:""Planet"") { EntityPermission[] permissions = new[] { new EntityPermission(Role: AuthorizationResolver.ROLE_ANONYMOUS, Actions: new[] { createAction, readAction, deleteAction }) }; Entity entity = new(Source: new($"graphqldb.{_containerName}", null, null, null), + Fields: null, Rest: null, GraphQL: new(Singular: "Planet", Plural: "Planets"), Permissions: permissions, @@ -724,7 +726,7 @@ type Planet @model(name:""Planet"") { string entityName = "Planet"; // cache configuration - RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, entity, entityName, new RuntimeCacheOptions() { Enabled = true, TtlSeconds = 5 }); + RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig(dataSource, graphqlOptions, restRuntimeOptions, mcpRuntimeOptions, entity, entityName, new RuntimeCacheOptions() { Enabled = true, TtlSeconds = 5 }); const string CUSTOM_CONFIG = "custom-config.json"; const string CUSTOM_SCHEMA = "custom-schema.gql"; diff --git a/src/Service.Tests/CosmosTests/SchemaGeneratorFactoryTests.cs b/src/Service.Tests/CosmosTests/SchemaGeneratorFactoryTests.cs index 20f415e3dc..f10ac17354 100644 --- a/src/Service.Tests/CosmosTests/SchemaGeneratorFactoryTests.cs +++ b/src/Service.Tests/CosmosTests/SchemaGeneratorFactoryTests.cs @@ -78,11 +78,12 @@ public async Task ExportGraphQLFromCosmosDB_GeneratesSchemaSuccessfully(string g {"database", globalDatabase}, {"container", globalContainer} }), - Runtime: new(Rest: null, GraphQL: new(), Host: new(null, null)), + Runtime: new(Rest: null, GraphQL: new(), Mcp: new(), Host: new(null, null)), Entities: new(new Dictionary() { {"Container1", new Entity( Source: new(entitySource, EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("Container1", "Container1s"), Permissions: new EntityPermission[] {}, @@ -90,6 +91,7 @@ public async Task ExportGraphQLFromCosmosDB_GeneratesSchemaSuccessfully(string g Mappings: null) }, {"Container2", new Entity( Source: new("mydb2.container2", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("Container2", "Container2s"), Permissions: new EntityPermission[] {}, @@ -97,6 +99,7 @@ public async Task ExportGraphQLFromCosmosDB_GeneratesSchemaSuccessfully(string g Mappings: null) }, {"Container0", new Entity( Source: new(null, EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("Container0", "Container0s"), Permissions: new EntityPermission[] {}, diff --git a/src/Service.Tests/DatabaseSchema-DwSql.sql b/src/Service.Tests/DatabaseSchema-DwSql.sql index 300ef7ff32..daed665949 100644 --- a/src/Service.Tests/DatabaseSchema-DwSql.sql +++ b/src/Service.Tests/DatabaseSchema-DwSql.sql @@ -336,7 +336,8 @@ VALUES (1, 'Awesome book', 1234), (17, 'CONN%_CONN', 1234), (18, '[Special Book]', 1234), (19, 'ME\YOU', 1234), -(20, 'C:\\LIFE', 1234); +(20, 'C:\\LIFE', 1234), +(21, '', 1234); INSERT INTO book_website_placements(id, book_id, price) VALUES (1, 1, 100), (2, 2, 50), (3, 3, 23), (4, 5, 33); diff --git a/src/Service.Tests/DatabaseSchema-MsSql.sql b/src/Service.Tests/DatabaseSchema-MsSql.sql index 3605b2628a..4e87394aee 100644 --- a/src/Service.Tests/DatabaseSchema-MsSql.sql +++ b/src/Service.Tests/DatabaseSchema-MsSql.sql @@ -531,7 +531,8 @@ VALUES (1, 'Awesome book', 1234), (17, 'CONN%_CONN', 1234), (18, '[Special Book]', 1234), (19, 'ME\YOU', 1234), -(20, 'C:\\LIFE', 1234); +(20, 'C:\\LIFE', 1234), +(21, '', 1234); SET IDENTITY_INSERT books OFF SET IDENTITY_INSERT books_mm ON diff --git a/src/Service.Tests/DatabaseSchema-MySql.sql b/src/Service.Tests/DatabaseSchema-MySql.sql index f746bc063a..dda93d86d1 100644 --- a/src/Service.Tests/DatabaseSchema-MySql.sql +++ b/src/Service.Tests/DatabaseSchema-MySql.sql @@ -388,7 +388,8 @@ INSERT INTO books(id, title, publisher_id) (17, 'CONN%_CONN', 1234), (18, '[Special Book]', 1234), (19, 'ME\\YOU', 1234), - (20, 'C:\\\\LIFE', 1234); + (20, 'C:\\\\LIFE', 1234), + (21, '', 1234); INSERT INTO book_website_placements(book_id, price) VALUES (1, 100), (2, 50), (3, 23), (5, 33); INSERT INTO website_users(id, username) VALUES (1, 'George'), (2, NULL), (3, ''), (4, 'book_lover_95'), (5, 'null'); INSERT INTO book_author_link(book_id, author_id) VALUES (1, 123), (2, 124), (3, 123), (3, 124), (4, 123), (4, 124), (5, 126); diff --git a/src/Service.Tests/DatabaseSchema-PostgreSql.sql b/src/Service.Tests/DatabaseSchema-PostgreSql.sql index 14615707b1..523e96c22f 100644 --- a/src/Service.Tests/DatabaseSchema-PostgreSql.sql +++ b/src/Service.Tests/DatabaseSchema-PostgreSql.sql @@ -391,7 +391,8 @@ INSERT INTO books(id, title, publisher_id) (17, 'CONN%_CONN', 1234), (18, '[Special Book]', 1234), (19, 'ME\YOU', 1234), - (20, 'C:\\LIFE', 1234); + (20, 'C:\\LIFE', 1234), + (21, '', 1234); INSERT INTO book_website_placements(book_id, price) VALUES (1, 100), (2, 50), (3, 23), (5, 33); INSERT INTO website_users(id, username) VALUES (1, 'George'), (2, NULL), (3, ''), (4, 'book_lover_95'), (5, 'null'); INSERT INTO book_author_link(book_id, author_id) VALUES (1, 123), (2, 124), (3, 123), (3, 124), (4, 123), (4, 124), (5, 126);; diff --git a/src/Service.Tests/GraphQLBuilder/Helpers/GraphQLTestHelpers.cs b/src/Service.Tests/GraphQLBuilder/Helpers/GraphQLTestHelpers.cs index a00885fcf3..737e29f48f 100644 --- a/src/Service.Tests/GraphQLBuilder/Helpers/GraphQLTestHelpers.cs +++ b/src/Service.Tests/GraphQLBuilder/Helpers/GraphQLTestHelpers.cs @@ -80,6 +80,7 @@ public static Dictionary CreateStubEntityPermissionsMap( public static Entity GenerateEmptyEntity(EntitySourceType sourceType = EntitySourceType.Table) { return new Entity(Source: new EntitySource(Type: sourceType, Object: "foo", Parameters: null, KeyFields: null), + Fields: null, Rest: new(Array.Empty()), GraphQL: new("", ""), Permissions: Array.Empty(), @@ -101,11 +102,12 @@ public static Entity GenerateStoredProcedureEntity( GraphQLOperation? graphQLOperation, string[] permissionOperations = null, string dbObjectName = "foo", - Dictionary parameters = null + List parameters = null ) { IEnumerable actions = (permissionOperations ?? new string[] { }).Select(a => new EntityAction(EnumExtensions.Deserialize(a), null, new(null, null))); Entity entity = new(Source: new EntitySource(Type: EntitySourceType.StoredProcedure, Object: "foo", Parameters: parameters, KeyFields: null), + Fields: null, Rest: new(Array.Empty()), GraphQL: new(Singular: graphQLTypeName, Plural: "", Enabled: true, Operation: graphQLOperation), Permissions: new[] { new EntityPermission(Role: "anonymous", Actions: actions.ToArray()) }, @@ -123,6 +125,7 @@ public static Entity GenerateStoredProcedureEntity( public static Entity GenerateEntityWithSingularPlural(string singularNameForEntity, string pluralNameForEntity, EntitySourceType sourceType = EntitySourceType.Table) { return new Entity(Source: new EntitySource(Type: sourceType, Object: "foo", Parameters: null, KeyFields: null), + Fields: null, Rest: new(Array.Empty()), GraphQL: new(singularNameForEntity, pluralNameForEntity), Permissions: Array.Empty(), @@ -139,6 +142,7 @@ public static Entity GenerateEntityWithSingularPlural(string singularNameForEnti public static Entity GenerateEntityWithStringType(string singularGraphQLName, EntitySourceType sourceType = EntitySourceType.Table) { return new Entity(Source: new EntitySource(Type: sourceType, Object: "foo", Parameters: null, KeyFields: null), + Fields: null, Rest: new(Array.Empty()), GraphQL: new(singularGraphQLName, ""), Permissions: Array.Empty(), diff --git a/src/Service.Tests/GraphQLBuilder/MultipleMutationBuilderTests.cs b/src/Service.Tests/GraphQLBuilder/MultipleMutationBuilderTests.cs index 0ed64ca6ee..94665d7c18 100644 --- a/src/Service.Tests/GraphQLBuilder/MultipleMutationBuilderTests.cs +++ b/src/Service.Tests/GraphQLBuilder/MultipleMutationBuilderTests.cs @@ -360,6 +360,7 @@ private static RuntimeConfigProvider GetRuntimeConfigProvider() { Runtime = new RuntimeOptions(Rest: runtimeConfig.Runtime.Rest, GraphQL: new GraphQLRuntimeOptions(MultipleMutationOptions: new MultipleMutationOptions(new MultipleCreateOptions(enabled: true))), + Mcp: runtimeConfig.Runtime.Mcp, Host: runtimeConfig.Runtime.Host, BaseRoute: runtimeConfig.Runtime.BaseRoute, Telemetry: runtimeConfig.Runtime.Telemetry, diff --git a/src/Service.Tests/GraphQLBuilder/MutationBuilderTests.cs b/src/Service.Tests/GraphQLBuilder/MutationBuilderTests.cs index 8a75724b62..a1478093dd 100644 --- a/src/Service.Tests/GraphQLBuilder/MutationBuilderTests.cs +++ b/src/Service.Tests/GraphQLBuilder/MutationBuilderTests.cs @@ -45,6 +45,7 @@ private static Entity GenerateEmptyEntity() { return new Entity( Source: new("dbo.entity", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("Foo", "Foos", Enabled: true), Permissions: Array.Empty(), @@ -194,6 +195,41 @@ type Foo @model(name:""Foo"") { Assert.AreEqual("bar", argType.Fields[0].Name.Value); } + [TestMethod] + [TestCategory("Mutation Builder - Create")] + [TestCategory("Mutation Builder - Update")] + [TestCategory("Mutation Builder - Delete")] + public void MutationExcludedForAllAutogeneratedFields() + { + string gql = + @" +type Foo @model(name:""Foo"") { + id: ID! @autoGenerated +} + "; + + DocumentNode root = Utf8GraphQLParser.Parse(gql); + + Dictionary entityNameToDatabasetype = new() + { + { "Foo", DatabaseType.MSSQL } + }; + + DocumentNode mutationRoot = MutationBuilder.Build( + root, + entityNameToDatabasetype, + new(new Dictionary { { "Foo", GenerateEmptyEntity() } }), + entityPermissionsMap: _entityPermissions); + + ObjectTypeDefinitionNode query = GetMutationNode(mutationRoot); + List fieldNames = query.Fields.Select(f => f.Name.Value).ToList(); + + // Assert that "createFoo" and "updateFoo" are not present + Assert.IsFalse(fieldNames.Contains("createFoo"), "createFoo should not be present"); + Assert.IsFalse(fieldNames.Contains("updateFoo"), "updateFoo should not be present"); + Assert.IsTrue(fieldNames.Contains("deleteFoo"), "deleteFoo should be present"); + } + [TestMethod] [TestCategory("Mutation Builder - Create")] [TestCategory("Schema Builder - Simple Type")] diff --git a/src/Service.Tests/GraphQLBuilder/Sql/SchemaConverterTests.cs b/src/Service.Tests/GraphQLBuilder/Sql/SchemaConverterTests.cs index e472097fad..84806adc78 100644 --- a/src/Service.Tests/GraphQLBuilder/Sql/SchemaConverterTests.cs +++ b/src/Service.Tests/GraphQLBuilder/Sql/SchemaConverterTests.cs @@ -743,6 +743,7 @@ public static Entity GenerateEmptyEntity(string entityName) { return new Entity( Source: new($"{SCHEMA_NAME}.{TABLE_NAME}", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new(entityName, ""), Permissions: Array.Empty(), diff --git a/src/Service.Tests/GraphQLBuilder/Sql/StoredProcedureBuilderTests.cs b/src/Service.Tests/GraphQLBuilder/Sql/StoredProcedureBuilderTests.cs index 06cad3e972..b07ebcb083 100644 --- a/src/Service.Tests/GraphQLBuilder/Sql/StoredProcedureBuilderTests.cs +++ b/src/Service.Tests/GraphQLBuilder/Sql/StoredProcedureBuilderTests.cs @@ -87,17 +87,28 @@ public void StoredProcedure_ParameterValueTypeResolution( // Parameter collection used to create DatabaseObjectSource which is used to create a new entity object. Dictionary configSourcedParameters = new() { { parameterName, JsonSerializer.SerializeToElement(configParamValue) } }; + // Convert configSourcedParameters to List + List parameterMetadataList = configSourcedParameters + .Select(kvp => new ParameterMetadata + { + Name = kvp.Key, + Default = kvp.Value is JsonElement je + ? je.ValueKind == JsonValueKind.String ? je.GetString() : je.ToString() + : kvp.Value?.ToString() + }) + .ToList(); + // Create a new entity where the GraphQL type is explicitly defined as Mutation in the runtime config. Entity spMutationEntity = GraphQLTestHelpers.GenerateStoredProcedureEntity( graphQLTypeName: spMutationTypeName, graphQLOperation: GraphQLOperation.Mutation, - parameters: configSourcedParameters); + parameters: parameterMetadataList); // Create a new entity where the GraphQL type is explicitly defined as Query in the runtime config. Entity spQueryEntity = GraphQLTestHelpers.GenerateStoredProcedureEntity( graphQLTypeName: spQueryTypeName, graphQLOperation: GraphQLOperation.Query, - parameters: configSourcedParameters); + parameters: parameterMetadataList); // Create the GraphQL type for the stored procedure entity. string spQueryEntityName = "spquery"; diff --git a/src/Service.Tests/ModuleInitializer.cs b/src/Service.Tests/ModuleInitializer.cs index b099508604..ba0407ecd5 100644 --- a/src/Service.Tests/ModuleInitializer.cs +++ b/src/Service.Tests/ModuleInitializer.cs @@ -51,6 +51,10 @@ public static void Init() VerifierSettings.IgnoreMember(options => options.IsGraphQLEnabled); // Ignore the entity IsGraphQLEnabled as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(entity => entity.IsGraphQLEnabled); + // Ignore the global IsMcpEnabled as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.IsMcpEnabled); + // Ignore the global RuntimeOptions.IsMcpEnabled as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(options => options.IsMcpEnabled); // Ignore the global IsHealthEnabled as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.IsHealthEnabled); // Ignore the global RuntimeOptions.IsHealthCheckEnabled as that's unimportant from a test standpoint. @@ -69,16 +73,16 @@ public static void Init() VerifierSettings.IgnoreMember(config => config.CosmosDataSourceUsed); // Ignore the IsRequestBodyStrict as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.IsRequestBodyStrict); - // Ignore the IsGraphQLEnabled as that's unimportant from a test standpoint. - VerifierSettings.IgnoreMember(config => config.IsGraphQLEnabled); - // Ignore the IsRestEnabled as that's unimportant from a test standpoint. - VerifierSettings.IgnoreMember(config => config.IsRestEnabled); + // Ignore the McpDmlTools as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.McpDmlTools); // Ignore the IsStaticWebAppsIdentityProvider as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.IsStaticWebAppsIdentityProvider); // Ignore the RestPath as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.RestPath); // Ignore the GraphQLPath as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.GraphQLPath); + // Ignore the McpPath as that's unimportant from a test standpoint. + VerifierSettings.IgnoreMember(config => config.McpPath); // Ignore the AllowIntrospection as that's unimportant from a test standpoint. VerifierSettings.IgnoreMember(config => config.AllowIntrospection); // Ignore the EnableAggregation as that's unimportant from a test standpoint. @@ -105,6 +109,8 @@ public static void Init() VerifierSettings.IgnoreMember(options => options.UserProvidedDepthLimit); // Ignore EnableLegacyDateTimeScalar as that's not serialized in our config file. VerifierSettings.IgnoreMember(options => options.EnableLegacyDateTimeScalar); + // Ignore UserProvidedPath as that's not serialized in our config file. + VerifierSettings.IgnoreMember(options => options.UserProvidedPath); // Customise the path where we store snapshots, so they are easier to locate in a PR review. VerifyBase.DerivePathInfo( (sourceFile, projectDirectory, type, method) => new( diff --git a/src/Service.Tests/OpenApiDocumentor/DocumentVerbosityTests.cs b/src/Service.Tests/OpenApiDocumentor/DocumentVerbosityTests.cs index de8a35212b..fa43617f4f 100644 --- a/src/Service.Tests/OpenApiDocumentor/DocumentVerbosityTests.cs +++ b/src/Service.Tests/OpenApiDocumentor/DocumentVerbosityTests.cs @@ -42,6 +42,7 @@ public async Task ResponseObjectSchemaIncludesTypeProperty() // Arrange Entity entity = new( Source: new(Object: "books", EntitySourceType.Table, null, null), + Fields: null, GraphQL: new(Singular: null, Plural: null, Enabled: false), Rest: new(Methods: EntityRestOptions.DEFAULT_SUPPORTED_VERBS), Permissions: OpenApiTestBootstrap.CreateBasicPermissions(), diff --git a/src/Service.Tests/OpenApiDocumentor/ParameterValidationTests.cs b/src/Service.Tests/OpenApiDocumentor/ParameterValidationTests.cs index 7ef4e015c0..7c0e0225ae 100644 --- a/src/Service.Tests/OpenApiDocumentor/ParameterValidationTests.cs +++ b/src/Service.Tests/OpenApiDocumentor/ParameterValidationTests.cs @@ -144,10 +144,23 @@ public async Task TestInputParametersForStoredProcedures() string entityName = "UpdateBookTitle"; string objectName = "update_book_title"; - // Adding a parameter default value. - Dictionary parameterDefaults = new() { { "title", "Test Title" } }; + // Adding parameter metadata with a default value. + List parameterMetadata = new() + { + new ParameterMetadata + { + Name = "id", + Required = false + }, + new ParameterMetadata + { + Name = "title", + Required = false, + Default = "Test Title" + } + }; - EntitySource entitySource = new(Object: objectName, EntitySourceType.StoredProcedure, parameterDefaults, null); + EntitySource entitySource = new(Object: objectName, Type: EntitySourceType.StoredProcedure, Parameters: parameterMetadata, KeyFields: null); OpenApiDocument openApiDocument = await GenerateOpenApiDocumentForGivenEntityAsync( entityName, entitySource, @@ -221,6 +234,7 @@ private async static Task GenerateOpenApiDocumentForGivenEntity { Entity entity = new( Source: entitySource, + Fields: null, GraphQL: new(Singular: null, Plural: null, Enabled: false), Rest: new(Methods: supportedHttpMethods ?? EntityRestOptions.DEFAULT_SUPPORTED_VERBS), Permissions: OpenApiTestBootstrap.CreateBasicPermissions(), diff --git a/src/Service.Tests/OpenApiDocumentor/PathValidationTests.cs b/src/Service.Tests/OpenApiDocumentor/PathValidationTests.cs index bff1333497..5f478b3b80 100644 --- a/src/Service.Tests/OpenApiDocumentor/PathValidationTests.cs +++ b/src/Service.Tests/OpenApiDocumentor/PathValidationTests.cs @@ -45,6 +45,7 @@ public async Task ValidateEntityRestPath(string entityName, string configuredRes { Entity entity = new( Source: new(Object: "books", EntitySourceType.Table, null, null), + Fields: null, GraphQL: new(Singular: null, Plural: null, Enabled: false), Rest: new(Methods: EntityRestOptions.DEFAULT_SUPPORTED_VERBS, Path: configuredRestPath), Permissions: OpenApiTestBootstrap.CreateBasicPermissions(), diff --git a/src/Service.Tests/OpenApiDocumentor/StoredProcedureGeneration.cs b/src/Service.Tests/OpenApiDocumentor/StoredProcedureGeneration.cs index 01ee1ac0a9..ffd5aaadde 100644 --- a/src/Service.Tests/OpenApiDocumentor/StoredProcedureGeneration.cs +++ b/src/Service.Tests/OpenApiDocumentor/StoredProcedureGeneration.cs @@ -55,11 +55,13 @@ public static void CreateEntities() { Entity entity1 = new( Source: new(Object: "insert_and_display_all_books_for_given_publisher", EntitySourceType.StoredProcedure, null, null), + Fields: null, GraphQL: new(Singular: null, Plural: null, Enabled: false), Rest: new(Methods: EntityRestOptions.DEFAULT_SUPPORTED_VERBS), Permissions: OpenApiTestBootstrap.CreateBasicPermissions(), Mappings: null, - Relationships: null); + Relationships: null, + Description: "Represents a stored procedure for books"); Dictionary entities = new() { @@ -129,6 +131,24 @@ public void ValidateResponseBodyContents(string entityName, string[] expectedCol ValidateOpenApiReferenceContents(schemaComponentReference, expectedSchemaReferenceId, expectedColumns, expectedColumnJsonTypes); } + /// + /// Integration tests validating that entity descriptions are included in the OpenAPI document. + /// + [TestMethod] + public void OpenApiDocumentor_TagsIncludeEntityDescription() + { + // Arrange: The entity name and expected description + string entityName = "sp1"; + string expectedDescription = "Represents a stored procedure for books"; // Set this to your actual description + + // Act: Get the tags from the OpenAPI document + IList tags = _openApiDocument.Tags; + + // Assert: There is a tag for the entity and it includes the description + Assert.IsTrue(tags.Any(t => t.Name == entityName && t.Description == expectedDescription), + $"Expected tag for '{entityName}' with description '{expectedDescription}' not found."); + } + /// /// Validates that the provided OpenApiReference object has the expected schema reference id /// and that that id is present in the list of component schema in the OpenApi document. diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt index 51d8543ed5..420977ed26 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForCosmos.verified.txt @@ -17,6 +17,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt index fa9a9cbcd7..b622552ef5 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMsSql.verified.txt @@ -21,6 +21,10 @@ } } }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ @@ -505,6 +509,18 @@ Object: books, Type: Table }, + Fields: [ + { + Name: id, + Alias: id, + PrimaryKey: false + }, + { + Name: title, + Alias: title, + PrimaryKey: false + } + ], GraphQL: { Singular: book, Plural: books, @@ -889,10 +905,6 @@ ] } ], - Mappings: { - id: id, - title: title - }, Relationships: { authors: { Cardinality: Many, @@ -1620,6 +1632,13 @@ Object: type_table, Type: Table }, + Fields: [ + { + Name: id, + Alias: typeid, + PrimaryKey: false + } + ], GraphQL: { Singular: SupportedType, Plural: SupportedTypes, @@ -1663,10 +1682,7 @@ } ] } - ], - Mappings: { - id: typeid - } + ] } }, { @@ -1761,6 +1777,18 @@ Object: trees, Type: Table }, + Fields: [ + { + Name: species, + Alias: Scientific Name, + PrimaryKey: false + }, + { + Name: region, + Alias: United State's Region, + PrimaryKey: false + } + ], GraphQL: { Singular: Tree, Plural: Trees, @@ -1804,11 +1832,7 @@ } ] } - ], - Mappings: { - region: United State's Region, - species: Scientific Name - } + ] } }, { @@ -1817,6 +1841,13 @@ Object: trees, Type: Table }, + Fields: [ + { + Name: species, + Alias: fancyName, + PrimaryKey: false + } + ], GraphQL: { Singular: Shrub, Plural: Shrubs, @@ -1862,9 +1893,6 @@ ] } ], - Mappings: { - species: fancyName - }, Relationships: { fungus: { TargetEntity: Fungus, @@ -1884,6 +1912,13 @@ Object: fungi, Type: Table }, + Fields: [ + { + Name: spores, + Alias: hazards, + PrimaryKey: false + } + ], GraphQL: { Singular: fungus, Plural: fungi, @@ -1944,9 +1979,6 @@ ] } ], - Mappings: { - spores: hazards - }, Relationships: { Shrub: { TargetEntity: Shrub, @@ -1964,11 +1996,14 @@ books_view_all: { Source: { Object: books_view_all, - Type: View, - KeyFields: [ - id - ] + Type: View }, + Fields: [ + { + Name: id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_view_all, Plural: books_view_alls, @@ -2010,11 +2045,15 @@ books_view_with_mapping: { Source: { Object: books_view_with_mapping, - Type: View, - KeyFields: [ - id - ] + Type: View }, + Fields: [ + { + Name: id, + Alias: book_id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_view_with_mapping, Plural: books_view_with_mappings, @@ -2032,22 +2071,25 @@ } ] } - ], - Mappings: { - id: book_id - } + ] } }, { stocks_view_selected: { Source: { Object: stocks_view_selected, - Type: View, - KeyFields: [ - categoryid, - pieceid - ] + Type: View }, + Fields: [ + { + Name: categoryid, + PrimaryKey: true + }, + { + Name: pieceid, + PrimaryKey: true + } + ], GraphQL: { Singular: stocks_view_selected, Plural: stocks_view_selecteds, @@ -2089,12 +2131,18 @@ books_publishers_view_composite: { Source: { Object: books_publishers_view_composite, - Type: View, - KeyFields: [ - id, - pub_id - ] + Type: View }, + Fields: [ + { + Name: id, + PrimaryKey: true + }, + { + Name: pub_id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_publishers_view_composite, Plural: books_publishers_view_composites, @@ -2348,6 +2396,28 @@ Object: aow, Type: Table }, + Fields: [ + { + Name: DetailAssessmentAndPlanning, + Alias: 始計, + PrimaryKey: false + }, + { + Name: WagingWar, + Alias: 作戰, + PrimaryKey: false + }, + { + Name: StrategicAttack, + Alias: 謀攻, + PrimaryKey: false + }, + { + Name: NoteNum, + Alias: ┬─┬ノ( º _ ºノ), + PrimaryKey: false + } + ], GraphQL: { Singular: ArtOfWar, Plural: ArtOfWars, @@ -2373,13 +2443,7 @@ } ] } - ], - Mappings: { - DetailAssessmentAndPlanning: 始計, - NoteNum: ┬─┬ノ( º _ ºノ), - StrategicAttack: 謀攻, - WagingWar: 作戰 - } + ] } }, { @@ -2834,10 +2898,18 @@ Source: { Object: insert_book, Type: stored-procedure, - Parameters: { - publisher_id: 1234, - title: randomX - } + Parameters: [ + { + Name: title, + Required: false, + Default: randomX + }, + { + Name: publisher_id, + Required: false, + Default: 1234 + } + ] }, GraphQL: { Singular: InsertBook, @@ -2952,10 +3024,18 @@ Source: { Object: update_book_title, Type: stored-procedure, - Parameters: { - id: 1, - title: Testing Tonight - } + Parameters: [ + { + Name: id, + Required: false, + Default: 1 + }, + { + Name: title, + Required: false, + Default: Testing Tonight + } + ] }, GraphQL: { Singular: UpdateBookTitle, @@ -2994,9 +3074,13 @@ Source: { Object: get_authors_history_by_first_name, Type: stored-procedure, - Parameters: { - firstName: Aaron - } + Parameters: [ + { + Name: firstName, + Required: false, + Default: Aaron + } + ] }, GraphQL: { Singular: SearchAuthorByFirstName, @@ -3035,10 +3119,18 @@ Source: { Object: insert_and_display_all_books_for_given_publisher, Type: stored-procedure, - Parameters: { - publisher_name: MyPublisher, - title: MyTitle - } + Parameters: [ + { + Name: title, + Required: false, + Default: MyTitle + }, + { + Name: publisher_name, + Required: false, + Default: MyPublisher + } + ] }, GraphQL: { Singular: InsertAndDisplayAllBooksUnderGivenPublisher, @@ -3078,6 +3170,18 @@ Object: GQLmappings, Type: Table }, + Fields: [ + { + Name: __column1, + Alias: column1, + PrimaryKey: false + }, + { + Name: __column2, + Alias: column2, + PrimaryKey: false + } + ], GraphQL: { Singular: GQLmappings, Plural: GQLmappings, @@ -3103,11 +3207,7 @@ } ] } - ], - Mappings: { - __column1: column1, - __column2: column2 - } + ] } }, { @@ -3150,6 +3250,18 @@ Object: mappedbookmarks, Type: Table }, + Fields: [ + { + Name: id, + Alias: bkid, + PrimaryKey: false + }, + { + Name: bkname, + Alias: name, + PrimaryKey: false + } + ], GraphQL: { Singular: MappedBookmarks, Plural: MappedBookmarks, @@ -3175,11 +3287,7 @@ } ] } - ], - Mappings: { - bkname: name, - id: bkid - } + ] } }, { @@ -3385,6 +3493,18 @@ Object: books, Type: Table }, + Fields: [ + { + Name: id, + Alias: id, + PrimaryKey: false + }, + { + Name: title, + Alias: title, + PrimaryKey: false + } + ], GraphQL: { Singular: bookNF, Plural: booksNF, @@ -3442,12 +3562,21 @@ Action: Read } ] + }, + { + Role: TestFieldExcludedForAggregation, + Actions: [ + { + Action: Read, + Fields: { + Exclude: [ + publisher_id + ] + } + } + ] } ], - Mappings: { - id: id, - title: title - }, Relationships: { authors: { Cardinality: Many, @@ -3638,6 +3767,36 @@ } ] } + }, + { + GetBooksAuth: { + Source: { + Object: get_books, + Type: stored-procedure + }, + GraphQL: { + Singular: GetBooksAuth, + Plural: GetBooksAuths, + Enabled: true, + Operation: Query + }, + Rest: { + Methods: [ + Get + ], + Enabled: true + }, + Permissions: [ + { + Role: teststoredprocauth, + Actions: [ + { + Action: Execute + } + ] + } + ] + } } ] } \ No newline at end of file diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt index 23f67259d4..6c81c138ce 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForMySql.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ @@ -370,6 +374,18 @@ Object: books, Type: Table }, + Fields: [ + { + Name: id, + Alias: id, + PrimaryKey: false + }, + { + Name: title, + Alias: title, + PrimaryKey: false + } + ], GraphQL: { Singular: book, Plural: books, @@ -731,10 +747,6 @@ ] } ], - Mappings: { - id: id, - title: title - }, Relationships: { authors: { Cardinality: Many, @@ -1139,6 +1151,13 @@ Object: type_table, Type: Table }, + Fields: [ + { + Name: id, + Alias: typeid, + PrimaryKey: false + } + ], GraphQL: { Singular: SupportedType, Plural: SupportedTypes, @@ -1182,10 +1201,7 @@ } ] } - ], - Mappings: { - id: typeid - } + ] } }, { @@ -1237,6 +1253,18 @@ Object: trees, Type: Table }, + Fields: [ + { + Name: species, + Alias: Scientific Name, + PrimaryKey: false + }, + { + Name: region, + Alias: United State's Region, + PrimaryKey: false + } + ], GraphQL: { Singular: Tree, Plural: Trees, @@ -1280,11 +1308,7 @@ } ] } - ], - Mappings: { - region: United State's Region, - species: Scientific Name - } + ] } }, { @@ -1293,6 +1317,13 @@ Object: trees, Type: Table }, + Fields: [ + { + Name: species, + Alias: fancyName, + PrimaryKey: false + } + ], GraphQL: { Singular: Shrub, Plural: Shrubs, @@ -1338,9 +1369,6 @@ ] } ], - Mappings: { - species: fancyName - }, Relationships: { fungus: { TargetEntity: Fungus, @@ -1360,6 +1388,13 @@ Object: fungi, Type: Table }, + Fields: [ + { + Name: spores, + Alias: hazards, + PrimaryKey: false + } + ], GraphQL: { Singular: fungus, Plural: fungi, @@ -1420,9 +1455,6 @@ ] } ], - Mappings: { - spores: hazards - }, Relationships: { Shrub: { TargetEntity: Shrub, @@ -1440,11 +1472,14 @@ books_view_all: { Source: { Object: books_view_all, - Type: View, - KeyFields: [ - id - ] + Type: View }, + Fields: [ + { + Name: id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_view_all, Plural: books_view_alls, @@ -1486,11 +1521,15 @@ books_view_with_mapping: { Source: { Object: books_view_with_mapping, - Type: View, - KeyFields: [ - id - ] + Type: View }, + Fields: [ + { + Name: id, + Alias: book_id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_view_with_mapping, Plural: books_view_with_mappings, @@ -1508,22 +1547,25 @@ } ] } - ], - Mappings: { - id: book_id - } + ] } }, { stocks_view_selected: { Source: { Object: stocks_view_selected, - Type: View, - KeyFields: [ - categoryid, - pieceid - ] + Type: View }, + Fields: [ + { + Name: categoryid, + PrimaryKey: true + }, + { + Name: pieceid, + PrimaryKey: true + } + ], GraphQL: { Singular: stocks_view_selected, Plural: stocks_view_selecteds, @@ -1565,12 +1607,18 @@ books_publishers_view_composite: { Source: { Object: books_publishers_view_composite, - Type: View, - KeyFields: [ - id, - pub_id - ] + Type: View }, + Fields: [ + { + Name: id, + PrimaryKey: true + }, + { + Name: pub_id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_publishers_view_composite, Plural: books_publishers_view_composites, @@ -1824,6 +1872,28 @@ Object: aow, Type: Table }, + Fields: [ + { + Name: DetailAssessmentAndPlanning, + Alias: 始計, + PrimaryKey: false + }, + { + Name: WagingWar, + Alias: 作戰, + PrimaryKey: false + }, + { + Name: StrategicAttack, + Alias: 謀攻, + PrimaryKey: false + }, + { + Name: NoteNum, + Alias: ┬─┬ノ( º _ ºノ), + PrimaryKey: false + } + ], GraphQL: { Singular: ArtOfWar, Plural: ArtOfWars, @@ -1849,13 +1919,7 @@ } ] } - ], - Mappings: { - DetailAssessmentAndPlanning: 始計, - NoteNum: ┬─┬ノ( º _ ºノ), - StrategicAttack: 謀攻, - WagingWar: 作戰 - } + ] } }, { @@ -2069,6 +2133,18 @@ Object: GQLmappings, Type: Table }, + Fields: [ + { + Name: __column1, + Alias: column1, + PrimaryKey: false + }, + { + Name: __column2, + Alias: column2, + PrimaryKey: false + } + ], GraphQL: { Singular: GQLmappings, Plural: GQLmappings, @@ -2094,11 +2170,7 @@ } ] } - ], - Mappings: { - __column1: column1, - __column2: column2 - } + ] } }, { @@ -2141,6 +2213,18 @@ Object: mappedbookmarks, Type: Table }, + Fields: [ + { + Name: id, + Alias: bkid, + PrimaryKey: false + }, + { + Name: bkname, + Alias: name, + PrimaryKey: false + } + ], GraphQL: { Singular: MappedBookmarks, Plural: MappedBookmarks, @@ -2166,11 +2250,7 @@ } ] } - ], - Mappings: { - bkname: name, - id: bkid - } + ] } }, { diff --git a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt index a534867fee..5e8631d46f 100644 --- a/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt +++ b/src/Service.Tests/Snapshots/ConfigurationTests.TestReadingRuntimeConfigForPostgreSql.verified.txt @@ -13,6 +13,10 @@ Path: /graphql, AllowIntrospection: true }, + Mcp: { + Enabled: true, + Path: /mcp + }, Host: { Cors: { Origins: [ @@ -403,6 +407,18 @@ Object: books, Type: Table }, + Fields: [ + { + Name: id, + Alias: id, + PrimaryKey: false + }, + { + Name: title, + Alias: title, + PrimaryKey: false + } + ], GraphQL: { Singular: book, Plural: books, @@ -764,10 +780,6 @@ ] } ], - Mappings: { - id: id, - title: title - }, Relationships: { authors: { Cardinality: Many, @@ -1189,6 +1201,13 @@ Object: type_table, Type: Table }, + Fields: [ + { + Name: id, + Alias: typeid, + PrimaryKey: false + } + ], GraphQL: { Singular: SupportedType, Plural: SupportedTypes, @@ -1232,10 +1251,7 @@ } ] } - ], - Mappings: { - id: typeid - } + ] } }, { @@ -1308,6 +1324,18 @@ Object: trees, Type: Table }, + Fields: [ + { + Name: species, + Alias: Scientific Name, + PrimaryKey: false + }, + { + Name: region, + Alias: United State's Region, + PrimaryKey: false + } + ], GraphQL: { Singular: Tree, Plural: Trees, @@ -1351,11 +1379,7 @@ } ] } - ], - Mappings: { - region: United State's Region, - species: Scientific Name - } + ] } }, { @@ -1364,6 +1388,13 @@ Object: trees, Type: Table }, + Fields: [ + { + Name: species, + Alias: fancyName, + PrimaryKey: false + } + ], GraphQL: { Singular: Shrub, Plural: Shrubs, @@ -1409,9 +1440,6 @@ ] } ], - Mappings: { - species: fancyName - }, Relationships: { fungus: { TargetEntity: Fungus, @@ -1431,6 +1459,13 @@ Object: fungi, Type: Table }, + Fields: [ + { + Name: spores, + Alias: hazards, + PrimaryKey: false + } + ], GraphQL: { Singular: fungus, Plural: fungi, @@ -1491,9 +1526,6 @@ ] } ], - Mappings: { - spores: hazards - }, Relationships: { Shrub: { TargetEntity: Shrub, @@ -1679,11 +1711,15 @@ books_view_with_mapping: { Source: { Object: books_view_with_mapping, - Type: View, - KeyFields: [ - id - ] + Type: View }, + Fields: [ + { + Name: id, + Alias: book_id, + PrimaryKey: true + } + ], GraphQL: { Singular: books_view_with_mapping, Plural: books_view_with_mappings, @@ -1701,10 +1737,7 @@ } ] } - ], - Mappings: { - id: book_id - } + ] } }, { @@ -1982,6 +2015,28 @@ Object: aow, Type: Table }, + Fields: [ + { + Name: DetailAssessmentAndPlanning, + Alias: 始計, + PrimaryKey: false + }, + { + Name: WagingWar, + Alias: 作戰, + PrimaryKey: false + }, + { + Name: StrategicAttack, + Alias: 謀攻, + PrimaryKey: false + }, + { + Name: NoteNum, + Alias: ┬─┬ノ( º _ ºノ), + PrimaryKey: false + } + ], GraphQL: { Singular: ArtOfWar, Plural: ArtOfWars, @@ -2007,13 +2062,7 @@ } ] } - ], - Mappings: { - DetailAssessmentAndPlanning: 始計, - NoteNum: ┬─┬ノ( º _ ºノ), - StrategicAttack: 謀攻, - WagingWar: 作戰 - } + ] } }, { @@ -2131,6 +2180,18 @@ Object: gqlmappings, Type: Table }, + Fields: [ + { + Name: __column1, + Alias: column1, + PrimaryKey: false + }, + { + Name: __column2, + Alias: column2, + PrimaryKey: false + } + ], GraphQL: { Singular: GQLmappings, Plural: GQLmappings, @@ -2156,11 +2217,7 @@ } ] } - ], - Mappings: { - __column1: column1, - __column2: column2 - } + ] } }, { @@ -2203,6 +2260,18 @@ Object: mappedbookmarks, Type: Table }, + Fields: [ + { + Name: id, + Alias: bkid, + PrimaryKey: false + }, + { + Name: bkname, + Alias: name, + PrimaryKey: false + } + ], GraphQL: { Singular: MappedBookmarks, Plural: MappedBookmarks, @@ -2228,11 +2297,7 @@ } ] } - ], - Mappings: { - bkname: name, - id: bkid - } + ] } }, { @@ -2386,6 +2451,18 @@ Object: books, Type: Table }, + Fields: [ + { + Name: id, + Alias: id, + PrimaryKey: false + }, + { + Name: title, + Alias: title, + PrimaryKey: false + } + ], GraphQL: { Singular: bookNF, Plural: booksNF, @@ -2445,10 +2522,6 @@ ] } ], - Mappings: { - id: id, - title: title - }, Relationships: { authors: { Cardinality: Many, @@ -2570,6 +2643,18 @@ Object: dimaccount, Type: Table }, + Fields: [ + { + Name: parentaccountkey, + Alias: ParentAccountKey, + PrimaryKey: false + }, + { + Name: accountkey, + Alias: AccountKey, + PrimaryKey: false + } + ], GraphQL: { Singular: dbo_DimAccount, Plural: dbo_DimAccounts, @@ -2588,10 +2673,6 @@ ] } ], - Mappings: { - accountkey: AccountKey, - parentaccountkey: ParentAccountKey - }, Relationships: { child_accounts: { Cardinality: Many, diff --git a/src/Service.Tests/SqlTests/GraphQLMutationTests/DwSqlGraphQLMutationTests.cs b/src/Service.Tests/SqlTests/GraphQLMutationTests/DwSqlGraphQLMutationTests.cs index 5d063c31fd..18339b2448 100644 --- a/src/Service.Tests/SqlTests/GraphQLMutationTests/DwSqlGraphQLMutationTests.cs +++ b/src/Service.Tests/SqlTests/GraphQLMutationTests/DwSqlGraphQLMutationTests.cs @@ -103,6 +103,72 @@ public override async Task InsertMutationWithOnlyTypenameInSelectionSet() SqlTestHelper.PerformTestEqualJsonStrings(expected, actual.ToString()); } + /// + /// Do: Inserts new Publisher with name = 'New publisher' + /// Check: Mutation fails because the database policy (@item.name ne 'New publisher') prohibits insertion of records with name = 'New publisher'. + /// + [TestMethod] + public async Task InsertMutationFailingDatabasePolicy() + { + string errorMessage = "Could not insert row with given values."; + string msSqlQuery = @" + SELECT count(*) as count + FROM [publishers] + WHERE [name] = 'New publisher' + FOR JSON PATH, + INCLUDE_NULL_VALUES, + WITHOUT_ARRAY_WRAPPER + "; + + string graphQLMutationName = "createPublisher"; + string graphQLMutationPayload = @" + mutation { + createPublisher(item: { id: 1 name: ""New publisher"" }) { + result + } + } + "; + + await InsertMutationFailingDatabasePolicy( + dbQuery: msSqlQuery, + errorMessage: errorMessage, + roleName: "database_policy_tester", + graphQLMutationName: graphQLMutationName, + graphQLMutationPayload: graphQLMutationPayload); + } + + /// + /// Do: Inserts new Publisher with name = 'Not New publisher' + /// Check: Mutation succeeds because the database policy (@item.name ne 'New publisher') is passed + /// + [TestMethod] + public async Task InsertMutationWithDatabasePolicy() + { + string msSqlQuery = @" + SELECT COUNT(*) AS [count] + FROM [publishers] + WHERE [name] = 'Not New publisher' + FOR JSON PATH, + INCLUDE_NULL_VALUES, + WITHOUT_ARRAY_WRAPPER + "; + + string graphqlMutationName = "createPublisher"; + string graphQLMutationPayload = @" + mutation { + createPublisher(item: { id: 1 name: ""Not New publisher"" }) { + result + } + } + "; + + await InsertMutationWithDatabasePolicy( + dbQuery: msSqlQuery, + roleName: "database_policy_tester", + graphQLMutationName: graphqlMutationName, + graphQLMutationPayload: graphQLMutationPayload); + } + /// /// Do: Update book in database and return its updated fields /// Check: Result value of success is verified in the response. diff --git a/src/Service.Tests/SqlTests/GraphQLMutationTests/GraphQLMutationTestBase.cs b/src/Service.Tests/SqlTests/GraphQLMutationTests/GraphQLMutationTestBase.cs index d8cd8d7a7e..745d5eade3 100644 --- a/src/Service.Tests/SqlTests/GraphQLMutationTests/GraphQLMutationTestBase.cs +++ b/src/Service.Tests/SqlTests/GraphQLMutationTests/GraphQLMutationTestBase.cs @@ -105,19 +105,10 @@ public virtual async Task InsertMutationWithDefaultBuiltInFunctions(string dbQue /// SELECT query to validate expected result. /// Expected error message. /// Custom client role in whose context this authenticated request will be executed - public async Task InsertMutationFailingDatabasePolicy(string dbQuery, string errorMessage, string roleName) + /// graphql request payload + public async Task InsertMutationFailingDatabasePolicy(string dbQuery, string errorMessage, string roleName, string graphQLMutationName, string graphQLMutationPayload) { - string graphQLMutationName = "createPublisher"; - string graphQLMutation = @" - mutation { - createPublisher(item: { name: ""New publisher"" }) { - id - name - } - } - "; - - JsonElement result = await ExecuteGraphQLRequestAsync(graphQLMutation, graphQLMutationName, clientRoleHeader: roleName, isAuthenticated: true); + JsonElement result = await ExecuteGraphQLRequestAsync(graphQLMutationPayload, graphQLMutationName, clientRoleHeader: roleName, isAuthenticated: true); SqlTestHelper.TestForErrorInGraphQLResponse( result.ToString(), @@ -132,6 +123,20 @@ public async Task InsertMutationFailingDatabasePolicy(string dbQuery, string err Assert.AreEqual(dbResponseJson.RootElement.GetProperty("count").GetInt64(), 0); } + /// + /// Do: Attempt to insert a new publisher with name allowed by database policy (@item.name ne 'New publisher') + /// Check: Mutation succeeds. + /// + public async Task InsertMutationWithDatabasePolicy(string dbQuery, string roleName, string graphQLMutationName, string graphQLMutationPayload) + { + await ExecuteGraphQLRequestAsync(graphQLMutationPayload, graphQLMutationName, clientRoleHeader: roleName, isAuthenticated: true); + + string currentDbResponse = await GetDatabaseResultAsync(dbQuery); + + JsonDocument currentResult = JsonDocument.Parse(currentDbResponse); + Assert.AreEqual(1, currentResult.RootElement.GetProperty("count").GetInt64()); + } + /// /// Do: Inserts new book using variables to set its title and publisher_id /// Check: If book with the expected values of the new book is present in the database and @@ -252,7 +257,7 @@ public async Task TestStoredProcedureMutationForDeletion(string dbQueryToVerifyD string currentDbResponse = await GetDatabaseResultAsync(dbQueryToVerifyDeletion); JsonDocument currentResult = JsonDocument.Parse(currentDbResponse); - Assert.AreEqual(currentResult.RootElement.GetProperty("maxId").GetInt64(), 20); + Assert.AreEqual(currentResult.RootElement.GetProperty("maxId").GetInt64(), 21); JsonElement graphQLResponse = await ExecuteGraphQLRequestAsync(graphQLMutation, graphQLMutationName, isAuthenticated: true); // Stored Procedure didn't return anything @@ -261,7 +266,7 @@ public async Task TestStoredProcedureMutationForDeletion(string dbQueryToVerifyD // check to verify new element is inserted string updatedDbResponse = await GetDatabaseResultAsync(dbQueryToVerifyDeletion); JsonDocument updatedResult = JsonDocument.Parse(updatedDbResponse); - Assert.AreEqual(updatedResult.RootElement.GetProperty("maxId").GetInt64(), 19); + Assert.AreEqual(updatedResult.RootElement.GetProperty("maxId").GetInt64(), 20); } public async Task InsertMutationOnTableWithTriggerWithNonAutoGenPK(string dbQuery) diff --git a/src/Service.Tests/SqlTests/GraphQLMutationTests/MsSqlGraphQLMutationTests.cs b/src/Service.Tests/SqlTests/GraphQLMutationTests/MsSqlGraphQLMutationTests.cs index e9a2f07c66..78337d601f 100644 --- a/src/Service.Tests/SqlTests/GraphQLMutationTests/MsSqlGraphQLMutationTests.cs +++ b/src/Service.Tests/SqlTests/GraphQLMutationTests/MsSqlGraphQLMutationTests.cs @@ -96,10 +96,55 @@ FROM [publishers] WITHOUT_ARRAY_WRAPPER "; + string graphqlMutationName = "createPublisher"; + string graphQLMutationPayload = @" + mutation { + createPublisher(item: { name: ""New publisher"" }) { + id + name + } + } + "; + await InsertMutationFailingDatabasePolicy( dbQuery: msSqlQuery, errorMessage: errorMessage, - roleName: "database_policy_tester"); + roleName: "database_policy_tester", + graphQLMutationName: graphqlMutationName, + graphQLMutationPayload: graphQLMutationPayload); + } + + /// + /// Do: Inserts new Publisher with name = 'Not New publisher' + /// Check: Mutation succeeds because the database policy (@item.name ne 'New publisher') is passed + /// + [TestMethod] + public async Task InsertMutationWithDatabasePolicy() + { + string msSqlQuery = @" + SELECT COUNT(*) AS [count] + FROM [publishers] + WHERE [name] = 'Not New publisher' + FOR JSON PATH, + INCLUDE_NULL_VALUES, + WITHOUT_ARRAY_WRAPPER + "; + + string graphqlMutationName = "createPublisher"; + string graphQLMutationPayload = @" + mutation { + createPublisher(item: { name: ""Not New publisher"" }) { + id + name + } + } + "; + + await InsertMutationWithDatabasePolicy( + dbQuery: msSqlQuery, + roleName: "database_policy_tester", + graphQLMutationName: graphqlMutationName, + graphQLMutationPayload: graphQLMutationPayload); } /// diff --git a/src/Service.Tests/SqlTests/GraphQLPaginationTests/GraphQLPaginationTestBase.cs b/src/Service.Tests/SqlTests/GraphQLPaginationTests/GraphQLPaginationTestBase.cs index e7e18d6090..33db8f8b49 100644 --- a/src/Service.Tests/SqlTests/GraphQLPaginationTests/GraphQLPaginationTestBase.cs +++ b/src/Service.Tests/SqlTests/GraphQLPaginationTests/GraphQLPaginationTestBase.cs @@ -84,95 +84,101 @@ public async Task RequestMaxUsingNegativeOne() } }"; - // this resultset represents all books in the db. - JsonElement actual = await ExecuteGraphQLRequestAsync(graphQLQuery, graphQLQueryName, isAuthenticated: false); string expected = @"{ - ""items"": [ + ""items"": [ { - ""id"": 1, - ""title"": ""Awesome book"" + ""id"": 1, + ""title"": ""Awesome book"" }, { - ""id"": 2, - ""title"": ""Also Awesome book"" + ""id"": 2, + ""title"": ""Also Awesome book"" }, { - ""id"": 3, - ""title"": ""Great wall of china explained"" + ""id"": 3, + ""title"": ""Great wall of china explained"" }, { - ""id"": 4, - ""title"": ""US history in a nutshell"" + ""id"": 4, + ""title"": ""US history in a nutshell"" }, { - ""id"": 5, - ""title"": ""Chernobyl Diaries"" + ""id"": 5, + ""title"": ""Chernobyl Diaries"" }, { - ""id"": 6, - ""title"": ""The Palace Door"" + ""id"": 6, + ""title"": ""The Palace Door"" }, { - ""id"": 7, - ""title"": ""The Groovy Bar"" + ""id"": 7, + ""title"": ""The Groovy Bar"" }, { - ""id"": 8, - ""title"": ""Time to Eat"" + ""id"": 8, + ""title"": ""Time to Eat"" }, { - ""id"": 9, - ""title"": ""Policy-Test-01"" + ""id"": 9, + ""title"": ""Policy-Test-01"" }, { - ""id"": 10, - ""title"": ""Policy-Test-02"" + ""id"": 10, + ""title"": ""Policy-Test-02"" }, { - ""id"": 11, - ""title"": ""Policy-Test-04"" + ""id"": 11, + ""title"": ""Policy-Test-04"" }, { - ""id"": 12, - ""title"": ""Time to Eat 2"" + ""id"": 12, + ""title"": ""Time to Eat 2"" + }, + { + ""id"": 13, + ""title"": ""Before Sunrise"" }, { - ""id"": 13, - ""title"": ""Before Sunrise"" + ""id"": 14, + ""title"": ""Before Sunset"" }, { - ""id"": 14, - ""title"": ""Before Sunset"" + ""id"": 15, + ""title"": ""SQL_CONN"" }, { - ""id"": 15, - ""title"": ""SQL_CONN"" + ""id"": 16, + ""title"": ""SOME%CONN"" }, { - ""id"": 16, - ""title"": ""SOME%CONN"" + ""id"": 17, + ""title"": ""CONN%_CONN"" }, { - ""id"": 17, - ""title"": ""CONN%_CONN"" + ""id"": 18, + ""title"": ""[Special Book]"" }, { - ""id"": 18, - ""title"": ""[Special Book]"" + ""id"": 19, + ""title"": ""ME\\YOU"" }, { - ""id"": 19, - ""title"": ""ME\\YOU"" + ""id"": 20, + ""title"": ""C:\\\\LIFE"" }, { - ""id"": 20, - ""title"": ""C:\\\\LIFE"" + ""id"": 21, + ""title"": """" } - ], - ""endCursor"": null, - ""hasNextPage"": false + ], + ""endCursor"": null, + ""hasNextPage"": false }"; + // Note: The max page size is 21 for MsSql and 20 for all other data sources, so when using -1 + // this resultset represents all books in the db. + JsonElement actual = await ExecuteGraphQLRequestAsync(graphQLQuery, graphQLQueryName, isAuthenticated: false); + SqlTestHelper.PerformTestEqualJsonStrings(expected, actual.ToString()); } @@ -196,91 +202,96 @@ public async Task RequestNoParamFullConnection() }"; JsonElement actual = await ExecuteGraphQLRequestAsync(graphQLQuery, graphQLQueryName, isAuthenticated: false); + string expected = @"{ - ""items"": [ + ""items"": [ { - ""id"": 1, - ""title"": ""Awesome book"" + ""id"": 1, + ""title"": ""Awesome book"" }, { - ""id"": 2, - ""title"": ""Also Awesome book"" + ""id"": 2, + ""title"": ""Also Awesome book"" }, { - ""id"": 3, - ""title"": ""Great wall of china explained"" + ""id"": 3, + ""title"": ""Great wall of china explained"" }, { - ""id"": 4, - ""title"": ""US history in a nutshell"" + ""id"": 4, + ""title"": ""US history in a nutshell"" }, { - ""id"": 5, - ""title"": ""Chernobyl Diaries"" + ""id"": 5, + ""title"": ""Chernobyl Diaries"" }, { - ""id"": 6, - ""title"": ""The Palace Door"" + ""id"": 6, + ""title"": ""The Palace Door"" }, { - ""id"": 7, - ""title"": ""The Groovy Bar"" + ""id"": 7, + ""title"": ""The Groovy Bar"" }, { - ""id"": 8, - ""title"": ""Time to Eat"" + ""id"": 8, + ""title"": ""Time to Eat"" }, { - ""id"": 9, - ""title"": ""Policy-Test-01"" + ""id"": 9, + ""title"": ""Policy-Test-01"" }, { - ""id"": 10, - ""title"": ""Policy-Test-02"" + ""id"": 10, + ""title"": ""Policy-Test-02"" }, { - ""id"": 11, - ""title"": ""Policy-Test-04"" + ""id"": 11, + ""title"": ""Policy-Test-04"" }, { - ""id"": 12, - ""title"": ""Time to Eat 2"" + ""id"": 12, + ""title"": ""Time to Eat 2"" }, { - ""id"": 13, - ""title"": ""Before Sunrise"" + ""id"": 13, + ""title"": ""Before Sunrise"" }, { - ""id"": 14, - ""title"": ""Before Sunset"" + ""id"": 14, + ""title"": ""Before Sunset"" }, { - ""id"": 15, - ""title"": ""SQL_CONN"" + ""id"": 15, + ""title"": ""SQL_CONN"" }, { - ""id"": 16, - ""title"": ""SOME%CONN"" + ""id"": 16, + ""title"": ""SOME%CONN"" }, { - ""id"": 17, - ""title"": ""CONN%_CONN"" + ""id"": 17, + ""title"": ""CONN%_CONN"" }, { - ""id"": 18, - ""title"": ""[Special Book]"" + ""id"": 18, + ""title"": ""[Special Book]"" }, { - ""id"": 19, - ""title"": ""ME\\YOU"" + ""id"": 19, + ""title"": ""ME\\YOU"" }, { - ""id"": 20, - ""title"": ""C:\\\\LIFE"" + ""id"": 20, + ""title"": ""C:\\\\LIFE"" + }, + { + ""id"": 21, + ""title"": """" } - ], - ""endCursor"": null, - ""hasNextPage"": false + ], + ""endCursor"": null, + ""hasNextPage"": false }"; SqlTestHelper.PerformTestEqualJsonStrings(expected, actual.ToString()); diff --git a/src/Service.Tests/SqlTests/GraphQLQueryTests/DwSqlGraphQLQueryTests.cs b/src/Service.Tests/SqlTests/GraphQLQueryTests/DwSqlGraphQLQueryTests.cs index fa977e48d5..8cf55c247d 100644 --- a/src/Service.Tests/SqlTests/GraphQLQueryTests/DwSqlGraphQLQueryTests.cs +++ b/src/Service.Tests/SqlTests/GraphQLQueryTests/DwSqlGraphQLQueryTests.cs @@ -1239,6 +1239,7 @@ public void TestEnableDwNto1JoinQueryFeatureFlagLoadedFromRuntime() { EnableDwNto1JoinQueryOptimization = true }), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -1261,6 +1262,7 @@ public void TestEnableDwNto1JoinQueryFeatureFlagDefaultValueLoaded() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/SqlTests/GraphQLQueryTests/GraphQLQueryTestBase.cs b/src/Service.Tests/SqlTests/GraphQLQueryTests/GraphQLQueryTestBase.cs index 55a1becb4a..16c1a878fa 100644 --- a/src/Service.Tests/SqlTests/GraphQLQueryTests/GraphQLQueryTestBase.cs +++ b/src/Service.Tests/SqlTests/GraphQLQueryTests/GraphQLQueryTestBase.cs @@ -2295,6 +2295,7 @@ public virtual async Task TestConfigTakesPrecedenceForRelationshipFieldsOverDB( Entity clubEntity = new( Source: new("clubs", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("club", "clubs"), Permissions: new[] { ConfigurationTests.GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -2304,6 +2305,7 @@ public virtual async Task TestConfigTakesPrecedenceForRelationshipFieldsOverDB( Entity playerEntity = new( Source: new("players", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: true), GraphQL: new("player", "players"), Permissions: new[] { ConfigurationTests.GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, diff --git a/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs b/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs index bec185e4b4..1d90a4c6f1 100644 --- a/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs +++ b/src/Service.Tests/SqlTests/GraphQLQueryTests/MsSqlGraphQLQueryTests.cs @@ -268,6 +268,22 @@ SELECT title FROM books await QueryWithSingleColumnPrimaryKey(msSqlQuery); } + [TestMethod] + public virtual async Task QueryWithEmptyStringResult() + { + string graphQLQueryName = "book_by_pk"; + string graphQLQuery = @"{ + book_by_pk(id: 21) { + title + } + }"; + + JsonElement actual = await ExecuteGraphQLRequestAsync(graphQLQuery, graphQLQueryName, isAuthenticated: false); + + string title = actual.GetProperty("title").GetString(); + Assert.AreEqual("", title); + } + [TestMethod] public async Task QueryWithSingleColumnPrimaryKeyAndMappings() { @@ -779,6 +795,106 @@ public override async Task TestNoAggregationOptionsForTableWithoutNumericFields( await base.TestNoAggregationOptionsForTableWithoutNumericFields(); } + /// + /// Tests that the entity description is present as a GraphQL comment in the generated schema for MSSQL. + /// + [TestMethod] + public void TestEntityDescriptionInGraphQLSchema() + { + Entity entity = CreateEntityWithDescription("This is a test entity description for MSSQL."); + RuntimeConfig config = CreateRuntimeConfig(entity); + List jsonArray = [ + JsonDocument.Parse("{ \"id\": 1, \"name\": \"Test\" }") + ]; + + string actualSchema = Core.Generator.SchemaGenerator.Generate(jsonArray, "TestEntity", config); + string expectedComment = "\"\"\"This is a test entity description for MSSQL.\"\"\""; + Assert.IsTrue(actualSchema.Contains(expectedComment, StringComparison.Ordinal), "Entity description should be present as a GraphQL comment for MSSQL."); + } + + /// + /// Description = null should not emit GraphQL description block. + /// + [TestMethod] + public void TestEntityDescription_Null_NotInGraphQLSchema() + { + Entity entity = CreateEntityWithDescription(null); + RuntimeConfig config = CreateRuntimeConfig(entity); + string schema = Core.Generator.SchemaGenerator.Generate( + [JsonDocument.Parse("{\"id\":1}")], + "TestEntity", + config); + + Assert.IsFalse(schema.Contains("Test entity description null", StringComparison.Ordinal), "Null description must not appear in schema."); + Assert.IsTrue(schema.Contains("type TestEntity", StringComparison.Ordinal), "Type definition should still exist."); + } + + /// + /// Description = "" (empty) should not emit GraphQL description block. + /// + [TestMethod] + public void TestEntityDescription_Empty_NotInGraphQLSchema() + { + Entity entity = CreateEntityWithDescription(string.Empty); + RuntimeConfig config = CreateRuntimeConfig(entity); + string schema = Core.Generator.SchemaGenerator.Generate( + [JsonDocument.Parse("{\"id\":1}")], + "TestEntity", + config); + + Assert.IsFalse(schema.Contains("\"\"\"\"\"\"", StringComparison.Ordinal), "Empty description triple quotes should not be emitted."); + Assert.IsTrue(schema.Contains("type TestEntity", StringComparison.Ordinal), "Type definition should still exist."); + } + + /// + /// Description = whitespace should not emit GraphQL description block. + /// + [TestMethod] + public void TestEntityDescription_Whitespace_NotInGraphQLSchema() + { + Entity entity = CreateEntityWithDescription(" \t "); + RuntimeConfig config = CreateRuntimeConfig(entity); + string schema = Core.Generator.SchemaGenerator.Generate( + [JsonDocument.Parse("{\"id\":1}")], + "TestEntity", + config); + + Assert.IsFalse(schema.Contains("\"\"\"", StringComparison.Ordinal), "Whitespace-only description should not produce a GraphQL description block."); + Assert.IsTrue(schema.Contains("type TestEntity", StringComparison.Ordinal), "Type definition should still exist."); + } + + private static Entity CreateEntityWithDescription(string description) + { + EntitySource source = new("TestTable", EntitySourceType.Table, null, null); + EntityGraphQLOptions gqlOptions = new("TestEntity", "TestEntities", true); + EntityRestOptions restOptions = new(null, "/test", true); + return new( + source, + gqlOptions, + null, + restOptions, + [], + null, + null, + null, + false, + null, + Description: description + ); + } + + private static RuntimeConfig CreateRuntimeConfig(Entity entity) + { + Dictionary entityDict = new() { { "TestEntity", entity } }; + RuntimeEntities entities = new(entityDict); + return new( + "", + new DataSource(DatabaseType.MSSQL, "", null), + entities, + null + ); + } + #endregion } } diff --git a/src/Service.Tests/SqlTests/RestApiTests/Delete/DwSqlDeleteApiTest.cs b/src/Service.Tests/SqlTests/RestApiTests/Delete/DwSqlDeleteApiTest.cs index c574db540f..984b252727 100644 --- a/src/Service.Tests/SqlTests/RestApiTests/Delete/DwSqlDeleteApiTest.cs +++ b/src/Service.Tests/SqlTests/RestApiTests/Delete/DwSqlDeleteApiTest.cs @@ -20,7 +20,7 @@ public class DwSqlDeleteApiTests : DeleteApiTestBase { "DeleteOneWithStoredProcedureTest", $"SELECT [id] FROM { _integrationTableName } " + - $"WHERE id = 20" + $"WHERE id = 21" } }; #region Test Fixture Setup diff --git a/src/Service.Tests/SqlTests/RestApiTests/Delete/MsSqlDeleteApiTest.cs b/src/Service.Tests/SqlTests/RestApiTests/Delete/MsSqlDeleteApiTest.cs index 13f4d31cf2..cf8a1f6fc5 100644 --- a/src/Service.Tests/SqlTests/RestApiTests/Delete/MsSqlDeleteApiTest.cs +++ b/src/Service.Tests/SqlTests/RestApiTests/Delete/MsSqlDeleteApiTest.cs @@ -29,7 +29,7 @@ public class MsSqlDeleteApiTests : DeleteApiTestBase // This query is used to confirm that the item no longer exists, not the // actual delete query. $"SELECT [id] FROM { _integrationTableName } " + - $"WHERE id = 20 FOR JSON PATH, INCLUDE_NULL_VALUES, WITHOUT_ARRAY_WRAPPER" + $"WHERE id = 21 FOR JSON PATH, INCLUDE_NULL_VALUES, WITHOUT_ARRAY_WRAPPER" } }; #region Test Fixture Setup diff --git a/src/Service.Tests/SqlTests/SqlTestHelper.cs b/src/Service.Tests/SqlTests/SqlTestHelper.cs index e6dbfaa8d1..e739f6cc8c 100644 --- a/src/Service.Tests/SqlTests/SqlTestHelper.cs +++ b/src/Service.Tests/SqlTests/SqlTestHelper.cs @@ -191,18 +191,18 @@ public static void TestForErrorInGraphQLResponse(string response, string message if (message is not null) { Console.WriteLine(response); - Assert.IsTrue(response.Contains(message), $"Message \"{message}\" not found in error"); + Assert.IsTrue(response.Contains(message), $"Message \"{message}\" not found in error {response}"); } if (statusCode != null) { - Assert.IsTrue(response.Contains($"\"code\":\"{statusCode}\""), $"Status code \"{statusCode}\" not found in error"); + Assert.IsTrue(response.Contains($"\"code\":\"{statusCode}\""), $"Status code \"{statusCode}\" not found in error {response}"); } if (path is not null) { Console.WriteLine(response); - Assert.IsTrue(response.Contains(path), $"Path \"{path}\" not found in error"); + Assert.IsTrue(response.Contains(path), $"Path \"{path}\" not found in error {response}"); } } @@ -389,6 +389,7 @@ public static RuntimeConfig InitBasicRuntimeConfigWithNoEntity( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: authenticationOptions) ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/TestHelper.cs b/src/Service.Tests/TestHelper.cs index f1ccd7f13e..b94470b96b 100644 --- a/src/Service.Tests/TestHelper.cs +++ b/src/Service.Tests/TestHelper.cs @@ -78,8 +78,21 @@ public static RuntimeConfigProvider GetRuntimeConfigProvider(FileSystemRuntimeCo /// The source name of the entity. public static RuntimeConfig AddMissingEntitiesToConfig(RuntimeConfig config, string entityKey, string entityName, string[] keyfields = null) { + List fields = []; + if (keyfields != null) + { + foreach (string key in keyfields) + { + if (!string.IsNullOrWhiteSpace(key)) + { + fields.Add(new FieldMetadata { Name = key, PrimaryKey = true }); + } + } + } + Entity entity = new( Source: new(entityName, EntitySourceType.Table, null, keyfields), + Fields: fields, GraphQL: new(entityKey, entityKey.Pluralize()), Rest: new(Enabled: true), Permissions: new[] diff --git a/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs b/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs index 3c49f8344e..119e6637c6 100644 --- a/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs +++ b/src/Service.Tests/UnitTests/ConfigValidationUnitTests.cs @@ -116,6 +116,7 @@ public void InvalidCRUDForStoredProcedure( Entity testEntity = new( Source: entitySource, + Fields: null, Rest: new(EntityRestOptions.DEFAULT_HTTP_VERBS_ENABLED_FOR_SP), GraphQL: new(AuthorizationHelpers.TEST_ENTITY, AuthorizationHelpers.TEST_ENTITY + "s"), Permissions: permissionSettings.ToArray(), @@ -192,6 +193,7 @@ public void InvalidActionSpecifiedForARole(string dbPolicy, EntityActionOperatio [DataRow(DatabaseType.MySQL, "", false, DisplayName = "Database Policy left empty for Create passes for MySQL")] [DataRow(DatabaseType.MySQL, " ", false, DisplayName = "Database Policy only whitespace for Create passes for MySQL")] [DataRow(DatabaseType.MSSQL, "2 eq @item.col3", false, DisplayName = "Database Policy defined for Create passes for MSSQL")] + [DataRow(DatabaseType.DWSQL, "2 eq @item.col3", false, DisplayName = "Database Policy defined for Create passes for DWSQL")] public void AddDatabasePolicyToCreateOperation(DatabaseType dbType, string dbPolicy, bool errorExpected) { EntityActionOperation action = EntityActionOperation.Create; @@ -256,6 +258,7 @@ public void TestAddingRelationshipWithInvalidTargetEntity() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) @@ -316,6 +319,7 @@ public void TestAddingRelationshipWithDisabledGraphQL() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) @@ -372,6 +376,7 @@ string relationshipEntity Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) @@ -460,6 +465,7 @@ public void TestRelationshipWithNoLinkingObjectAndEitherSourceOrTargetFieldIsNul Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap)); @@ -552,6 +558,7 @@ public void TestRelationshipWithoutSourceAndTargetFieldsMatching( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap)); @@ -625,6 +632,7 @@ public void TestRelationshipWithoutSourceAndTargetFieldsAsValidBackingColumns( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap)); @@ -754,6 +762,7 @@ public void TestRelationshipWithoutLinkingSourceAndTargetFieldsMatching( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap)); @@ -992,6 +1001,7 @@ public void TestOperationValidityAndCasing(string operationName, bool exceptionE Entity sampleEntity = new( Source: new(AuthorizationHelpers.TEST_ENTITY, EntitySourceType.Table, null, null), + Fields: null, Rest: null, GraphQL: null, Permissions: new[] { permissionForEntity }, @@ -1010,6 +1020,7 @@ public void TestOperationValidityAndCasing(string operationName, bool exceptionE Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap)); @@ -1082,6 +1093,7 @@ public void ValidateGraphQLTypeNamesFromConfig(string entityNameFromConfig, bool Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) @@ -1439,21 +1451,27 @@ public void ValidateValidEntityDefinitionsDoesNotGenerateDuplicateQueries(Databa /// /// GraphQL global path /// REST global path + /// MCP global path /// Exception expected [DataTestMethod] - [DataRow("/graphql", "/graphql", true)] - [DataRow("/api", "/api", true)] - [DataRow("/graphql", "/api", false)] - public void TestGlobalRouteValidation(string graphQLConfiguredPath, string restConfiguredPath, bool expectError) + [DataRow("/graphql", "/graphql", "/mcp", true, DisplayName = "GraphQL and REST conflict (same path).")] + [DataRow("/api", "/api", "/mcp", true, DisplayName = "REST and GraphQL conflict (same path).")] + [DataRow("/graphql", "/api", "/mcp", false, DisplayName = "GraphQL, REST, and MCP distinct.")] + // Extra case: conflict with MCP + [DataRow("/mcp", "/api", "/mcp", true, DisplayName = "MCP and GraphQL conflict (same path).")] + [DataRow("/graphql", "/mcp", "/mcp", true, DisplayName = "MCP and REST conflict (same path).")] + public void TestGlobalRouteValidation(string graphQLConfiguredPath, string restConfiguredPath, string mcpConfiguredPath, bool expectError) { GraphQLRuntimeOptions graphQL = new(Path: graphQLConfiguredPath); RestRuntimeOptions rest = new(Path: restConfiguredPath); + McpRuntimeOptions mcp = new(Path: mcpConfiguredPath); RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig( new(DatabaseType.MSSQL, "", Options: null), graphQL, - rest); - string expectedErrorMessage = "Conflicting GraphQL and REST path configuration."; + rest, + mcp); + string expectedErrorMessage = "Conflicting path configuration between GraphQL, REST, and MCP."; try { @@ -1521,6 +1539,7 @@ private static Entity GetSampleEntityUsingSourceAndRelationshipMap( Entity sampleEntity = new( Source: new(source, EntitySourceType.Table, null, null), + Fields: null, Rest: restDetails ?? new(Enabled: false), GraphQL: graphQLDetails, Permissions: new[] { permissionForEntity }, @@ -1670,11 +1689,16 @@ public void ValidateApiURIsAreWellFormed( { string graphQLPathPrefix = GraphQLRuntimeOptions.DEFAULT_PATH; string restPathPrefix = RestRuntimeOptions.DEFAULT_PATH; + string mcpPathPrefix = McpRuntimeOptions.DEFAULT_PATH; if (apiType is ApiType.REST) { restPathPrefix = apiPathPrefix; } + else if (apiType is ApiType.MCP) + { + mcpPathPrefix = apiPathPrefix; + } else { graphQLPathPrefix = apiPathPrefix; @@ -1682,11 +1706,13 @@ public void ValidateApiURIsAreWellFormed( GraphQLRuntimeOptions graphQL = new(Path: graphQLPathPrefix); RestRuntimeOptions rest = new(Path: restPathPrefix); + McpRuntimeOptions mcp = new(Enabled: false); RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig( new(DatabaseType.MSSQL, "", Options: null), graphQL, - rest); + rest, + mcp); RuntimeConfigValidator configValidator = InitializeRuntimeConfigValidator(); @@ -1709,25 +1735,33 @@ public void ValidateApiURIsAreWellFormed( /// /// Boolean flag to indicate if REST endpoints are enabled globally. /// Boolean flag to indicate if GraphQL endpoints are enabled globally. + /// Boolean flag to indicate if MCP endpoints are enabled globally. /// Boolean flag to indicate if exception is expected. - [DataRow(true, true, false, DisplayName = "Both REST and GraphQL enabled.")] - [DataRow(true, false, false, DisplayName = "REST enabled, and GraphQL disabled.")] - [DataRow(false, true, false, DisplayName = "REST disabled, and GraphQL enabled.")] - [DataRow(false, false, true, DisplayName = "Both REST and GraphQL are disabled.")] + [DataRow(true, true, true, false, DisplayName = "REST, GraphQL, and MCP enabled.")] + [DataRow(true, true, false, false, DisplayName = "REST and GraphQL enabled, MCP disabled.")] + [DataRow(true, false, true, false, DisplayName = "REST enabled, GraphQL disabled, and MCP enabled.")] + [DataRow(true, false, false, false, DisplayName = "REST enabled, GraphQL and MCP disabled.")] + [DataRow(false, true, true, false, DisplayName = "REST disabled, GraphQL and MCP enabled.")] + [DataRow(false, true, false, false, DisplayName = "REST disabled, GraphQL enabled, and MCP disabled.")] + [DataRow(false, false, true, false, DisplayName = "REST and GraphQL disabled, MCP enabled.")] + [DataRow(false, false, false, true, DisplayName = "REST, GraphQL, and MCP disabled.")] [DataTestMethod] - public void EnsureFailureWhenBothRestAndGraphQLAreDisabled( + public void EnsureFailureWhenRestAndGraphQLAndMcpAreDisabled( bool restEnabled, bool graphqlEnabled, + bool mcpEnabled, bool expectError) { GraphQLRuntimeOptions graphQL = new(Enabled: graphqlEnabled); RestRuntimeOptions rest = new(Enabled: restEnabled); + McpRuntimeOptions mcp = new(Enabled: mcpEnabled); RuntimeConfig configuration = ConfigurationTests.InitMinimalRuntimeConfig( new(DatabaseType.MSSQL, "", Options: null), graphQL, - rest); - string expectedErrorMessage = "Both GraphQL and REST endpoints are disabled."; + rest, + mcp); + string expectedErrorMessage = "GraphQL, REST, and MCP endpoints are disabled."; try { @@ -1981,6 +2015,7 @@ public void ValidateRestMethodsForEntityInConfig( string entityName = "EntityA"; // Sets REST method for the entity Entity entity = new(Source: new("TEST_SOURCE", sourceType, null, null), + Fields: null, Rest: new(Methods: methods), GraphQL: new(entityName, ""), Permissions: Array.Empty(), @@ -1994,6 +2029,7 @@ public void ValidateRestMethodsForEntityInConfig( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null)), Entities: new(entityMap)); @@ -2067,6 +2103,7 @@ public void ValidateRestPathForEntityInConfig( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) @@ -2137,6 +2174,7 @@ public void ValidateUniqueRestPathsForEntitiesInConfig( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(entityMap) @@ -2197,6 +2235,7 @@ public void ValidateRuntimeBaseRouteSettings( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: new(Provider: authenticationProvider, Jwt: null)), BaseRoute: runtimeBaseRoute ), @@ -2302,6 +2341,7 @@ public void TestRuntimeConfigSetupWithNonJsonConstructor() Entity sampleEntity1 = new( Source: entitySource, + Fields: null, GraphQL: null, Rest: null, Permissions: null, @@ -2333,6 +2373,7 @@ public void TestRuntimeConfigSetupWithNonJsonConstructor() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new RuntimeEntities(entityMap), @@ -2379,13 +2420,22 @@ public void TestRuntimeConfigSetupWithNonJsonConstructor() DisplayName = "DefaultPageSize cannot be 0")] [DataRow(true, 101, 100, "Pagination options invalid. The default page size cannot be greater than max page size", DisplayName = "DefaultPageSize cannot be greater than MaxPageSize")] + [DataRow(false, null, null, "", (int)PaginationOptions.DEFAULT_PAGE_SIZE, (int)PaginationOptions.MAX_PAGE_SIZE, null, + DisplayName = "NextLinkRelative should be false when no value provided in config")] + [DataRow(false, null, null, "", (int)PaginationOptions.DEFAULT_PAGE_SIZE, (int)PaginationOptions.MAX_PAGE_SIZE, true, + DisplayName = "NextLinkRelative should be true when explicitly set to true in config")] + [DataRow(false, null, null, "", (int)PaginationOptions.DEFAULT_PAGE_SIZE, (int)PaginationOptions.MAX_PAGE_SIZE, false, + DisplayName = "NextLinkRelative should be false when explicitly set to false in config")] + [DataRow(false, 1000, 10000, "", 1000, 10000, true, + DisplayName = "NextLinkRelative with custom page sizes")] public void ValidatePaginationOptionsInConfig( bool exceptionExpected, int? defaultPageSize, int? maxPageSize, string expectedExceptionMessage, int? expectedDefaultPageSize = null, - int? expectedMaxPageSize = null) + int? expectedMaxPageSize = null, + bool? nextLinkRelative = null) { try { @@ -2395,13 +2445,15 @@ public void ValidatePaginationOptionsInConfig( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: null), - Pagination: new PaginationOptions(defaultPageSize, maxPageSize) + Pagination: new PaginationOptions(defaultPageSize, maxPageSize, nextLinkRelative) ), Entities: new(new Dictionary())); Assert.AreEqual((uint)expectedDefaultPageSize, runtimeConfig.DefaultPageSize()); Assert.AreEqual((uint)expectedMaxPageSize, runtimeConfig.MaxPageSize()); + Assert.AreEqual(expected: nextLinkRelative ?? false, actual: runtimeConfig.NextLinkRelative()); } catch (DataApiBuilderException dabException) { @@ -2445,6 +2497,7 @@ public void ValidateMaxResponseSizeInConfig( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: null, MaxResponseSizeMB: providedMaxResponseSizeMB) ), Entities: new(new Dictionary())); diff --git a/src/Service.Tests/UnitTests/DbExceptionParserUnitTests.cs b/src/Service.Tests/UnitTests/DbExceptionParserUnitTests.cs index ba7f05251a..02801de3e2 100644 --- a/src/Service.Tests/UnitTests/DbExceptionParserUnitTests.cs +++ b/src/Service.Tests/UnitTests/DbExceptionParserUnitTests.cs @@ -38,6 +38,7 @@ public void VerifyCorrectErrorMessage(bool isDeveloperMode, string expected) Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null, isDeveloperMode ? HostMode.Development : HostMode.Production) ), Entities: new(new Dictionary()) @@ -80,6 +81,7 @@ public void TestIsTransientExceptionMethod(bool expected, int number) Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null, HostMode.Development) ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/UnitTests/MultiSourceQueryExecutionUnitTests.cs b/src/Service.Tests/UnitTests/MultiSourceQueryExecutionUnitTests.cs index 986419f228..e06e140328 100644 --- a/src/Service.Tests/UnitTests/MultiSourceQueryExecutionUnitTests.cs +++ b/src/Service.Tests/UnitTests/MultiSourceQueryExecutionUnitTests.cs @@ -109,7 +109,7 @@ public async Task TestMultiSourceQuery() .AddType() .AddType() .TryAddTypeInterceptor(new ResolverTypeInterceptor(new ExecutionHelper(queryEngineFactory.Object, mutationEngineFactory.Object, provider))); - ISchema schema = schemaBuilder.Create(); + Schema schema = schemaBuilder.Create(); IExecutionResult result = await schema.MakeExecutable().ExecuteAsync(_query); // client is mapped as belonging to the sql data source. @@ -251,6 +251,7 @@ public async Task TestMultiSourceTokenSet() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: null) ), DefaultDataSourceName: DATA_SOURCE_NAME_1, @@ -312,6 +313,7 @@ private static RuntimeConfig GenerateMockRuntimeConfigForMultiDbScenario() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), // use prod mode to avoid having to mock config file watcher Host: new(Cors: null, Authentication: null, HostMode.Production) ), diff --git a/src/Service.Tests/UnitTests/MySqlQueryExecutorUnitTests.cs b/src/Service.Tests/UnitTests/MySqlQueryExecutorUnitTests.cs index 423234aa73..cbfef36664 100644 --- a/src/Service.Tests/UnitTests/MySqlQueryExecutorUnitTests.cs +++ b/src/Service.Tests/UnitTests/MySqlQueryExecutorUnitTests.cs @@ -46,6 +46,7 @@ public async Task TestHandleManagedIdentityAccess( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/UnitTests/PortResolutionHelperTests.cs b/src/Service.Tests/UnitTests/PortResolutionHelperTests.cs new file mode 100644 index 0000000000..8a2d38d3be --- /dev/null +++ b/src/Service.Tests/UnitTests/PortResolutionHelperTests.cs @@ -0,0 +1,133 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using Azure.DataApiBuilder.Service.Utilities; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +namespace Azure.DataApiBuilder.Service.Tests.UnitTests +{ + /// + /// Tests for the class, which resolves the internal port used by the application + /// + [TestClass] + public class PortResolutionHelperTests + { + /// + /// Tests the method to ensure it resolves the correct + /// port. + /// + /// This test method sets the "ASPNETCORE_URLS" environment variable to various test + /// cases and verifies that the method returns the + /// expected port. It handles different URL formats and edge cases, including null or invalid inputs. + /// A string representing the ASP.NET Core URLs to be tested. + /// The expected port number that should be resolved. + [DataTestMethod] + [DataRow("http://localhost:5000", 5000)] + [DataRow("https://localhost:443", 443)] + [DataRow("http://+:1234", 1234)] + [DataRow("https://*:8443", 8443)] + [DataRow("http://localhost:5000;https://localhost:443", 5000)] + [DataRow("https://localhost:443;http://localhost:5000", 5000)] + [DataRow("http://localhost:5000,https://localhost:443", 5000)] + [DataRow(null, 5000)] + [DataRow("", 5000)] + [DataRow("http://localhost", 80)] + [DataRow("https://localhost", 443)] + [DataRow("http://[::1]:5000", 5000)] + [DataRow("http://localhost;https://localhost:8443", 80)] + [DataRow("https://localhost:8443;https://localhost:9443", 8443)] + [DataRow("invalid;http://localhost:5000", 5000)] + [DataRow("http://localhost:5000;invalid", 5000)] + [DataRow("http://+:", 5000)] + [DataRow("https://localhost:5001;http://localhost:5000", 5000)] + [DataRow("https://localhost:5001;https://localhost:5002", 5001)] + public void ResolveInternalPortResolvesCorrectPortPositiveTest(string aspnetcoreUrls, int expectedPort) + { + TestPortResolution(aspnetcoreUrls, null, expectedPort); + } + + /// + /// Tests that the method uses the "DEFAULT_PORT" + /// environment variable when the "ASPNETCORE_URLS" environment variable is not set. + /// + /// This test sets the "DEFAULT_PORT" environment variable to "4321" and verifies that + /// returns this value. It ensures that the method + /// correctly defaults to using "DEFAULT_PORT" when "ASPNETCORE_URLS" is null. + [TestMethod] + public void ResolveInternalPortUsesDefaultPortEnvVarTest() + { + TestPortResolution(null, "4321", 4321); + } + + /// + /// Tests that the method uses the default port when the + /// environment variable ASPNETCORE_URLS is set to invalid values. + /// + /// This test sets the ASPNETCORE_URLS environment variable to invalid URLs and + /// the DEFAULT_PORT environment variable to a valid port number. It verifies that correctly falls back to using the default port specified + /// by DEFAULT_PORT. + [TestMethod] + public void ResolveInternalPortUsesDefaultPortWhenUrlsAreInvalidTest() + { + TestPortResolution("invalid-url;another-invalid", "4321", 4321); + } + + /// + /// Tests that the method falls back to the default port + /// when the DEFAULT_PORT environment variable is set to a non-numeric value. + /// + /// This test sets the DEFAULT_PORT environment variable to an invalid value and + /// verifies that correctly falls back to using + /// the default port of 5000 when the DEFAULT_PORT cannot be parsed as a valid integer. + [TestMethod] + public void ResolveInternalPortFallsBackToDefaultWhenDefaultPortIsInvalidTest() + { + TestPortResolution(null, "abc", 5000); + } + + /// + /// Negative tests for the method. + /// + /// A string representing the ASP.NET Core URLs to be tested. + /// The expected port number that should be resolved. + [DataTestMethod] + [DataRow("http://localhost:5000 https://localhost:443", 5000)] // space invalid, falls back to default + [DataRow("http://localhost:5000|https://localhost:443", 5000)] // invalid delimiter, falls back to default + [DataRow("localhost:5000", 5000)] // missing scheme: fallback to default + [DataRow("http://:", 5000)] // incomplete URL: fallback to default + [DataRow("ftp://localhost:21", 5000)] // unsupported scheme: fallback to default + [DataRow("http://unix:/var/run/app.sock", 80)] // unix socket: defaults to 80 (no port specified) + [DataRow("http://unix:var/run/app.sock", 5000)] // malformed unix socket: fallback to default + [DataRow("http://unix:", 80)] // incomplete unix socket: defaults to 80 + public void ResolveInternalPortResolvesCorrectPortNegativeTest(string aspnetcoreUrls, int expectedPort) + { + TestPortResolution(aspnetcoreUrls, null, expectedPort); + } + + /// + /// Helper method to test port resolution with environment variables. + /// + /// The ASPNETCORE_URLS environment variable value to set. + /// The DEFAULT_PORT environment variable value to set. + /// The expected port number that should be resolved. + private static void TestPortResolution(string aspnetcoreUrls, string defaultPort, int expectedPort) + { + string originalUrls = Environment.GetEnvironmentVariable("ASPNETCORE_URLS"); + string originalDefaultPort = Environment.GetEnvironmentVariable("DEFAULT_PORT"); + Environment.SetEnvironmentVariable("ASPNETCORE_URLS", aspnetcoreUrls); + Environment.SetEnvironmentVariable("DEFAULT_PORT", defaultPort); + try + { + int port = PortResolutionHelper.ResolveInternalPort(); + Assert.AreEqual(expectedPort, port); + } + finally + { + Environment.SetEnvironmentVariable("ASPNETCORE_URLS", originalUrls); + Environment.SetEnvironmentVariable("DEFAULT_PORT", originalDefaultPort); + } + } + } +} diff --git a/src/Service.Tests/UnitTests/PostgreSqlQueryExecutorUnitTests.cs b/src/Service.Tests/UnitTests/PostgreSqlQueryExecutorUnitTests.cs index f0db8b4742..ccaa90b353 100644 --- a/src/Service.Tests/UnitTests/PostgreSqlQueryExecutorUnitTests.cs +++ b/src/Service.Tests/UnitTests/PostgreSqlQueryExecutorUnitTests.cs @@ -57,6 +57,7 @@ public async Task TestHandleManagedIdentityAccess( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/UnitTests/RequestValidatorUnitTests.cs b/src/Service.Tests/UnitTests/RequestValidatorUnitTests.cs index a19823df18..5be1375c0f 100644 --- a/src/Service.Tests/UnitTests/RequestValidatorUnitTests.cs +++ b/src/Service.Tests/UnitTests/RequestValidatorUnitTests.cs @@ -356,11 +356,12 @@ public static void PerformTest( Runtime: new( Rest: new(Path: "/api"), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: null) ), Entities: new(new Dictionary() { - { DEFAULT_NAME, new Entity(entitySource, new EntityGraphQLOptions(findRequestContext.EntityName, findRequestContext.EntityName), new EntityRestOptions(new SupportedHttpVerb[0]), null, null, null) } + { DEFAULT_NAME, new Entity(entitySource, new EntityGraphQLOptions(findRequestContext.EntityName, findRequestContext.EntityName), null, new EntityRestOptions(new SupportedHttpVerb[0]), null, null, null) } }) ); MockFileSystem fileSystem = new(); diff --git a/src/Service.Tests/UnitTests/RestServiceUnitTests.cs b/src/Service.Tests/UnitTests/RestServiceUnitTests.cs index 9d483bf1d2..1fa1a276ad 100644 --- a/src/Service.Tests/UnitTests/RestServiceUnitTests.cs +++ b/src/Service.Tests/UnitTests/RestServiceUnitTests.cs @@ -115,6 +115,7 @@ public static void InitializeTest(string restRoutePrefix, string entityName) Runtime: new( Rest: new(Path: restRoutePrefix), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) diff --git a/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs b/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs index d933fa827d..b98de993e2 100644 --- a/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs +++ b/src/Service.Tests/UnitTests/RuntimeConfigLoaderJsonDeserializerTests.cs @@ -259,7 +259,7 @@ public void TestNullableOptionalProps() TryParseAndAssertOnDefaults("{" + emptyRuntime, out _); // Test with empty sub properties of runtime - minJson.Append(@"{ ""rest"": { }, ""graphql"": { }, + minJson.Append(@"{ ""rest"": { }, ""graphql"": { }, ""mcp"": { }, ""base-route"" : """","); StringBuilder minJsonWithHostSubProps = new(minJson + @"""telemetry"" : { }, ""host"" : "); StringBuilder minJsonWithTelemetrySubProps = new(minJson + @"""host"" : { }, ""telemetry"" : "); @@ -273,7 +273,7 @@ public void TestNullableOptionalProps() TryParseAndAssertOnDefaults("{" + emptyHostSubProps, out _); // Test with empty telemetry sub-properties - minJsonWithTelemetrySubProps.Append(@"{ ""application-insights"": { }, ""log-level"": { } } }"); + minJsonWithTelemetrySubProps.Append(@"{ ""application-insights"": { }, ""log-level"": { }, ""open-telemetry"": { }, ""azure-log-analytics"": { }, ""file"": { } } }"); string emptyTelemetrySubProps = minJsonWithTelemetrySubProps + "}"; TryParseAndAssertOnDefaults("{" + emptyTelemetrySubProps, out _); @@ -423,6 +423,10 @@ public static string GetModifiedJsonString(string[] reps, string enumString) } } }, + ""mcp"": { + ""enabled"": true, + ""path"": """ + reps[++index % reps.Length] + @""" + }, ""host"": { ""mode"": ""development"", ""cors"": { @@ -506,6 +510,10 @@ public static string GetModifiedJsonString(string[] reps, string enumString) ""enabled"": true, ""path"": ""/graphql"" }, + ""mcp"": { + ""enabled"": true, + ""path"": ""/mcp"" + }, ""host"": { ""mode"": ""development"", ""cors"": { @@ -641,6 +649,8 @@ private static bool TryParseAndAssertOnDefaults(string json, out RuntimeConfig p Assert.AreEqual(RestRuntimeOptions.DEFAULT_PATH, parsedConfig.RestPath); Assert.IsTrue(parsedConfig.IsGraphQLEnabled); Assert.AreEqual(GraphQLRuntimeOptions.DEFAULT_PATH, parsedConfig.GraphQLPath); + Assert.IsTrue(parsedConfig.IsMcpEnabled); + Assert.AreEqual(McpRuntimeOptions.DEFAULT_PATH, parsedConfig.McpPath); Assert.IsTrue(parsedConfig.AllowIntrospection); Assert.IsFalse(parsedConfig.IsDevelopmentMode()); Assert.IsTrue(parsedConfig.IsStaticWebAppsIdentityProvider); @@ -648,6 +658,12 @@ private static bool TryParseAndAssertOnDefaults(string json, out RuntimeConfig p Assert.IsTrue(parsedConfig.IsLogLevelNull()); Assert.IsTrue(parsedConfig.Runtime?.Telemetry?.ApplicationInsights is null || !parsedConfig.Runtime.Telemetry.ApplicationInsights.Enabled); + Assert.IsTrue(parsedConfig.Runtime?.Telemetry?.OpenTelemetry is null + || !parsedConfig.Runtime.Telemetry.OpenTelemetry.Enabled); + Assert.IsTrue(parsedConfig.Runtime?.Telemetry?.AzureLogAnalytics is null + || !parsedConfig.Runtime.Telemetry.AzureLogAnalytics.Enabled); + Assert.IsTrue(parsedConfig.Runtime?.Telemetry?.File is null + || !parsedConfig.Runtime.Telemetry.File.Enabled); return true; } diff --git a/src/Service.Tests/UnitTests/SerializationDeserializationTests.cs b/src/Service.Tests/UnitTests/SerializationDeserializationTests.cs index 2b5e5bf3ba..44978cd6aa 100644 --- a/src/Service.Tests/UnitTests/SerializationDeserializationTests.cs +++ b/src/Service.Tests/UnitTests/SerializationDeserializationTests.cs @@ -428,7 +428,7 @@ private static void VerifyParameterDefinitionSerializationDeserialization(Parame { // test number of properties/fields defined in Column Definition int fields = typeof(ParameterDefinition).GetFields(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance).Length; - Assert.AreEqual(fields, 5); + Assert.AreEqual(fields, 9); // test values expectedParameterDefinition.Equals(deserializedParameterDefinition); } diff --git a/src/Service.Tests/UnitTests/SqlMetadataProviderUnitTests.cs b/src/Service.Tests/UnitTests/SqlMetadataProviderUnitTests.cs index 3c7427971d..8b4ed68f60 100644 --- a/src/Service.Tests/UnitTests/SqlMetadataProviderUnitTests.cs +++ b/src/Service.Tests/UnitTests/SqlMetadataProviderUnitTests.cs @@ -3,19 +3,23 @@ using System; using System.Collections.Generic; +using System.Data.Common; using System.IO; using System.Net; +using System.Text.Json.Nodes; using System.Threading.Tasks; using Azure.DataApiBuilder.Config.DatabasePrimitives; using Azure.DataApiBuilder.Config.ObjectModel; using Azure.DataApiBuilder.Core.Authorization; using Azure.DataApiBuilder.Core.Configurations; +using Azure.DataApiBuilder.Core.Models; using Azure.DataApiBuilder.Core.Resolvers; using Azure.DataApiBuilder.Core.Resolvers.Factories; using Azure.DataApiBuilder.Core.Services; using Azure.DataApiBuilder.Service.Exceptions; using Azure.DataApiBuilder.Service.Tests.Configuration; using Azure.DataApiBuilder.Service.Tests.SqlTests; +using Microsoft.AspNetCore.Http; using Microsoft.Data.SqlClient; using Microsoft.Extensions.Logging; using Microsoft.VisualStudio.TestTools.UnitTesting; @@ -343,6 +347,7 @@ public void ValidateGraphQLReservedNaming_DatabaseColumns(string dbColumnName, s Entity sampleEntity = new( Source: new("sampleElement", EntitySourceType.Table, null, null), + Fields: null, Rest: new(Enabled: false), GraphQL: new("", ""), Permissions: new EntityPermission[] { ConfigurationTests.GetMinimalPermissionConfig(AuthorizationResolver.ROLE_ANONYMOUS) }, @@ -399,6 +404,101 @@ public async Task ValidateInferredRelationshipInfoForPgSql() ValidateInferredRelationshipInfoForTables(); } + /// + /// Data-driven test to validate that DataApiBuilderException is thrown for various invalid resultFieldName values + /// during stored procedure result set definition population. + /// + [DataTestMethod, TestCategory(TestCategory.MSSQL)] + [DataRow(null, DisplayName = "Null result field name")] + [DataRow("", DisplayName = "Empty result field name")] + [DataRow(" ", DisplayName = "Multiple spaces result field name")] + public async Task ValidateExceptionForInvalidResultFieldNames(string invalidFieldName) + { + DatabaseEngine = TestCategory.MSSQL; + TestHelper.SetupDatabaseEnvironment(DatabaseEngine); + RuntimeConfig baseConfigFromDisk = SqlTestHelper.SetupRuntimeConfig(); + + // Create a RuntimeEntities with ONLY our test stored procedure entity + Dictionary entitiesDictionary = new() + { + { + "get_book_by_id", new Entity( + Source: new("dbo.get_book_by_id", EntitySourceType.StoredProcedure, null, null), + Fields: null, + Rest: new(Enabled: true), + GraphQL: new("get_book_by_id", "get_book_by_ids", Enabled: true), + Permissions: new EntityPermission[] { + new( + Role: "anonymous", + Actions: new EntityAction[] { + new(Action: EntityActionOperation.Execute, Fields: null, Policy: null) + }) + }, + Relationships: null, + Mappings: null + ) + } + }; + + RuntimeEntities entities = new(entitiesDictionary); + RuntimeConfig runtimeConfig = baseConfigFromDisk with { Entities = entities }; + RuntimeConfigProvider runtimeConfigProvider = TestHelper.GenerateInMemoryRuntimeConfigProvider(runtimeConfig); + ILogger sqlMetadataLogger = new Mock>().Object; + + // Setup query builder + _queryBuilder = new MsSqlQueryBuilder(); + + try + { + string dataSourceName = runtimeConfigProvider.GetConfig().DefaultDataSourceName; + + // Create mock query executor that always returns JsonArray with invalid field name + Mock mockQueryExecutor = new(); + + // Create a JsonArray that simulates the stored procedure result with invalid field name + JsonArray invalidFieldJsonArray = new(); + JsonObject jsonObject = new() + { + [BaseSqlQueryBuilder.STOREDPROC_COLUMN_NAME] = invalidFieldName, // This will be null, empty, or whitespace + [BaseSqlQueryBuilder.STOREDPROC_COLUMN_SYSTEMTYPENAME] = "varchar", + [BaseSqlQueryBuilder.STOREDPROC_COLUMN_ISNULLABLE] = false + }; + invalidFieldJsonArray.Add(jsonObject); + + // Setup the mock to return our malformed JsonArray for all ExecuteQueryAsync calls + mockQueryExecutor.Setup(x => x.ExecuteQueryAsync( + It.IsAny(), + It.IsAny>(), + It.IsAny, Task>>(), + It.IsAny(), + It.IsAny(), + It.IsAny>())) + .ReturnsAsync(invalidFieldJsonArray); + + // Setup Mock query manager Factory + Mock queryManagerFactory = new(); + queryManagerFactory.Setup(x => x.GetQueryBuilder(It.IsAny())).Returns(_queryBuilder); + queryManagerFactory.Setup(x => x.GetQueryExecutor(It.IsAny())).Returns(mockQueryExecutor.Object); + + ISqlMetadataProvider sqlMetadataProvider = new MsSqlMetadataProvider( + runtimeConfigProvider, + queryManagerFactory.Object, + sqlMetadataLogger, + dataSourceName); + + await sqlMetadataProvider.InitializeAsync(); + Assert.Fail($"Expected DataApiBuilderException was not thrown for invalid resultFieldName: '{invalidFieldName}'."); + } + catch (DataApiBuilderException ex) + { + Assert.AreEqual(HttpStatusCode.ServiceUnavailable, ex.StatusCode); + Assert.AreEqual(DataApiBuilderException.SubStatusCodes.ErrorInInitialization, ex.SubStatusCode); + Assert.IsTrue(ex.Message.Contains("returns a column without a name")); + } + + TestHelper.UnsetAllDABEnvironmentVariables(); + } + /// /// Helper method for test methods ValidateInferredRelationshipInfoFor{MsSql, MySql, and PgSql}. /// This helper validates that an entity's relationship data is correctly inferred based on config and database supplied relationship metadata. diff --git a/src/Service.Tests/UnitTests/SqlQueryExecutorUnitTests.cs b/src/Service.Tests/UnitTests/SqlQueryExecutorUnitTests.cs index 92b076107a..2b62c6b444 100644 --- a/src/Service.Tests/UnitTests/SqlQueryExecutorUnitTests.cs +++ b/src/Service.Tests/UnitTests/SqlQueryExecutorUnitTests.cs @@ -80,6 +80,7 @@ public async Task TestHandleManagedIdentityAccess( Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -154,6 +155,7 @@ public async Task TestRetryPolicyExhaustingMaxAttempts() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -229,6 +231,7 @@ public void Test_DbCommandParameter_PopulatedWithCorrectDbTypes() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -344,6 +347,7 @@ public async Task TestHttpContextIsPopulatedWithDbExecutionTime() Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -446,6 +450,7 @@ public void TestToValidateLockingOfHttpContextObjectDuringCalcuationOfDbExecutio Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(null, null) ), Entities: new(new Dictionary()) @@ -512,6 +517,7 @@ public void ValidateStreamingLogicAsync(int readDataLoops, bool exceptionExpecte Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: null, MaxResponseSizeMB: 5) ), Entities: new(new Dictionary())); @@ -573,6 +579,7 @@ public void ValidateStreamingLogicForStoredProcedures(int readDataLoops, bool ex Runtime: new( Rest: new(), GraphQL: new(), + Mcp: new(), Host: new(Cors: null, Authentication: null, MaxResponseSizeMB: 4) ), Entities: new(new Dictionary())); @@ -616,6 +623,51 @@ public void ValidateStreamingLogicForStoredProcedures(int readDataLoops, bool ex } } + /// + /// Makes sure the stream logic handles cells with empty strings correctly. + /// + [DataTestMethod, TestCategory(TestCategory.MSSQL)] + public void ValidateStreamingLogicForEmptyCellsAsync() + { + TestHelper.SetupDatabaseEnvironment(TestCategory.MSSQL); + FileSystem fileSystem = new(); + FileSystemRuntimeConfigLoader loader = new(fileSystem); + RuntimeConfig runtimeConfig = new( + Schema: "UnitTestSchema", + DataSource: new DataSource(DatabaseType: DatabaseType.MSSQL, "", Options: null), + Runtime: new( + Rest: new(), + GraphQL: new(), + Mcp: new(), + Host: new(Cors: null, Authentication: null, MaxResponseSizeMB: 5) + ), + Entities: new(new Dictionary())); + + RuntimeConfigProvider runtimeConfigProvider = TestHelper.GenerateInMemoryRuntimeConfigProvider(runtimeConfig); + + Mock>> queryExecutorLogger = new(); + Mock httpContextAccessor = new(); + DbExceptionParser dbExceptionParser = new MsSqlDbExceptionParser(runtimeConfigProvider); + + // Instantiate the MsSqlQueryExecutor and Setup parameters for the query + MsSqlQueryExecutor msSqlQueryExecutor = new(runtimeConfigProvider, dbExceptionParser, queryExecutorLogger.Object, httpContextAccessor.Object); + + Mock dbDataReader = new(); + dbDataReader.Setup(d => d.HasRows).Returns(true); + + // Make sure GetChars returns 0 when buffer is null + dbDataReader.Setup(x => x.GetChars(It.IsAny(), It.IsAny(), null, It.IsAny(), It.IsAny())).Returns(0); + + // Make sure available size is set to > 0 + int availableSize = (int)runtimeConfig.MaxResponseSizeMB() * 1024 * 1024; + + // Stream char data should not return an exception + availableSize -= msSqlQueryExecutor.StreamCharData( + dbDataReader: dbDataReader.Object, availableSize: availableSize, resultJsonString: new(), ordinal: 0); + + Assert.AreEqual(availableSize, (int)runtimeConfig.MaxResponseSizeMB() * 1024 * 1024); + } + [TestCleanup] public void CleanupAfterEachTest() { diff --git a/src/Service.Tests/dab-config.DwSql.json b/src/Service.Tests/dab-config.DwSql.json index c4a9e6b09d..78f9e91480 100644 --- a/src/Service.Tests/dab-config.DwSql.json +++ b/src/Service.Tests/dab-config.DwSql.json @@ -2,7 +2,7 @@ "$schema": "https://github.com/Azure/data-api-builder/releases/download/vmajor.minor.patch/dab.draft.schema.json", "data-source": { "database-type": "dwsql", - "connection-string": "Server=tcp:{your_server}.database.windows.net,1433;Database={your_database};User ID={your_user_name};Password={your_password_here};Encrypt=True;TrustServerCertificate=False;Connection Timeout=30;", + "connection-string": "Server=tcp:127.0.0.1,1433;Persist Security Info=False;User ID=sa;Password=REPLACEME;MultipleActiveResultSets=False;Connection Timeout=5;", "options": { "set-session-context": true } @@ -76,23 +76,6 @@ } ] }, - { - "role": "database_policy_tester", - "actions": [ - { - "action": "update", - "policy": { - "database": "@item.id ne 1234" - } - }, - { - "action": "read", - "policy": { - "database": "@item.id ne 1234 or @item.id gt 1940" - } - } - ] - }, { "role": "policy_tester_01", "actions": [ @@ -252,14 +235,41 @@ "action": "delete" } ] + }, + { + "role": "database_policy_tester", + "actions": [ + { + "action": "create", + "policy": { + "database": "@item.name ne 'New publisher'" + } + }, + { + "action": "update", + "policy": { + "database": "@item.id ne 1234" + } + }, + { + "action": "read", + "policy": { + "database": "@item.id ne 1234 or @item.id gt 1940" + } + } + ] } ], "relationships": { "books": { "cardinality": "many", "target.entity": "Book", - "source.fields": [ "id" ], - "target.fields": [ "publisher_id" ], + "source.fields": [ + "id" + ], + "target.fields": [ + "publisher_id" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -336,6 +346,20 @@ } ] }, + { + "role": "database_policy_tester", + "actions": [ + { + "action": "read" + }, + { + "action": "update", + "policy": { + "database": "@item.pieceid ne 1" + } + } + ] + }, { "role": "test_role_with_noread", "actions": [ @@ -420,14 +444,6 @@ "enabled": true }, "permissions": [ - { - "role": "anonymous", - "actions": [ - { - "action": "read" - } - ] - }, { "role": "authenticated", "actions": [ @@ -445,6 +461,14 @@ } ] }, + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + }, { "role": "TestNestedFilterFieldIsNull_ColumnForbidden", "actions": [ @@ -857,27 +881,22 @@ ] } ], + "mappings": { + "id": "id", + "title": "title" + }, "relationships": { "websiteplacement": { "cardinality": "one", "target.entity": "BookWebsitePlacement", - "source.fields": [ "id" ], - "target.fields": [ "book_id" ], - "linking.source.fields": [], - "linking.target.fields": [] - }, - "authors": { - "cardinality": "many", - "target.entity": "Author", - "source.fields": [ "id" ], - "target.fields": [ "id" ], - "linking.object": "book_author_link", - "linking.source.fields": [ + "source.fields": [ + "id" + ], + "target.fields": [ "book_id" ], - "linking.target.fields": [ - "author_id" - ] + "linking.source.fields": [], + "linking.target.fields": [] }, "publishers": { "cardinality": "one", @@ -887,20 +906,39 @@ ], "target.fields": [ "id" - ] + ], + "linking.source.fields": [], + "linking.target.fields": [] }, "reviews": { "cardinality": "many", "target.entity": "Review", - "source.fields": [ "id" ], - "target.fields": [ "book_id" ], + "source.fields": [ + "id" + ], + "target.fields": [ + "book_id" + ], "linking.source.fields": [], "linking.target.fields": [] + }, + "authors": { + "cardinality": "many", + "target.entity": "Author", + "source.fields": [ + "id" + ], + "target.fields": [ + "id" + ], + "linking.object": "book_author_link", + "linking.source.fields": [ + "book_id" + ], + "linking.target.fields": [ + "author_id" + ] } - }, - "mappings": { - "id": "id", - "title": "title" } }, "BookWebsitePlacement": { @@ -958,8 +996,12 @@ "books": { "cardinality": "one", "target.entity": "Book", - "source.fields": [ "book_id" ], - "target.fields": [ "id" ], + "source.fields": [ + "book_id" + ], + "target.fields": [ + "id" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -1014,11 +1056,19 @@ "books": { "cardinality": "many", "target.entity": "Book", - "source.fields": [ "id" ], - "target.fields": [ "id" ], + "source.fields": [ + "id" + ], + "target.fields": [ + "id" + ], "linking.object": "book_author_link", - "linking.source.fields": [ "author_id" ], - "linking.target.fields": [ "book_id" ] + "linking.source.fields": [ + "author_id" + ], + "linking.target.fields": [ + "book_id" + ] } } }, @@ -1078,8 +1128,12 @@ "books": { "cardinality": "one", "target.entity": "Book", - "source.fields": [ "book_id" ], - "target.fields": [ "id" ], + "source.fields": [ + "book_id" + ], + "target.fields": [ + "id" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -1183,8 +1237,12 @@ "myseries": { "cardinality": "one", "target.entity": "series", - "source.fields": [ "series_id" ], - "target.fields": [ "id" ], + "source.fields": [ + "series_id" + ], + "target.fields": [ + "id" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -1470,8 +1528,12 @@ "fungus": { "cardinality": "one", "target.entity": "Fungus", - "source.fields": [ "species" ], - "target.fields": [ "habitat" ], + "source.fields": [ + "species" + ], + "target.fields": [ + "habitat" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -1552,11 +1614,15 @@ "spores": "hazards" }, "relationships": { - "shrub": { + "Shrub": { "cardinality": "one", "target.entity": "Shrub", - "source.fields": [ "habitat" ], - "target.fields": [ "species" ], + "source.fields": [ + "habitat" + ], + "target.fields": [ + "species" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -1667,19 +1733,6 @@ } ] }, - { - "role": "TestNestedFilterManyOne_ColumnForbidden", - "actions": [ - { - "action": "read", - "fields": { - "exclude": [ - "name" - ] - } - } - ] - }, { "role": "TestNestedFilterManyOne_EntityReadForbidden", "actions": [ @@ -1694,6 +1747,19 @@ } ] }, + { + "role": "TestNestedFilterManyOne_ColumnForbidden", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [ + "name" + ] + } + } + ] + }, { "role": "TestNestedFilterOneMany_ColumnForbidden", "actions": [ @@ -1715,8 +1781,12 @@ "comics": { "cardinality": "many", "target.entity": "Comic", - "source.fields": [ "id" ], - "target.fields": [ "series_id" ], + "source.fields": [ + "id" + ], + "target.fields": [ + "series_id" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -2209,16 +2279,16 @@ } ] }, - "GetBooks": { + "GetBook": { "source": { - "object": "get_books", + "object": "get_book_by_id", "type": "stored-procedure" }, "graphql": { - "enabled": true, - "operation": "query", + "enabled": false, + "operation": "mutation", "type": { - "singular": "GetBooks", + "singular": "GetBook", "plural": "GetBooks" } }, @@ -2247,16 +2317,16 @@ } ] }, - "GetBook": { + "GetBooks": { "source": { - "object": "get_book_by_id", + "object": "get_books", "type": "stored-procedure" }, "graphql": { - "enabled": false, - "operation": "mutation", + "enabled": true, + "operation": "query", "type": { - "singular": "GetBook", + "singular": "GetBooks", "plural": "GetBooks" } }, @@ -2301,7 +2371,7 @@ "rest": { "enabled": true, "methods": [ - "get" + "post" ] }, "permissions": [ @@ -2342,7 +2412,7 @@ "rest": { "enabled": true, "methods": [ - "get" + "post" ] }, "permissions": [ @@ -2380,7 +2450,7 @@ "rest": { "enabled": true, "methods": [ - "get" + "post" ] }, "permissions": [ @@ -2444,21 +2514,17 @@ } ] }, - "InsertAndDisplayAllBooksUnderGivenPublisher": { + "DeleteLastInsertedBook": { "source": { - "object": "insert_and_display_all_books_for_given_publisher", - "type": "stored-procedure", - "parameters": { - "title": "MyTitle", - "publisher_name": "MyPublisher" - } + "object": "delete_last_inserted_book", + "type": "stored-procedure" }, "graphql": { "enabled": true, "operation": "mutation", "type": { - "singular": "InsertAndDisplayAllBooksUnderGivenPublisher", - "plural": "InsertAndDisplayAllBooksUnderGivenPublishers" + "singular": "DeleteLastInsertedBook", + "plural": "DeleteLastInsertedBooks" } }, "rest": { @@ -2528,17 +2594,21 @@ } ] }, - "DeleteLastInsertedBook": { + "InsertAndDisplayAllBooksUnderGivenPublisher": { "source": { - "object": "delete_last_inserted_book", - "type": "stored-procedure" + "object": "insert_and_display_all_books_for_given_publisher", + "type": "stored-procedure", + "parameters": { + "title": "MyTitle", + "publisher_name": "MyPublisher" + } }, "graphql": { "enabled": true, "operation": "mutation", "type": { - "singular": "DeleteLastInsertedBook", - "plural": "DeleteLastInsertedBooks" + "singular": "InsertAndDisplayAllBooksUnderGivenPublisher", + "plural": "InsertAndDisplayAllBooksUnderGivenPublishers" } }, "rest": { @@ -2566,43 +2636,6 @@ } ] }, - "dbo_DimAccount": { - "source": "dbo.DimAccount", - "permissions": [ - { - "role": "anonymous", - "actions": [ - "read", - "create", - "update", - "delete" - ] - } - ], - "relationships": { - - "parent_account": { - "cardinality": "one", - "target.entity": "dbo_DimAccount", - "source.fields": [ - "ParentAccountKey" - ], - "target.fields": [ - "AccountKey" - ] - }, - "child_accounts": { - "cardinality": "many", - "target.entity": "dbo_DimAccount", - "source.fields": [ - "AccountKey" - ], - "target.fields": [ - "ParentAccountKey" - ] - } - } - }, "DateOnlyTable": { "source": { "object": "date_only_table", @@ -2627,10 +2660,62 @@ "actions": [ { "action": "*" + } + ] + } + ] + }, + "dbo_DimAccount": { + "source": { + "object": "DimAccount", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "dbo_DimAccount", + "plural": "dbo_DimAccounts" } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "*" + } ] + } + ], + "relationships": { + "parent_account": { + "cardinality": "one", + "target.entity": "dbo_DimAccount", + "source.fields": [ + "ParentAccountKey" + ], + "target.fields": [ + "AccountKey" + ], + "linking.source.fields": [], + "linking.target.fields": [] + }, + "child_accounts": { + "cardinality": "many", + "target.entity": "dbo_DimAccount", + "source.fields": [ + "AccountKey" + ], + "target.fields": [ + "ParentAccountKey" + ], + "linking.source.fields": [], + "linking.target.fields": [] + } } - ] } } -} +} \ No newline at end of file diff --git a/src/Service.Tests/dab-config.MsSql.json b/src/Service.Tests/dab-config.MsSql.json index c1eb906572..d5e903d4f3 100644 --- a/src/Service.Tests/dab-config.MsSql.json +++ b/src/Service.Tests/dab-config.MsSql.json @@ -23,6 +23,11 @@ } } }, + "mcp": { + "enabled": true, + "path": "/mcp", + "dml-tools": true + }, "host": { "cors": { "origins": [ @@ -1969,8 +1974,12 @@ "fungus": { "cardinality": "one", "target.entity": "Fungus", - "source.fields": [ "species" ], - "target.fields": [ "habitat" ], + "source.fields": [ + "species" + ], + "target.fields": [ + "habitat" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -2048,11 +2057,15 @@ "spores": "hazards" }, "relationships": { - "shrub": { + "Shrub": { "cardinality": "one", "target.entity": "Shrub", - "source.fields": [ "habitat" ], - "target.fields": [ "species" ], + "source.fields": [ + "habitat" + ], + "target.fields": [ + "species" + ], "linking.source.fields": [], "linking.target.fields": [] } @@ -2306,7 +2319,8 @@ "Notebook": { "source": { "object": "notebooks", - "type": "table" + "type": "table", + "object-description": "Table containing notebook information" }, "graphql": { "enabled": true, @@ -3572,6 +3586,19 @@ "action": "read" } ] + }, + { + "role": "TestFieldExcludedForAggregation", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [ + "publisher_id" + ] + } + } + ] } ], "mappings": { @@ -3790,6 +3817,36 @@ ] } ] + }, + "GetBooksAuth": { + "source": { + "object": "get_books", + "type": "stored-procedure" + }, + "graphql": { + "enabled": true, + "operation": "query", + "type": { + "singular": "GetBooksAuth", + "plural": "GetBooksAuths" + } + }, + "rest": { + "enabled": true, + "methods": [ + "get" + ] + }, + "permissions": [ + { + "role": "teststoredprocauth", + "actions": [ + { + "action": "execute" + } + ] + } + ] } } } diff --git a/src/Service/Azure.DataApiBuilder.Service.csproj b/src/Service/Azure.DataApiBuilder.Service.csproj index e757ca4ee8..275bd03def 100644 --- a/src/Service/Azure.DataApiBuilder.Service.csproj +++ b/src/Service/Azure.DataApiBuilder.Service.csproj @@ -1,4 +1,4 @@ - + net8.0 @@ -45,7 +45,6 @@ - @@ -54,6 +53,7 @@ + @@ -74,6 +74,8 @@ + + @@ -99,9 +101,13 @@ + + + + diff --git a/src/Service/HealthCheck/HealthCheckHelper.cs b/src/Service/HealthCheck/HealthCheckHelper.cs index 5361a55da5..452cb803a9 100644 --- a/src/Service/HealthCheck/HealthCheckHelper.cs +++ b/src/Service/HealthCheck/HealthCheckHelper.cs @@ -2,6 +2,7 @@ // Licensed under the MIT License. using System; +using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.Linq; @@ -43,7 +44,7 @@ public HealthCheckHelper(ILogger logger, HttpUtilities httpUt /// /// GetHealthCheckResponse is the main function which fetches the HttpContext and then creates the comprehensive health check report. - /// Serializes the report to JSON and returns the response. + /// Serializes the report to JSON and returns the response. /// /// RuntimeConfig /// This function returns the comprehensive health report after calculating the response time of each datasource, rest and graphql health queries. @@ -53,13 +54,13 @@ public async Task GetHealthCheckResponseAsync(Ru // If the response has already been created, it will be reused. _logger.LogTrace("Comprehensive Health check is enabled in the runtime configuration."); - ComprehensiveHealthCheckReport ComprehensiveHealthCheckReport = new(); - UpdateVersionAndAppName(ref ComprehensiveHealthCheckReport); - UpdateTimestampOfResponse(ref ComprehensiveHealthCheckReport); - UpdateDabConfigurationDetails(ref ComprehensiveHealthCheckReport, runtimeConfig); - await UpdateHealthCheckDetailsAsync(ComprehensiveHealthCheckReport, runtimeConfig); - UpdateOverallHealthStatus(ref ComprehensiveHealthCheckReport); - return ComprehensiveHealthCheckReport; + ComprehensiveHealthCheckReport comprehensiveHealthCheckReport = new(); + UpdateVersionAndAppName(ref comprehensiveHealthCheckReport); + UpdateTimestampOfResponse(ref comprehensiveHealthCheckReport); + UpdateDabConfigurationDetails(ref comprehensiveHealthCheckReport, runtimeConfig); + await UpdateHealthCheckDetailsAsync(comprehensiveHealthCheckReport, runtimeConfig); + UpdateOverallHealthStatus(ref comprehensiveHealthCheckReport); + return comprehensiveHealthCheckReport; } // Updates the incoming role header with the appropriate value from the request headers. @@ -133,12 +134,13 @@ private static void UpdateTimestampOfResponse(ref ComprehensiveHealthCheckReport } // Updates the DAB configuration details coming from RuntimeConfig for the Health report. - private static void UpdateDabConfigurationDetails(ref ComprehensiveHealthCheckReport ComprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) + private static void UpdateDabConfigurationDetails(ref ComprehensiveHealthCheckReport comprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) { - ComprehensiveHealthCheckReport.ConfigurationDetails = new ConfigurationDetails + comprehensiveHealthCheckReport.ConfigurationDetails = new ConfigurationDetails { Rest = runtimeConfig.IsRestEnabled, GraphQL = runtimeConfig.IsGraphQLEnabled, + Mcp = runtimeConfig.IsMcpEnabled, Caching = runtimeConfig.IsCachingEnabled, Telemetry = runtimeConfig?.Runtime?.Telemetry != null, Mode = runtimeConfig?.Runtime?.Host?.Mode ?? HostMode.Production, // Modify to runtimeConfig.HostMode in Roles PR @@ -146,30 +148,30 @@ private static void UpdateDabConfigurationDetails(ref ComprehensiveHealthCheckRe } // Main function to internally call for data source and entities health check. - private async Task UpdateHealthCheckDetailsAsync(ComprehensiveHealthCheckReport ComprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) + private async Task UpdateHealthCheckDetailsAsync(ComprehensiveHealthCheckReport comprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) { - ComprehensiveHealthCheckReport.Checks = new List(); - await UpdateDataSourceHealthCheckResultsAsync(ComprehensiveHealthCheckReport, runtimeConfig); - await UpdateEntityHealthCheckResultsAsync(ComprehensiveHealthCheckReport, runtimeConfig); + comprehensiveHealthCheckReport.Checks = new List(); + await UpdateDataSourceHealthCheckResultsAsync(comprehensiveHealthCheckReport, runtimeConfig); + await UpdateEntityHealthCheckResultsAsync(comprehensiveHealthCheckReport, runtimeConfig); } // Updates the DataSource Health Check Results in the response. - private async Task UpdateDataSourceHealthCheckResultsAsync(ComprehensiveHealthCheckReport ComprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) + private async Task UpdateDataSourceHealthCheckResultsAsync(ComprehensiveHealthCheckReport comprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) { - if (ComprehensiveHealthCheckReport.Checks != null && runtimeConfig.DataSource.IsDatasourceHealthEnabled) + if (comprehensiveHealthCheckReport.Checks != null && runtimeConfig.DataSource.IsDatasourceHealthEnabled) { string query = Utilities.GetDatSourceQuery(runtimeConfig.DataSource.DatabaseType); (int, string?) response = await ExecuteDatasourceQueryCheckAsync(query, runtimeConfig.DataSource.ConnectionString); bool isResponseTimeWithinThreshold = response.Item1 >= 0 && response.Item1 < runtimeConfig.DataSource.DatasourceThresholdMs; // Add DataSource Health Check Results - ComprehensiveHealthCheckReport.Checks.Add(new HealthCheckResultEntry + comprehensiveHealthCheckReport.Checks.Add(new HealthCheckResultEntry { Name = runtimeConfig?.DataSource?.Health?.Name ?? runtimeConfig?.DataSource?.DatabaseType.ToString(), ResponseTimeData = new ResponseTimeData { ResponseTimeMs = response.Item1, - ThresholdMs = runtimeConfig?.DataSource.DatasourceThresholdMs + ThresholdMs = runtimeConfig?.DataSource?.DatasourceThresholdMs }, Exception = !isResponseTimeWithinThreshold ? TIME_EXCEEDED_ERROR_MESSAGE : response.Item2, Tags = [HealthCheckConstants.DATASOURCE], @@ -194,26 +196,64 @@ private async Task UpdateDataSourceHealthCheckResultsAsync(ComprehensiveHealthCh return (HealthCheckConstants.ERROR_RESPONSE_TIME_MS, errorMessage); } - // Updates the Entity Health Check Results in the response. + // Updates the Entity Health Check Results in the response. // Goes through the entities one by one and executes the rest and graphql checks (if enabled). - private async Task UpdateEntityHealthCheckResultsAsync(ComprehensiveHealthCheckReport ComprehensiveHealthCheckReport, RuntimeConfig runtimeConfig) + private async Task UpdateEntityHealthCheckResultsAsync(ComprehensiveHealthCheckReport report, RuntimeConfig runtimeConfig) { - if (runtimeConfig?.Entities != null && runtimeConfig.Entities.Entities.Any()) + List> enabledEntities = runtimeConfig.Entities.Entities + .Where(e => e.Value.IsEntityHealthEnabled) + .ToList(); + + if (enabledEntities.Count == 0) + { + _logger.LogInformation("No enabled entities found for health checks. Skipping entity health checks."); + return; + } + + ConcurrentBag concurrentChecks = new(); + + // Use MaxQueryParallelism from RuntimeConfig or default to RuntimeHealthCheckConfig.DEFAULT_MAX_QUERY_PARALLELISM + int maxParallelism = runtimeConfig.Runtime?.Health?.MaxQueryParallelism ?? RuntimeHealthCheckConfig.DEFAULT_MAX_QUERY_PARALLELISM; + + _logger.LogInformation("Executing health checks for {Count} enabled entities with parallelism of {MaxParallelism}.", enabledEntities.Count, maxParallelism); + + // Executes health checks for all enabled entities in parallel, with a maximum degree of parallelism + // determined by configuration (or a default). Each entity's health check runs as an independent task. + // Results are collected in a thread-safe ConcurrentBag. This approach significantly improves performance + // for large numbers of entities by utilizing available CPU and I/O resources efficiently. + await Parallel.ForEachAsync(enabledEntities, new ParallelOptions { MaxDegreeOfParallelism = maxParallelism }, async (entity, _) => { - foreach (KeyValuePair Entity in runtimeConfig.Entities.Entities) + try { - if (Entity.Value.IsEntityHealthEnabled) + ComprehensiveHealthCheckReport localReport = new() { - await PopulateEntityHealthAsync(ComprehensiveHealthCheckReport, Entity, runtimeConfig); + Checks = new List() + }; + + await PopulateEntityHealthAsync(localReport, entity, runtimeConfig); + + if (localReport.Checks != null) + { + foreach (HealthCheckResultEntry check in localReport.Checks) + { + concurrentChecks.Add(check); + } } } - } + catch (Exception ex) + { + _logger.LogError(ex, "Error processing entity '{EntityKey}'", entity.Key); + } + }); + + report.Checks ??= new List(); + report.Checks.AddRange(concurrentChecks); } // Populates the Entity Health Check Results in the response for a particular entity. // Checks for Rest enabled and executes the rest query. // Checks for GraphQL enabled and executes the graphql query. - private async Task PopulateEntityHealthAsync(ComprehensiveHealthCheckReport ComprehensiveHealthCheckReport, KeyValuePair entity, RuntimeConfig runtimeConfig) + private async Task PopulateEntityHealthAsync(ComprehensiveHealthCheckReport comprehensiveHealthCheckReport, KeyValuePair entity, RuntimeConfig runtimeConfig) { // Global Rest and GraphQL Runtime Options RuntimeOptions? runtimeOptions = runtimeConfig.Runtime; @@ -226,7 +266,7 @@ private async Task PopulateEntityHealthAsync(ComprehensiveHealthCheckReport Comp { if (runtimeOptions.IsRestEnabled && entityValue.IsRestEnabled) { - ComprehensiveHealthCheckReport.Checks ??= new List(); + comprehensiveHealthCheckReport.Checks ??= new List(); // In case of REST API, use the path specified in [entity.path] (if present). // The path is trimmed to remove the leading '/' character. @@ -236,7 +276,7 @@ private async Task PopulateEntityHealthAsync(ComprehensiveHealthCheckReport Comp bool isResponseTimeWithinThreshold = response.Item1 >= 0 && response.Item1 < entityValue.EntityThresholdMs; // Add Entity Health Check Results - ComprehensiveHealthCheckReport.Checks.Add(new HealthCheckResultEntry + comprehensiveHealthCheckReport.Checks.Add(new HealthCheckResultEntry { Name = entityKeyName, ResponseTimeData = new ResponseTimeData @@ -252,12 +292,12 @@ private async Task PopulateEntityHealthAsync(ComprehensiveHealthCheckReport Comp if (runtimeOptions.IsGraphQLEnabled && entityValue.IsGraphQLEnabled) { - ComprehensiveHealthCheckReport.Checks ??= new List(); + comprehensiveHealthCheckReport.Checks ??= new List(); - (int, string?) response = await ExecuteGraphQLEntityQueryAsync(runtimeConfig.GraphQLPath, entityValue, entityKeyName); + (int, string?) response = await ExecuteGraphQlEntityQueryAsync(runtimeConfig.GraphQLPath, entityValue, entityKeyName); bool isResponseTimeWithinThreshold = response.Item1 >= 0 && response.Item1 < entityValue.EntityThresholdMs; - ComprehensiveHealthCheckReport.Checks.Add(new HealthCheckResultEntry + comprehensiveHealthCheckReport.Checks.Add(new HealthCheckResultEntry { Name = entityKeyName, ResponseTimeData = new ResponseTimeData @@ -290,7 +330,7 @@ private async Task PopulateEntityHealthAsync(ComprehensiveHealthCheckReport Comp } // Executes the GraphQL Entity Query and keeps track of the response time and error message. - private async Task<(int, string?)> ExecuteGraphQLEntityQueryAsync(string graphqlUriSuffix, Entity entity, string entityName) + private async Task<(int, string?)> ExecuteGraphQlEntityQueryAsync(string graphqlUriSuffix, Entity entity, string entityName) { string? errorMessage = null; if (entity != null) diff --git a/src/Service/HealthCheck/Model/ConfigurationDetails.cs b/src/Service/HealthCheck/Model/ConfigurationDetails.cs index c3989e0167..9ff007754e 100644 --- a/src/Service/HealthCheck/Model/ConfigurationDetails.cs +++ b/src/Service/HealthCheck/Model/ConfigurationDetails.cs @@ -18,6 +18,9 @@ public record ConfigurationDetails [JsonPropertyName("graphql")] public bool GraphQL { get; init; } + [JsonPropertyName("mcp")] + public bool Mcp { get; init; } + [JsonPropertyName("caching")] public bool Caching { get; init; } diff --git a/src/Service/Program.cs b/src/Service/Program.cs index 6535069d3c..1059fd52ff 100644 --- a/src/Service/Program.cs +++ b/src/Service/Program.cs @@ -12,6 +12,7 @@ using Azure.DataApiBuilder.Service.Telemetry; using Microsoft.ApplicationInsights; using Microsoft.AspNetCore; +using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Hosting; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Hosting; @@ -20,6 +21,9 @@ using OpenTelemetry.Exporter; using OpenTelemetry.Logs; using OpenTelemetry.Resources; +using Serilog; +using Serilog.Core; +using Serilog.Extensions.Logging; namespace Azure.DataApiBuilder.Service { @@ -132,9 +136,11 @@ private static ParseResult GetParseResult(Command cmd, string[] args) /// /// Creates a LoggerFactory and add filter with the given LogLevel. /// - /// minimum log level. + /// Minimum log level. /// Telemetry client - public static ILoggerFactory GetLoggerFactoryForLogLevel(LogLevel logLevel, TelemetryClient? appTelemetryClient = null, LogLevelInitializer? logLevelInitializer = null) + /// Hot-reloadable log level + /// Core Serilog logging pipeline + public static ILoggerFactory GetLoggerFactoryForLogLevel(LogLevel logLevel, TelemetryClient? appTelemetryClient = null, LogLevelInitializer? logLevelInitializer = null, Logger? serilogLogger = null) { return LoggerFactory .Create(builder => @@ -195,6 +201,34 @@ public static ILoggerFactory GetLoggerFactoryForLogLevel(LogLevel logLevel, Tele }); } + if (Startup.IsAzureLogAnalyticsAvailable(Startup.AzureLogAnalyticsOptions)) + { + builder.AddProvider(new AzureLogAnalyticsLoggerProvider(Startup.CustomLogCollector)); + + if (logLevelInitializer is null) + { + builder.AddFilter(category: string.Empty, logLevel); + } + else + { + builder.AddFilter(category: string.Empty, level => level >= logLevelInitializer.MinLogLevel); + } + } + + if (Startup.FileSinkOptions.Enabled && serilogLogger is not null) + { + builder.AddSerilog(serilogLogger); + + if (logLevelInitializer is null) + { + builder.AddFilter(category: string.Empty, logLevel); + } + else + { + builder.AddFilter(category: string.Empty, level => level >= logLevelInitializer.MinLogLevel); + } + } + builder.AddConsole(); }); } diff --git a/src/Service/Startup.cs b/src/Service/Startup.cs index a696148332..92726367a8 100644 --- a/src/Service/Startup.cs +++ b/src/Service/Startup.cs @@ -24,10 +24,14 @@ using Azure.DataApiBuilder.Core.Services.MetadataProviders; using Azure.DataApiBuilder.Core.Services.OpenAPI; using Azure.DataApiBuilder.Core.Telemetry; +using Azure.DataApiBuilder.Mcp.Core; using Azure.DataApiBuilder.Service.Controllers; using Azure.DataApiBuilder.Service.Exceptions; using Azure.DataApiBuilder.Service.HealthCheck; using Azure.DataApiBuilder.Service.Telemetry; +using Azure.DataApiBuilder.Service.Utilities; +using Azure.Identity; +using Azure.Monitor.Ingestion; using HotChocolate; using HotChocolate.AspNetCore; using HotChocolate.Execution; @@ -49,13 +53,14 @@ using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; -using Microsoft.Extensions.Primitives; using NodaTime; using OpenTelemetry.Exporter; using OpenTelemetry.Logs; using OpenTelemetry.Metrics; using OpenTelemetry.Resources; using OpenTelemetry.Trace; +using Serilog; +using Serilog.Core; using StackExchange.Redis; using ZiggyCreatures.Caching.Fusion; using ZiggyCreatures.Caching.Fusion.Backplane.StackExchangeRedis; @@ -70,8 +75,11 @@ public class Startup(IConfiguration configuration, ILogger logger) public static bool IsLogLevelOverriddenByCli; + public static AzureLogAnalyticsCustomLogCollector CustomLogCollector = new(); public static ApplicationInsightsOptions AppInsightsOptions = new(); public static OpenTelemetryOptions OpenTelemetryOptions = new(); + public static AzureLogAnalyticsOptions AzureLogAnalyticsOptions = new(); + public static FileSinkOptions FileSinkOptions = new(); public const string NO_HTTPS_REDIRECT_FLAG = "--no-https-redirect"; private readonly HotReloadEventHandler _hotReloadEventHandler = new(); private RuntimeConfigProvider? _configProvider; @@ -172,6 +180,39 @@ public void ConfigureServices(IServiceCollection services) }); } + if (runtimeConfigAvailable + && runtimeConfig?.Runtime?.Telemetry?.AzureLogAnalytics is not null + && IsAzureLogAnalyticsAvailable(runtimeConfig.Runtime.Telemetry.AzureLogAnalytics)) + { + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(sp => + { + AzureLogAnalyticsOptions options = runtimeConfig.Runtime.Telemetry.AzureLogAnalytics; + ManagedIdentityCredential credential = new(); + LogsIngestionClient logsIngestionClient = new(new Uri(options.Auth!.DceEndpoint!), credential); + return new AzureLogAnalyticsFlusherService(options, CustomLogCollector, logsIngestionClient, _logger); + }); + services.AddHostedService(sp => sp.GetRequiredService()); + } + + if (runtimeConfigAvailable + && runtimeConfig?.Runtime?.Telemetry?.File is not null + && runtimeConfig.Runtime.Telemetry.File.Enabled) + { + services.AddSingleton(sp => + { + FileSinkOptions options = runtimeConfig.Runtime.Telemetry.File; + return new LoggerConfiguration().WriteTo.File( + path: options.Path, + rollingInterval: (RollingInterval)Enum.Parse(typeof(RollingInterval), options.RollingInterval), + retainedFileCountLimit: options.RetainedFileCountLimit, + fileSizeLimitBytes: options.FileSizeLimitBytes, + rollOnFileSizeLimit: true); + }); + services.AddSingleton(sp => sp.GetRequiredService().MinimumLevel.Verbose().CreateLogger()); + } + services.AddSingleton(implementationFactory: serviceProvider => { LogLevelInitializer logLevelInit = new(MinimumLogLevel, typeof(RuntimeConfigValidator).FullName, _configProvider, _hotReloadEventHandler); @@ -283,25 +324,9 @@ public void ConfigureServices(IServiceCollection services) services.AddHttpClient("ContextConfiguredHealthCheckClient") .ConfigureHttpClient((serviceProvider, client) => { - IHttpContextAccessor httpCtxAccessor = serviceProvider.GetRequiredService(); - HttpContext? httpContext = httpCtxAccessor.HttpContext; - string baseUri = string.Empty; - - if (httpContext is not null) - { - string scheme = httpContext.Request.Scheme; // "http" or "https" - string host = httpContext.Request.Host.Host ?? "localhost"; // e.g. "localhost" - int port = ResolveInternalPort(httpContext); - baseUri = $"{scheme}://{host}:{port}"; - client.BaseAddress = new Uri(baseUri); - } - else - { - // Optional fallback if ever needed in non-request scenarios - baseUri = $"http://localhost:{ResolveInternalPort()}"; - client.BaseAddress = new Uri(baseUri); - } - + int port = PortResolutionHelper.ResolveInternalPort(); + string baseUri = $"http://localhost:{port}"; + client.BaseAddress = new Uri(baseUri); _logger.LogInformation($"Configured HealthCheck HttpClient BaseAddress as: {baseUri}"); client.DefaultRequestHeaders.Accept.Clear(); client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); @@ -353,6 +378,12 @@ public void ConfigureServices(IServiceCollection services) services.AddSingleton(); services.AddSingleton(); + services.AddHttpLogging(logging => + { + logging.LoggingFields = Microsoft.AspNetCore.HttpLogging.HttpLoggingFields.RequestBody; + logging.ResponseBodyLogLimit = 9999999; + }); + AddGraphQLService(services, runtimeConfig?.Runtime?.GraphQL); // Subscribe the GraphQL schema refresh method to the specific hot-reload event @@ -428,6 +459,9 @@ public void ConfigureServices(IServiceCollection services) } services.AddSingleton(); + + services.AddDabMcpServer(configProvider); + services.AddControllers(); } @@ -448,7 +482,10 @@ private void AddGraphQLService(IServiceCollection services, GraphQLRuntimeOption .AddHttpRequestInterceptor() .ConfigureSchema((serviceProvider, schemaBuilder) => { - GraphQLSchemaCreator graphQLService = serviceProvider.GetRequiredService(); + // The GraphQLSchemaCreator is an application service that is not available on + // the schema specific service provider, this means we have to get it with + // the GetRootServiceProvider helper. + GraphQLSchemaCreator graphQLService = serviceProvider.GetRootServiceProvider().GetRequiredService(); graphQLService.InitializeSchemaAndResolvers(schemaBuilder); }) .AddHttpRequestInterceptor() @@ -458,7 +495,14 @@ private void AddGraphQLService(IServiceCollection services, GraphQLRuntimeOption .AddTypeConverter( from => new TimeOnly(from.Hour, from.Minute, from.Second, from.Millisecond)) .AddTypeConverter( - from => new LocalTime(from.Hour, from.Minute, from.Second, from.Millisecond)); + from => new LocalTime(from.Hour, from.Minute, from.Second, from.Millisecond)) + .ModifyCostOptions(options => + { + options.MaxFieldCost = 10000; + options.MaxTypeCost = 10000; + options.EnforceCostLimits = false; + options.ApplyCostDefaults = false; + }); // Conditionally adds a maximum depth rule to the GraphQL queries/mutation selection set. // This rule is only added if a positive depth limit is specified, ensuring that the server @@ -471,21 +515,21 @@ private void AddGraphQLService(IServiceCollection services, GraphQLRuntimeOption } server.AddErrorFilter(error => + { + if (error.Exception is not null) { - if (error.Exception is not null) - { - _logger.LogError(exception: error.Exception, message: "A GraphQL request execution error occurred."); - return error.WithMessage(error.Exception.Message); - } + _logger.LogError(exception: error.Exception, message: "A GraphQL request execution error occurred."); + return error.WithMessage(error.Exception.Message); + } - if (error.Code is not null) - { - _logger.LogError(message: "Error code: {errorCode}\nError message: {errorMessage}", error.Code, error.Message); - return error.WithMessage(error.Message); - } + if (error.Code is not null) + { + _logger.LogError(message: "Error code: {errorCode}\nError message: {errorMessage}", error.Code, error.Message); + return error.WithMessage(error.Message); + } - return error; - }) + return error; + }) .AddErrorFilter(error => { if (error.Exception is DataApiBuilderException thrownException) @@ -533,6 +577,8 @@ public void Configure(IApplicationBuilder app, IWebHostEnvironment env, RuntimeC // Configure Application Insights Telemetry ConfigureApplicationInsightsTelemetry(app, runtimeConfig); ConfigureOpenTelemetry(runtimeConfig); + ConfigureAzureLogAnalytics(runtimeConfig); + ConfigureFileSink(app, runtimeConfig); // Config provided before starting the engine. isRuntimeReady = PerformOnConfigChangeAsync(app).Result; @@ -563,20 +609,26 @@ public void Configure(IApplicationBuilder app, IWebHostEnvironment env, RuntimeC if (!Program.IsHttpsRedirectionDisabled) { - app.UseHttpsRedirection(); + // Use HTTPS redirection for all endpoints except /health and /graphql. + // This is necessary because ContextConfiguredHealthCheckClient base URI is http://localhost:{port} for internal API calls + app.UseWhen( + context => !(context.Request.Path.StartsWithSegments("/health") || context.Request.Path.StartsWithSegments("/graphql")), + appBuilder => appBuilder.UseHttpsRedirection() + ); } // URL Rewrite middleware MUST be called prior to UseRouting(). // https://andrewlock.net/understanding-pathbase-in-aspnetcore/#placing-usepathbase-in-the-correct-location app.UseCorrelationIdMiddleware(); app.UsePathRewriteMiddleware(); + app.UseHttpLogging(); // SwaggerUI visualization of the OpenAPI description document is only available // in developer mode in alignment with the restriction placed on ChilliCream's BananaCakePop IDE. // Consequently, SwaggerUI is not presented in a StaticWebApps (late-bound config) environment. if (IsUIEnabled(runtimeConfig, env)) { - app.UseSwaggerUI(c => + app.UseSwaggerUI(c => // CodeQL [SM04686] SwaggerUI is only enabled for Development environment. { c.ConfigObject.Urls = new SwaggerEndpointMapper(app.ApplicationServices.GetService()); }); @@ -635,15 +687,18 @@ public void Configure(IApplicationBuilder app, IWebHostEnvironment env, RuntimeC // without proper authorization headers. app.UseClientRoleHeaderAuthorizationMiddleware(); - IRequestExecutorResolver requestExecutorResolver = app.ApplicationServices.GetRequiredService(); + IRequestExecutorManager requestExecutorManager = app.ApplicationServices.GetRequiredService(); _hotReloadEventHandler.Subscribe( "GRAPHQL_SCHEMA_EVICTION_ON_CONFIG_CHANGED", - (_, _) => EvictGraphQLSchema(requestExecutorResolver)); + (_, _) => EvictGraphQLSchema(requestExecutorManager)); app.UseEndpoints(endpoints => { endpoints.MapControllers(); + // Special for MCP + endpoints.MapDabMcp(runtimeConfigProvider); + endpoints .MapGraphQL() .WithOptions(new GraphQLServerOptions @@ -675,10 +730,10 @@ public void Configure(IApplicationBuilder app, IWebHostEnvironment env, RuntimeC /// /// Evicts the GraphQL schema from the request executor resolver. /// - private static void EvictGraphQLSchema(IRequestExecutorResolver requestExecutorResolver) + private static void EvictGraphQLSchema(IRequestExecutorManager requestExecutorResolver) { Console.WriteLine("Evicting old GraphQL schema."); - requestExecutorResolver.EvictRequestExecutor(); + requestExecutorResolver.EvictExecutor(); } /// @@ -699,8 +754,9 @@ public static ILoggerFactory CreateLoggerFactoryForHostedAndNonHostedScenario(IS } TelemetryClient? appTelemetryClient = serviceProvider.GetService(); + Logger? serilogLogger = serviceProvider.GetService(); - return Program.GetLoggerFactoryForLogLevel(logLevelInitializer.MinLogLevel, appTelemetryClient, logLevelInitializer); + return Program.GetLoggerFactoryForLogLevel(logLevelInitializer.MinLogLevel, appTelemetryClient, logLevelInitializer, serilogLogger); } /// @@ -858,7 +914,6 @@ private void ConfigureApplicationInsightsTelemetry(IApplicationBuilder app, Runt /// is enabled, we can track different events and metrics. /// /// The provider used to load runtime configuration. - /// private void ConfigureOpenTelemetry(RuntimeConfig runtimeConfig) { if (runtimeConfig?.Runtime?.Telemetry is not null @@ -868,7 +923,7 @@ private void ConfigureOpenTelemetry(RuntimeConfig runtimeConfig) if (!OpenTelemetryOptions.Enabled) { - _logger.LogInformation("Open Telemetry are disabled."); + _logger.LogInformation("Open Telemetry is disabled."); return; } @@ -884,6 +939,92 @@ private void ConfigureOpenTelemetry(RuntimeConfig runtimeConfig) } } + /// + /// Configure Azure Log Analytics based on the loaded runtime configuration. If Azure Log Analytics + /// is enabled, we can track different events and metrics. + /// + /// The provider used to load runtime configuration. + private void ConfigureAzureLogAnalytics(RuntimeConfig runtimeConfig) + { + if (runtimeConfig?.Runtime?.Telemetry is not null + && runtimeConfig.Runtime.Telemetry.AzureLogAnalytics is not null) + { + AzureLogAnalyticsOptions = runtimeConfig.Runtime.Telemetry.AzureLogAnalytics; + + if (!AzureLogAnalyticsOptions.Enabled) + { + _logger.LogInformation("Azure Log Analytics is disabled."); + return; + } + + bool isAuthIncomplete = false; + if (string.IsNullOrEmpty(AzureLogAnalyticsOptions.Auth?.CustomTableName)) + { + _logger.LogError("Logs won't be sent to Azure Log Analytics because the Custom Table Name is not available in the config file."); + isAuthIncomplete = true; + } + + if (string.IsNullOrEmpty(AzureLogAnalyticsOptions.Auth?.DcrImmutableId)) + { + _logger.LogError("Logs won't be sent to Azure Log Analytics because the DCR Immutable Id is not available in the config file."); + isAuthIncomplete = true; + } + + if (string.IsNullOrEmpty(AzureLogAnalyticsOptions.Auth?.DceEndpoint)) + { + _logger.LogError("Logs won't be sent to Azure Log Analytics because the DCE Endpoint is not available in the config file."); + isAuthIncomplete = true; + } + + if (isAuthIncomplete) + { + return; + } + + // Updating Startup Logger to Log from Startup Class. + ILoggerFactory? loggerFactory = Program.GetLoggerFactoryForLogLevel(MinimumLogLevel); + _logger = loggerFactory.CreateLogger(); + } + } + + /// + /// Configure File Sink based on the loaded runtime configuration. If File Sink + /// is enabled, we can track different events and metrics. + /// + /// The application builder. + /// The provider used to load runtime configuration. + private void ConfigureFileSink(IApplicationBuilder app, RuntimeConfig runtimeConfig) + { + if (runtimeConfig?.Runtime?.Telemetry is not null + && runtimeConfig.Runtime.Telemetry.File is not null) + { + FileSinkOptions = runtimeConfig.Runtime.Telemetry.File; + + if (!FileSinkOptions.Enabled) + { + _logger.LogInformation("File is disabled."); + return; + } + + if (string.IsNullOrWhiteSpace(FileSinkOptions.Path)) + { + _logger.LogError("Logs won't be sent to File because the Path is not available in the config file."); + return; + } + + Logger? serilogLogger = app.ApplicationServices.GetService(); + if (serilogLogger is null) + { + _logger.LogError("Serilog Logger Configuration is not set."); + return; + } + + // Updating Startup Logger to Log from Startup Class. + ILoggerFactory? loggerFactory = Program.GetLoggerFactoryForLogLevel(logLevel: MinimumLogLevel, serilogLogger: serilogLogger); + _logger = loggerFactory.CreateLogger(); + } + } + /// /// Sets Static Web Apps EasyAuth as the authentication scheme for the engine. /// @@ -1032,57 +1173,15 @@ public static void AddValidFilters() } /// - /// Get the internal port of the container. + /// Helper function that returns if AzureLogAnalytics feature is enabled and properly configured. /// - /// The HttpContext - /// The internal container port - private static int ResolveInternalPort(HttpContext? httpContext = null) + public static bool IsAzureLogAnalyticsAvailable(AzureLogAnalyticsOptions azureLogAnalyticsOptions) { - // Try X-Forwarded-Port if context is present - if (httpContext is not null && - httpContext.Request.Headers.TryGetValue("X-Forwarded-Port", out StringValues fwdPortVal) && - int.TryParse(fwdPortVal.ToString(), out int fwdPort) && - fwdPort > 0) - { - return fwdPort; - } - - // Infer scheme from context if available, else default to "http" - string scheme = httpContext?.Request.Scheme ?? "http"; - - // Check ASPNETCORE_URLS env var - string? aspnetcoreUrls = Environment.GetEnvironmentVariable("ASPNETCORE_URLS"); - - if (!string.IsNullOrWhiteSpace(aspnetcoreUrls)) - { - foreach (string part in aspnetcoreUrls.Split(new[] { ';', ',' }, StringSplitOptions.RemoveEmptyEntries)) - { - string trimmed = part.Trim(); - - // Handle wildcard format (e.g. http://+:5002) - if (trimmed.StartsWith($"{scheme}://+:", StringComparison.OrdinalIgnoreCase)) - { - int colonIndex = trimmed.LastIndexOf(':'); - if (colonIndex != -1 && - int.TryParse(trimmed.Substring(colonIndex + 1), out int wildcardPort) && - wildcardPort > 0) - { - return wildcardPort; - } - } - - // Handle standard URI format - if (trimmed.StartsWith($"{scheme}://", StringComparison.OrdinalIgnoreCase) && - Uri.TryCreate(trimmed, UriKind.Absolute, out Uri? uri)) - { - return uri.Port; - } - } - } - - // Fallback - return scheme.Equals("https", StringComparison.OrdinalIgnoreCase) ? 443 : 5000; + return azureLogAnalyticsOptions.Auth is not null + && azureLogAnalyticsOptions.Enabled + && !string.IsNullOrWhiteSpace(azureLogAnalyticsOptions.Auth.CustomTableName) + && !string.IsNullOrWhiteSpace(azureLogAnalyticsOptions.Auth.DcrImmutableId) + && !string.IsNullOrWhiteSpace(azureLogAnalyticsOptions.Auth.DceEndpoint); } - } } diff --git a/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs b/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs new file mode 100644 index 0000000000..130b872fb4 --- /dev/null +++ b/src/Service/Telemetry/AzureLogAnalyticsCustomLogCollector.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Threading.Channels; +using System.Threading.Tasks; +using Azure.DataApiBuilder.Config.ObjectModel; +using Microsoft.Extensions.Logging; + +namespace Azure.DataApiBuilder.Service.Telemetry; + +/// +/// Interface for customized log collector. +/// +public interface ICustomLogCollector +{ + Task LogAsync(string message, LogLevel loggingLevel, string? source = null); + Task> DequeueAllAsync(string dabIdentifier, int flushIntervalSeconds); +} + +/// +/// Log collector customized to retrieve and send all of the logs created by DAB. +/// +public class AzureLogAnalyticsCustomLogCollector : ICustomLogCollector +{ + private readonly Channel _logs = Channel.CreateUnbounded(); + + /// + /// Adds one log to the channel asynchronously, and saves the time at which it was created. + /// + /// Structured log message. + /// Severity of log event. + /// Class from which log event originated. + public async Task LogAsync(string message, LogLevel logLevel, string? source = null) + { + DateTime dateTime = DateTime.UtcNow; + await _logs.Writer.WriteAsync( + new AzureLogAnalyticsLogs( + dateTime.ToString("o"), + logLevel.ToString(), + message, + source)); + } + + /// + /// Creates a list periodically from the logs that are currently saved. + /// + /// Custom name to distinguish the logs sent from DAB to Azure Log Analytics. + /// Period of time between each list of logs is sent. + /// List of logs structured to be sent to Azure Log Analytics. + public async Task> DequeueAllAsync(string dabIdentifier, int flushIntervalSeconds) + { + List list = new(); + + if (await _logs.Reader.WaitToReadAsync()) + { + Stopwatch time = Stopwatch.StartNew(); + + while (true) + { + if (_logs.Reader.TryRead(out AzureLogAnalyticsLogs? item)) + { + item.Identifier = dabIdentifier; + list.Add(item); + } + + if (time.Elapsed >= TimeSpan.FromSeconds(flushIntervalSeconds)) + { + break; + } + } + } + + return list; + } +} diff --git a/src/Service/Telemetry/AzureLogAnalyticsFlusherService.cs b/src/Service/Telemetry/AzureLogAnalyticsFlusherService.cs new file mode 100644 index 0000000000..3c157211e8 --- /dev/null +++ b/src/Service/Telemetry/AzureLogAnalyticsFlusherService.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Azure.DataApiBuilder.Config.ObjectModel; +using Azure.Monitor.Ingestion; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; + +namespace Azure.DataApiBuilder.Service.Telemetry; + +/// +/// Service used to periodically flush logs to Azure Log Analytics +/// +public class AzureLogAnalyticsFlusherService : BackgroundService +{ + private readonly AzureLogAnalyticsOptions _options; + private readonly ICustomLogCollector _customLogCollector; + private readonly LogsIngestionClient _logsIngestionClient; + private readonly ILogger _logger; + + public AzureLogAnalyticsFlusherService(AzureLogAnalyticsOptions options, ICustomLogCollector customLogCollector, LogsIngestionClient logsIngestionClient, ILogger logger) + { + _options = options; + _customLogCollector = customLogCollector; + _logsIngestionClient = logsIngestionClient; + _logger = logger; + } + + /// + /// Function that will keep periodically flushing data logs as long as Azure Log Analytics is enabled. + /// + /// Token used to stop running service when program is shut down. + protected async override Task ExecuteAsync(CancellationToken stoppingToken) + { + while (true) + { + try + { + List logs = await _customLogCollector.DequeueAllAsync(_options.DabIdentifier!, (int)_options.FlushIntervalSeconds!); + + if (logs.Count > 0) + { + await _logsIngestionClient.UploadAsync(_options.Auth!.DcrImmutableId!, _options.Auth!.CustomTableName!, logs); + } + } + catch (Exception ex) + { + _logger.LogError($"Error uploading logs to Azure Log Analytics: {ex}"); + } + } + } +} diff --git a/src/Service/Telemetry/AzureLogAnalyticsLogger.cs b/src/Service/Telemetry/AzureLogAnalyticsLogger.cs new file mode 100644 index 0000000000..52579c3963 --- /dev/null +++ b/src/Service/Telemetry/AzureLogAnalyticsLogger.cs @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using Microsoft.Extensions.Logging; + +namespace Azure.DataApiBuilder.Service.Telemetry; + +/// +/// Logger used to receive all the logs that will be sent to Azure Log Analytics +/// and are created by Data API builder while it is running. +/// +public class AzureLogAnalyticsLogger : ILogger +{ + private readonly string _className; + private readonly ICustomLogCollector _customLogCollector; + + public AzureLogAnalyticsLogger(string className, ICustomLogCollector customLogCollector) + { + _className = className; + _customLogCollector = customLogCollector; + } + + public IDisposable? BeginScope(TState state) where TState : notnull => default!; + + public bool IsEnabled(LogLevel logLevel) => true; + + public async void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter) + { + string message = formatter(state, exception); + await _customLogCollector.LogAsync(message, logLevel, _className); + } +} diff --git a/src/Service/Telemetry/AzureLogAnalyticsLoggerProvider.cs b/src/Service/Telemetry/AzureLogAnalyticsLoggerProvider.cs new file mode 100644 index 0000000000..71e17d548d --- /dev/null +++ b/src/Service/Telemetry/AzureLogAnalyticsLoggerProvider.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using Microsoft.Extensions.Logging; + +namespace Azure.DataApiBuilder.Service.Telemetry; + +/// +/// Adds an Azure Log Analytics logger named 'AzureLogAnalyticsLogger' to the . +/// +public class AzureLogAnalyticsLoggerProvider : ILoggerProvider +{ + private readonly ICustomLogCollector _customLogCollector; + + public AzureLogAnalyticsLoggerProvider(ICustomLogCollector customLogCollector) + { + _customLogCollector = customLogCollector; + } + + public ILogger CreateLogger(string className) + { + return new AzureLogAnalyticsLogger(className, _customLogCollector); + } + + public void Dispose() { } +} diff --git a/src/Service/Utilities/PortResolutionHelper.cs b/src/Service/Utilities/PortResolutionHelper.cs new file mode 100644 index 0000000000..c7e7f3befd --- /dev/null +++ b/src/Service/Utilities/PortResolutionHelper.cs @@ -0,0 +1,117 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; + +namespace Azure.DataApiBuilder.Service.Utilities +{ + /// + /// Provides methods to resolve the internal port for the application based on environment variables. + /// + public static class PortResolutionHelper + { + /// + /// Resolves the internal port used by the application based on environment variables and URL bindings. + /// + /// This method determines the port by checking the ASPNETCORE_URLS environment + /// variable for URL bindings. If a valid port is found in the URLs, it is returned. If no port is specified, + /// the method checks the DEFAULT_PORT environment variable for a fallback port. If neither is set, the + /// default port of 5000 is returned. + /// The resolved port number. Returns the port specified in ASPNETCORE_URLS, or the fallback port from + /// DEFAULT_PORT, or 5000 if no port is configured. + public static int ResolveInternalPort() + { + string? urls = Environment.GetEnvironmentVariable("ASPNETCORE_URLS"); + int? httpsPort = null; + + if (!string.IsNullOrWhiteSpace(urls)) + { + string[] parts = urls.Split(new[] { ';', ',' }, StringSplitOptions.RemoveEmptyEntries); + + foreach (string part in parts) + { + string trimmedPart = part.Trim(); + + // Try to parse as a valid URI first + if (Uri.TryCreate(trimmedPart, UriKind.Absolute, out Uri? uri) && + (uri.Scheme == Uri.UriSchemeHttp || uri.Scheme == Uri.UriSchemeHttps)) + { + if (uri.Scheme == Uri.UriSchemeHttp) + { + return uri.Port; + } + else if (uri.Scheme == Uri.UriSchemeHttps) + { + httpsPort ??= uri.Port; + } + + continue; + } + + // Handle known wildcard patterns (http/https with + or * as host) + // Example: http://+:1234 or http://*:1234 or https://+:1234 or https://*:1234 + if (trimmedPart.StartsWith("http://+:", StringComparison.OrdinalIgnoreCase) || + trimmedPart.StartsWith("http://*:", StringComparison.OrdinalIgnoreCase)) + { + string portString = trimmedPart.Substring(trimmedPart.LastIndexOf(':') + 1); + + if (int.TryParse(portString, out int port) && port > 0) + { + return port; + } + + continue; + } + + if (trimmedPart.StartsWith("https://+:", StringComparison.OrdinalIgnoreCase) || + trimmedPart.StartsWith("https://*:", StringComparison.OrdinalIgnoreCase)) + { + string portString = trimmedPart.Substring(trimmedPart.LastIndexOf(':') + 1); + + if (int.TryParse(portString, out int port) && port > 0) + { + httpsPort ??= port; + } + + continue; + } + } + } + + // If no HTTP, fallback to HTTPS port if present + if (httpsPort.HasValue) + { + return httpsPort.Value; + } + + // Check ASPNETCORE_HTTP_PORTS if ASPNETCORE_URLS is not set + string? httpPorts = Environment.GetEnvironmentVariable("ASPNETCORE_HTTP_PORTS"); + + if (!string.IsNullOrWhiteSpace(httpPorts)) + { + string[] portParts = httpPorts.Split(new[] { ';', ',' }, StringSplitOptions.RemoveEmptyEntries); + + foreach (string portPart in portParts) + { + string trimmedPort = portPart.Trim(); + + if (int.TryParse(trimmedPort, out int port) && port > 0) + { + return port; + } + } + } + + // Configurable fallback port + string? defaultPortEnv = Environment.GetEnvironmentVariable("DEFAULT_PORT"); + + if (int.TryParse(defaultPortEnv, out int defaultPort) && defaultPort > 0) + { + return defaultPort; + } + + // Default Kestrel port if not specified. + return 5000; + } + } +} diff --git a/src/Service/appsettings.json b/src/Service/appsettings.json index c4dc8f6326..1ec900abf9 100644 --- a/src/Service/appsettings.json +++ b/src/Service/appsettings.json @@ -3,7 +3,8 @@ "LogLevel": { "Default": "Error", "Microsoft": "Warning", - "Microsoft.Hosting.Lifetime": "Information" + "Microsoft.Hosting.Lifetime": "Information", + "Microsoft.AspNetCore.HttpLogging.HttpLoggingMiddleware": "Information" } }, "AllowedHosts": "*" diff --git a/src/Service/dab-config.json b/src/Service/dab-config.json new file mode 100644 index 0000000000..6e20350b74 --- /dev/null +++ b/src/Service/dab-config.json @@ -0,0 +1,3362 @@ +{ + "$schema": "https://github.com/Azure/data-api-builder/releases/latest/download/dab.draft.schema.json", + "data-source": { + "database-type": "mssql", + "connection-string": "Data Source=nqf4kgvoqm4ufazdzriupb2pay-hmnbvxar2mgu7e3ng27fsqy3we.database.fabric.microsoft.com,1433;User ID=8bfaf0d6-fa20-4ed5-a450-0005ceb77729;Password=z5y8Q~hLcfdAflVrfnYoVxdavIJXZb5tlH~tAbRn;Pooling=True;Min Pool Size=0;Max Pool Size=100;Multiple Active Result Sets=False;Connect Timeout=30;Encrypt=False;Trust Server Certificate=True;Authentication=ActiveDirectoryServicePrincipal;Initial Catalog=apiLayer-345587c4-1232-457e-9761-b6bca3d72e2e; Command Timeout=60", + "options": { + "set-session-context": true + } + }, + "runtime": { + "telemetry": { + "application-insights": { + "enabled": false, + "connection-string": "InstrumentationKey=d303d229-1055-4f48-a811-4dc0a3d4aa1e;IngestionEndpoint=https://westeurope-5.in.applicationinsights.azure.com/;LiveEndpoint=https://westeurope.livediagnostics.monitor.azure.com/;ApplicationId=c98a3731-8125-4aa0-867a-3361c0e536db" + } + }, + "pagination": { + "max-page-size": 100000 + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "rest": { + "enabled": true, + "path": "/api", + "request-body-strict": true + }, + "graphql": { + "enabled": true, + "path": "/graphql", + "allow-introspection": true + }, + "host": { + "cors": { + "origins": [], + "allow-credentials": false + }, + "authentication": { + "provider": "StaticWebApps" + }, + "mode": "development" + } + }, + "entities": { + "BillOfLading": { + "source": { + "object": "silver_ops.v_BillOfLading", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "BillOfLading", + "plural": "BillsOfLading" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "BillOfLadingPerShipmentEquipment": { + "cardinality": "many", + "target.entity": "BillOfLadingPerShipmentEquipment", + "source.fields": [ + "systemId" + ], + "target.fields": [ + "BOLId" + ] + }, + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "EnvOrderNo" + ], + "target.fields": [ + "shipmentID" + ] + }, + "Party": { + "cardinality": "many", + "target.entity": "BillOfLadingParty", + "source.fields": [ + "DocumentNo" + ], + "target.fields": [ + "DocumentNo" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "BillOfLadingParty": { + "source": { + "object": "silver_ops.v_BillOfLadingParty", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "BillOfLadingParty", + "plural": "BillOfLadingParties" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "BillOfLading": { + "cardinality": "one", + "target.entity": "BillOfLading", + "source.fields": [ + "DocumentNo" + ], + "target.fields": [ + "DocumentNo" + ] + }, + "Partner": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "ContactNo" + ], + "target.fields": [ + "partnerID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "BillOfLadingPerShipmentEquipment": { + "source": { + "object": "silver_ops.BillOfLadingPerShipmentEquipment", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "BillOfLadingPerShipmentEquipment", + "plural": "BillOfLadingsPerShipmentEquipment" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentId" + ], + "target.fields": [ + "id" + ] + }, + "BillOfLading": { + "cardinality": "one", + "target.entity": "BillOfLading", + "source.fields": [ + "BOLId" + ], + "target.fields": [ + "systemId" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "CargoItem": { + "source": { + "object": "silver_ops.CargoItem", + "type": "table", + "key-fields": [ + "cargoItemID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CargoItem", + "plural": "CargoItems" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentId" + ], + "target.fields": [ + "shipmentID" + ] + }, + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentId" + ], + "target.fields": [ + "id" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "CargoMovementType": { + "source": { + "object": "silver_ops.CargoMovementType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CargoMovementType", + "plural": "CargoMovementTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "CarrierPartner": { + "source": { + "object": "silver_ops.Partner", + "type": "table", + "key-fields": [ + "partnerID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CarrierPartner", + "plural": "CarrierPartners" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipments": { + "cardinality": "many", + "target.entity": "Shipment", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "carrierID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "CharacteristicGroup": { + "source": { + "object": "silver_ops.v_Characteristics", + "type": "table", + "key-fields": [ + "code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CharacteristicGroup", + "plural": "CharacteristicGroups" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "CustomerPartner": { + "source": { + "object": "silver_ops.v_Partner", + "type": "table", + "key-fields": [ + "partnerID" + ] + }, + "cache": { + "enabled": true, + "ttl-seconds": 15 + }, + "graphql": { + "enabled": true, + "type": { + "singular": "CustomerPartner", + "plural": "CustomerPartners" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Organization": { + "cardinality": "one", + "target.entity": "Organization", + "source.fields": [ + "mdmOrganizationEntityID" + ], + "target.fields": [ + "mdmEntityID" + ] + }, + "Shipment": { + "cardinality": "many", + "target.entity": "Shipment", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "customerID" + ] + }, + "InboundStockOrder": { + "cardinality": "many", + "target.entity": "InboundOrder", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "customerCode" + ] + }, + "OutboundStockOrder": { + "cardinality": "many", + "target.entity": "OutboundOrder", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "customerCode" + ] + }, + "Company": { + "cardinality": "many", + "target.entity": "PartnerPerCompany", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "sourceEntityID" + ] + }, + "TariffContactGroup": { + "cardinality": "many", + "target.entity": "TariffContactGroupMember", + "source.fields": [ + "partnerID" + ], + "target.fields": [ + "PartnerId" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Equipment": { + "source": { + "object": "silver_ops.Equipment", + "type": "table", + "key-fields": [ + "equipmentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Equipment", + "plural": "Equipment" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "relationships": { + "ShipmentEquipments": { + "cardinality": "many", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "equipmentID" + ], + "target.fields": [ + "equipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "EquipmentEventType": { + "source": { + "object": "silver_ops.EquipmentEventType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "EquipmentEventType", + "plural": "EquipmentEventTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "EquipmentMap": { + "source": { + "object": "silver_ops.usp_MapSource", + "type": "stored-procedure", + "parameters": { + "customerScope": "string" + } + }, + "cache": { + "enabled": true, + "ttl-seconds": 60 + }, + "graphql": { + "enabled": true, + "type": { + "singular": "EquipmentMap", + "plural": "EquipmentMaps" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "execute" + } + ] + } + ] + }, + "FlowtypeType": { + "source": { + "object": "silver_ops.v_FlowTypeType", + "type": "table", + "key-fields": [ + "flowType" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "FlowtypeType", + "plural": "FlowtypeTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + + "InvoiceSearch": { + "source": { + "object": "silver_ops.usp_invoiceSearch", + "type": "stored-procedure", + "parameters": { + "searchString": "string", + "customerScope": "string" + } + }, + "graphql": { + "enabled": true, + "type": { + "singular": "InvoiceSearch", + "plural": "InvoiceSearches" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "execute" + } + ] + } + ] + }, + + "GlobalSearch": { + "source": { + "object": "silver_ops.usp_globalSearch", + "type": "stored-procedure", + "parameters": { + "searchString": "string", + "customerScope": "string" + } + }, + "graphql": { + "enabled": true, + "type": { + "singular": "GlobalSearch", + "plural": "GlobalSearches" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "execute" + } + ] + } + ] + }, + "GoodsSummary": { + "source": { + "object": "silver_ops.v_GoodsSummmary", + "type": "table", + "key-fields": [ + "shipmentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "GoodsSummary", + "plural": "GoodsSummaries" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "InboundOrder": { + "source": { + "object": "silver_ops.InboundOrder", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "InboundOrder", + "plural": "InboundOrders" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "customer": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "customerCode" + ], + "target.fields": [ + "partnerID" + ] + }, + "inboundOrderLines": { + "cardinality": "many", + "target.entity": "InboundOrderLine", + "source.fields": [ + "no" + ], + "target.fields": [ + "documentNo" + ] + }, + "shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "opsFactOrderId" + ], + "target.fields": [ + "shipmentID" + ] + }, + "characteristics": { + "cardinality": "one", + "target.entity": "CharacteristicGroup", + "source.fields": [ + "characteristicGroupCode" + ], + "target.fields": [ + "code" + ] + }, + "terminalLocation": { + "cardinality": "one", + "target.entity": "TerminalLocation", + "source.fields": [ + "unloadingTerminalCode" + ], + "target.fields": [ + "terminalCode" + ] + }, + "pol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polCode" + ], + "target.fields": [ + "Code" + ] + }, + "pod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podCode" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "InboundOrderLine": { + "source": { + "object": "silver_ops.v_InboundOrderLine", + "type": "table", + "key-fields": [ + "documentNo", + "lineNo" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "InboundOrderLine", + "plural": "InboundOrderLines" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "inboundOrder": { + "cardinality": "one", + "target.entity": "InboundOrder", + "source.fields": [ + "documentNo" + ], + "target.fields": [ + "no" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "LoadReport": { + "source": { + "object": "silver_ops.v_loadReport", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Loadreport", + "plural": "Loadreports" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "ops_fact_order_id" + ], + "target.fields": [ + "shipmentID" + ] + }, + "POL": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polCode" + ], + "target.fields": [ + "Code" + ] + }, + "POD": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podCode" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "LoadtypeType": { + "source": { + "object": "silver_ops.v_LoadTypeType", + "type": "table", + "key-fields": [ + "loadType" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LoadtypeType", + "plural": "LoadtypeTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "LocationPDY": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPDY", + "plural": "LocationsPDY" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "LocationPOD": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPOD", + "plural": "LocationsPOD" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "LocationPOL": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPOL", + "plural": "LocationsPOL" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "LocationPRC": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPRC", + "plural": "LocationsPRC" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "LocationTS1": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationTS1", + "plural": "LocationsTS1" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "LocationTS2": { + "source": { + "object": "silver_ops.Location", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "LocationPS2", + "plural": "LocationsTS2" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "ModeOfTransportType": { + "source": { + "object": "silver_ops.ModeOfTransportType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ModeOfTransportType", + "plural": "ModeOfTransportTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "OrganisationLocation": { + "source": { + "object": "silver_mdm.v_Location", + "type": "table", + "key-fields": [ + "mdmEntityID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OrganisationLocation", + "plural": "OrganisationLocations" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Organization": { + "source": { + "object": "silver_mdm.Organization", + "type": "table", + "key-fields": [ + "mdmEntityID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Organization", + "plural": "Organizations" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "PartnerPerCompany": { + "cardinality": "many", + "target.entity": "PartnerPerCompany", + "source.fields": [ + "mdmEntityID" + ], + "target.fields": [ + "mdmOrganizationEntityID" + ] + }, + "Partner": { + "cardinality": "many", + "target.entity": "CustomerPartner", + "source.fields": [ + "mdmEntityID" + ], + "target.fields": [ + "mdmOrganizationEntityID" + ] + }, + "OrganizationLocation": { + "cardinality": "many", + "target.entity": "OrganizationLocationRelation", + "source.fields": [ + "mdmEntityID" + ], + "target.fields": [ + "mdmEntityOrganizationID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "OrganizationLocationRelation": { + "source": { + "object": "silver_mdm.OrganizationAddressRelation", + "type": "table", + "key-fields": [ + "mdmRelationshipID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OrganizationLocationRelation", + "plural": "OrganizationLocationRelations" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Organization": { + "cardinality": "one", + "target.entity": "Organization", + "source.fields": [ + "mdmEntityOrganizationID" + ], + "target.fields": [ + "mdmEntityID" + ] + }, + "Location": { + "cardinality": "one", + "target.entity": "OrganisationLocation", + "source.fields": [ + "mdmEntityLocationID" + ], + "target.fields": [ + "mdmEntityID" + ] + } + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "OutboundOrder": { + "source": { + "object": "silver_ops.v_OutboundOrder", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OutboundOrder", + "plural": "OutboundOrders" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "customer": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "customerCode" + ], + "target.fields": [ + "partnerID" + ] + }, + "outboundOrderLines": { + "cardinality": "many", + "target.entity": "OutboundOrderLine", + "source.fields": [ + "no" + ], + "target.fields": [ + "documentNo" + ] + }, + "shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "opsFactOrderId" + ], + "target.fields": [ + "shipmentID" + ] + }, + "pol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polCode" + ], + "target.fields": [ + "Code" + ] + }, + "pod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podCode" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "OutboundOrderLine": { + "source": { + "object": "silver_ops.v_OutboundOrderLine", + "type": "table", + "key-fields": [ + "documentNo", + "lineNo" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OutboundOrderLine", + "plural": "OutboundOrderLines" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "outboundOrder": { + "cardinality": "one", + "target.entity": "OutboundOrder", + "source.fields": [ + "documentNo" + ], + "target.fields": [ + "no" + ] + }, + "charecteristics": { + "cardinality": "one", + "target.entity": "CharacteristicGroup", + "source.fields": [ + "characteristicGroupCode" + ], + "target.fields": [ + "code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "PartnerPerCompany": { + "source": { + "object": "silver_ops.v_PartnerPerCompany", + "type": "table", + "key-fields": [ + "PartnerPerCompanyId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "PartnerPerCompany", + "plural": "PartnerPerCompanies" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Customer": { + "cardinality": "many", + "target.entity": "CustomerPartner", + "source.fields": [ + "sourceEntityID" + ], + "target.fields": [ + "partnerID" + ] + }, + "organization": { + "cardinality": "one", + "target.entity": "Organization", + "source.fields": [ + "mdmOrganizationEntityID" + ], + "target.fields": [ + "mdmEntityID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "pdyPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "pdyPort", + "plural": "pdyPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "podPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "podPort", + "plural": "podPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "polPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "polPort", + "plural": "polPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "prcPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "prcPort", + "plural": "prcPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "geoPort": { + "source": { + "object": "silver_ops.Port", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "geoPort", + "plural": "geoPorts" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "PurchaseDocument": { + "source": { + "object": "silver_ops.PurchaseDocument", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "PurchaseDocument", + "plural": "PurchaseDocuments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "OperationalOrderNo" + ], + "target.fields": [ + "mplBookingReference" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [] + } + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "SalesDocument": { + "source": { + "object": "silver_ops.v_SalesDocument", + "type": "table", + "key-fields": [ + "salesDocumentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "SalesDocument", + "plural": "SalesDocuments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "invoicee": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "invoiceeID" + ], + "target.fields": [ + "partnerID" + ] + }, + "SalesDocumentLine": { + "cardinality": "many", + "target.entity": "SalesDocumentLine", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "Shipments": { + "cardinality": "many", + "target.entity": "ShipmentsPerSalesDocument", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "Document": { + "cardinality": "one", + "target.entity": "ShipmentDocument", + "source.fields": [ + "documentNumber" + ], + "target.fields": [ + "invoiceCrMemoNo" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [ + "D365FSynchErrorText", + "D365FSynchStatus", + "Env", + "OrderNo", + "SelltoCustomerNo", + "SourceOrderNos" + ] + } + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "SalesDocumentLine": { + "source": { + "object": "silver_ops.SalesDocumentLine", + "type": "table", + "key-fields": [ + "salesDocumentID", + "lineNo" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "SalesDocumentLine", + "plural": "SalesDocumentLines" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "SalesDocument": { + "cardinality": "one", + "target.entity": "SalesDocument", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read", + "fields": { + "exclude": [ + "D365FSynchErrorText", + "D365FSynchStatus", + "Env", + "OrderNo", + "SelltoCustomerNo", + "sourceOrderNo" + ] + } + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "Shipment": { + "source": { + "object": "silver_ops.v_Shipment", + "type": "table", + "key-fields": [ + "shipmentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Shipment", + "plural": "Shipments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "BillOfLading": { + "cardinality": "many", + "target.entity": "BillOfLading", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "EnvOrderNo" + ] + }, + "ShipmentEquipments": { + "cardinality": "many", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + }, + "CargoItems": { + "cardinality": "many", + "target.entity": "CargoItem", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentId" + ] + }, + "carrier": { + "cardinality": "one", + "target.entity": "CarrierPartner", + "source.fields": [ + "carrierID" + ], + "target.fields": [ + "partnerID" + ] + }, + "event": { + "cardinality": "many", + "target.entity": "ShipmentEvent", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "ShipmentId" + ] + }, + "customer": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "customerID" + ], + "target.fields": [ + "partnerID" + ] + }, + "shipmentPol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "pol" + ], + "target.fields": [ + "Code" + ] + }, + "shipmentPod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "pod" + ], + "target.fields": [ + "Code" + ] + }, + "shipmentPrc": { + "cardinality": "one", + "target.entity": "prcPort", + "source.fields": [ + "prc" + ], + "target.fields": [ + "Code" + ] + }, + "shipmentPdy": { + "cardinality": "one", + "target.entity": "pdyPort", + "source.fields": [ + "pdy" + ], + "target.fields": [ + "Code" + ] + }, + "goodsSummary": { + "cardinality": "one", + "target.entity": "GoodsSummary", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "ShipmentDocument": { + "source": { + "object": "silver_ops.v_ShipmentDocument", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentDocument", + "plural": "ShipmentDocuments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "ops_fact_order_id" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "ShipmentEquipment": { + "source": { + "object": "silver_ops.v_ShipmentEquipment", + "type": "table", + "key-fields": [ + "id" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentEquipment", + "plural": "ShipmentEquipment" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Equipments": { + "cardinality": "one", + "target.entity": "Equipment", + "source.fields": [ + "equipmentID" + ], + "target.fields": [ + "equipmentID" + ] + }, + "TrackingEvents": { + "cardinality": "many", + "target.entity": "TrackingEvent", + "source.fields": [ + "id" + ], + "target.fields": [ + "shipmentEquipmentID" + ] + }, + "ShipmentEquipmentTransport": { + "cardinality": "many", + "target.entity": "ShipmentEquipmentTransport", + "source.fields": [ + "id" + ], + "target.fields": [ + "shipmentEquipmentID" + ] + }, + "CargoItem": { + "cardinality": "many", + "target.entity": "CargoItem", + "source.fields": [ + "id" + ], + "target.fields": [ + "shipmentEquipmentId" + ] + }, + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "ShipmentEquipmentTransport": { + "source": { + "object": "silver_ops.ShipmentEquipmentTransport", + "type": "table", + "key-fields": [ + "id" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentEquipmentTransport", + "plural": "ShipmentEquipmentTransports" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "pol": { + "cardinality": "one", + "target.entity": "polPort", + "source.fields": [ + "polID" + ], + "target.fields": [ + "Code" + ] + }, + "pod": { + "cardinality": "one", + "target.entity": "podPort", + "source.fields": [ + "podID" + ], + "target.fields": [ + "Code" + ] + }, + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentID" + ], + "target.fields": [ + "id" + ] + }, + "Transport": { + "cardinality": "one", + "target.entity": "Transport", + "source.fields": [ + "transportID" + ], + "target.fields": [ + "id" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "ShipmentEvent": { + "source": { + "object": "silver_ops.v_ShipmentEvent", + "type": "table", + "key-fields": [ + "ShipmentId", + "systemCreatedAt" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentEvent", + "plural": "ShipmentEvents" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "ShipmentId" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "ShipmentLocationType": { + "source": { + "object": "silver_ops.ShipmentLocationType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentLocationType", + "plural": "ShipmentLocationTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "ShipmentsPerSalesDocument": { + "source": { + "object": "silver_ops.v_ShipmentsPerSalesDocument", + "type": "table", + "key-fields": [ + "salesDocumentID", + "shipmentID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentsPerSalesDocument", + "plural": "ShipmentsPerSalesDocuments" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "SalesDocument": { + "cardinality": "one", + "target.entity": "SalesDocument", + "source.fields": [ + "salesDocumentID" + ], + "target.fields": [ + "salesDocumentID" + ] + }, + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentID" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "ShipmentStatusType": { + "source": { + "object": "silver_ops.ShipmentStatusType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ShipmentStatusType", + "plural": "ShipmentStatusTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TariffCarrier": { + "source": { + "object": "silver_ops.v_TariffVendors", + "type": "table", + "key-fields": [ + "SpecificCustVendNo" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffCarrier", + "plural": "TariffCarriers" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Tariff": { + "cardinality": "many", + "target.entity": "Tariff", + "source.fields": [ + "SpecificCustVendNo" + ], + "target.fields": [ + "SpecificCustVendNo" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + + "Tariff": { + "source": { + "object": "silver_ops.v_Tariff", + "type": "table", + "key-fields": [ + "TariffId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Tariff", + "plural": "Tariffs" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "TariffVendor": { + "cardinality": "one", + "target.entity": "TariffCarrier", + "source.fields": [ + "SpecificCustVendNo" + ], + "target.fields": [ + "SpecificCustVendNo" + ] + }, + "TariffSurcharge": { + "cardinality": "many", + "target.entity": "TariffSurcharge", + "source.fields": [ + "TariffSurchargeKey" + ], + "target.fields": [ + "TariffId" + ] + }, + "TariffContactGroupMember": { + "cardinality": "many", + "target.entity": "TariffContactGroupMember", + "source.fields": [ + "SourceNo" + ], + "target.fields": [ + "ContactGroupCode" + ] + }, + "Pol": { + "cardinality": "one", + "target.entity": "LocationPOL", + "source.fields": [ + "FromZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pod": { + "cardinality": "one", + "target.entity": "LocationPOD", + "source.fields": [ + "ToZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Prc": { + "cardinality": "one", + "target.entity": "LocationPRC", + "source.fields": [ + "PRCCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pdy": { + "cardinality": "one", + "target.entity": "LocationPDY", + "source.fields": [ + "PDYCode" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment1": { + "cardinality": "one", + "target.entity": "LocationTS1", + "source.fields": [ + "TransshipmentPort" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment2": { + "cardinality": "one", + "target.entity": "LocationTS2", + "source.fields": [ + "TransshipmentPort2" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "TariffAll": { + "source": { + "object": "silver_ops.v_TariffAll", + "type": "table", + "key-fields": [ + "TariffId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffAll", + "plural": "TariffsAll" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "TariffSurcharge": { + "cardinality": "many", + "target.entity": "TariffSurcharge", + "source.fields": [ + "TariffSurchargeKey" + ], + "target.fields": [ + "TariffId" + ] + }, + "TariffContactGroupMember": { + "cardinality": "many", + "target.entity": "TariffContactGroupMember", + "source.fields": [ + "SourceNo" + ], + "target.fields": [ + "ContactGroupCode" + ] + }, + "Pol": { + "cardinality": "one", + "target.entity": "LocationPOL", + "source.fields": [ + "FromZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pod": { + "cardinality": "one", + "target.entity": "LocationPOD", + "source.fields": [ + "ToZoneCode" + ], + "target.fields": [ + "Code" + ] + }, + "Prc": { + "cardinality": "one", + "target.entity": "LocationPRC", + "source.fields": [ + "PRCCode" + ], + "target.fields": [ + "Code" + ] + }, + "Pdy": { + "cardinality": "one", + "target.entity": "LocationPDY", + "source.fields": [ + "PDYCode" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment1": { + "cardinality": "one", + "target.entity": "LocationTS1", + "source.fields": [ + "TransshipmentPort" + ], + "target.fields": [ + "Code" + ] + }, + "Transshipment2": { + "cardinality": "one", + "target.entity": "LocationTS2", + "source.fields": [ + "TransshipmentPort2" + ], + "target.fields": [ + "Code" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "TariffContactGroupMember": { + "source": { + "object": "silver_ops.TariffContactGroupMembers", + "type": "table", + "key-fields": [ + "ContactGroupCode" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffContactGroupMember", + "plural": "TariffContactGroupMembers" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Tariff": { + "cardinality": "many", + "target.entity": "Tariff", + "source.fields": [ + "ContactGroupCode" + ], + "target.fields": [ + "SourceNo" + ] + }, + "Customer": { + "cardinality": "one", + "target.entity": "CustomerPartner", + "source.fields": [ + "PartnerId" + ], + "target.fields": [ + "partnerID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "TariffContainerGroup": { + "source": { + "object": "silver_ops.TariffContainerGroup", + "type": "table", + "key-fields": [ + "ContainerCode" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffContainerGroup", + "plural": "TariffContainerGroups" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Tariff": { + "cardinality": "many", + "target.entity": "Tariff", + "source.fields": [ + "ContainerCode" + ], + "target.fields": [ + "ContainerType" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "TariffContainerSize": { + "source": { + "object": "silver_ops.v_TariffContainerSize", + "type": "table", + "key-fields": [ + "containerSize" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffContainerSize", + "plural": "TariffContainerSizes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TariffSurcharge": { + "source": { + "object": "silver_ops.TariffSurcharge", + "type": "table", + "key-fields": [ + "systemId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TariffSurcharge", + "plural": "TariffSurcharges" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Tariff": { + "cardinality": "one", + "target.entity": "Tariff", + "source.fields": [ + "TariffId" + ], + "target.fields": [ + "TariffSurchargeKey" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "TerminalLocation": { + "source": { + "object": "silver_ops.v_TerminalLocation", + "type": "table", + "key-fields": [ + "terminalCode" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TerminalLocation", + "plural": "TerminalLocations" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "DelayEvent": { + "source": { + "object": "silver_trk.ww_DelayEvent", + "type": "table", + "key-fields": [ + "delayEventId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "DelayEvent", + "plural": "DelayEvents" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentId" + ], + "target.fields": [ + "id" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "TrackingEvent": { + "source": { + "object": "silver_trk.v_TrackingEvent", + "type": "table", + "key-fields": [ + "trackingEventID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TrackingEvent", + "plural": "TrackingEvents" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "ShipmentEquipment": { + "cardinality": "one", + "target.entity": "ShipmentEquipment", + "source.fields": [ + "shipmentEquipmentID" + ], + "target.fields": [ + "id" + ] + }, + "TrackingPort": { + "cardinality": "one", + "target.entity": "TrackingPort", + "source.fields": [ + "portID" + ], + "target.fields": [ + "portId" + ] + }, + "TrackingVessel": { + "cardinality": "one", + "target.entity": "TrackingVessel", + "source.fields": [ + "vesselId" + ], + "target.fields": [ + "vesselId" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "TrackingPort": { + "source": { + "object": "silver_trk.ww_ports", + "type": "table", + "key-fields": [ + "portId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TrackingPort", + "plural": "TrackingPorts" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "relationships": { + "geoPort": { + "cardinality": "one", + "target.entity": "geoPort", + "source.fields": [ "locode" ], + "target.fields": [ "Code" ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "TrackingVessel": { + "source": { + "object": "silver_trk.ww_vessels", + "type": "table", + "key-fields": [ + "vesselId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TrackingVessel", + "plural": "TrackingVessels" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Transport": { + "source": { + "object": "silver_ops.Transport", + "type": "table", + "key-fields": [ + "id" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Transport", + "plural": "Transports" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "ShipmentEquipmentTransport": { + "cardinality": "many", + "target.entity": "ShipmentEquipmentTransport", + "source.fields": [ + "id" + ], + "target.fields": [ + "transportID" + ] + }, + "Vessel": { + "cardinality": "one", + "target.entity": "Vessel", + "source.fields": [ + "vesselID" + ], + "target.fields": [ + "vesselID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "TransportPlanStageType": { + "source": { + "object": "silver_ops.TransportPlanStageType", + "type": "table" + }, + "graphql": { + "enabled": true, + "type": { + "singular": "TransportPlanStageType", + "plural": "TransportPlanStageTypes" + } + }, + "rest": { + "enabled": true + }, + "cache": { + "enabled": true, + "ttl-seconds": 120 + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ] + }, + "Vessel": { + "source": { + "object": "silver_ops.Vessel", + "type": "table", + "key-fields": [ + "vesselID" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "Vessel", + "plural": "Vessels" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Transport": { + "cardinality": "many", + "target.entity": "Transport", + "source.fields": [ + "vesselID" + ], + "target.fields": [ + "vesselID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "WindwardRegistrationFeedback": { + "source": { + "object": "silver_trk.ww_TrackingFeedback", + "type": "table", + "key-fields": [ + "shipmentId" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "WindwardRegistrationFeedback", + "plural": "WindwardRegistrationFeedbacks" + } + }, + "rest": { + "enabled": true + }, + "relationships": { + "Shipment": { + "cardinality": "one", + "target.entity": "Shipment", + "source.fields": [ + "shipmentId" + ], + "target.fields": [ + "shipmentID" + ] + } + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 120 + } + }, + "OperationalUnitOfMeasure": { + "source": { + "object": "silver_ops.OperationalUnitOfMeasure", + "type": "table", + "key-fields": [ + "Code" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "OperationalUnitOfMeasure", + "plural": "OperationalUnitsOfMeasure" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { + "action": "read" + } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 600 + } + }, + "ContainerType": { + "source": { + "object": "silver_ops.ContainerType", + "type": "table", + "key-fields": [ + "ISOCode" + ] + }, + "graphql": { + "enabled": true, + "type": { + "singular": "ContainerType", + "plural": "ContainerTypes" + } + }, + "rest": { + "enabled": true + }, + "permissions": [ + { + "role": "anonymous", + "actions": [ + { "action": "read" } + ] + } + ], + "cache": { + "enabled": true, + "ttl-seconds": 600 + } + } + } +}