diff --git a/.changeset/auth-meet-scope.md b/.changeset/auth-meet-scope.md new file mode 100644 index 00000000..0899aee3 --- /dev/null +++ b/.changeset/auth-meet-scope.md @@ -0,0 +1,5 @@ +--- +"@googleworkspace/cli": patch +--- + +Fix `gws auth login -s meet` not granting any Meet scopes. The Meet API exposes scopes under the `meetings.*` prefix (e.g. `meetings.space.readonly`), but the CLI service alias is `meet`, so the scope-picker filter matched nothing and silently dropped every Meet scope. Map the `meet` service to the `meetings` scope prefix so the existing dynamic-augmentation path pulls all Meet scopes from the Discovery document when `-s meet` is supplied. diff --git a/.changeset/auth-tui-no-cloud-platform-inject.md b/.changeset/auth-tui-no-cloud-platform-inject.md new file mode 100644 index 00000000..2507f50f --- /dev/null +++ b/.changeset/auth-tui-no-cloud-platform-inject.md @@ -0,0 +1,7 @@ +--- +"@googleworkspace/cli": patch +--- + +fix(auth): stop auto-injecting cloud-platform scope after TUI scope picker selection. This scope is restricted by Google and blocked by some Workspace admin policies, which caused `admin_policy_enforced` login failures for users who picked narrower, permitted scopes (upstream #562). Users who need cloud-platform (e.g. for the modelarmor helper) can tick it in the picker or pass `--full` / `--scopes https://www.googleapis.com/auth/cloud-platform`. + +Behavior change: existing users who relied on the silent cloud-platform injection to run the modelarmor helper must re-authenticate with one of the explicit paths above on their next `gws auth login`. diff --git a/.changeset/clippy-script-collapsible-match.md b/.changeset/clippy-script-collapsible-match.md new file mode 100644 index 00000000..3ae4c3f1 --- /dev/null +++ b/.changeset/clippy-script-collapsible-match.md @@ -0,0 +1,5 @@ +--- +"@googleworkspace/cli": patch +--- + +chore(clippy): collapse nested `if` inside the `"json"` arm of the Apps Script file classifier into a match guard. No behavior change — only satisfies `clippy::collapsible_match` which is now enforced in CI. diff --git a/.changeset/fix-asset-naming.md b/.changeset/fix-asset-naming.md new file mode 100644 index 00000000..081acf72 --- /dev/null +++ b/.changeset/fix-asset-naming.md @@ -0,0 +1,5 @@ +--- +"@googleworkspace/cli": patch +--- + +ci(release): use friendly asset names (linux-amd64, macos-arm64, etc.) instead of raw Rust target triples. Add asset_name field to build matrix. Update Homebrew formula URLs and FORK.md install commands accordingly. diff --git a/.changeset/fix-rpm-package-path.md b/.changeset/fix-rpm-package-path.md new file mode 100644 index 00000000..de3f3e33 --- /dev/null +++ b/.changeset/fix-rpm-package-path.md @@ -0,0 +1,5 @@ +--- +"@googleworkspace/cli": patch +--- + +fix(release): use -p crates/google-workspace-cli for cargo generate-rpm. The --manifest-path flag is not supported in cargo-generate-rpm 0.21.0; -p treats its argument as a directory path, so the correct form is -p crates/google-workspace-cli (which resolves to crates/google-workspace-cli/Cargo.toml). diff --git a/.changeset/fix-test-race-fork-install-docs.md b/.changeset/fix-test-race-fork-install-docs.md new file mode 100644 index 00000000..3f0aca79 --- /dev/null +++ b/.changeset/fix-test-race-fork-install-docs.md @@ -0,0 +1,5 @@ +--- +"@googleworkspace/cli": patch +--- + +fix(test): fix encryption key race in parallel tests; docs: add deb/rpm/Windows install sections to FORK.md. Replace raw set_var with EnvVarGuard in test_load_credentials_encrypted_file to prevent GOOGLE_WORKSPACE_CLI_CONFIG_DIR leaking into concurrent tests. Add serial attribute and per-test config-dir isolation to test_load_credentials_encrypted_takes_priority_over_default. diff --git a/.changeset/gmail-profile-userinfo-endpoint.md b/.changeset/gmail-profile-userinfo-endpoint.md new file mode 100644 index 00000000..936edf1e --- /dev/null +++ b/.changeset/gmail-profile-userinfo-endpoint.md @@ -0,0 +1,5 @@ +--- +"@googleworkspace/cli": patch +--- + +fix(gmail): switch display-name lookup in `gmail +send` from People API to the OIDC userinfo endpoint (`https://openidconnect.googleapis.com/v1/userinfo`). The People API path returned 403 on some personal Gmail accounts even when `userinfo.profile` was granted, which made the "grant the profile scope" tip fire for users who already had the scope, and sent messages with a null From display name (upstream #644). The userinfo endpoint accepts the same `userinfo.profile` scope and behaves uniformly across Workspace and personal accounts. The 401/403 fallback message was also reworded so it no longer misdiagnoses transient permission denials as a missing scope. diff --git a/.changeset/gmail-unpadded-base64url.md b/.changeset/gmail-unpadded-base64url.md new file mode 100644 index 00000000..ad6c4eb7 --- /dev/null +++ b/.changeset/gmail-unpadded-base64url.md @@ -0,0 +1,5 @@ +--- +"@googleworkspace/cli": patch +--- + +fix(gmail): accept unpadded base64url from Gmail API. Gmail API returns base64url data without `=` padding in attachment and message body responses (per spec). The previous decoder (`URL_SAFE`) required canonical padding and would silently return an error for real Gmail data with missing padding. Replaced with a custom `URL_SAFE_LENIENT` engine using `DecodePaddingMode::Indifferent`, which accepts both padded and unpadded input without any preprocessing (upstream #774). diff --git a/.changeset/mcp-helper-tools.md b/.changeset/mcp-helper-tools.md new file mode 100644 index 00000000..712cde78 --- /dev/null +++ b/.changeset/mcp-helper-tools.md @@ -0,0 +1,5 @@ +--- +"@googleworkspace/cli": minor +--- + +Add MCP helper tools: gmail_send convenience wrapper that reuses CLI helper logic for RFC 2822 formatting and base64 encoding diff --git a/.changeset/mcp-http-phase2-oauth.md b/.changeset/mcp-http-phase2-oauth.md new file mode 100644 index 00000000..3cbaa8a7 --- /dev/null +++ b/.changeset/mcp-http-phase2-oauth.md @@ -0,0 +1,5 @@ +--- +"@googleworkspace/cli": minor +--- + +feat(mcp): add OAuth2 PKCE Authorization Server for HTTP transport (Phase 2/3). Enable with `gws mcp --transport http --auth`. Implements MCP Authorization spec 2025-11-25: RFC 9728 protected-resource metadata, RFC 8414 AS metadata, DCR stub, PKCE S256 authorization/token flow via Google OAuth2. All `/mcp` requests require `Authorization: Bearer` when `--auth` is active; unauthenticated requests receive 401 with `WWW-Authenticate` pointing to the AS metadata URL. Sessions expire after 8 hours. Requires `client_secret.json` from `gws auth setup`. diff --git a/.changeset/mcp-http-transport-phase1.md b/.changeset/mcp-http-transport-phase1.md new file mode 100644 index 00000000..0e62ceee --- /dev/null +++ b/.changeset/mcp-http-transport-phase1.md @@ -0,0 +1,5 @@ +--- +"@googleworkspace/cli": minor +--- + +feat(mcp): add Streamable HTTP transport (Phase 1, no auth). Start with `gws mcp -s gmail --transport http --port 3000`; Claude Desktop config changes to `{"url": "http://localhost:3000/mcp"}`. Uses rmcp 1.x `transport-streamable-http-server` feature via axum. Stdio transport unchanged. diff --git a/.changeset/release-windows-deb-rpm.md b/.changeset/release-windows-deb-rpm.md new file mode 100644 index 00000000..7ff5ba93 --- /dev/null +++ b/.changeset/release-windows-deb-rpm.md @@ -0,0 +1,5 @@ +--- +"@googleworkspace/cli": patch +--- + +ci(release): add Windows (.zip), Debian (.deb), and RPM (.rpm) packages to release artifacts. Windows binary is packaged as a zip archive. Linux targets additionally generate .deb (via cargo-deb) and .rpm (via cargo-generate-rpm) packages alongside the existing .tar.gz archives. diff --git a/.github/workflows/auto-tag.yml b/.github/workflows/auto-tag.yml new file mode 100644 index 00000000..9d0a8383 --- /dev/null +++ b/.github/workflows/auto-tag.yml @@ -0,0 +1,45 @@ +name: Auto Tag + +on: + push: + branches: + - main + paths: + - 'crates/**/*.rs' + - 'crates/**/Cargo.toml' + - 'Cargo.lock' + workflow_dispatch: + +permissions: + contents: write + +jobs: + tag: + name: Create fork tag + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + token: ${{ secrets.GH_PAT }} + + - name: Compute and push next tag + run: | + VERSION=$(grep '^version' crates/google-workspace-cli/Cargo.toml \ + | head -1 | grep -oP '[\d]+\.[\d]+\.[\d]+') + + LAST=$(git tag -l "fork/v${VERSION}-mcp.*" | sort -V | tail -1) + if [ -z "$LAST" ]; then + N=1 + else + N=$(echo "$LAST" | grep -oP '\d+$') + N=$((N + 1)) + fi + + NEW_TAG="fork/v${VERSION}-mcp.${N}" + echo "Creating tag: $NEW_TAG" + + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git tag "$NEW_TAG" + git push origin "$NEW_TAG" diff --git a/.github/workflows/automation.yml b/.github/workflows/automation.yml deleted file mode 100644 index 5eb3de95..00000000 --- a/.github/workflows/automation.yml +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright 2026 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: Automation - -on: - push: - branches: [main] - pull_request_target: - types: [opened, synchronize, reopened] - pull_request_review: - types: [submitted] - -permissions: - contents: write - issues: write - pull-requests: write - -jobs: - format: - name: Format - if: github.event_name == 'push' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - with: - token: ${{ secrets.GOOGLEWORKSPACE_BOT_TOKEN }} - - - name: Install Rust - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable - with: - components: rustfmt - - - name: Run cargo fmt - run: cargo fmt --all - - - name: Commit and push - run: | - git config user.name "github-actions[bot]" - git config user.email "41898282+github-actions[bot]@users.noreply.github.com" - git add -A - git diff --cached --quiet || git commit -m "style: cargo fmt" && git push - - file-labeler: - name: File Labeler - if: github.event_name == 'pull_request_target' - runs-on: ubuntu-latest - steps: - - uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5 - with: - repo-token: ${{ secrets.GOOGLEWORKSPACE_BOT_TOKEN }} - sync-labels: true - - gemini-review: - name: Gemini Review - if: >- - github.event_name == 'pull_request_target' && - github.event.action == 'synchronize' - runs-on: ubuntu-latest - concurrency: - group: gemini-review-${{ github.event.pull_request.number }} - cancel-in-progress: true - steps: - - name: Remove reviewed label - uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - try { - await github.rest.issues.removeLabel({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.payload.pull_request.number, - name: 'gemini: reviewed', - }); - } catch (e) { - // Label not present — ignore - } - - - name: Debounce - run: sleep 60 - - - name: Trigger Gemini Code Assist review - uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7 - with: - github-token: ${{ secrets.GOOGLEWORKSPACE_BOT_TOKEN }} - script: | - await github.rest.issues.createComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.payload.pull_request.number, - body: '/gemini review', - }); - - gemini-reviewed: - name: Gemini Reviewed - if: >- - github.event_name == 'pull_request_review' && - github.event.review.user.login == 'gemini-code-assist[bot]' - runs-on: ubuntu-latest - steps: - - name: Add reviewed label if review matches HEAD - uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - const pr = await github.rest.pulls.get({ - owner: context.repo.owner, - repo: context.repo.repo, - pull_number: context.payload.pull_request.number, - }); - - if (context.payload.review.commit_id !== pr.data.head.sha) { - console.log(`Review is for ${context.payload.review.commit_id} but HEAD is ${pr.data.head.sha} — skipping label`); - return; - } - - try { - await github.rest.issues.addLabels({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.payload.pull_request.number, - labels: ['gemini: reviewed'], - }); - } catch (e) { - if (e.status === 403) { - console.log(`Token cannot add labels for this review event (${e.message}) — skipping`); - return; - } - throw e; - } diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1eedc997..3127189d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -276,7 +276,7 @@ jobs: build: name: Build - needs: [smoketest, changes] + needs: [build-linux, changes] if: | always() && !cancelled() && !failure() && (needs.changes.outputs.rust == 'true' || github.event_name == 'push') @@ -339,91 +339,3 @@ jobs: env: CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc - smoketest: - name: API Smoketest - needs: build-linux - if: github.event.pull_request.head.repo.full_name == github.repository || github.event_name != 'pull_request' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - - - name: Download binary - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4 - with: - name: gws-linux-x86_64 - path: ./bin - - - name: Make binary executable - run: chmod +x ./bin/gws - - - name: Decode credentials - env: - GOOGLE_CREDENTIALS_JSON: ${{ secrets.GOOGLE_CREDENTIALS_JSON }} - run: | - if [ -z "$GOOGLE_CREDENTIALS_JSON" ]; then - echo "::error::GOOGLE_CREDENTIALS_JSON secret is not set" - exit 1 - fi - echo "$GOOGLE_CREDENTIALS_JSON" | base64 -d > /tmp/credentials.json - - - name: Smoketest — help - run: ./bin/gws --help - - - name: Smoketest — schema introspection - run: ./bin/gws schema drive.files.list | jq -e '.httpMethod' - - - name: Smoketest — Drive files list - env: - GOOGLE_WORKSPACE_CLI_CREDENTIALS_FILE: /tmp/credentials.json - run: | - ./bin/gws drive files list \ - --params '{"pageSize": 1, "fields": "files(id,mimeType)"}' \ - | jq -e '.files' - - - name: Smoketest — Gmail messages - env: - GOOGLE_WORKSPACE_CLI_CREDENTIALS_FILE: /tmp/credentials.json - run: | - ./bin/gws gmail users messages list \ - --params '{"userId": "me", "maxResults": 1, "fields": "messages(id)"}' \ - | jq -e '.messages' - - - name: Smoketest — Calendar events - env: - GOOGLE_WORKSPACE_CLI_CREDENTIALS_FILE: /tmp/credentials.json - run: | - ./bin/gws calendar events list \ - --params '{"calendarId": "primary", "maxResults": 1, "fields": "kind,items(id,status)"}' \ - | jq -e '.kind' - - - name: Smoketest — Slides presentation - env: - GOOGLE_WORKSPACE_CLI_CREDENTIALS_FILE: /tmp/credentials.json - run: | - ./bin/gws slides presentations get \ - --params '{"presentationId": "1knOKD_87JWE4qsEbO4r5O91IxTER5ybBBhOJgZ1yLFI", "fields": "presentationId,slides(objectId)"}' \ - | jq -e '.presentationId' - - - name: Smoketest — pagination - env: - GOOGLE_WORKSPACE_CLI_CREDENTIALS_FILE: /tmp/credentials.json - run: | - LINES=$(./bin/gws drive files list \ - --params '{"pageSize": 1, "fields": "nextPageToken,files(id)"}' \ - --page-all --page-limit 2 \ - | wc -l) - if [ "$LINES" -lt 2 ]; then - echo "::error::Expected at least 2 NDJSON lines from pagination, got $LINES" - exit 1 - fi - - - name: Smoketest — error handling - run: | - if ./bin/gws fakeservice list 2>&1; then - echo "::error::Expected exit code 1 for unknown service" - exit 1 - fi - - - name: Cleanup credentials - if: always() - run: rm -f /tmp/credentials.json diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml deleted file mode 100644 index c4a4809c..00000000 --- a/.github/workflows/cla.yml +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2026 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: CLA - -on: - check_run: - types: [completed] - -permissions: - pull-requests: write - -jobs: - cla-label: - name: CLA Label - if: github.event.check_run.name == 'cla/google' - runs-on: ubuntu-latest - steps: - - name: Update CLA label - uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7 - with: - github-token: ${{ secrets.GOOGLEWORKSPACE_BOT_TOKEN }} - script: | - const cr = context.payload.check_run; - const passed = cr.conclusion === 'success'; - - for (const pr of cr.pull_requests) { - const labels = passed - ? { add: 'cla: yes', remove: 'cla: no' } - : { add: 'cla: no', remove: 'cla: yes' }; - - await github.rest.issues.addLabels({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: pr.number, - labels: [labels.add], - }); - - try { - await github.rest.issues.removeLabel({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: pr.number, - name: labels.remove, - }); - } catch (e) { - // Label not present — ignore - } - } diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml deleted file mode 100644 index 4e1b323c..00000000 --- a/.github/workflows/coverage.yml +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2026 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: Coverage - -on: - push: - branches: [main] - paths: ['**/*.rs', 'Cargo.toml', 'Cargo.lock'] - pull_request: - branches: [main] - paths: ['**/*.rs', 'Cargo.toml', 'Cargo.lock'] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} - -jobs: - coverage: - name: Coverage - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - - - name: Install Rust - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable - with: - components: llvm-tools-preview - - - name: Install cargo-llvm-cov - uses: taiki-e/install-action@a37010ded18ff788be4440302bd6830b1ae50d8b # cargo-llvm-cov - with: - tool: cargo-llvm-cov - - - name: Generate code coverage - run: cargo llvm-cov --all-features --workspace --lcov --output-path lcov.info - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238 # v4 - with: - files: lcov.info - fail_ci_if_error: false - token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/generate-skills.yml b/.github/workflows/generate-skills.yml deleted file mode 100644 index a30f1bc3..00000000 --- a/.github/workflows/generate-skills.yml +++ /dev/null @@ -1,119 +0,0 @@ -# Copyright 2026 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: Generate Skills - -on: - schedule: - - cron: "0 * * * *" # Hourly — keeps skills in sync with Discovery API changes - workflow_dispatch: # Manual trigger - push: - branches-ignore: - - main # main is kept up to date by PR merges - -concurrency: - group: generate-skills-${{ github.ref_name }} - cancel-in-progress: true - -env: - CARGO_TERM_COLOR: always - -jobs: - generate: - name: Generate and commit skills - runs-on: ubuntu-latest - permissions: - contents: write - pull-requests: write - - steps: - - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - with: - # For cron/dispatch: check out main. For push: check out the branch. - ref: ${{ github.head_ref || github.ref_name }} - token: ${{ secrets.GOOGLEWORKSPACE_BOT_TOKEN }} - - - name: Install Rust - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable - - - name: Setup sccache - uses: mozilla-actions/sccache-action@2df7dbab909c49ab7d3382d05da469f3f975c2d6 # v0.0.7 - - - name: Cache cargo - uses: Swatinem/rust-cache@ad397744b0d591a723ab90405b7247fac0e6b8db # v2 - with: - key: generate-skills-ubuntu - - - name: Generate skills - run: cargo run -- generate-skills - - - name: Check for changes - id: diff - run: | - if git diff --quiet skills/ docs/skills.md; then - echo "changed=false" >> "$GITHUB_OUTPUT" - else - echo "changed=true" >> "$GITHUB_OUTPUT" - fi - - # --- Cron / workflow_dispatch: open a PR against main --- - - name: Create changeset for sync PR - if: >- - steps.diff.outputs.changed == 'true' && - (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') - run: | - mkdir -p .changeset - cat > .changeset/sync-skills.md << 'EOF' - --- - "@googleworkspace/cli": patch - --- - - Sync generated skills with latest Google Discovery API specs - EOF - - - name: Create or update sync PR - if: >- - steps.diff.outputs.changed == 'true' && - (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') - uses: peter-evans/create-pull-request@22a9089034f40e5a961c8808d113e2c98fb63676 # v7 - with: - token: ${{ secrets.GOOGLEWORKSPACE_BOT_TOKEN }} - branch: chore/sync-skills - title: "chore: sync skills with Discovery API" - body: | - Automated PR — the Google Discovery API specs have changed and the - generated skill files are out of date. - - Created by the **Generate Skills** workflow (`generate-skills.yml`). - commit-message: "chore: regenerate skills from Discovery API" - add-paths: | - skills/ - docs/skills.md - .changeset/sync-skills.md - delete-branch: true - - # --- Push events (non-main branches): commit directly --- - - name: Commit and push if changed - if: >- - steps.diff.outputs.changed == 'true' && - github.event_name == 'push' - env: - GITHUB_TOKEN: ${{ secrets.GOOGLEWORKSPACE_BOT_TOKEN }} - run: | - git config user.name "googleworkspace-bot" - git config user.email "googleworkspace-bot@users.noreply.github.com" - - git add skills/ docs/skills.md - git commit -m "chore: regenerate skills [skip ci]" - git push diff --git a/.github/workflows/publish-skills.yml b/.github/workflows/publish-skills.yml deleted file mode 100644 index 35214380..00000000 --- a/.github/workflows/publish-skills.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Publish OpenClaw Skills - -on: - push: - branches: [main] - paths: - - "skills/**" - - ".github/workflows/publish-skills.yml" - pull_request: - branches: [main] - paths: - - "skills/**" - - ".github/workflows/publish-skills.yml" - schedule: - - cron: "0 * * * *" # Hourly, to drip-publish past rate limits - workflow_dispatch: - -concurrency: - group: ${{ github.workflow }} - cancel-in-progress: false - -jobs: - publish: - # Skip fork PRs — secrets (CLAWHUB_TOKEN) are not available - if: github.event.pull_request.head.repo.full_name == github.repository || github.event_name != 'pull_request' - runs-on: ubuntu-latest - permissions: - contents: read - steps: - - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - - - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 - with: - node-version: "20" - - - name: Install ClawHub CLI - run: npm i -g clawhub@0.7.0 - - - name: Authenticate ClawHub - env: - CLAWHUB_TOKEN: ${{ secrets.CLAWHUB_TOKEN }} - run: | - if [ -z "$CLAWHUB_TOKEN" ]; then - echo "::error::CLAWHUB_TOKEN secret is not set" - exit 1 - fi - clawhub login --token "$CLAWHUB_TOKEN" - - - name: Publish skills - run: | - if [ "${{ github.event_name }}" = "pull_request" ]; then - clawhub sync --root skills --all --dry-run - else - clawhub sync --root skills --all - fi diff --git a/.github/workflows/release-changesets.yml b/.github/workflows/release-changesets.yml deleted file mode 100644 index 25caa4f4..00000000 --- a/.github/workflows/release-changesets.yml +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2026 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: Release (Changeset) - -on: - workflow_dispatch: - push: - branches: - - main - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: false - -jobs: - release: - name: Release - runs-on: ubuntu-latest - permissions: - contents: write - pull-requests: write - steps: - - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - with: - token: ${{ secrets.GOOGLEWORKSPACE_BOT_TOKEN }} - - - name: Install Rust - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable - - - name: Install Nix - uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30 - with: - github_access_token: ${{ secrets.GOOGLEWORKSPACE_BOT_TOKEN }} - - - uses: pnpm/action-setup@c5ba7f7862a0f64c1b1a05fbac13e0b8e86ba08c # v4 - - - name: Setup Node.js - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 - with: - node-version: '22' - cache: 'pnpm' - - - name: Install Dependencies - run: pnpm install - - - run: | - git config --global user.name "googleworkspace-bot" - git config --global user.email "googleworkspace-bot@google.com" - - - name: Create Release Pull Request or Tag - id: changesets - uses: changesets/action@6a0a831ff30acef54f2c6aa1cbbc1096b066edaf # v1 - with: - version: pnpm run version-sync - publish: pnpm run tag-release - commit: 'chore: release versions' - title: 'chore: release versions' - setupGitUser: false - env: - GITHUB_TOKEN: ${{ secrets.GOOGLEWORKSPACE_BOT_TOKEN }} diff --git a/.github/workflows/release-fork.yml b/.github/workflows/release-fork.yml new file mode 100644 index 00000000..43e454b9 --- /dev/null +++ b/.github/workflows/release-fork.yml @@ -0,0 +1,272 @@ +name: Release + +on: + push: + tags: + - 'fork/v*' + workflow_dispatch: + inputs: + tag_name: + description: 'Tag to release (e.g. fork/v0.22.5-mcp.1)' + required: true + type: string + +permissions: + contents: write + +env: + CARGO_TERM_COLOR: always + SCCACHE_GHA_ENABLED: "true" + SCCACHE_IGNORE_SERVER_IO_ERROR: "true" + +jobs: + build: + name: Build (${{ matrix.target }}) + runs-on: ${{ matrix.os }} + strategy: + matrix: + include: + - os: macos-latest + target: aarch64-apple-darwin + asset_name: macos-arm64 + - os: macos-latest + target: x86_64-apple-darwin + asset_name: macos-amd64 + - os: ubuntu-latest + target: x86_64-unknown-linux-gnu + asset_name: linux-amd64 + - os: ubuntu-latest + target: aarch64-unknown-linux-gnu + asset_name: linux-arm64 + - os: windows-latest + target: x86_64-pc-windows-msvc + asset_name: windows-amd64 + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Set asset filename + shell: bash + run: | + TAG="${{ github.event.inputs.tag_name || github.ref_name }}" + VERSION="${TAG#fork/v}" + echo "ASSET=gws-mcp-${VERSION}-${{ matrix.asset_name }}" >> "$GITHUB_ENV" + + - name: Install Rust + uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + targets: ${{ matrix.target }} + + - name: Setup sccache + id: sccache + uses: mozilla-actions/sccache-action@9e7fa8a12102821edf02ca5dbea1acd0f89a2696 # v0.0.10 + continue-on-error: true + + - name: Enable sccache + if: steps.sccache.outcome == 'success' + shell: bash + run: | + if sccache --start-server 2>/dev/null; then + echo "RUSTC_WRAPPER=sccache" >> "$GITHUB_ENV" + else + echo "::warning::sccache server failed to start, building without cache" + fi + + - name: Cache cargo + uses: Swatinem/rust-cache@c19371144df3bb44fab255c43d04cbc2ab54d1c4 # v2.9.1 + with: + key: release-${{ matrix.target }} + cache-targets: "false" + + - name: Install cross-compilation tools (aarch64 Linux) + if: matrix.target == 'aarch64-unknown-linux-gnu' + run: | + sudo apt-get update + sudo apt-get install -y gcc-aarch64-linux-gnu + + - name: Disable Windows Defender scanning for cargo + if: runner.os == 'Windows' + shell: pwsh + run: | + Add-MpPreference -ExclusionPath "$env:USERPROFILE\.cargo" + Add-MpPreference -ExclusionPath "$env:USERPROFILE\.rustup" + Add-MpPreference -ExclusionPath "${{ github.workspace }}\target" + + - name: Build + run: cargo build --release --target ${{ matrix.target }} + env: + CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc + + - name: Package (Unix) + if: runner.os != 'Windows' + run: | + tar -czf "${{ env.ASSET }}.tar.gz" \ + -C "target/${{ matrix.target }}/release" gws + if command -v sha256sum >/dev/null 2>&1; then + sha256sum "${{ env.ASSET }}.tar.gz" > "${{ env.ASSET }}.tar.gz.sha256" + else + shasum -a 256 "${{ env.ASSET }}.tar.gz" > "${{ env.ASSET }}.tar.gz.sha256" + fi + + - name: Package (Windows) + if: runner.os == 'Windows' + shell: pwsh + run: | + Compress-Archive -Path "target/${{ matrix.target }}/release/gws.exe" ` + -DestinationPath "${{ env.ASSET }}.zip" + $hash = (Get-FileHash "${{ env.ASSET }}.zip" -Algorithm SHA256).Hash.ToLower() + "$hash ${{ env.ASSET }}.zip" | Out-File -Encoding ASCII "${{ env.ASSET }}.zip.sha256" + + - name: Generate .deb + if: runner.os == 'Linux' + run: | + cargo install cargo-deb@3.7.0 + cargo deb --no-build --no-strip --target ${{ matrix.target }} \ + -p google-workspace-cli + DEB=$(ls target/${{ matrix.target }}/debian/*.deb) + cp "$DEB" "${{ env.ASSET }}.deb" + sha256sum "${{ env.ASSET }}.deb" > "${{ env.ASSET }}.deb.sha256" + + - name: Generate .rpm + if: runner.os == 'Linux' + run: | + cargo install cargo-generate-rpm@0.21.0 + cargo generate-rpm --target ${{ matrix.target }} \ + -p crates/google-workspace-cli + RPM=$(ls target/${{ matrix.target }}/generate-rpm/*.rpm) + cp "$RPM" "${{ env.ASSET }}.rpm" + sha256sum "${{ env.ASSET }}.rpm" > "${{ env.ASSET }}.rpm.sha256" + + - name: Upload artifact + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: ${{ env.ASSET }} + path: ${{ env.ASSET }}.* + retention-days: 1 + + release: + name: Create Release and Update Formula + needs: build + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Download all artifacts + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + merge-multiple: true + + - name: Create GitHub Release + env: + GH_TOKEN: ${{ github.token }} + run: | + TAG="${{ github.event.inputs.tag_name || github.ref_name }}" + VERSION="${TAG#fork/v}" + cat > /tmp/release-notes.md << 'RELNOTES' + ## Changes + + See [FORK.md](https://github.com/shigechika/gws-mcp/blob/main/FORK.md) for fork-specific changes. + + ## Install + + ### Homebrew (macOS / Linux) + ```bash + brew install shigechika/tap/gws-mcp + ``` + + ### Debian / Ubuntu (.deb) + ```bash + sudo dpkg -i gws-mcp-VERSION-linux-amd64.deb + # or for arm64: + sudo dpkg -i gws-mcp-VERSION-linux-arm64.deb + ``` + + ### RHEL / Fedora / Amazon Linux (.rpm) + ```bash + sudo rpm -i gws-mcp-VERSION-linux-amd64.rpm + # or for aarch64: + sudo rpm -i gws-mcp-VERSION-linux-arm64.rpm + ``` + + ### Windows + Download `gws-mcp-VERSION-windows-amd64.zip`, extract `gws.exe`, and place it in a directory on your PATH. + + ### Cargo + ```bash + cargo install --git https://github.com/shigechika/gws-mcp --locked + ``` + + ### Direct download (macOS / Linux) + Download the `.tar.gz` archive for your platform below and place `gws` in your PATH. + RELNOTES + # Replace VERSION placeholder with actual version + sed -i "s/VERSION/${VERSION}/g" /tmp/release-notes.md + gh release create "$TAG" \ + --title "fork/v${VERSION}" \ + --notes-file /tmp/release-notes.md \ + gws-mcp-* + + - name: Update Homebrew formula + env: + GH_TOKEN: ${{ secrets.HOMEBREW_TAP_TOKEN }} + TAG: ${{ github.event.inputs.tag_name || github.ref_name }} + run: | + [[ -z "$GH_TOKEN" ]] && echo "HOMEBREW_TAP_TOKEN not set, skipping formula update" && exit 0 + VERSION="${TAG#fork/v}" + TAG_ENCODED="${TAG/\//%2F}" + BASE_URL="https://github.com/shigechika/gws-mcp/releases/download/${TAG_ENCODED}" + + SHA_ARM64_DARWIN=$(awk '{print $1}' "gws-mcp-${VERSION}-macos-arm64.tar.gz.sha256") + SHA_X86_64_DARWIN=$(awk '{print $1}' "gws-mcp-${VERSION}-macos-amd64.tar.gz.sha256") + SHA_X86_64_LINUX=$(awk '{print $1}' "gws-mcp-${VERSION}-linux-amd64.tar.gz.sha256") + SHA_ARM64_LINUX=$(awk '{print $1}' "gws-mcp-${VERSION}-linux-arm64.tar.gz.sha256") + + cat > /tmp/gws-mcp.rb << FORMULA + class GwsMcp < Formula + desc "Google Workspace CLI with MCP server support" + homepage "https://github.com/shigechika/gws-mcp" + version "${VERSION}" + license "Apache-2.0" + + on_macos do + on_arm do + url "${BASE_URL}/gws-mcp-${VERSION}-macos-arm64.tar.gz" + sha256 "${SHA_ARM64_DARWIN}" + end + on_intel do + url "${BASE_URL}/gws-mcp-${VERSION}-macos-amd64.tar.gz" + sha256 "${SHA_X86_64_DARWIN}" + end + end + + on_linux do + on_arm do + url "${BASE_URL}/gws-mcp-${VERSION}-linux-arm64.tar.gz" + sha256 "${SHA_ARM64_LINUX}" + end + on_intel do + url "${BASE_URL}/gws-mcp-${VERSION}-linux-amd64.tar.gz" + sha256 "${SHA_X86_64_LINUX}" + end + end + + def install + bin.install "gws" + end + + test do + assert_match version.to_s, shell_output("#{bin}/gws --version") + end + end + FORMULA + + # Commit to homebrew-tap + gh api repos/shigechika/homebrew-tap/contents/Formula/gws-mcp.rb \ + --method PUT \ + --field message="update: gws-mcp formula to ${VERSION}" \ + --field content="$(base64 -w0 /tmp/gws-mcp.rb)" \ + --field sha="$(gh api repos/shigechika/homebrew-tap/contents/Formula/gws-mcp.rb --jq '.sha' 2>/dev/null || echo '')" \ + 2>/dev/null || \ + gh api repos/shigechika/homebrew-tap/contents/Formula/gws-mcp.rb \ + --method PUT \ + --field message="add: gws-mcp formula ${VERSION}" \ + --field content="$(base64 -w0 /tmp/gws-mcp.rb)" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index 6de7cbc5..00000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,227 +0,0 @@ -# Release workflow for @googleworkspace/cli -# -# Triggered by pushing a semver tag (e.g. v0.22.3). -# Builds platform binaries, creates a GitHub Release, publishes to npm and crates.io. - -name: Release -permissions: - contents: write - attestations: write - id-token: write - -on: - pull_request: - push: - tags: - - 'v[0-9]+.[0-9]+.[0-9]+*' - -env: - CARGO_TERM_COLOR: always - -jobs: - plan: - runs-on: ubuntu-latest - outputs: - version: ${{ steps.meta.outputs.version }} - prerelease: ${{ steps.meta.outputs.prerelease }} - publishing: ${{ github.ref_type == 'tag' }} - steps: - - id: meta - run: | - TAG="${GITHUB_REF_NAME}" - VERSION="${TAG#v}" - echo "version=${VERSION}" >> "$GITHUB_OUTPUT" - if [[ "$VERSION" == *-* ]]; then - echo "prerelease=true" >> "$GITHUB_OUTPUT" - else - echo "prerelease=false" >> "$GITHUB_OUTPUT" - fi - - build: - needs: plan - strategy: - fail-fast: false - matrix: - include: - - target: aarch64-apple-darwin - runner: macos-latest - archive: tar.gz - - target: x86_64-apple-darwin - runner: macos-latest - archive: tar.gz - - target: aarch64-unknown-linux-gnu - runner: ubuntu-latest - archive: tar.gz - cross: true - - target: aarch64-unknown-linux-musl - runner: ubuntu-latest - archive: tar.gz - cross: true - - target: x86_64-unknown-linux-gnu - runner: ubuntu-latest - archive: tar.gz - - target: x86_64-unknown-linux-musl - runner: ubuntu-latest - archive: tar.gz - cross: true - - target: x86_64-pc-windows-msvc - runner: windows-latest - archive: zip - runs-on: ${{ matrix.runner }} - steps: - - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - - - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable - with: - targets: ${{ matrix.target }} - - - name: Install cross - if: matrix.cross - run: cargo install cross --git https://github.com/cross-rs/cross --tag v0.2.5 - - - name: Build - run: | - if [ "${{ matrix.cross }}" = "true" ]; then - cross build --release --target ${{ matrix.target }} --locked - else - cargo build --release --target ${{ matrix.target }} --locked - fi - shell: bash - - - name: Package (unix) - if: matrix.archive == 'tar.gz' - run: | - ARTIFACT="google-workspace-cli-${{ matrix.target }}" - mkdir -p staging - cp "target/${{ matrix.target }}/release/gws" staging/ - cp LICENSE README.md CHANGELOG.md staging/ - tar czf "${ARTIFACT}.tar.gz" -C staging . - shasum -a 256 "${ARTIFACT}.tar.gz" > "${ARTIFACT}.tar.gz.sha256" - shell: bash - - - name: Package (windows) - if: matrix.archive == 'zip' - run: | - $ARTIFACT = "google-workspace-cli-${{ matrix.target }}" - New-Item -ItemType Directory -Path $ARTIFACT - Copy-Item "target/${{ matrix.target }}/release/gws.exe" "$ARTIFACT/" - Copy-Item LICENSE, README.md, CHANGELOG.md "$ARTIFACT/" - Compress-Archive -Path "$ARTIFACT/*" -DestinationPath "$ARTIFACT.zip" - (Get-FileHash "$ARTIFACT.zip" -Algorithm SHA256).Hash.ToLower() + " $ARTIFACT.zip" | Out-File -Encoding ascii "$ARTIFACT.zip.sha256" - shell: pwsh - - - name: Attest - if: github.ref_type == 'tag' - uses: actions/attest-build-provenance@43d14bc2b83dec42d39ecae14e916627a18bb661 # v3 - with: - subject-path: google-workspace-cli-${{ matrix.target }}.${{ matrix.archive }} - - - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 - with: - name: binary-${{ matrix.target }} - path: | - google-workspace-cli-${{ matrix.target }}.${{ matrix.archive }} - google-workspace-cli-${{ matrix.target }}.${{ matrix.archive }}.sha256 - - release: - needs: [plan, build] - if: github.ref_type == 'tag' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - - - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4 - with: - pattern: binary-* - path: artifacts/ - merge-multiple: true - - - name: Create GitHub Release - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - PRERELEASE_FLAG="" - if [ "${{ needs.plan.outputs.prerelease }}" = "true" ]; then - PRERELEASE_FLAG="--prerelease" - fi - - NOTES="## Installation - - Download the archive for your OS and architecture from the assets below. - - ### Linux / macOS - - Replace \`\` with your platform (e.g., \`aarch64-apple-darwin\` or \`x86_64-unknown-linux-gnu\`). - - \`\`\`bash - # 1. Download the archive and its checksum - curl -sLO https://github.com/googleworkspace/cli/releases/download/${{ github.ref_name }}/google-workspace-cli-.tar.gz - curl -sLO https://github.com/googleworkspace/cli/releases/download/${{ github.ref_name }}/google-workspace-cli-.tar.gz.sha256 - - # 2. Verify the checksum - shasum -a 256 -c google-workspace-cli-.tar.gz.sha256 - - # 3. Extract and install - tar -xzf google-workspace-cli-.tar.gz - chmod +x gws - sudo mv gws /usr/local/bin/ - \`\`\` - - ### Windows - - 1. Download \`google-workspace-cli-x86_64-pc-windows-msvc.zip\` and its \`.sha256\` file - 2. Verify the checksum (e.g., using PowerShell \`Get-FileHash\`) - 3. Extract the archive and move \`gws.exe\` to a directory included in your system \`PATH\`. - - --- - " - - gh release create "${{ github.ref_name }}" \ - --target "${{ github.sha }}" \ - --title "${{ github.ref_name }}" \ - --notes "$NOTES" \ - --generate-notes \ - $PRERELEASE_FLAG \ - artifacts/* - - publish-npm: - needs: [plan, release] - runs-on: ubuntu-latest - if: ${{ needs.plan.outputs.publishing == 'true' && needs.plan.outputs.prerelease == 'false' }} - steps: - - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - - - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6 - with: - node-version: '20.x' - registry-url: 'https://wombat-dressing-room.appspot.com' - - - name: Publish to npm - working-directory: npm - run: npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - - publish-cargo: - needs: [plan, release] - runs-on: ubuntu-latest - if: ${{ needs.plan.outputs.publishing == 'true' && needs.plan.outputs.prerelease == 'false' }} - steps: - - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 - - - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable - - # Publish library crate first (CLI depends on it) - - name: Publish google-workspace - run: cargo publish --package google-workspace - env: - CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} - - # Wait for crates.io to index the library crate - - name: Wait for crates.io index - run: sleep 30 - - - name: Publish google-workspace-cli - run: cargo publish --package google-workspace-cli - env: - CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml deleted file mode 100644 index 18c0b9fe..00000000 --- a/.github/workflows/stale.yml +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2026 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: 'Close Stale PRs' -on: - schedule: - - cron: '30 1 * * *' - workflow_dispatch: - -permissions: - pull-requests: write - -jobs: - stale: - runs-on: ubuntu-latest - steps: - - uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9 - with: - days-before-issue-stale: -1 - days-before-issue-close: -1 - days-before-pr-stale: 3 - days-before-pr-close: 0 - stale-pr-message: 'This PR has been inactive for 72 hours. Closing to keep the queue clean.' - close-pr-message: 'This PR was closed because it has been stalled for 72 hours. Feel free to magically reopen it if you want to continue working on it!' - exempt-pr-labels: 'keep-alive' diff --git a/.github/workflows/sync-upstream.yml b/.github/workflows/sync-upstream.yml new file mode 100644 index 00000000..892bd5d2 --- /dev/null +++ b/.github/workflows/sync-upstream.yml @@ -0,0 +1,139 @@ +name: Sync upstream + +on: + schedule: + - cron: '0 0 * * 1' # 毎週月曜 UTC 0:00 + workflow_dispatch: + +permissions: + contents: write + pull-requests: write + +jobs: + sync: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Configure git + run: | + git config user.name "github-actions[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + + - name: Fetch upstream + run: | + git remote add upstream https://github.com/googleworkspace/cli.git + git fetch upstream + + - name: Check for new commits + id: check + run: | + COUNT=$(git rev-list HEAD..upstream/main --count) + echo "count=$COUNT" >> "$GITHUB_OUTPUT" + echo "Upstream has $COUNT new commit(s)" + + - name: Attempt merge + if: steps.check.outputs.count != '0' + id: merge + run: | + BRANCH="sync-upstream/$(date +%Y%m%d)" + git checkout -b "$BRANCH" + + # Files the fork has deleted and wants to keep deleted. + # If upstream modifies any of these, auto-resolve by removing them. + FORK_DELETED_FILES=( + ".github/workflows/release.yml" + ".github/workflows/automation.yml" + ".github/workflows/cla.yml" + ".github/workflows/coverage.yml" + ".github/workflows/generate-skills.yml" + ".github/workflows/publish-skills.yml" + ".github/workflows/release-changesets.yml" + ".github/workflows/stale.yml" + "dist-workspace.toml" + ) + + if git merge upstream/main --no-edit; then + CONFLICT=false + else + # Auto-resolve modify/delete conflicts for files the fork deleted. + RESOLVED=false + for f in "${FORK_DELETED_FILES[@]}"; do + if git status --porcelain "$f" 2>/dev/null | grep -q "^DU\|^UD"; then + git rm -f "$f" || true + RESOLVED=true + fi + done + + # Check if any conflicts remain after auto-resolution. + if git diff --name-only --diff-filter=U | grep -q .; then + # Real conflicts remain — commit as-is for manual resolution. + git add -A + git commit -m "sync: merge upstream/main (conflicts need manual resolution)" --no-verify || true + CONFLICT=true + elif [ "$RESOLVED" = "true" ]; then + # Only fork-deleted-file conflicts existed, and we resolved them. + git commit --no-edit + CONFLICT=false + else + # Should not happen, but safe fallback. + git add -A + git commit -m "sync: merge upstream/main" --no-verify || true + CONFLICT=true + fi + fi + + if [ "$CONFLICT" = "false" ]; then + # Strip issue/PR number references to prevent cross-references + MSG=$(git log -1 --format=%B) + CLEANED=$(echo "$MSG" | sed 's/ (#[0-9]\+)//g; s/#[0-9]\+//g') + if [ "$MSG" != "$CLEANED" ]; then + git commit --amend -m "$CLEANED" + fi + fi + echo "conflict=$CONFLICT" >> "$GITHUB_OUTPUT" + + echo "branch=$BRANCH" >> "$GITHUB_OUTPUT" + git push -u origin "$BRANCH" + + - name: Create PR (clean merge) + if: steps.check.outputs.count != '0' && steps.merge.outputs.conflict == 'false' + run: | + UPSTREAM_VER=$(git describe --tags upstream/main 2>/dev/null || git rev-parse --short upstream/main) + gh pr create \ + --title "sync: merge upstream/main ($UPSTREAM_VER)" \ + --body "$(cat <<'EOF' + ## Summary + - upstream/main を自動マージしました(コンフリクトなし) + - レビュー後にマージしてください + + ### チェックリスト + - [ ] CI が通ること + - [ ] MCP 機能に影響がないこと(`mcp_server.rs`, `helpers/gmail/mod.rs` の `pub(crate)` 可視性) + EOF + )" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Create PR (conflict) + if: steps.check.outputs.count != '0' && steps.merge.outputs.conflict == 'true' + run: | + UPSTREAM_VER=$(git describe --tags upstream/main 2>/dev/null || git rev-parse --short upstream/main) + gh pr create \ + --title "sync: merge upstream/main ($UPSTREAM_VER) — 要手動解決" \ + --body "$(cat <<'EOF' + ## Summary + - upstream/main のマージでコンフリクトが発生しました + - 手動で解決してください + + ### MCP 温存チェックリスト + - [ ] `crates/google-workspace-cli/src/mcp_server.rs` が残っていること + - [ ] `crates/google-workspace-cli/src/main.rs` の `mod mcp_server` と MCP エントリが残っていること + - [ ] `crates/google-workspace-cli/src/helpers/gmail/mod.rs` の `pub(crate)` 可視性が維持されていること + - [ ] `cargo clippy -- -D warnings && cargo test` が通ること + EOF + )" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/AGENTS.md b/AGENTS.md index 72211226..8627a833 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -191,6 +191,55 @@ Helpers are handwritten commands prefixed with `+` that provide value the schema See [`src/helpers/README.md`](crates/google-workspace-cli/src/helpers/README.md) for full guidelines, anti-patterns, and a checklist for new helpers. +## MCP Helper Tools (fork-only) + +The MCP server (`gws mcp --helpers`) exposes high-level helper tools that automate complex API interactions for AI agents. These are fork-only additions. + +### Façade architecture + +To minimize upstream merge conflicts, MCP helper tools use a **façade pattern**: + +``` +helpers/gmail/mod.rs (upstream code) + └─ Fork-only MCP bridge block (end of file) + ├─ mcp_compose_reply() ← pub(crate) façade + └─ mcp_build_send_metadata() + +mcp_server.rs (fork-only file) + ├─ send_raw_gmail() ← shared Gmail send logic + ├─ handle_gmail_send() ← uses pub(crate) helpers directly + └─ handle_gmail_reply() ← calls mcp_compose_reply() façade +``` + +**Key rules:** +- **Never change `pub(super)` to `pub(crate)` on upstream functions.** Instead, add a `pub(crate)` façade function at the end of the file inside a clearly marked `// Fork-only: MCP bridge functions` block. +- Façade functions are always appended at the **end of the file** to minimize merge conflicts. +- `mcp_server.rs` itself is fork-only (upstream deleted it), so it has no merge conflicts. + +### Adding a new MCP helper tool + +1. **If the helper needs internal functions** — add a `pub(crate)` façade in the relevant `helpers/*.rs` file (end of file, in the MCP bridge block) +2. **Register the tool** in `append_helper_tools()` in `mcp_server.rs` (service-gated) +3. **Implement the handler** as `handle_(arguments)` in `mcp_server.rs` +4. **Add routing** in the `match` block inside `handle_tools_call()` +5. **Add tests** — parameter validation, schema, and `--helpers` flag gating +6. **Update FORK.md** tool table + +### Current MCP helper tools + +| Tool | Handler | Façade | Description | +|---|---|---|---| +| `gmail_send` | `handle_gmail_send` | None (uses `pub(crate)` helpers directly) | Send a new email | +| `gmail_reply` | `handle_gmail_reply` | `mcp_compose_reply` | Reply within a thread | + +### Upstream merge checklist for MCP + +After merging upstream, verify: +1. `mcp_server.rs` still compiles (check `executor::execute_method` signature changes) +2. `pub(crate)` on `Mailbox`, `to_mb_address_list`, `apply_optional_headers`, `finalize_message`, `resolve_mail_method` has not been reverted to `pub(super)` +3. MCP bridge façade block at end of `helpers/gmail/mod.rs` is still present +4. Run `cargo clippy -- -D warnings && cargo test -p google-workspace-cli -- mcp_server` + ## Environment Variables ### Authentication diff --git a/CLAUDE.md b/CLAUDE.md index 2b3af2d1..c1b7bc29 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1 +1,27 @@ When contributing to this repository, you must strictly follow all guidelines outlined in the AGENTS.md file. + +## Fork-specific Notes + +This is a fork of `googleworkspace/cli` that maintains MCP server support. See `FORK.md` for details. + +### Upstream Merge Checklist + +After merging upstream/main, fix MCP compilation errors: +1. `crates/google-workspace-cli/src/mcp_server.rs` — match new arguments in `executor::execute_method()` calls +2. `crates/google-workspace-cli/src/mcp_server.rs` — match new fields in Gmail helper structs +3. `crates/google-workspace-cli/src/helpers/gmail/mod.rs` — ensure `pub(crate)` visibility is not reverted to `pub(super)` +4. `pub(crate)` targets: `Mailbox`, `to_mb_address_list`, `apply_optional_headers`, `finalize_message`, `resolve_mail_method`, `Attachment` +5. Run `cargo clippy -- -D warnings && cargo test` to verify +6. If conflicts exceed ~20 files, consider rebasing: checkout upstream/main as new branch, re-apply MCP changes, reset main + +### Project Structure + +- Cargo workspace: `crates/google-workspace-cli/` (binary) + `crates/google-workspace/` (library) +- MCP server: `crates/google-workspace-cli/src/mcp_server.rs` +- Local install: `cargo install --path crates/google-workspace-cli` + +### GitHub Actions + +- Only 3 workflows exist: `ci.yml`, `policy.yml`, `sync-upstream.yml` +- `gh workflow list` may show upstream workflows — use `gh api repos///actions/workflows` to check actual fork workflows +- `gh run list` may show upstream runs — filter with `--branch=main` for fork-specific results diff --git a/Cargo.lock b/Cargo.lock index b7e7baec..e45ce908 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -20,7 +20,7 @@ checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" dependencies = [ "cfg-if", "cipher", - "cpufeatures", + "cpufeatures 0.2.17", ] [[package]] @@ -149,6 +149,58 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +[[package]] +name = "axum" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31b698c5f9a010f6573133b09e0de5408834d0c82f8d7475a89fc1867a71cd90" +dependencies = [ + "axum-core", + "bytes", + "form_urlencoded", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "serde_core", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "base64" version = "0.22.1" @@ -178,9 +230,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.11.0" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" +checksum = "c4512299f36f043ab09a583e57bceb5a5aab7a73db1805848e8fef3c9e8c78b3" [[package]] name = "block-buffer" @@ -226,9 +278,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.58" +version = "1.2.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1e928d4b69e3077709075a938a05ffbedfa53a84c8f766efbf8220bb1ff60e1" +checksum = "d16d90359e986641506914ba71350897565610e87ce0ad9e6f28569db3dd5c6d" dependencies = [ "find-msvc-tools", "shlex", @@ -246,6 +298,17 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" +[[package]] +name = "chacha20" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f8d983286843e49675a4b7a2d174efe136dc93a18d69130dd18198a6c167601" +dependencies = [ + "cfg-if", + "cpufeatures 0.3.0", + "rand_core 0.10.1", +] + [[package]] name = "chrono" version = "0.4.44" @@ -255,6 +318,7 @@ dependencies = [ "iana-time-zone", "js-sys", "num-traits", + "serde", "wasm-bindgen", "windows-link", ] @@ -281,9 +345,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.6.0" +version = "4.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b193af5b67834b676abd72466a96c1024e6a6ad978a1f484bd90b85c94041351" +checksum = "1ddb117e43bbf7dacf0a4190fef4d345b9bad68dfc649cb349e7d17d28428e51" dependencies = [ "clap_builder", "clap_derive", @@ -303,9 +367,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.6.0" +version = "4.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1110bd8a634a1ab8cb04345d8d878267d57c3cf1b38d91b71af6686408bbca6a" +checksum = "f2ce8604710f6733aa641a2b3731eaa1e8b3d9973d5e3565da11800813f997a9" dependencies = [ "heck", "proc-macro2", @@ -383,6 +447,15 @@ dependencies = [ "libc", ] +[[package]] +name = "cpufeatures" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b2a41393f66f16b0823bb79094d54ac5fbd34ab292ddafb9a0456ac9f87d201" +dependencies = [ + "libc", +] + [[package]] name = "crossbeam-channel" version = "0.5.15" @@ -404,7 +477,7 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" dependencies = [ - "bitflags 2.11.0", + "bitflags 2.11.1", "crossterm_winapi", "derive_more", "document-features", @@ -650,6 +723,12 @@ version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" +[[package]] +name = "dyn-clone" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" + [[package]] name = "either" version = "1.15.0" @@ -693,9 +772,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.3.0" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +checksum = "9f1f227452a390804cdb637b74a86990f2a7d7ba4b7d5693aac9b4dd6defd8d6" [[package]] name = "filedescriptor" @@ -753,6 +832,21 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + [[package]] name = "futures-channel" version = "0.3.32" @@ -760,6 +854,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", + "futures-sink", ] [[package]] @@ -814,6 +909,7 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ + "futures-channel", "futures-core", "futures-io", "futures-macro", @@ -880,6 +976,7 @@ dependencies = [ "cfg-if", "libc", "r-efi 6.0.0", + "rand_core 0.10.1", "wasip2", "wasip3", ] @@ -917,6 +1014,7 @@ dependencies = [ "aes-gcm", "anyhow", "async-trait", + "axum", "base64", "bytes", "chrono", @@ -934,9 +1032,10 @@ dependencies = [ "mail-builder", "mime_guess2", "percent-encoding", - "rand 0.8.5", + "rand 0.8.6", "ratatui", "reqwest", + "rmcp", "serde", "serde_json", "serial_test", @@ -993,6 +1092,12 @@ dependencies = [ "foldhash 0.2.0", ] +[[package]] +name = "hashbrown" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f467dd6dccf739c208452f8014c75c18bb8301b050ad1cfb27153803edb0f51" + [[package]] name = "heck" version = "0.5.0" @@ -1085,16 +1190,15 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.27.7" +version = "0.27.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +checksum = "33ca68d021ef39cf6463ab54c1d0f5daf03377b70561305bb89a8f83aab66e0f" dependencies = [ "http", "hyper", "hyper-util", "rustls", "rustls-native-certs", - "rustls-pki-types", "tokio", "tokio-rustls", "tower-service", @@ -1149,12 +1253,13 @@ dependencies = [ [[package]] name = "icu_collections" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +checksum = "2984d1cd16c883d7935b9e07e44071dca8d917fd52ecc02c04d5fa0b5a3f191c" dependencies = [ "displaydoc", "potential_utf", + "utf8_iter", "yoke", "zerofrom", "zerovec", @@ -1162,9 +1267,9 @@ dependencies = [ [[package]] name = "icu_locale_core" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +checksum = "92219b62b3e2b4d88ac5119f8904c10f8f61bf7e95b640d25ba3075e6cac2c29" dependencies = [ "displaydoc", "litemap", @@ -1175,9 +1280,9 @@ dependencies = [ [[package]] name = "icu_normalizer" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +checksum = "c56e5ee99d6e3d33bd91c5d85458b6005a22140021cc324cea84dd0e72cff3b4" dependencies = [ "icu_collections", "icu_normalizer_data", @@ -1189,15 +1294,15 @@ dependencies = [ [[package]] name = "icu_normalizer_data" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" +checksum = "da3be0ae77ea334f4da67c12f149704f19f81d1adf7c51cf482943e84a2bad38" [[package]] name = "icu_properties" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +checksum = "bee3b67d0ea5c2cca5003417989af8996f8604e34fb9ddf96208a033901e70de" dependencies = [ "icu_collections", "icu_locale_core", @@ -1209,15 +1314,15 @@ dependencies = [ [[package]] name = "icu_properties_data" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" +checksum = "8e2bbb201e0c04f7b4b3e14382af113e17ba4f63e2c9d2ee626b720cbce54a14" [[package]] name = "icu_provider" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +checksum = "139c4cf31c8b5f33d7e199446eff9c1e02decfc2f0eec2c8d71f65befa45b421" dependencies = [ "displaydoc", "icu_locale_core", @@ -1253,9 +1358,9 @@ dependencies = [ [[package]] name = "idna_adapter" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +checksum = "cb68373c0d6620ef8105e855e7745e18b0d00d3bdb07fb532e434244cdb9a714" dependencies = [ "icu_normalizer", "icu_properties", @@ -1263,12 +1368,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.13.0" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +checksum = "d466e9454f08e4a911e14806c24e16fba1b4c121d1ea474396f396069cf949d9" dependencies = [ "equivalent", - "hashbrown 0.16.1", + "hashbrown 0.17.0", "serde", "serde_core", ] @@ -1343,9 +1448,9 @@ checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682" [[package]] name = "js-sys" -version = "0.3.93" +version = "0.3.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "797146bb2677299a1eb6b7b50a890f4c361b29ef967addf5b2fa45dae1bb6d7d" +checksum = "a1840c94c045fbcf8ba2812c95db44499f7c64910a912551aaaa541decebcacf" dependencies = [ "cfg-if", "futures-util", @@ -1398,15 +1503,15 @@ checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" [[package]] name = "libc" -version = "0.2.183" +version = "0.2.186" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" +checksum = "68ab91017fe16c622486840e4c83c9a37afeff978bd239b5293d61ece587de66" [[package]] name = "libredox" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ddbf48fd451246b1f8c2610bd3b4ac0cc6e149d89832867093ab69a17194f08" +checksum = "e02f3bb43d335493c96bf3fd3a321600bf6bd07ed34bc64118e9293bdffea46c" dependencies = [ "libc", ] @@ -1417,7 +1522,7 @@ version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f50e8f47623268b5407192d26876c4d7f89d686ca130fdc53bced4814cd29f8" dependencies = [ - "bitflags 2.11.0", + "bitflags 2.11.1", ] [[package]] @@ -1428,9 +1533,9 @@ checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" [[package]] name = "litemap" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" +checksum = "92daf443525c4cce67b150400bc2316076100ce0b3686209eb8cf3c31612e6f0" [[package]] name = "litrs" @@ -1455,9 +1560,9 @@ checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] name = "lru" -version = "0.16.3" +version = "0.16.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1dc47f592c06f33f8e3aea9591776ec7c9f9e4124778ff8a3c3b87159f7e593" +checksum = "7f66e8d5d03f609abc3a39e6f08e4164ebf1447a732906d39eb9b99b7919ef39" dependencies = [ "hashbrown 0.16.1", ] @@ -1496,6 +1601,12 @@ dependencies = [ "regex-automata", ] +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + [[package]] name = "memchr" version = "2.8.0" @@ -1559,7 +1670,7 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ - "bitflags 2.11.0", + "bitflags 2.11.1", "cfg-if", "cfg_aliases", "libc", @@ -1682,6 +1793,12 @@ dependencies = [ "windows-link", ] +[[package]] +name = "pastey" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5a797f0e07bdf071d15742978fc3128ec6c22891c31a3a931513263904c982a" + [[package]] name = "percent-encoding" version = "2.3.2" @@ -1767,7 +1884,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ "phf_shared 0.11.3", - "rand 0.8.5", + "rand 0.8.6", ] [[package]] @@ -1816,7 +1933,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" dependencies = [ "cfg-if", - "cpufeatures", + "cpufeatures 0.2.17", "opaque-debug", "universal-hash", ] @@ -1829,9 +1946,9 @@ checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" [[package]] name = "potential_utf" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +checksum = "0103b1cef7ec0cf76490e969665504990193874ea05c85ff9bab8b911d0a0564" dependencies = [ "zerovec", ] @@ -1899,7 +2016,7 @@ dependencies = [ "bytes", "getrandom 0.3.4", "lru-slab", - "rand 0.9.2", + "rand 0.9.4", "ring", "rustc-hash", "rustls", @@ -1948,9 +2065,9 @@ checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" [[package]] name = "rand" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +checksum = "5ca0ecfa931c29007047d1bc58e623ab12e5590e8c7cc53200d5202b69266d8a" dependencies = [ "libc", "rand_chacha 0.3.1", @@ -1959,14 +2076,25 @@ dependencies = [ [[package]] name = "rand" -version = "0.9.2" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +checksum = "44c5af06bb1b7d3216d91932aed5265164bf384dc89cd6ba05cf59a35f5f76ea" dependencies = [ "rand_chacha 0.9.0", "rand_core 0.9.5", ] +[[package]] +name = "rand" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2e8e8bcc7961af1fdac401278c6a831614941f6164ee3bf4ce61b7edb162207" +dependencies = [ + "chacha20", + "getrandom 0.4.2", + "rand_core 0.10.1", +] + [[package]] name = "rand_chacha" version = "0.3.1" @@ -2005,6 +2133,12 @@ dependencies = [ "getrandom 0.3.4", ] +[[package]] +name = "rand_core" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63b8176103e19a2643978565ca18b50549f6101881c443590420e4dc998a3c69" + [[package]] name = "ratatui" version = "0.30.0" @@ -2025,7 +2159,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ef8dea09a92caaf73bff7adb70b76162e5937524058a7e5bff37869cbbec293" dependencies = [ - "bitflags 2.11.0", + "bitflags 2.11.1", "compact_str", "hashbrown 0.16.1", "indoc", @@ -2077,7 +2211,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7dbfa023cd4e604c2553483820c5fe8aa9d71a42eea5aa77c6e7f35756612db" dependencies = [ - "bitflags 2.11.0", + "bitflags 2.11.1", "hashbrown 0.16.1", "indoc", "instability", @@ -2096,7 +2230,7 @@ version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 2.11.0", + "bitflags 2.11.1", ] [[package]] @@ -2110,6 +2244,26 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "ref-cast" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "regex" version = "1.12.3" @@ -2194,6 +2348,50 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "rmcp" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e12ca9067b5ebfbd5b3fcdc4acfceb81aa7d5ab2a879dff7cb75d22434276aad" +dependencies = [ + "async-trait", + "base64", + "bytes", + "chrono", + "futures", + "http", + "http-body", + "http-body-util", + "pastey", + "pin-project-lite", + "rand 0.10.1", + "rmcp-macros", + "schemars", + "serde", + "serde_json", + "sse-stream", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tokio-util", + "tower-service", + "tracing", + "uuid", +] + +[[package]] +name = "rmcp-macros" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7caa6743cc0888e433105fe1bc551a7f607940b126a37bc97b478e86064627eb" +dependencies = [ + "darling 0.23.0", + "proc-macro2", + "quote", + "serde_json", + "syn 2.0.117", +] + [[package]] name = "rustc-hash" version = "2.1.2" @@ -2215,7 +2413,7 @@ version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" dependencies = [ - "bitflags 2.11.0", + "bitflags 2.11.1", "errno", "libc", "linux-raw-sys", @@ -2224,9 +2422,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.37" +version = "0.23.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4" +checksum = "7c2c118cb077cca2822033836dfb1b975355dfb784b5e8da48f7b6c5db74e60e" dependencies = [ "once_cell", "ring", @@ -2250,9 +2448,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.14.0" +version = "1.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" +checksum = "30a7197ae7eb376e574fe940d068c30fe0462554a3ddbe4eca7838e049c937a9" dependencies = [ "web-time", "zeroize", @@ -2260,9 +2458,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.10" +version = "0.103.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df33b2b81ac578cabaf06b89b0631153a3f416b0a886e8a7a1707fb51abbd1ef" +checksum = "61c429a8649f110dddef65e2a5ad240f747e85f7758a6bccc7e5777bd33f756e" dependencies = [ "ring", "rustls-pki-types", @@ -2299,6 +2497,32 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "schemars" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2b42f36aa1cd011945615b92222f6bf73c599a102a300334cd7f8dbeec726cc" +dependencies = [ + "chrono", + "dyn-clone", + "ref-cast", + "schemars_derive", + "serde", + "serde_json", +] + +[[package]] +name = "schemars_derive" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d115b50f4aaeea07e79c1912f645c7513d81715d0420f8bc77a18c6260b307f" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.117", +] + [[package]] name = "scopeguard" version = "1.2.0" @@ -2323,7 +2547,7 @@ version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.11.0", + "bitflags 2.11.1", "core-foundation 0.9.4", "core-foundation-sys", "libc", @@ -2336,7 +2560,7 @@ version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d" dependencies = [ - "bitflags 2.11.0", + "bitflags 2.11.1", "core-foundation 0.10.1", "core-foundation-sys", "libc", @@ -2355,9 +2579,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.27" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +checksum = "8a7852d02fc848982e0c167ef163aaff9cd91dc640ba85e263cb1ce46fae51cd" [[package]] name = "serde" @@ -2389,6 +2613,17 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "serde_derive_internals" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "serde_json" version = "1.0.149" @@ -2402,6 +2637,17 @@ dependencies = [ "zmij", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" +dependencies = [ + "itoa", + "serde", + "serde_core", +] + [[package]] name = "serde_spanned" version = "0.6.9" @@ -2462,7 +2708,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", - "cpufeatures", + "cpufeatures 0.2.17", "digest", ] @@ -2540,6 +2786,19 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "sse-stream" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3962b63f038885f15bce2c6e02c0e7925c072f1ac86bb60fd44c5c6b762fb72" +dependencies = [ + "bytes", + "futures-util", + "http-body", + "http-body-util", + "pin-project-lite", +] + [[package]] name = "stable_deref_trait" version = "1.2.1" @@ -2585,6 +2844,12 @@ version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" +[[package]] +name = "symlink" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7973cce6668464ea31f176d85b13c7ab3bba2cb3b77a2ed26abd7801688010a" + [[package]] name = "syn" version = "1.0.109" @@ -2669,7 +2934,7 @@ checksum = "4676b37242ccbd1aabf56edb093a4827dc49086c0ffd764a5705899e0f35f8f7" dependencies = [ "anyhow", "base64", - "bitflags 2.11.0", + "bitflags 2.11.1", "fancy-regex", "filedescriptor", "finl_unicode", @@ -2787,9 +3052,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +checksum = "c8323304221c2a851516f22236c5722a72eaa19749016521d6dff0824447d96d" dependencies = [ "displaydoc", "zerovec", @@ -2812,9 +3077,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.50.0" +version = "1.52.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d" +checksum = "b67dee974fe86fd92cc45b7a95fdd2f99a36a6d7b0d431a231178d3d670bbcc6" dependencies = [ "bytes", "libc", @@ -2829,9 +3094,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.6.1" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c" +checksum = "385a6cb71ab9ab790c5fe8d67f1645e6c450a7ce006a33de03daa956cf70a496" dependencies = [ "proc-macro2", "quote", @@ -2848,6 +3113,17 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-stream" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + [[package]] name = "tokio-util" version = "0.7.18" @@ -2915,6 +3191,7 @@ dependencies = [ "tokio", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -2923,7 +3200,7 @@ version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" dependencies = [ - "bitflags 2.11.0", + "bitflags 2.11.1", "bytes", "futures-util", "http", @@ -2953,6 +3230,7 @@ version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" dependencies = [ + "log", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -2960,11 +3238,12 @@ dependencies = [ [[package]] name = "tracing-appender" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf" +checksum = "050686193eb999b4bb3bc2acfa891a13da00f79734704c4b8b4ef1a10b368a3c" dependencies = [ "crossbeam-channel", + "symlink", "thiserror 2.0.18", "time", "tracing-subscriber", @@ -3041,9 +3320,9 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.19.0" +version = "1.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" +checksum = "40ce102ab67701b8526c123c1bab5cbe42d7040ccfd0f64af1a385808d2f43de" [[package]] name = "ucd-trie" @@ -3134,9 +3413,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.23.0" +version = "1.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ac8b6f42ead25368cf5b098aeb3dc8a1a2c05a3eee8a9a1a68c640edbfc79d9" +checksum = "ddd74a9687298c6858e9b88ec8935ec45d22e8fd5e6394fa1bd4e99a87789c76" dependencies = [ "atomic", "getrandom 0.4.2", @@ -3183,11 +3462,11 @@ checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasip2" -version = "1.0.2+wasi-0.2.9" +version = "1.0.3+wasi-0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +checksum = "20064672db26d7cdc89c7798c48a0fdfac8213434a1186e5ef29fd560ae223d6" dependencies = [ - "wit-bindgen", + "wit-bindgen 0.57.1", ] [[package]] @@ -3196,14 +3475,14 @@ version = "0.4.0+wasi-0.3.0-rc-2026-01-06" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" dependencies = [ - "wit-bindgen", + "wit-bindgen 0.51.0", ] [[package]] name = "wasm-bindgen" -version = "0.2.116" +version = "0.2.120" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dc0882f7b5bb01ae8c5215a1230832694481c1a4be062fd410e12ea3da5b631" +checksum = "df52b6d9b87e0c74c9edfa1eb2d9bf85e5d63515474513aa50fa181b3c4f5db1" dependencies = [ "cfg-if", "once_cell", @@ -3214,9 +3493,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.66" +version = "0.4.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19280959e2844181895ef62f065c63e0ca07ece4771b53d89bfdb967d97cbf05" +checksum = "af934872acec734c2d80e6617bbb5ff4f12b052dd8e6332b0817bce889516084" dependencies = [ "js-sys", "wasm-bindgen", @@ -3224,9 +3503,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.116" +version = "0.2.120" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75973d3066e01d035dbedaad2864c398df42f8dd7b1ea057c35b8407c015b537" +checksum = "78b1041f495fb322e64aca85f5756b2172e35cd459376e67f2a6c9dffcedb103" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3234,9 +3513,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.116" +version = "0.2.120" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91af5e4be765819e0bcfee7322c14374dc821e35e72fa663a830bbc7dc199eac" +checksum = "9dcd0ff20416988a18ac686d4d4d0f6aae9ebf08a389ff5d29012b05af2a1b41" dependencies = [ "bumpalo", "proc-macro2", @@ -3247,9 +3526,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.116" +version = "0.2.120" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9bf0406a78f02f336bf1e451799cca198e8acde4ffa278f0fb20487b150a633" +checksum = "49757b3c82ebf16c57d69365a142940b384176c24df52a087fb748e2085359ea" dependencies = [ "unicode-ident", ] @@ -3295,7 +3574,7 @@ version = "0.244.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" dependencies = [ - "bitflags 2.11.0", + "bitflags 2.11.1", "hashbrown 0.15.5", "indexmap", "semver", @@ -3303,9 +3582,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.93" +version = "0.3.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "749466a37ee189057f54748b200186b59a03417a117267baf3fd89cecc9fb837" +checksum = "2eadbac71025cd7b0834f20d1fe8472e8495821b4e9801eb0a60bd1f19827602" dependencies = [ "js-sys", "wasm-bindgen", @@ -3714,6 +3993,12 @@ dependencies = [ "wit-bindgen-rust-macro", ] +[[package]] +name = "wit-bindgen" +version = "0.57.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ebf944e87a7c253233ad6766e082e3cd714b5d03812acc24c318f549614536e" + [[package]] name = "wit-bindgen-core" version = "0.51.0" @@ -3763,7 +4048,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" dependencies = [ "anyhow", - "bitflags 2.11.0", + "bitflags 2.11.1", "indexmap", "log", "serde", @@ -3795,15 +4080,15 @@ dependencies = [ [[package]] name = "writeable" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" +checksum = "1ffae5123b2d3fc086436f8834ae3ab053a283cfac8fe0a0b8eaae044768a4c4" [[package]] name = "yoke" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +checksum = "abe8c5fda708d9ca3df187cae8bfb9ceda00dd96231bed36e445a1a48e66f9ca" dependencies = [ "stable_deref_trait", "yoke-derive", @@ -3812,9 +4097,9 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +checksum = "de844c262c8848816172cef550288e7dc6c7b7814b4ee56b3e1553f275f1858e" dependencies = [ "proc-macro2", "quote", @@ -3869,18 +4154,18 @@ dependencies = [ [[package]] name = "zerofrom" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +checksum = "69faa1f2a1ea75661980b013019ed6687ed0e83d069bc1114e2cc74c6c04c4df" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +checksum = "11532158c46691caf0f2593ea8358fed6bbf68a0315e80aae9bd41fbade684a1" dependencies = [ "proc-macro2", "quote", @@ -3910,9 +4195,9 @@ dependencies = [ [[package]] name = "zerotrie" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +checksum = "0f9152d31db0792fa83f70fb2f83148effb5c1f5b8c7686c3459e361d9bc20bf" dependencies = [ "displaydoc", "yoke", @@ -3921,9 +4206,9 @@ dependencies = [ [[package]] name = "zerovec" -version = "0.11.5" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +checksum = "90f911cbc359ab6af17377d242225f4d75119aec87ea711a880987b18cd7b239" dependencies = [ "yoke", "zerofrom", @@ -3932,9 +4217,9 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +checksum = "625dc425cab0dca6dc3c3319506e6593dcb08a9f387ea3b284dbd52a92c40555" dependencies = [ "proc-macro2", "quote", diff --git a/FORK.ja.md b/FORK.ja.md new file mode 100644 index 00000000..b187e259 --- /dev/null +++ b/FORK.ja.md @@ -0,0 +1,127 @@ +# Fork: MCP サーバー機能を温存した gws + +このリポジトリは [googleworkspace/cli](https://github.com/googleworkspace/cli) のフォークです。 + +[English version](FORK.md) + +upstream が削除した **MCP(Model Context Protocol)サーバー機能** を独自にメンテナンスし、AI エージェントから Google Workspace API を直接呼び出せる状態を維持しています。 + +## upstream との差分 + +| 項目 | upstream | このフォーク | +|---|---|---| +| MCP サーバー (`gws mcp`) | 削除済み | 維持・メンテナンス中 | +| MCP helper tools (`--helpers`) | なし | `gmail_send` 等を独自実装 | +| CI/CD ワークフロー | upstream 環境依存 | 最小構成(CI + Policy + Sync + Release) | + +### MCP サーバー + +Discovery Document から動的にツールを生成し、stdio 経由で MCP プロトコルを提供します。 + +```bash +# Gmail の MCP サーバーを起動(helper tool 付き) +gws mcp -s gmail --helpers + +# 複数サービスを同時に提供 +gws mcp -s gmail -s drive -s calendar --helpers + +# compact モード(サービスごとに1ツール) +gws mcp -s gmail --tool-mode compact +``` + +### MCP helper tools + +`--helpers` フラグで有効化される便利ツールです。Discovery API の raw tool に加え、RFC 2822 構築や base64 エンコード等の面倒な処理を自動化します。 + +| ツール名 | 説明 | +|---|---| +| `gmail_send` | メール送信。to/subject/body を渡すだけで RFC 2822 フォーマット・base64url エンコードを自動処理 | +| `gmail_reply` | スレッド内返信。message_id/body を渡すだけで In-Reply-To, References, Re: 件名, threadId を自動設定 | + +## インストール + +### Homebrew(macOS / Linux)— 推奨 + +```bash +brew install shigechika/tap/gws-mcp +``` + +Rust ツールチェーン不要。macOS(Apple Silicon / Intel)と Linux(x86\_64 / arm64)向けのバイナリを事前ビルドして配布しています。 + +### Cargo(ソースからビルド) + +```bash +# GitHub から直接インストール +cargo install --git https://github.com/shigechika/gws-mcp --locked +``` + +ローカルに clone 済みの場合は、ワーキングツリーからインストール: + +```bash +cd gws-mcp +cargo install --path crates/google-workspace-cli +``` + +`~/.cargo/bin/gws` にバイナリがインストールされます。`cargo build --release` は `target/release/gws` にビルドするだけで `~/.cargo/bin/` は**更新されない**点に注意してください。 + +## Claude での使い方 + +**Claude Code** — `~/.claude.json` に追加: + +```json +{ + "mcpServers": { + "gws": { + "command": "gws", + "args": ["mcp", "-s", "gmail", "-s", "drive", "-s", "calendar", "--helpers"] + } + } +} +``` + +**Claude Desktop** — `~/Library/Application Support/Claude/claude_desktop_config.json`(macOS)に追加: + +```json +{ + "mcpServers": { + "gws": { + "command": "gws", + "args": ["mcp", "-s", "gmail", "-s", "drive", "-s", "calendar", "--helpers"] + } + } +} +``` + +## このフォークで対応した upstream の MCP issue + +upstream の MCP サーバーに対するバグ報告・機能要望(MCP 削除に伴い close されたもの)を、このフォークで移植・対応しています。 + +| upstream issue | 状態 | 内容 | +|---|---|---| +| [#162](https://github.com/googleworkspace/cli/issues/162) — `tools/list` が呼び出せないツール名を返す(alias と doc.name の不一致) | 対応済 | `walk_resources` がツール名プレフィックスに Discovery doc 名ではなく設定された alias を使うよう変更。`tools/list` と `tools/call` の名前空間を統一 | +| [#170](https://github.com/googleworkspace/cli/issues/170) — 複数単語のリソース名(`admin_role_assignments_list` 等)でパースが壊れる | 対応済 | `split('_')` を Discovery ツリーに対する貪欲リゾルバ(`resolve_tool_path`)に置換。アンダースコアを含むリソース名・任意の入れ子に対応 | +| [#212](https://github.com/googleworkspace/cli/issues/212) — Full mode の schema が GET メソッドにも `body`/`upload` を含む | 対応済 | `method.request.is_some()` の時のみ `body` を、`supports_media_upload == true` の時のみ `upload` を付与 | +| [#251](https://github.com/googleworkspace/cli/issues/251) — `--upload` が絶対パス・トラバーサルパスを受理する | 対応済 | MCP の `upload` 引数で絶対パス・`..` 要素を拒否 | +| [#260](https://github.com/googleworkspace/cli/issues/260) — tool annotations(`readOnlyHint` / `destructiveHint` / `idempotentHint`) | 部分対応 | HTTP method から導出した annotations を全ツールに付与。`tool_search` メタツールとページネーションは未移植 | +| [#642](https://github.com/googleworkspace/cli/issues/642) — `parse_message_headers` の case-sensitive マッチが `CC` 等の非正規ケースのヘッダを落とす | 対応済 | ヘッダ名を小文字化してからマッチするよう変更。Exchange/Outlook 由来の `"CC"` 等、RFC 5322 §1.2.2 に沿った任意ケーシングを認識 | +| [#573](https://github.com/googleworkspace/cli/issues/573) — `gmail.users.messages.get` で `metadataHeaders` 配列がクエリパラメータに展開されない | 対応済 | Discovery パーサが `repeated: true` を保持(`discovery.rs`)し、JSON 配列値を複数クエリに展開する実装が入っている(`executor.rs`)。Discovery 駆動の MCP ツールも同じ挙動を継承 | +| [#625](https://github.com/googleworkspace/cli/issues/625) — `script` service が `services.rs` に未登録で helper が到達不能 | 対応済 | `ServiceEntry { aliases: &["script"], api_name: "script", version: "v1", ... }` として登録済み。`gws script ...` と MCP `script_*` ツールが正常に解決する | +| [#717](https://github.com/googleworkspace/cli/issues/717) — `gws auth status` が非 JSON を stdout に出力し `jq` パイプラインを破壊 | 対応済 | `Using keyring backend: ` は `credential_store.rs` で `eprintln!`(stderr)に出力される。`gws auth status \| jq .` は正常に動作 | +| [#562](https://github.com/googleworkspace/cli/issues/562) — 対話 TUI が `cloud-platform` スコープを無条件に注入し、Workspace の admin policy で制限される組織では login が失敗する | 対応済 | `run_discovery_scope_picker` の選択後 auto-inject を削除(`auth_commands.rs`)。`cloud-platform` が必要な用途(modelarmor 等)は picker で明示選択するか `--full` / `--scopes` で指定する | +| [#644](https://github.com/googleworkspace/cli/issues/644) — `gmail +send` が `userinfo.profile` スコープを付与済みでも「grant profile scope」ヒントを出し、From の表示名が null になる | 対応済 | `helpers/gmail/mod.rs` の表示名取得を People API (`/people/me?personFields=names`) から OIDC userinfo endpoint (`openidconnect.googleapis.com/v1/userinfo`) に変更。同じスコープで Workspace / 個人 Gmail どちらでも一貫したレスポンスが得られる。401/403 時のフォールバックメッセージも、一時的な拒否をスコープ欠落と誤診断しない表現に改訂 | + +## upstream MCP 定点観測 + +| 時期 | 出来事 | +|---|---| +| 2026-03-04 | `feat: add gws mcp server` — upstream に MCP サーバーが追加 | +| 2026-03-05 | ブランチ `fix/mcp-hyphen-tool-names` が upstream に出現 — ツール名の区切り文字をアンダースコアからハイフンに変更 | +| 2026-03-06 | `fix!: Remove MCP server mode` — 追加からわずか2日で upstream が breaking change として MCP サーバーを削除 | +| 2026-03-06 | 同ブランチがマージされずに削除 — upstream での MCP 復活は見送り | + +## upstream 同期方針 + +- 毎週月曜に GitHub Actions で upstream/main を自動マージ +- コンフリクト発生時は PR を作成して手動解決 +- MCP 関連コード(`src/mcp_server.rs`、`pub(crate)` 可視性)の温存を最優先 +- upstream のコミットメッセージから `#番号` 参照を除去(クロスリファレンス防止) diff --git a/FORK.md b/FORK.md new file mode 100644 index 00000000..dbbe2265 --- /dev/null +++ b/FORK.md @@ -0,0 +1,213 @@ +# Fork: gws with MCP server support + +This repository is a fork of [googleworkspace/cli](https://github.com/googleworkspace/cli). + +It maintains the **MCP (Model Context Protocol) server** that upstream removed, allowing AI agents to call Google Workspace APIs directly. + +[日本語版はこちら](FORK.ja.md) + +## Differences from upstream + +| Feature | upstream | This fork | +|---|---|---| +| MCP server (`gws mcp`) | Removed | Maintained | +| MCP helper tools (`--helpers`) | N/A | `gmail_send` and more | +| HTTP transport (`--transport http`) | N/A | Streamable HTTP (Phase 1: no auth) | +| OAuth2 PKCE auth (`--auth`) | N/A | MCP spec 2025-11-25 compliant AS (RFC 9728 + RFC 8414 + PKCE S256) | +| CI/CD workflows | Upstream-specific | Minimal (CI + Policy + Sync + Release) | + +### MCP server + +Dynamically generates tools from Discovery Documents and serves them via the MCP protocol over stdio. + +```bash +# Start MCP server for Gmail with helper tools +gws mcp -s gmail --helpers + +# Serve multiple services +gws mcp -s gmail -s drive -s calendar --helpers + +# Compact mode (one tool per service) +gws mcp -s gmail --tool-mode compact +``` + +### MCP helper tools + +Enabled with the `--helpers` flag. These provide high-level operations on top of the raw Discovery API tools, automating tedious tasks like RFC 2822 formatting and base64url encoding. + +| Tool | Description | +|---|---| +| `gmail_send` | Send email. Just pass to/subject/body — RFC 2822 formatting and base64url encoding are handled automatically | +| `gmail_reply` | Reply within a thread. Pass message_id/body — In-Reply-To, References, Re: subject, and threadId are set automatically | + +## Installation + +### Homebrew (macOS / Linux) — recommended + +```bash +brew install shigechika/tap/gws-mcp +``` + +No Rust toolchain required. Binaries are pre-built for macOS (Apple Silicon / Intel) and Linux (x86\_64 / arm64). + +### Debian / Ubuntu (.deb) + +```bash +sudo dpkg -i gws-mcp--linux-amd64.deb +# or for arm64: +sudo dpkg -i gws-mcp--linux-arm64.deb +``` + +Download the `.deb` file from the [latest release](https://github.com/shigechika/gws-mcp/releases/latest). + +### RHEL / Fedora / Amazon Linux (.rpm) + +```bash +sudo rpm -i gws-mcp--linux-amd64.rpm +# or for aarch64: +sudo rpm -i gws-mcp--linux-arm64.rpm +``` + +Download the `.rpm` file from the [latest release](https://github.com/shigechika/gws-mcp/releases/latest). + +### Windows + +Download `gws-mcp--windows-amd64.zip` from the [latest release](https://github.com/shigechika/gws-mcp/releases/latest), extract `gws.exe`, and place it in a directory on your `PATH`. + +### Direct download (macOS / Linux) + +Download the `.tar.gz` archive for your platform from the [latest release](https://github.com/shigechika/gws-mcp/releases/latest) and place `gws` in your `PATH`. + +| Platform | Archive | +|---|---| +| macOS (Apple Silicon) | `gws-mcp--macos-arm64.tar.gz` | +| macOS (Intel) | `gws-mcp--macos-amd64.tar.gz` | +| Linux x86\_64 | `gws-mcp--linux-amd64.tar.gz` | +| Linux arm64 | `gws-mcp--linux-arm64.tar.gz` | + +### Cargo (from source) + +```bash +# Install directly from GitHub +cargo install --git https://github.com/shigechika/gws-mcp --locked +``` + +If you cloned the repository locally: + +```bash +cd gws-mcp +cargo install --path crates/google-workspace-cli +``` + +This installs the binary to `~/.cargo/bin/gws`. Note that `cargo build --release` only builds to `target/release/gws` and does **not** update `~/.cargo/bin/`. + +## Usage with Claude + +**Claude Code** — add to `~/.claude.json`: + +```json +{ + "mcpServers": { + "gws": { + "command": "gws", + "args": ["mcp", "-s", "gmail", "-s", "drive", "-s", "calendar", "--helpers"] + } + } +} +``` + +**Claude Desktop** — add to `~/Library/Application Support/Claude/claude_desktop_config.json` (macOS): + +```json +{ + "mcpServers": { + "gws": { + "command": "gws", + "args": ["mcp", "-s", "gmail", "-s", "drive", "-s", "calendar", "--helpers"] + } + } +} +``` + +### HTTP transport (Streamable HTTP) + +Start the server first: + +```bash +gws mcp -s gmail -s drive -s calendar --helpers --transport http --port 3000 +``` + +Then point Claude at it — no `command`/`args` needed, just a URL: + +```json +{ + "mcpServers": { + "gws": { + "url": "http://localhost:3000/mcp" + } + } +} +``` + +The server binds to `127.0.0.1` by default (loopback only). Use `--bind 0.0.0.0` to allow external access (not recommended without additional auth). + +### OAuth2 PKCE authentication (`--auth`) + +Enables a full OAuth2 Authorization Server on the HTTP transport, compliant with the [MCP Authorization spec 2025-11-25](https://modelcontextprotocol.io/specification/2025-11-25/basic/authorization/). + +**Prerequisites:** +1. Run `gws auth setup` to create `client_secret.json` with a Google OAuth2 web app credential +2. Add `http://localhost:/oauth/callback` as an **Authorized redirect URI** in [Google Cloud Console](https://console.cloud.google.com/apis/credentials) + +```bash +gws mcp -s gmail -s drive -s calendar --helpers --transport http --port 3000 --auth +``` + +The server exposes these OAuth2 endpoints: + +| Endpoint | RFC | Purpose | +|---|---|---| +| `/.well-known/oauth-protected-resource` | RFC 9728 | Protected Resource Metadata | +| `/.well-known/oauth-authorization-server` | RFC 8414 | Authorization Server Metadata | +| `/oauth/register` | RFC 7591 (stub) | Dynamic Client Registration | +| `/oauth/authorize` | RFC 6749 | Authorization endpoint — redirects to Google | +| `/oauth/callback` | — | Google OAuth2 callback | +| `/oauth/token` | RFC 6749 | Token endpoint — exchanges code + PKCE verifier for bearer token | + +All requests to `/mcp` require a valid `Authorization: Bearer ` header. Sessions expire after 8 hours. + +> **Note:** In Phase 2/3 (current), GWS API calls still use the shared `gws auth login` token. Bearer tokens identify the user (email) but do not yet carry per-user GWS credentials. Per-user token isolation is planned for Phase 4. + +## Upstream MCP issues addressed in this fork + +Bug reports and feature requests that targeted upstream's MCP server (closed when MCP was removed). This fork ports the fixes so they remain useful: + +| Upstream issue | Status | Notes | +|---|---|---| +| [#162](https://github.com/googleworkspace/cli/issues/162) — `tools/list` returns uncallable tool names for aliased services | Fixed | `walk_resources` now uses the configured service alias as tool-name prefix (instead of Discovery doc name), so `tools/list` and `tools/call` share one namespace | +| [#170](https://github.com/googleworkspace/cli/issues/170) — Tool name parsing breaks on multi-word resources (`admin_role_assignments_list` etc.) | Fixed | Replaced `split('_')` with a greedy Discovery-tree resolver (`resolve_tool_path`). Handles arbitrarily nested resources whose names contain underscores | +| [#212](https://github.com/googleworkspace/cli/issues/212) — Full-mode schemas expose `body`/`upload` on GET-only methods | Fixed | `body` is added only when `method.request.is_some()`; `upload` only when `supports_media_upload` is true | +| [#251](https://github.com/googleworkspace/cli/issues/251) — Dynamic `--upload` accepts unsafe absolute/traversal paths | Fixed | MCP `upload` argument rejects absolute paths and `..` components | +| [#260](https://github.com/googleworkspace/cli/issues/260) — Tool annotations (`readOnlyHint`, `destructiveHint`, `idempotentHint`) | Partial | Annotations derived from HTTP method are now attached to every tool. `tool_search` meta-tool and pagination from the original proposal are not yet ported | +| [#642](https://github.com/googleworkspace/cli/issues/642) — `parse_message_headers` case-sensitive match drops CC/headers with non-canonical casing | Fixed | Normalized header names to lowercase before matching, so `"CC"` from Exchange/Outlook, `"from"` lowercase, etc. are all recognized per RFC 5322 §1.2.2 | +| [#573](https://github.com/googleworkspace/cli/issues/573) — `metadataHeaders` array not expanded as repeated query params in `gmail.users.messages.get` | Fixed | Discovery parser preserves `repeated: true` (`discovery.rs`), and the executor expands JSON array values into multiple query entries (`executor.rs`). Discovery-driven MCP tools inherit the same behavior | +| [#625](https://github.com/googleworkspace/cli/issues/625) — `script` service not registered in `services.rs` (helper unreachable) | Fixed | `ServiceEntry { aliases: &["script"], api_name: "script", version: "v1", ... }` is registered, so `gws script ...` and MCP `script_*` tools resolve correctly | +| [#717](https://github.com/googleworkspace/cli/issues/717) — `gws auth status` prints non-JSON to stdout, breaking `jq` pipelines | Fixed | `Using keyring backend: ` is emitted via `eprintln!` to stderr (`credential_store.rs`), so `gws auth status \| jq .` parses cleanly | +| [#562](https://github.com/googleworkspace/cli/issues/562) — Interactive TUI unconditionally injects `cloud-platform` scope, breaking org-restricted accounts | Fixed | Removed the post-selection auto-inject in `run_discovery_scope_picker` (`auth_commands.rs`). Users who need `cloud-platform` (e.g. for modelarmor) can tick it in the picker or pass `--full` / explicit `--scopes` | +| [#644](https://github.com/googleworkspace/cli/issues/644) — `gmail +send` prints "grant profile scope" tip and sends with null From name even when `userinfo.profile` is granted | Fixed | Switched display-name lookup in `helpers/gmail/mod.rs` from People API (`/people/me?personFields=names`) to the OIDC userinfo endpoint (`openidconnect.googleapis.com/v1/userinfo`), which accepts the same scope and responds consistently across Workspace and personal Gmail accounts. Reworded the 401/403 fallback so it doesn't misdiagnose a transient permission denial as a missing scope | + +## Upstream MCP timeline + +| Date | Event | +|---|---| +| 2026-03-04 | `feat: add gws mcp server` — MCP server added to upstream | +| 2026-03-05 | Branch `fix/mcp-hyphen-tool-names` appeared in upstream — tool name separator change from underscore to hyphen | +| 2026-03-06 | `fix!: Remove MCP server mode` — MCP server removed from upstream as a breaking change, just 2 days after introduction | +| 2026-03-06 | Branch `fix/mcp-hyphen-tool-names` deleted without being merged — MCP remains absent from upstream | + +## Upstream sync policy + +- Weekly auto-merge from upstream/main via GitHub Actions (every Monday) +- Conflicts trigger a PR for manual resolution +- MCP-related code (`src/mcp_server.rs`, `pub(crate)` visibility, MCP bridge façade functions) is preserved as top priority +- Issue/PR number references (`#123`) are stripped from upstream commit messages to prevent cross-references diff --git a/crates/google-workspace-cli/Cargo.toml b/crates/google-workspace-cli/Cargo.toml index 058b109e..f80bddf6 100644 --- a/crates/google-workspace-cli/Cargo.toml +++ b/crates/google-workspace-cli/Cargo.toml @@ -66,6 +66,8 @@ tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] } tracing-appender = "0.2" uuid = { version = "1.22.0", features = ["v4", "v5"] } mime_guess2 = "2.3.1" +axum = "0.8" +rmcp = { version = "1", features = ["transport-streamable-http-server", "server"] } [target.'cfg(target_os = "macos")'.dependencies] keyring = { version = "3.6.3", features = ["apple-native"] } @@ -78,3 +80,21 @@ keyring = "3.6.3" [dev-dependencies] serial_test = "3.4.0" + +[package.metadata.deb] +name = "gws-mcp" +maintainer = "AIKAWA Shigechika " +copyright = "2026, Google LLC" +license-file = ["../../LICENSE", "0"] +extended-description = "Google Workspace CLI with MCP server support. Provides a stdio MCP server for Gmail, Drive, Calendar, and other Google Workspace APIs." +depends = "ca-certificates" +section = "utils" +priority = "optional" +assets = [["target/release/gws", "usr/bin/gws", "755"]] + +[package.metadata.generate-rpm] +name = "gws-mcp" +license = "Apache-2.0" +description = "Google Workspace CLI with MCP server support" +requires = { ca-certificates = "*" } +assets = [{ source = "target/release/gws", dest = "/usr/bin/gws", mode = "755" }] diff --git a/crates/google-workspace-cli/build.rs b/crates/google-workspace-cli/build.rs new file mode 100644 index 00000000..287a5dc3 --- /dev/null +++ b/crates/google-workspace-cli/build.rs @@ -0,0 +1,21 @@ +// Fork-only: embed git describe output for fork version identification. +// This file does not exist in upstream and will not cause merge conflicts. + +use std::process::Command; + +fn main() { + // Re-run if git HEAD changes (new commit or checkout) + println!("cargo:rerun-if-changed=../../.git/HEAD"); + println!("cargo:rerun-if-changed=../../.git/refs/"); + + let describe = Command::new("git") + .args(["describe", "--tags", "--always", "--dirty"]) + .output() + .ok() + .filter(|o| o.status.success()) + .and_then(|o| String::from_utf8(o.stdout).ok()) + .map(|s| s.trim().to_string()) + .unwrap_or_else(|| "unknown".to_string()); + + println!("cargo:rustc-env=GWS_FORK_DESCRIBE={describe}"); +} diff --git a/crates/google-workspace-cli/src/auth.rs b/crates/google-workspace-cli/src/auth.rs index 9d8847e4..7566e2da 100644 --- a/crates/google-workspace-cli/src/auth.rs +++ b/crates/google-workspace-cli/src/auth.rs @@ -763,8 +763,8 @@ mod tests { let dir = tempfile::tempdir().unwrap(); let enc_path = dir.path().join("credentials.enc"); - // Isolate global config dir to prevent races with other tests - std::env::set_var("GOOGLE_WORKSPACE_CLI_CONFIG_DIR", dir.path()); + // Isolate global config dir; guard restores the env var on drop. + let _config_guard = EnvVarGuard::set("GOOGLE_WORKSPACE_CLI_CONFIG_DIR", dir.path()); // Encrypt and write let encrypted = crate::credential_store::encrypt(json.as_bytes()).unwrap(); @@ -785,6 +785,7 @@ mod tests { } #[tokio::test] + #[serial_test::serial] async fn test_load_credentials_encrypted_takes_priority_over_default() { // Encrypted credentials should be loaded before the default plaintext path let enc_json = r#"{ @@ -804,6 +805,9 @@ mod tests { let enc_path = dir.path().join("credentials.enc"); let plain_path = dir.path().join("credentials.json"); + // Isolate global config dir; guard restores the env var on drop. + let _config_guard = EnvVarGuard::set("GOOGLE_WORKSPACE_CLI_CONFIG_DIR", dir.path()); + let encrypted = crate::credential_store::encrypt(enc_json.as_bytes()).unwrap(); std::fs::write(&enc_path, &encrypted).unwrap(); std::fs::write(&plain_path, plain_json).unwrap(); diff --git a/crates/google-workspace-cli/src/auth_commands.rs b/crates/google-workspace-cli/src/auth_commands.rs index d7571e74..4b4988b0 100644 --- a/crates/google-workspace-cli/src/auth_commands.rs +++ b/crates/google-workspace-cli/src/auth_commands.rs @@ -841,6 +841,7 @@ fn map_service_to_scope_prefixes(service: &str) -> Vec<&str> { "slides" => vec!["presentations"], "docs" => vec!["documents"], "people" => vec!["contacts", "directory"], + "meet" => vec!["meetings"], s => vec![s], } } @@ -930,7 +931,7 @@ fn run_discovery_scope_picker( relevant_scopes: &[crate::setup::DiscoveredScope], services_filter: Option<&HashSet>, ) -> Option> { - use crate::setup::{ScopeClassification, PLATFORM_SCOPE}; + use crate::setup::ScopeClassification; use crate::setup_tui::{PickerResult, SelectItem}; let mut recommended_scopes = vec![]; @@ -1102,10 +1103,12 @@ fn run_discovery_scope_picker( } } - // Always include cloud-platform scope - if !selected.contains(&PLATFORM_SCOPE.to_string()) { - selected.push(PLATFORM_SCOPE.to_string()); - } + // Do not auto-inject cloud-platform. It is a restricted scope that + // some Workspace orgs block via admin policy, which would cause + // `admin_policy_enforced` login failures for users who picked + // narrower scopes (upstream #562). Users who need cloud-platform + // (e.g. for the modelarmor helper) can tick it in the picker or + // pass `--full` / `--scopes https://www.googleapis.com/auth/cloud-platform`. // Hierarchical dedup: if we have both a broad scope (e.g. `.../auth/drive`) // and a narrower scope (e.g. `.../auth/drive.metadata`, `.../auth/drive.readonly`), @@ -1791,6 +1794,34 @@ mod tests { assert_eq!(scopes.len(), FULL_SCOPES.len()); } + /// `DEFAULT_SCOPES` must not include cloud-platform: it is a restricted + /// scope that some Workspace admin policies block, and the login flow + /// relies on this invariant to avoid `admin_policy_enforced` errors + /// for users who did not opt in (upstream #562). + #[test] + fn default_scopes_does_not_include_cloud_platform() { + for scope in DEFAULT_SCOPES { + assert!( + !scope.contains("cloud-platform"), + "DEFAULT_SCOPES must not include cloud-platform (upstream #562): {}", + scope + ); + } + } + + /// `FULL_SCOPES` is the opt-in path for users who do want cloud-platform + /// (e.g. for the modelarmor helper). Keep this invariant so we don't + /// accidentally remove the only non-custom entry point that includes it. + #[test] + fn full_scopes_includes_cloud_platform() { + assert!( + FULL_SCOPES + .iter() + .any(|s| *s == "https://www.googleapis.com/auth/cloud-platform"), + "FULL_SCOPES must include cloud-platform as the opt-in entry point" + ); + } + #[test] #[serial_test::serial] fn resolve_client_credentials_from_env_vars() { @@ -1833,6 +1864,9 @@ mod tests { #[test] #[serial_test::serial] fn config_dir_returns_gws_subdir() { + // Other tests (e.g. in auth.rs) may set GOOGLE_WORKSPACE_CLI_CONFIG_DIR + // to a tempdir path that does not end in "gws", so clear it first. + std::env::remove_var("GOOGLE_WORKSPACE_CLI_CONFIG_DIR"); let path = config_dir(); assert!(path.ends_with("gws")); } @@ -2236,6 +2270,23 @@ mod tests { )); } + #[test] + fn scope_matches_service_meet() { + let services: HashSet = ["meet"].iter().map(|s| s.to_string()).collect(); + assert!(scope_matches_service( + "https://www.googleapis.com/auth/meetings.space.created", + &services + )); + assert!(scope_matches_service( + "https://www.googleapis.com/auth/meetings.space.readonly", + &services + )); + assert!(scope_matches_service( + "https://www.googleapis.com/auth/meetings.space.settings", + &services + )); + } + // ── services filter integration tests ──────────────────────────────── #[test] diff --git a/crates/google-workspace-cli/src/helpers/gmail/mod.rs b/crates/google-workspace-cli/src/helpers/gmail/mod.rs index caeb8b6b..63eefe13 100644 --- a/crates/google-workspace-cli/src/helpers/gmail/mod.rs +++ b/crates/google-workspace-cli/src/helpers/gmail/mod.rs @@ -32,7 +32,16 @@ pub(super) use crate::error::GwsError; pub(super) use crate::executor; use crate::output::sanitize_for_terminal; pub(super) use anyhow::Context; -pub(super) use base64::{engine::general_purpose::URL_SAFE, Engine as _}; +pub(super) use base64::Engine as _; +/// URL-safe base64 decoder accepting both padded and unpadded input (Gmail API omits padding). +pub(super) const URL_SAFE_LENIENT: base64::engine::GeneralPurpose = + base64::engine::GeneralPurpose::new( + &base64::alphabet::URL_SAFE, + base64::engine::general_purpose::GeneralPurposeConfig::new() + .with_decode_padding_mode(base64::engine::DecodePaddingMode::Indifferent), + ); +#[cfg(test)] +pub(super) use base64::engine::general_purpose::{URL_SAFE, URL_SAFE_NO_PAD}; pub(super) use clap::{Arg, ArgAction, ArgMatches, Command}; pub(super) use mail_builder::headers::address::Address as MbAddress; pub(super) use serde::Serialize; @@ -62,7 +71,7 @@ fn sanitize_control_chars(s: &str) -> String { /// A parsed RFC 5322 mailbox: optional display name + email address. #[derive(Clone, Debug, Default, PartialEq, Eq, Serialize)] -pub(super) struct Mailbox { +pub(crate) struct Mailbox { pub name: Option, pub email: String, } @@ -133,7 +142,7 @@ pub(super) fn to_mb_address(mailbox: &Mailbox) -> MbAddress<'_> { } /// Convert a slice of `Mailbox` to a `mail_builder::Address` (list). -pub(super) fn to_mb_address_list(mailboxes: &[Mailbox]) -> MbAddress<'_> { +pub(crate) fn to_mb_address_list(mailboxes: &[Mailbox]) -> MbAddress<'_> { MbAddress::new_list(mailboxes.iter().map(to_mb_address).collect()) } @@ -258,15 +267,19 @@ fn parse_message_headers(headers: &[Value]) -> ParsedMessageHeaders { let name = header.get("name").and_then(|v| v.as_str()).unwrap_or(""); let value = header.get("value").and_then(|v| v.as_str()).unwrap_or(""); - match name { - "From" => parsed.from = value.to_string(), - "Reply-To" => append_address_list_header_value(&mut parsed.reply_to, value), - "To" => append_address_list_header_value(&mut parsed.to, value), - "Cc" => append_address_list_header_value(&mut parsed.cc, value), - "Subject" => parsed.subject = value.to_string(), - "Date" => parsed.date = value.to_string(), - "Message-ID" | "Message-Id" => parsed.message_id = value.to_string(), - "References" => append_header_value(&mut parsed.references, value), + // RFC 5322 §1.2.2: header field names are case-insensitive. Gmail + // preserves the sender's original casing, so `"CC"` from + // Exchange/Outlook would fall through a case-sensitive match and + // silently drop CC recipients in +reply-all (upstream #642). + match name.to_ascii_lowercase().as_str() { + "from" => parsed.from = value.to_string(), + "reply-to" => append_address_list_header_value(&mut parsed.reply_to, value), + "to" => append_address_list_header_value(&mut parsed.to, value), + "cc" => append_address_list_header_value(&mut parsed.cc, value), + "subject" => parsed.subject = value.to_string(), + "date" => parsed.date = value.to_string(), + "message-id" => parsed.message_id = value.to_string(), + "references" => append_header_value(&mut parsed.references, value), _ => {} } } @@ -618,16 +631,30 @@ pub(super) async fn resolve_sender( result = Some(vec![Mailbox::parse(&raw)]); } Ok(None) => {} - Err(e) if matches!(&e, GwsError::Api { code: 403, .. }) => { - // Token exists but doesn't carry the scope. + Err(e) + if matches!( + &e, + GwsError::Api { + code: 401 | 403, + .. + } + ) => + { + // Access token was minted without the profile scope, or + // the scope was revoked. If `gws auth status` shows + // `userinfo.profile` in `scopes`, re-running + // `gws auth login` refreshes the cached token and clears + // the mismatch; otherwise add the scope explicitly. eprintln!( - "Tip: run `gws auth login` and grant the \"profile\" scope \ - to include your display name in the From header" + "Note: userinfo endpoint denied the profile request. \ + Re-run `gws auth login` to refresh credentials; if the \ + \"profile\" scope is missing from `gws auth status`, \ + grant it there. Sending From header without display name." ); } Err(e) => { eprintln!( - "Note: could not fetch display name from People API ({})", + "Note: could not fetch display name from userinfo endpoint ({})", sanitize_for_terminal(&e.to_string()) ); } @@ -639,20 +666,26 @@ pub(super) async fn resolve_sender( Ok(result) } -/// Fetch the authenticated user's display name from the People API. -/// Requires a token with the `profile` scope. +/// Fetch the authenticated user's display name from the OIDC userinfo endpoint. +/// Requires a token carrying the `userinfo.profile` (or standard `profile`) scope. +/// +/// Upstream #644: the previous implementation used People API +/// (`/people/me?personFields=names`), which returns 403 for some personal +/// Gmail accounts even when `userinfo.profile` is granted — producing a +/// misleading "grant the profile scope" tip for users who already had it. +/// The OIDC userinfo endpoint accepts the same `userinfo.profile` scope +/// and responds uniformly across Workspace and personal accounts. async fn fetch_profile_display_name( client: &reqwest::Client, token: &str, ) -> Result, GwsError> { let resp = crate::client::send_with_retry(|| { client - .get("https://people.googleapis.com/v1/people/me") - .query(&[("personFields", "names")]) + .get("https://openidconnect.googleapis.com/v1/userinfo") .bearer_auth(token) }) .await - .map_err(|e| GwsError::Other(anyhow::anyhow!("People API request failed: {e}")))?; + .map_err(|e| GwsError::Other(anyhow::anyhow!("userinfo request failed: {e}")))?; if !resp.status().is_success() { let status = resp.status().as_u16(); @@ -660,22 +693,21 @@ async fn fetch_profile_display_name( .text() .await .unwrap_or_else(|_| "(error body unreadable)".to_string()); - return Err(build_api_error(status, &body, "People API request failed")); + return Err(build_api_error(status, &body, "userinfo request failed")); } - let body: Value = resp.json().await.map_err(|e| { - GwsError::Other(anyhow::anyhow!("Failed to parse People API response: {e}")) - })?; + let body: Value = resp + .json() + .await + .map_err(|e| GwsError::Other(anyhow::anyhow!("Failed to parse userinfo response: {e}")))?; Ok(parse_profile_display_name(&body)) } -/// Extract the display name from a People API `people.get` response. +/// Extract the display name from an OIDC userinfo response. +/// The response is a flat object with a top-level `name` string. fn parse_profile_display_name(body: &Value) -> Option { - body.get("names") - .and_then(|v| v.as_array()) - .and_then(|names| names.first()) - .and_then(|n| n.get("displayName")) + body.get("name") .and_then(|v| v.as_str()) .filter(|s| !s.is_empty()) .map(sanitize_control_chars) @@ -725,7 +757,7 @@ async fn fetch_attachment_data( )) })?; - URL_SAFE + URL_SAFE_LENIENT .decode(data_str) .map_err(|e| GwsError::Other(anyhow::anyhow!("Failed to decode attachment data: {e}"))) } @@ -817,7 +849,7 @@ struct PayloadContents { /// Decode a base64url-encoded text body part, returning the string on success. fn decode_text_body(data: &str, mime_label: &str) -> Option { - match URL_SAFE.decode(data) { + match URL_SAFE_LENIENT.decode(data) { Ok(decoded) => match String::from_utf8(decoded) { Ok(s) => Some(s), Err(e) => { @@ -1165,7 +1197,7 @@ pub(super) fn set_threading_headers<'x>( } /// Apply optional From, CC, and BCC headers to a `MessageBuilder`. -pub(super) fn apply_optional_headers<'x>( +pub(crate) fn apply_optional_headers<'x>( mut mb: mail_builder::MessageBuilder<'x>, from: Option<&'x [Mailbox]>, cc: Option<&'x [Mailbox]>, @@ -1189,7 +1221,7 @@ pub(super) fn apply_optional_headers<'x>( /// `multipart/related` container so `cid:` references render correctly. Gmail's API /// rewrites `Content-Disposition: inline` to `attachment` when parts sit in /// `multipart/mixed`, so the explicit `multipart/related` structure is required. -pub(super) fn finalize_message( +pub(crate) fn finalize_message( mb: mail_builder::MessageBuilder<'_>, body: impl Into, html: bool, @@ -1282,7 +1314,7 @@ const MAX_TOTAL_ATTACHMENT_BYTES: u64 = 25 * 1024 * 1024; /// from the Gmail API). mail-builder handles RFC 2231 encoding for non-ASCII /// filenames in the Content-Disposition header. #[derive(Debug)] -pub(super) struct Attachment { +pub(crate) struct Attachment { pub filename: String, pub content_type: String, pub data: Vec, @@ -1402,7 +1434,7 @@ fn resolve_draft_method( } /// Resolve either `users.drafts.create` or `users.messages.send` based on the draft flag. -pub(super) fn resolve_mail_method( +pub(crate) fn resolve_mail_method( doc: &crate::discovery::RestDescription, draft: bool, ) -> Result<&crate::discovery::RestMethod, GwsError> { @@ -2427,6 +2459,41 @@ mod tests { assert_eq!(original.body_html.as_deref(), Some("

HTML only

")); } + #[test] + fn test_parse_original_message_handles_non_canonical_header_casing() { + // Regression for upstream #642: Exchange/Outlook emit "CC" in uppercase, + // other MTAs use lowercase. All variants must be recognized. + let msg = json!({ + "threadId": "thread-1", + "snippet": "s", + "payload": { + "mimeType": "text/plain", + "headers": [ + { "name": "FROM", "value": "alice@example.com" }, + { "name": "to", "value": "bob@example.com" }, + { "name": "CC", "value": "carol@example.com" }, + { "name": "subject", "value": "Re: test" }, + { "name": "MESSAGE-ID", "value": "" }, + { "name": "references", "value": "" } + ], + "body": { "data": URL_SAFE.encode("body") } + } + }); + + let original = parse_original_message(&msg).unwrap(); + assert_eq!(original.from.email, "alice@example.com"); + assert_eq!(original.to.len(), 1); + assert_eq!(original.to[0].email, "bob@example.com"); + let cc = original + .cc + .expect("CC header should be recognized regardless of casing"); + assert_eq!(cc.len(), 1); + assert_eq!(cc[0].email, "carol@example.com"); + assert_eq!(original.subject, "Re: test"); + assert_eq!(original.message_id, "msg@example.com"); + assert_eq!(original.references, vec!["ref@example.com"]); + } + #[test] fn test_parse_original_message_multipart_alternative() { let msg = json!({ @@ -3393,23 +3460,20 @@ mod tests { assert!(addrs[0].name.is_none()); } - // --- parse_profile_display_name tests --- + // --- parse_profile_display_name tests (OIDC userinfo responses) --- #[test] fn test_parse_profile_display_name() { + // OIDC userinfo response shape — flat object with top-level `name`. let body = serde_json::json!({ - "resourceName": "people/112118466613566642951", - "etag": "%EgUBAi43PRoEAQIFByIMR0xCc0FMcVBJQmc9", - "names": [{ - "metadata": { - "primary": true, - "source": { "type": "DOMAIN_PROFILE", "id": "112118466613566642951" } - }, - "displayName": "Malo Bourgon", - "familyName": "Bourgon", - "givenName": "Malo", - "displayNameLastFirst": "Bourgon, Malo" - }] + "sub": "112118466613566642951", + "name": "Malo Bourgon", + "given_name": "Malo", + "family_name": "Bourgon", + "picture": "https://lh3.googleusercontent.com/a/example", + "email": "malo@example.com", + "email_verified": true, + "locale": "en" }); assert_eq!( parse_profile_display_name(&body).as_deref(), @@ -3423,6 +3487,28 @@ mod tests { URL_SAFE.encode(s) } + fn base64url_no_pad(s: &str) -> String { + URL_SAFE_NO_PAD.encode(s) + } + + #[test] + fn test_extract_payload_contents_unpadded_base64() { + // Gmail API returns base64url without padding; ensure we decode it correctly. + let text_data = base64url_no_pad("Hello plain text"); + let html_data = base64url_no_pad("

Hello HTML

"); + assert!(!text_data.ends_with('='), "test data must be unpadded"); + let payload = json!({ + "mimeType": "multipart/alternative", + "parts": [ + { "mimeType": "text/plain", "body": { "data": text_data, "size": 16 } }, + { "mimeType": "text/html", "body": { "data": html_data, "size": 18 } }, + ] + }); + let contents = extract_payload_contents(&payload); + assert_eq!(contents.body_text.as_deref(), Some("Hello plain text")); + assert_eq!(contents.body_html.as_deref(), Some("

Hello HTML

")); + } + #[test] fn test_extract_payload_contents_simple() { let text_data = base64url("Hello plain text"); @@ -3590,15 +3676,13 @@ mod tests { #[test] fn test_parse_profile_display_name_empty_name() { - let body = serde_json::json!({ - "names": [{ "displayName": "" }] - }); + let body = serde_json::json!({ "sub": "x", "name": "" }); assert!(parse_profile_display_name(&body).is_none()); } #[test] - fn test_parse_profile_display_name_no_names_array() { - let body = serde_json::json!({ "names": "not-an-array" }); + fn test_parse_profile_display_name_name_not_string() { + let body = serde_json::json!({ "sub": "x", "name": 42 }); assert!(parse_profile_display_name(&body).is_none()); } @@ -3980,3 +4064,119 @@ mod tests { ); } } + +// --------------------------------------------------------------------------- +// Fork-only: MCP bridge functions +// +// These pub(crate) façade functions expose internal helpers to the MCP server +// (src/mcp_server.rs) WITHOUT changing the visibility of upstream functions. +// When merging upstream, re-apply this block at the end of the file. +// --------------------------------------------------------------------------- + +/// Compose a reply message for the MCP `gmail_reply` helper tool. +/// +/// Fetches the original message metadata, resolves the reply recipient, +/// builds threading headers, and returns the RFC 2822 raw message together +/// with the thread ID so the caller can send it via the Gmail API. +pub(crate) async fn mcp_compose_reply( + message_id: &str, + body_text: &str, + reply_all: bool, + cc: Option<&[Mailbox]>, + bcc: Option<&[Mailbox]>, +) -> Result<(String, Option), GwsError> { + // 1. Auth + fetch original message + let token = crate::auth::get_token(&[GMAIL_SCOPE]) + .await + .map_err(|e| GwsError::Auth(format!("Gmail auth failed: {e}")))?; + let client = crate::client::build_client()?; + let original = fetch_message_metadata(&client, &token, message_id).await?; + + // 2. Determine reply recipients + let to_mailboxes = if reply_all { + // Reply-To (or From) + original To + CC, excluding self + let mut recipients = match &original.reply_to { + Some(rt) => rt.clone(), + None => vec![original.from.clone()], + }; + recipients.extend(original.to.iter().cloned()); + if let Some(orig_cc) = &original.cc { + recipients.extend(orig_cc.iter().cloned()); + } + // Best-effort self-dedup: fetch profile and exclude own address + if let Ok(profile_email) = fetch_self_email(&client, &token).await { + let lower = profile_email.to_lowercase(); + recipients.retain(|m| m.email_lowercase() != lower); + } + recipients + } else { + match &original.reply_to { + Some(rt) => rt.clone(), + None => vec![original.from.clone()], + } + }; + + if to_mailboxes.is_empty() { + return Err(GwsError::Validation( + "No recipient could be determined from the original message".to_string(), + )); + } + + // 3. Subject + let subject = if original.subject.to_lowercase().starts_with("re:") { + original.subject.clone() + } else { + format!("Re: {}", original.subject) + }; + + // 4. Threading headers + let refs = build_references_chain(&original); + let threading = ThreadingHeaders { + in_reply_to: &original.message_id, + references: &refs, + }; + + // 5. Build RFC 2822 message + let mb = mail_builder::MessageBuilder::new() + .to(to_mb_address_list(&to_mailboxes)) + .subject(&subject); + let mb = apply_optional_headers(mb, None, cc, bcc); + let mb = set_threading_headers(mb, &threading); + let raw_message = finalize_message(mb, body_text, false, &[])?; + + Ok((raw_message, original.thread_id.clone())) +} + +/// Wrapper around the internal `build_send_metadata` for MCP use. +pub(crate) fn mcp_build_send_metadata(thread_id: Option<&str>, draft: bool) -> Option { + build_send_metadata(thread_id, draft) +} + +/// Fetch the authenticated user's primary email (for self-dedup in reply-all). +async fn fetch_self_email(client: &reqwest::Client, token: &str) -> Result { + let resp = crate::client::send_with_retry(|| { + client + .get("https://gmail.googleapis.com/gmail/v1/users/me/profile") + .bearer_auth(token) + }) + .await + .map_err(|e| GwsError::Other(anyhow::anyhow!("Failed to fetch user profile: {e}")))?; + + if !resp.status().is_success() { + return Err(GwsError::Other(anyhow::anyhow!( + "Profile fetch failed with status {}", + resp.status() + ))); + } + + let profile: serde_json::Value = resp + .json() + .await + .map_err(|e| GwsError::Other(anyhow::anyhow!("Failed to parse profile: {e}")))?; + + profile + .get("emailAddress") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()) + .ok_or_else(|| GwsError::Other(anyhow::anyhow!("Profile missing emailAddress"))) +} diff --git a/crates/google-workspace-cli/src/helpers/script.rs b/crates/google-workspace-cli/src/helpers/script.rs index 11bcdebe..c97e22a7 100644 --- a/crates/google-workspace-cli/src/helpers/script.rs +++ b/crates/google-workspace-cli/src/helpers/script.rs @@ -169,13 +169,8 @@ fn process_file(path: &Path) -> Result, GwsError> { filename.trim_end_matches(".js").trim_end_matches(".gs"), ), "html" => ("HTML", filename.trim_end_matches(".html")), - "json" => { - if filename == "appsscript.json" { - ("JSON", "appsscript") - } else { - return Ok(None); - } - } + // Only `appsscript.json` is uploaded as JSON; other .json files fall through to `_` below. + "json" if filename == "appsscript.json" => ("JSON", "appsscript"), _ => return Ok(None), }; diff --git a/crates/google-workspace-cli/src/main.rs b/crates/google-workspace-cli/src/main.rs index 41dcc1e1..6722546b 100644 --- a/crates/google-workspace-cli/src/main.rs +++ b/crates/google-workspace-cli/src/main.rs @@ -32,6 +32,8 @@ mod fs_util; mod generate_skills; mod helpers; mod logging; +mod mcp_http_server; +mod mcp_server; mod oauth_config; mod output; mod schema; @@ -106,7 +108,11 @@ async fn run() -> Result<(), GwsError> { } if is_version_flag(&first_arg) { - println!("gws {}", env!("CARGO_PKG_VERSION")); + println!( + "gws {} ({})", + env!("CARGO_PKG_VERSION"), + env!("GWS_FORK_DESCRIBE") + ); println!("This is not an officially supported Google product."); return Ok(()); } @@ -138,6 +144,11 @@ async fn run() -> Result<(), GwsError> { return auth_commands::handle_auth_command(&auth_args).await; } + // Handle the `mcp` command + if first_arg == "mcp" { + return mcp_server::start(&args[1..]).await; + } + // Parse service name and optional version override let (api_name, version) = parse_service_and_version(&args, &first_arg)?; diff --git a/crates/google-workspace-cli/src/mcp_http_server.rs b/crates/google-workspace-cli/src/mcp_http_server.rs new file mode 100644 index 00000000..ecd448d8 --- /dev/null +++ b/crates/google-workspace-cli/src/mcp_http_server.rs @@ -0,0 +1,684 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! MCP Streamable HTTP transport with optional OAuth2 PKCE authentication. +//! +//! Phase 1 (no auth): `gws mcp -s gmail --transport http --port 3000` +//! Phase 2 (OAuth2): `gws mcp -s gmail --transport http --port 3000 --auth` +//! +//! When `--auth` is enabled the server implements the MCP Authorization spec +//! (2025-11-25, RFC 9728 / RFC 8414 / PKCE-S256) and acts as both OAuth2 +//! Authorization Server and Resource Server. Google is used as the identity +//! provider; a `client_secret.json` created via `gws auth setup` is required. +//! +//! Phase 2+3 limitation: Google API calls still use the shared credential from +//! `gws auth login`. Per-user token isolation is Phase 4. + +use std::{ + collections::HashMap, + sync::Arc, + time::{Duration, SystemTime}, +}; + +use axum::{ + extract::{Query, State}, + http::{Request, StatusCode}, + middleware::{self, Next}, + response::{IntoResponse, Redirect, Response}, + routing::{get, post}, + Form, Json, Router, +}; +use rmcp::transport::{ + streamable_http_server::session::local::LocalSessionManager, StreamableHttpServerConfig, + StreamableHttpService, +}; +use rmcp::{ + model::{ + CallToolRequestParams, CallToolResult, Implementation, ListToolsResult, + PaginatedRequestParams, ServerInfo, Tool, + }, + service::RequestContext, + ErrorData as McpError, RoleServer, ServerHandler, +}; +use serde::{Deserialize, Serialize}; +use serde_json::{json, Value}; +use sha2::{Digest, Sha256}; +use tokio::sync::Mutex; +use uuid::Uuid; + +use crate::{ + error::GwsError, + mcp_server::{build_tools_list, handle_tools_call, ServerConfig}, + oauth_config::{load_client_config, InstalledConfig}, +}; + +// ─── Constants ─────────────────────────────────────────────────────────────── + +const SESSION_TTL: Duration = Duration::from_secs(8 * 3600); // 8 h +const PENDING_TTL: Duration = Duration::from_secs(600); // 10 min + +// ─── Phase 1: MCP handler ──────────────────────────────────────────────────── + +struct GwsMcpHandler { + config: Arc, + tools_cache: Arc>>>, +} + +impl ServerHandler for GwsMcpHandler { + fn get_info(&self) -> ServerInfo { + let mut info = ServerInfo::default(); + info.server_info = Implementation::new("gws-mcp", env!("CARGO_PKG_VERSION")); + info.capabilities.tools = Some(Default::default()); + info + } + + async fn list_tools( + &self, + _request: Option, + _ctx: RequestContext, + ) -> Result { + let mut cache = self.tools_cache.lock().await; + if cache.is_none() { + let values = build_tools_list(&self.config) + .await + .map_err(|e| McpError::internal_error(e.to_string(), None))?; + let tools: Vec = values + .into_iter() + .filter_map(|v| { + serde_json::from_value(v.clone()) + .map_err(|e| { + let name = v.get("name").and_then(|n| n.as_str()).unwrap_or("?"); + eprintln!("[gws mcp] Warning: skipping tool '{name}': {e}"); + }) + .ok() + }) + .collect(); + *cache = Some(tools); + } + Ok(ListToolsResult { + tools: cache.as_ref().unwrap().clone(), + ..Default::default() + }) + } + + async fn call_tool( + &self, + request: CallToolRequestParams, + _ctx: RequestContext, + ) -> Result { + let params = json!({ + "name": request.name.as_ref(), + "arguments": request.arguments.unwrap_or_default() + }); + let result = handle_tools_call(¶ms, &self.config) + .await + .map_err(|e| McpError::internal_error(e.to_string(), None))?; + serde_json::from_value(result).map_err(|e| McpError::internal_error(e.to_string(), None)) + } +} + +// ─── Phase 2: Auth types ───────────────────────────────────────────────────── + +/// State stored between `/oauth/authorize` and `/oauth/callback`. +struct PendingAuth { + code_challenge: String, + #[allow(dead_code)] // kept for future client_id validation + client_id: String, + redirect_uri: String, + created_at: SystemTime, +} + +/// State stored between `/oauth/callback` and `/oauth/token`. +struct PendingCode { + code_challenge: String, + client_redirect_uri: String, + email: String, + created_at: SystemTime, +} + +struct UserSession { + #[allow(dead_code)] // used in Phase 4 for per-user token lookup + email: String, + #[allow(dead_code)] + created_at: SystemTime, +} + +#[derive(Default)] +struct AuthStore { + /// OAuth `state` value → PendingAuth (before Google callback) + pending: Mutex>, + /// Short-lived auth code → PendingCode (after Google callback, before token exchange) + codes: Mutex>, + /// Bearer UUID → UserSession (live sessions) + sessions: Mutex>, +} + +#[derive(Clone)] +struct AppState { + auth_store: Arc, + oauth_cfg: Arc, + port: u16, + bind: String, +} + +// ─── Helpers ───────────────────────────────────────────────────────────────── + +fn is_expired(created_at: SystemTime, ttl: Duration) -> bool { + SystemTime::now() + .duration_since(created_at) + .map(|d| d >= ttl) + .unwrap_or(true) +} + +fn base64url(bytes: &[u8]) -> String { + use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine}; + URL_SAFE_NO_PAD.encode(bytes) +} + +fn verify_pkce_s256(verifier: &str, challenge: &str) -> bool { + base64url(&Sha256::digest(verifier.as_bytes())) == challenge +} + +fn urlencode(s: &str) -> String { + percent_encoding::utf8_percent_encode(s, percent_encoding::NON_ALPHANUMERIC).to_string() +} + +/// Returns the public base URL of the server (uses `localhost` even when bound to `0.0.0.0`). +fn server_base(port: u16, bind: &str) -> String { + let host = match bind { + "0.0.0.0" => "localhost", + "::" => "::1", + other => other, + }; + format!("http://{host}:{port}") +} + +fn build_google_auth_url( + client_id: &str, + redirect_uri: &str, + scopes: &str, + state: &str, +) -> String { + format!( + "https://accounts.google.com/o/oauth2/auth?\ + scope={}&access_type=offline&redirect_uri={}&\ + response_type=code&client_id={}&state={}&\ + prompt=select_account+consent", + urlencode(scopes), + urlencode(redirect_uri), + urlencode(client_id), + urlencode(state), + ) +} + +// ─── Phase 3: Bearer middleware ─────────────────────────────────────────────── + +async fn bearer_auth_middleware( + State(state): State, + request: Request, + next: Next, +) -> Response { + let path = request.uri().path(); + // OAuth and well-known endpoints are public + if path.starts_with("/oauth/") || path.starts_with("/.well-known/") { + return next.run(request).await; + } + + let Some(token) = request + .headers() + .get("authorization") + .and_then(|v| v.to_str().ok()) + .and_then(|s| s.strip_prefix("Bearer ")) + .map(|s| s.to_string()) + else { + return ( + StatusCode::UNAUTHORIZED, + [( + "WWW-Authenticate", + "Bearer realm=\"gws-mcp\", resource_metadata=\"/.well-known/oauth-protected-resource\"", + )], + ) + .into_response(); + }; + + let sessions = state.auth_store.sessions.lock().await; + match sessions.get(&token) { + Some(s) if !is_expired(s.created_at, SESSION_TTL) => {} + _ => { + return ( + StatusCode::UNAUTHORIZED, + [( + "WWW-Authenticate", + "Bearer realm=\"gws-mcp\", error=\"invalid_token\", resource_metadata=\"/.well-known/oauth-protected-resource\"", + )], + ) + .into_response(); + } + } + + next.run(request).await +} + +// ─── OAuth metadata endpoints ──────────────────────────────────────────────── + +/// RFC 9728 — OAuth 2.0 Protected Resource Metadata +async fn protected_resource_metadata(State(state): State) -> Json { + let base = server_base(state.port, &state.bind); + Json(json!({ + "resource": base, + "authorization_servers": [base], + "bearer_methods_supported": ["header"], + "resource_documentation": "https://github.com/shigechika/gws-mcp" + })) +} + +/// RFC 8414 — OAuth 2.0 Authorization Server Metadata +async fn authorization_server_metadata(State(state): State) -> Json { + let base = server_base(state.port, &state.bind); + Json(json!({ + "issuer": base, + "authorization_endpoint": format!("{base}/oauth/authorize"), + "token_endpoint": format!("{base}/oauth/token"), + "registration_endpoint": format!("{base}/oauth/register"), + "response_types_supported": ["code"], + "grant_types_supported": ["authorization_code"], + "code_challenge_methods_supported": ["S256"], + "token_endpoint_auth_methods_supported": ["none"], + "scopes_supported": ["openid", "email", "profile"] + })) +} + +// ─── Dynamic Client Registration stub (RFC 7591) ───────────────────────────── + +#[derive(Deserialize)] +struct RegisterRequest { + client_name: Option, + redirect_uris: Option>, +} + +async fn oauth_register( + State(_): State, + Json(req): Json, +) -> impl IntoResponse { + let client_id = req + .client_name + .filter(|n| !n.is_empty()) + .unwrap_or_else(|| "gws-mcp-client".to_string()); + ( + StatusCode::CREATED, + Json(json!({ + "client_id": client_id, + "redirect_uris": req.redirect_uris.unwrap_or_default(), + "grant_types": ["authorization_code"], + "response_types": ["code"], + "token_endpoint_auth_method": "none" + })), + ) +} + +// ─── /oauth/authorize ──────────────────────────────────────────────────────── + +#[derive(Deserialize)] +struct AuthorizeParams { + response_type: String, + client_id: String, + redirect_uri: String, + state: String, + code_challenge: String, + code_challenge_method: String, + scope: Option, +} + +async fn oauth_authorize( + State(state): State, + Query(p): Query, +) -> Result { + if p.response_type != "code" { + return Err(( + StatusCode::BAD_REQUEST, + "unsupported_response_type".to_string(), + )); + } + if p.code_challenge_method != "S256" { + return Err(( + StatusCode::BAD_REQUEST, + "only S256 code_challenge_method is supported".to_string(), + )); + } + + state.auth_store.pending.lock().await.insert( + p.state.clone(), + PendingAuth { + code_challenge: p.code_challenge, + client_id: p.client_id, + redirect_uri: p.redirect_uri, + created_at: SystemTime::now(), + }, + ); + + let callback = format!("{}/oauth/callback", server_base(state.port, &state.bind)); + // Request openid/email/profile from Google to identify the user. + // GWS API scopes are not needed here because Phase 2+3 still uses the + // shared credential from `gws auth login`. Per-user GWS tokens are Phase 4. + let scopes = p.scope.as_deref().unwrap_or("openid email profile"); + let google_url = + build_google_auth_url(&state.oauth_cfg.client_id, &callback, scopes, &p.state); + + Ok(Redirect::to(&google_url)) +} + +// ─── /oauth/callback ───────────────────────────────────────────────────────── + +#[derive(Deserialize)] +struct CallbackParams { + code: Option, + state: String, + error: Option, +} + +async fn oauth_callback( + State(state): State, + Query(p): Query, +) -> Result { + if let Some(err) = p.error { + return Err(( + StatusCode::BAD_REQUEST, + format!("Google OAuth error: {err}"), + )); + } + let google_code = p + .code + .ok_or_else(|| (StatusCode::BAD_REQUEST, "missing code".to_string()))?; + + let pending = { + let mut lock = state.auth_store.pending.lock().await; + lock.remove(&p.state) + .filter(|pa| !is_expired(pa.created_at, PENDING_TTL)) + .ok_or_else(|| (StatusCode::BAD_REQUEST, "invalid or expired state".to_string()))? + }; + + let callback = format!("{}/oauth/callback", server_base(state.port, &state.bind)); + let token_resp = + exchange_google_code(&state.oauth_cfg, &google_code, &callback) + .await + .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?; + + let email = get_google_email(&token_resp.access_token) + .await + .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?; + + let auth_code = Uuid::new_v4().to_string(); + state.auth_store.codes.lock().await.insert( + auth_code.clone(), + PendingCode { + code_challenge: pending.code_challenge, + client_redirect_uri: pending.redirect_uri.clone(), + email, + created_at: SystemTime::now(), + }, + ); + + let location = format!( + "{}?code={}&state={}", + pending.redirect_uri, + urlencode(&auth_code), + urlencode(&p.state), + ); + Ok(Redirect::to(&location)) +} + +// ─── /oauth/token ──────────────────────────────────────────────────────────── + +#[derive(Deserialize)] +struct TokenRequest { + grant_type: String, + code: String, + code_verifier: String, + redirect_uri: Option, +} + +#[derive(Serialize)] +struct TokenResponse { + access_token: String, + token_type: &'static str, + expires_in: u64, +} + +async fn oauth_token( + State(state): State, + Form(req): Form, +) -> Result, (StatusCode, Json)> { + if req.grant_type != "authorization_code" { + return Err(( + StatusCode::BAD_REQUEST, + Json(json!({"error": "unsupported_grant_type"})), + )); + } + + let pending_code = { + let mut lock = state.auth_store.codes.lock().await; + lock.remove(&req.code) + .filter(|pc| !is_expired(pc.created_at, PENDING_TTL)) + .ok_or_else(|| { + ( + StatusCode::BAD_REQUEST, + Json(json!({"error": "invalid_grant"})), + ) + })? + }; + + // Validate redirect_uri if provided + if let Some(uri) = &req.redirect_uri { + if *uri != pending_code.client_redirect_uri { + return Err(( + StatusCode::BAD_REQUEST, + Json(json!({"error": "invalid_grant", "error_description": "redirect_uri mismatch"})), + )); + } + } + + // Verify PKCE S256 + if !verify_pkce_s256(&req.code_verifier, &pending_code.code_challenge) { + return Err(( + StatusCode::BAD_REQUEST, + Json(json!({"error": "invalid_grant", "error_description": "PKCE verification failed"})), + )); + } + + let bearer = Uuid::new_v4().to_string(); + state.auth_store.sessions.lock().await.insert( + bearer.clone(), + UserSession { + email: pending_code.email.clone(), + created_at: SystemTime::now(), + }, + ); + + eprintln!("[gws mcp] Session created for {}", pending_code.email); + + Ok(Json(TokenResponse { + access_token: bearer, + token_type: "bearer", + expires_in: SESSION_TTL.as_secs(), + })) +} + +// ─── Google helpers ─────────────────────────────────────────────────────────── + +#[derive(Deserialize)] +struct GoogleTokenResp { + access_token: String, +} + +async fn exchange_google_code( + cfg: &InstalledConfig, + code: &str, + redirect_uri: &str, +) -> anyhow::Result { + let client = crate::client::shared_client()?; + let params = [ + ("client_id", cfg.client_id.as_str()), + ("client_secret", cfg.client_secret.as_str()), + ("code", code), + ("redirect_uri", redirect_uri), + ("grant_type", "authorization_code"), + ]; + let resp = client + .post("https://oauth2.googleapis.com/token") + .form(¶ms) + .send() + .await?; + if !resp.status().is_success() { + let status = resp.status(); + let body = crate::auth::response_text_or_placeholder(resp.text().await); + anyhow::bail!("Google token exchange failed ({status}): {body}"); + } + resp.json().await.map_err(Into::into) +} + +async fn get_google_email(access_token: &str) -> anyhow::Result { + #[derive(Deserialize)] + struct UserinfoResp { + email: String, + } + let client = crate::client::shared_client()?; + let resp: UserinfoResp = client + .get("https://openidconnect.googleapis.com/v1/userinfo") + .bearer_auth(access_token) + .send() + .await? + .json() + .await?; + Ok(resp.email) +} + +// ─── Router builder ─────────────────────────────────────────────────────────── + +fn build_auth_router( + mcp_service: StreamableHttpService, + app_state: AppState, +) -> Router { + Router::new() + .route( + "/.well-known/oauth-protected-resource", + get(protected_resource_metadata), + ) + .route( + "/.well-known/oauth-authorization-server", + get(authorization_server_metadata), + ) + .route("/oauth/register", post(oauth_register)) + .route("/oauth/authorize", get(oauth_authorize)) + .route("/oauth/callback", get(oauth_callback)) + .route("/oauth/token", post(oauth_token)) + .nest_service("/mcp", mcp_service) + .layer(middleware::from_fn_with_state( + app_state.clone(), + bearer_auth_middleware, + )) + .with_state(app_state) +} + +// ─── Public entry point ─────────────────────────────────────────────────────── + +pub(crate) async fn start_http( + config: ServerConfig, + port: u16, + bind: String, + enable_auth: bool, +) -> Result<(), GwsError> { + let config = Arc::new(config); + let tools_cache: Arc>>> = Arc::new(Mutex::new(None)); + + let loopback = matches!(bind.as_str(), "127.0.0.1" | "::1" | "localhost"); + let http_config = if loopback { + StreamableHttpServerConfig::default() + } else { + StreamableHttpServerConfig::default().disable_allowed_hosts() + }; + + let mcp_service: StreamableHttpService = { + let config = config.clone(); + let tools_cache = tools_cache.clone(); + StreamableHttpService::new( + move || { + Ok(GwsMcpHandler { + config: config.clone(), + tools_cache: tools_cache.clone(), + }) + }, + Default::default(), + http_config, + ) + }; + + let app = if enable_auth { + let oauth_cfg = load_client_config().map_err(|e| { + GwsError::Other(anyhow::anyhow!( + "--auth requires client_secret.json (run `gws auth setup` first): {e}" + )) + })?; + let app_state = AppState { + auth_store: Arc::new(AuthStore::default()), + oauth_cfg: Arc::new(oauth_cfg), + port, + bind: bind.clone(), + }; + build_auth_router(mcp_service, app_state) + } else { + Router::new().nest_service("/mcp", mcp_service) + }; + + let addr = format!("{bind}:{port}"); + let base = server_base(port, &bind); + eprintln!("[gws mcp] HTTP server listening on {base}/mcp"); + if enable_auth { + eprintln!("[gws mcp] OAuth2 auth enabled — callback URL: {base}/oauth/callback"); + eprintln!( + "[gws mcp] Ensure {base}/oauth/callback is registered as a redirect URI in Google Cloud Console" + ); + } + if !loopback { + eprintln!("[gws mcp] Warning: server accessible from external hosts (--bind {bind})"); + } + + let listener = tokio::net::TcpListener::bind(&addr) + .await + .map_err(|e| GwsError::Other(anyhow::anyhow!("Failed to bind to {addr}: {e}")))?; + + axum::serve(listener, app) + .await + .map_err(|e| GwsError::Other(anyhow::anyhow!("HTTP server error: {e}")))?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn pkce_s256_rfc7636_example() { + // RFC 7636 §4.6 worked example + let verifier = "dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk"; + let challenge = "E9Melhoa2OwvFrEMTJguCHaoeK1t8URWbuGJSstw-cM"; + assert!(verify_pkce_s256(verifier, challenge)); + } + + #[test] + fn pkce_s256_rejects_wrong_verifier() { + let challenge = "E9Melhoa2OwvFrEMTJguCHaoeK1t8URWbuGJSstw-cM"; + assert!(!verify_pkce_s256("wrong-verifier", challenge)); + } +} diff --git a/crates/google-workspace-cli/src/mcp_server.rs b/crates/google-workspace-cli/src/mcp_server.rs new file mode 100644 index 00000000..be1aa281 --- /dev/null +++ b/crates/google-workspace-cli/src/mcp_server.rs @@ -0,0 +1,1697 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Model Context Protocol (MCP) server implementation. +//! Provides a stdio JSON-RPC server exposing Google Workspace APIs as MCP tools. + +use crate::discovery::RestResource; +use crate::error::GwsError; +use crate::services; +use clap::{Arg, Command}; +use serde_json::{json, Value}; +use std::collections::HashMap; +use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader}; + +#[derive(Debug, Clone, Copy, PartialEq)] +pub(crate) enum ToolMode { + Full, + Compact, +} + +#[derive(Debug, Clone)] +pub(crate) struct ServerConfig { + services: Vec, + workflows: bool, + helpers: bool, + tool_mode: ToolMode, +} + +fn build_mcp_cli() -> Command { + Command::new("mcp") + .about("Starts the MCP server (stdio by default, or HTTP with --transport http)") + .arg( + Arg::new("transport") + .long("transport") + .value_parser(["stdio", "http"]) + .default_value("stdio") + .help("Transport mode: 'stdio' (default) or 'http' (Streamable HTTP)"), + ) + .arg( + Arg::new("port") + .long("port") + .short('p') + .value_parser(clap::value_parser!(u16)) + .default_value("3000") + .help("Port to listen on (HTTP transport only)"), + ) + .arg( + Arg::new("bind") + .long("bind") + .default_value("127.0.0.1") + .help("Address to bind (HTTP transport only). Use 0.0.0.0 to allow external access"), + ) + .arg( + Arg::new("auth") + .long("auth") + .action(clap::ArgAction::SetTrue) + .help("Enable OAuth2 PKCE authentication (HTTP transport only). Requires client_secret.json from `gws auth setup`"), + ) + .arg( + Arg::new("services") + .long("services") + .short('s') + .help("Comma separated list of services to expose (e.g., drive,gmail,all)") + .default_value(""), + ) + .arg( + Arg::new("workflows") + .long("workflows") + .short('w') + .action(clap::ArgAction::SetTrue) + .help("Expose workflows as tools"), + ) + .arg( + Arg::new("helpers") + .long("helpers") + .short('e') + .action(clap::ArgAction::SetTrue) + .help("Expose service-specific helpers as tools"), + ) + .arg( + Arg::new("tool-mode") + .long("tool-mode") + .value_parser(["compact", "full"]) + .default_value("full") + .help("Tool granularity: 'compact' (1 tool/service + discover) or 'full' (1 tool/method)"), + ) +} + +pub async fn start(args: &[String]) -> Result<(), GwsError> { + let matches = build_mcp_cli().get_matches_from(args); + let tool_mode = match matches.get_one::("tool-mode").map(|s| s.as_str()) { + Some("compact") => ToolMode::Compact, + _ => ToolMode::Full, + }; + let mut config = ServerConfig { + services: Vec::new(), + workflows: matches.get_flag("workflows"), + helpers: matches.get_flag("helpers"), + tool_mode, + }; + + let svc_str = matches.get_one::("services").unwrap(); + if !svc_str.is_empty() { + if svc_str == "all" { + config.services = services::SERVICES + .iter() + .map(|s| s.aliases[0].to_string()) + .collect(); + } else { + config.services = svc_str.split(',').map(|s| s.trim().to_string()).collect(); + } + } + + if config.services.is_empty() { + eprintln!("[gws mcp] Warning: No services configured. Zero tools will be exposed."); + eprintln!("[gws mcp] Re-run with: gws mcp -s (e.g., -s drive,gmail,calendar)"); + eprintln!("[gws mcp] Use -s all to expose all available services."); + } else { + eprintln!( + "[gws mcp] Starting with services: {}", + config.services.join(", ") + ); + eprintln!("[gws mcp] Tool mode: {:?}", config.tool_mode); + } + + let transport = matches + .get_one::("transport") + .map(|s| s.as_str()) + .unwrap_or("stdio"); + + if transport == "http" { + let port = *matches.get_one::("port").unwrap_or(&3000); + let bind = matches + .get_one::("bind") + .map(|s| s.as_str()) + .unwrap_or("127.0.0.1") + .to_string(); + let enable_auth = matches.get_flag("auth"); + return crate::mcp_http_server::start_http(config, port, bind, enable_auth).await; + } + + let mut stdin = BufReader::new(tokio::io::stdin()).lines(); + let mut stdout = tokio::io::stdout(); + + let mut tools_cache = None; + + while let Ok(Some(line)) = stdin.next_line().await { + if line.trim().is_empty() { + continue; + } + + match serde_json::from_str::(&line) { + Ok(req) => { + let is_notification = req.get("id").is_none(); + let method = req.get("method").and_then(|m| m.as_str()).unwrap_or(""); + let params = req.get("params").cloned().unwrap_or_else(|| json!({})); + + let result = handle_request(method, ¶ms, &config, &mut tools_cache).await; + + if !is_notification { + let id = req.get("id").unwrap(); + let response = match result { + Ok(res) => json!({ + "jsonrpc": "2.0", + "id": id, + "result": res + }), + Err(e) => json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32603, + "message": e.to_string() + } + }), + }; + + let mut out = match serde_json::to_string(&response) { + Ok(s) => s, + Err(e) => { + eprintln!("[gws mcp] Failed to serialize response: {e}"); + continue; + } + }; + out.push('\n'); + let _ = stdout.write_all(out.as_bytes()).await; + let _ = stdout.flush().await; + } + } + Err(_) => { + let response = json!({ + "jsonrpc": "2.0", + "id": Value::Null, + "error": { + "code": -32700, + "message": "Parse error" + } + }); + let mut out = match serde_json::to_string(&response) { + Ok(s) => s, + Err(e) => { + eprintln!("[gws mcp] Failed to serialize error response: {e}"); + continue; + } + }; + out.push('\n'); + let _ = stdout.write_all(out.as_bytes()).await; + let _ = stdout.flush().await; + } + } + } + + Ok(()) +} + +async fn handle_request( + method: &str, + params: &Value, + config: &ServerConfig, + tools_cache: &mut Option>, +) -> Result { + match method { + "initialize" => Ok(json!({ + "protocolVersion": "2024-11-05", + "serverInfo": { + "name": "gws-mcp", + "version": env!("CARGO_PKG_VERSION") + }, + "capabilities": { + "tools": {} + } + })), + "notifications/initialized" => Ok(json!({})), + "tools/list" => { + if tools_cache.is_none() { + *tools_cache = Some(build_tools_list(config).await?); + } + Ok(json!({ + "tools": tools_cache.as_ref().unwrap() + })) + } + "tools/call" => match handle_tools_call(params, config).await { + Ok(val) => Ok(val), + Err(e) => Ok(json!({ + "content": [{ "type": "text", "text": e.to_string() }], + "isError": true + })), + }, + _ => Err(GwsError::Validation(format!( + "Method not supported: {}", + method + ))), + } +} + +pub(crate) async fn build_tools_list(config: &ServerConfig) -> Result, GwsError> { + if config.tool_mode == ToolMode::Compact { + return build_compact_tools_list(config).await; + } + + let mut tools = Vec::new(); + + for svc_name in &config.services { + let (api_name, version) = + crate::parse_service_and_version(&[svc_name.to_string()], svc_name)?; + if let Ok(doc) = crate::discovery::fetch_discovery_document(&api_name, &version).await { + walk_resources(svc_name, &doc.resources, &mut tools); + } else { + eprintln!("[gws mcp] Warning: Failed to load discovery document for service '{}'. It will not be available as a tool.", svc_name); + } + } + + if config.helpers { + append_helper_tools(&config.services, &mut tools); + } + + if config.workflows { + append_workflow_tools(&mut tools); + } + + Ok(tools) +} + +async fn build_compact_tools_list(config: &ServerConfig) -> Result, GwsError> { + let mut tools = Vec::new(); + + for svc_name in &config.services { + let (api_name, version) = + crate::parse_service_and_version(&[svc_name.to_string()], svc_name)?; + + let description = if let Ok(doc) = + crate::discovery::fetch_discovery_document(&api_name, &version).await + { + let mut resource_names = Vec::new(); + collect_resource_paths(&doc.resources, "", &mut resource_names); + resource_names.sort(); + let svc_entry = services::SERVICES + .iter() + .find(|e| e.aliases.contains(&svc_name.as_str())); + let desc = svc_entry.map(|e| e.description).unwrap_or("Google API"); + if resource_names.is_empty() { + desc.to_string() + } else { + let names_str: Vec<&str> = resource_names.iter().map(|s| s.as_str()).collect(); + format!("{}. Resources: {}", desc, names_str.join(", ")) + } + } else { + eprintln!( + "[gws mcp] Warning: Failed to load discovery document for '{}'. Tool will have minimal description.", + svc_name + ); + format!("Google Workspace API: {}", svc_name) + }; + + tools.push(json!({ + "name": svc_name, + "description": description, + "inputSchema": { + "type": "object", + "properties": { + "resource": { + "type": "string", + "description": "Resource name (e.g., files, permissions)" + }, + "method": { + "type": "string", + "description": "Method name (e.g., list, get, create)" + }, + "params": { + "type": "object", + "description": "Query or path parameters" + }, + "body": { + "type": "object", + "description": "Request body" + }, + "upload": { + "type": "string", + "description": "Local file path to upload" + }, + "page_all": { + "type": "boolean", + "description": "Auto-paginate, returning all pages" + } + }, + "required": ["resource", "method"] + } + })); + } + + tools.push(json!({ + "name": "gws_discover", + "description": "Query available resources, methods, and parameter schemas for any enabled service. Call with service only to list resources; add resource to list methods; add method to get full parameter schema.", + "inputSchema": { + "type": "object", + "properties": { + "service": { + "type": "string", + "description": "Service name (e.g., drive, gmail)" + }, + "resource": { + "type": "string", + "description": "Resource name to list methods for" + }, + "method": { + "type": "string", + "description": "Method name to get full parameter schema" + } + }, + "required": ["service"] + } + })); + + if config.helpers { + append_helper_tools(&config.services, &mut tools); + } + + if config.workflows { + append_workflow_tools(&mut tools); + } + + Ok(tools) +} + +fn append_workflow_tools(tools: &mut Vec) { + tools.push(json!({ + "name": "workflow_standup_report", + "description": "Today's meetings + open tasks as a standup summary", + "inputSchema": { + "type": "object", + "properties": { + "format": { "type": "string", "description": "Output format: json, table, yaml, csv" } + } + } + })); + tools.push(json!({ + "name": "workflow_meeting_prep", + "description": "Prepare for your next meeting: agenda, attendees, and linked docs", + "inputSchema": { + "type": "object", + "properties": { + "calendar": { "type": "string", "description": "Calendar ID (default: primary)" } + } + } + })); + tools.push(json!({ + "name": "workflow_email_to_task", + "description": "Convert a Gmail message into a Google Tasks entry", + "inputSchema": { + "type": "object", + "properties": { + "message_id": { "type": "string", "description": "Gmail message ID" }, + "tasklist": { "type": "string", "description": "Task list ID" } + }, + "required": ["message_id"] + } + })); + tools.push(json!({ + "name": "workflow_weekly_digest", + "description": "Weekly summary: this week's meetings + unread email count", + "inputSchema": { + "type": "object", + "properties": { + "format": { "type": "string", "description": "Output format" } + } + } + })); + tools.push(json!({ + "name": "workflow_file_announce", + "description": "Announce a Drive file in a Chat space", + "inputSchema": { + "type": "object", + "properties": { + "file_id": { "type": "string", "description": "Drive file ID" }, + "space": { "type": "string", "description": "Chat space name" }, + "message": { "type": "string", "description": "Custom message" } + }, + "required": ["file_id", "space"] + } + })); +} + +fn append_helper_tools(services: &[String], tools: &mut Vec) { + if services.iter().any(|s| s == "gmail") { + tools.push(json!({ + "name": "gmail_send", + "description": "Send an email with plain text body. Handles RFC 2822 formatting and base64 encoding automatically — no need to construct raw messages manually.", + "inputSchema": { + "type": "object", + "properties": { + "to": { + "type": "string", + "description": "Recipient email address(es), comma-separated" + }, + "subject": { + "type": "string", + "description": "Email subject" + }, + "body": { + "type": "string", + "description": "Email body (plain text)" + }, + "cc": { + "type": "string", + "description": "CC email address(es), comma-separated" + }, + "bcc": { + "type": "string", + "description": "BCC email address(es), comma-separated" + } + }, + "required": ["to", "subject", "body"] + }, + "annotations": { + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + } + })); + tools.push(json!({ + "name": "gmail_reply", + "description": "Reply to an email within its existing thread. Automatically sets threading headers (In-Reply-To, References) and Re: subject prefix. Use gmail_users_messages_list or gmail_users_messages_get to find the message_id.", + "inputSchema": { + "type": "object", + "properties": { + "message_id": { + "type": "string", + "description": "Gmail message ID to reply to" + }, + "body": { + "type": "string", + "description": "Reply body (plain text)" + }, + "reply_all": { + "type": "boolean", + "description": "Reply to all recipients instead of just the sender (default: false)" + }, + "cc": { + "type": "string", + "description": "Additional CC email address(es), comma-separated" + }, + "bcc": { + "type": "string", + "description": "BCC email address(es), comma-separated" + } + }, + "required": ["message_id", "body"] + }, + "annotations": { + "readOnlyHint": false, + "destructiveHint": false, + "idempotentHint": false, + } + })); + } +} + +// --------------------------------------------------------------------------- +// Common MCP helper utilities +// --------------------------------------------------------------------------- + +/// Extract a required string parameter from MCP tool arguments. +fn get_required_str<'a>(args: &'a Value, name: &str) -> Result<&'a str, GwsError> { + args.get(name) + .and_then(|v| v.as_str()) + .filter(|s| !s.trim().is_empty()) + .ok_or_else(|| GwsError::Validation(format!("Missing '{name}' parameter"))) +} + +/// Extract an optional string parameter from MCP tool arguments. +fn get_optional_str<'a>(args: &'a Value, name: &str) -> Option<&'a str> { + args.get(name) + .and_then(|v| v.as_str()) + .filter(|s| !s.trim().is_empty()) +} + +/// Send a raw RFC 2822 message via Gmail API. +/// +/// Shared by `handle_gmail_send` and `handle_gmail_reply`. +async fn send_raw_gmail( + raw_message: &str, + thread_id: Option<&str>, + draft: bool, +) -> Result { + let (api_name, version) = crate::parse_service_and_version(&["gmail".to_string()], "gmail")?; + let doc = crate::discovery::fetch_discovery_document(&api_name, &version).await?; + let method = crate::helpers::gmail::resolve_mail_method(&doc, draft)?; + let metadata = crate::helpers::gmail::mcp_build_send_metadata(thread_id, draft); + + let params = json!({ "userId": "me" }); + let params_str = params.to_string(); + + let scopes: Vec<&str> = crate::select_scope(&method.scopes).into_iter().collect(); + let (token, auth_method) = match crate::auth::get_token(&scopes).await { + Ok(t) => (Some(t), crate::executor::AuthMethod::OAuth), + Err(e) => return Err(GwsError::Auth(format!("Gmail auth failed: {e}"))), + }; + + let pagination = crate::executor::PaginationConfig { + page_all: false, + page_limit: 10, + page_delay_ms: 100, + }; + + let result = crate::executor::execute_method( + &doc, + method, + Some(¶ms_str), + metadata.as_deref(), + token.as_deref(), + auth_method, + None, + Some(crate::executor::UploadSource::Bytes { + data: raw_message.as_bytes(), + content_type: "message/rfc822", + }), + false, + &pagination, + None, + &crate::helpers::modelarmor::SanitizeMode::Warn, + &crate::formatter::OutputFormat::default(), + true, + ) + .await?; + + let text_content = match result { + Some(val) => serde_json::to_string_pretty(&val).unwrap_or_else(|_| "[]".to_string()), + None => "Operation completed successfully.".to_string(), + }; + + Ok(json!({ + "content": [{ "type": "text", "text": text_content }], + "isError": false + })) +} + +// --------------------------------------------------------------------------- +// Gmail helper handlers +// --------------------------------------------------------------------------- + +async fn handle_gmail_send(arguments: &Value) -> Result { + let to = get_required_str(arguments, "to")?; + let subject = get_required_str(arguments, "subject")?; + let body_text = get_required_str(arguments, "body")?; + let cc_str = get_optional_str(arguments, "cc"); + let bcc_str = get_optional_str(arguments, "bcc"); + + let to_mailboxes = crate::helpers::gmail::Mailbox::parse_list(to); + if to_mailboxes.is_empty() { + return Err(GwsError::Validation( + "'to' must specify at least one recipient".to_string(), + )); + } + let cc_mailboxes = cc_str.map(crate::helpers::gmail::Mailbox::parse_list); + let bcc_mailboxes = bcc_str.map(crate::helpers::gmail::Mailbox::parse_list); + + let mb = mail_builder::MessageBuilder::new() + .to(crate::helpers::gmail::to_mb_address_list(&to_mailboxes)) + .subject(subject); + + let mb = crate::helpers::gmail::apply_optional_headers( + mb, + None, + cc_mailboxes.as_deref(), + bcc_mailboxes.as_deref(), + ); + + let raw_message = crate::helpers::gmail::finalize_message(mb, body_text, false, &[])?; + + send_raw_gmail(&raw_message, None, false).await +} + +async fn handle_gmail_reply(arguments: &Value) -> Result { + let message_id = get_required_str(arguments, "message_id")?; + let body_text = get_required_str(arguments, "body")?; + let reply_all = arguments + .get("reply_all") + .and_then(|v| v.as_bool()) + .unwrap_or(false); + let cc_str = get_optional_str(arguments, "cc"); + let bcc_str = get_optional_str(arguments, "bcc"); + + let cc_mailboxes = cc_str.map(crate::helpers::gmail::Mailbox::parse_list); + let bcc_mailboxes = bcc_str.map(crate::helpers::gmail::Mailbox::parse_list); + + let (raw_message, thread_id) = crate::helpers::gmail::mcp_compose_reply( + message_id, + body_text, + reply_all, + cc_mailboxes.as_deref(), + bcc_mailboxes.as_deref(), + ) + .await?; + + send_raw_gmail(&raw_message, thread_id.as_deref(), false).await +} + +fn walk_resources(prefix: &str, resources: &HashMap, tools: &mut Vec) { + for (res_name, res) in resources { + let new_prefix = format!("{}_{}", prefix, res_name); + + for (method_name, method) in &res.methods { + let tool_name = format!("{}_{}", new_prefix, method_name); + let mut description = method.description.clone().unwrap_or_default(); + if description.is_empty() { + description = format!("Execute the {} Google API method", tool_name); + } + + let mut properties = serde_json::Map::new(); + properties.insert( + "params".to_string(), + json!({ + "type": "object", + "description": "Query or path parameters (e.g. fileId, q, pageSize)" + }), + ); + if method.request.is_some() { + properties.insert( + "body".to_string(), + json!({ + "type": "object", + "description": "Request body API object" + }), + ); + } + if method.supports_media_upload { + properties.insert( + "upload".to_string(), + json!({ + "type": "string", + "description": "Local file path to upload as media content" + }), + ); + } + if method.parameters.contains_key("pageToken") { + properties.insert( + "page_all".to_string(), + json!({ + "type": "boolean", + "description": "Auto-paginate, returning all pages" + }), + ); + } + let input_schema = json!({ + "type": "object", + "properties": properties + }); + + tools.push(json!({ + "name": tool_name, + "description": description, + "inputSchema": input_schema, + "annotations": http_method_annotations(&method.http_method), + })); + } + + if !res.resources.is_empty() { + walk_resources(&new_prefix, &res.resources, tools); + } + } +} + +/// MCP tool annotations derived from HTTP method (upstream #260). +fn http_method_annotations(http_method: &str) -> Value { + let m = http_method.to_ascii_uppercase(); + json!({ + "readOnlyHint": m == "GET", + "destructiveHint": m == "DELETE", + "idempotentHint": matches!(m.as_str(), "GET" | "PUT" | "DELETE" | "HEAD"), + }) +} + +/// Greedy resolver for underscore-joined tool names (upstream #170). +/// +/// Resource names may themselves contain underscores (e.g. `role_assignments`), +/// so `split('_')` is ambiguous. This walks the Discovery tree and consumes +/// `resource_name_` prefixes greedily, supporting arbitrarily nested resources. +fn resolve_tool_path( + remaining: &str, + resources: &HashMap, +) -> Option<(Vec, String)> { + for (res_name, res) in resources { + let prefix = format!("{}_", res_name); + if let Some(after) = remaining.strip_prefix(&prefix) { + if res.methods.contains_key(after) { + return Some((vec![res_name.clone()], after.to_string())); + } + if let Some((mut sub_path, method)) = resolve_tool_path(after, &res.resources) { + sub_path.insert(0, res_name.clone()); + return Some((sub_path, method)); + } + } + } + None +} + +async fn handle_discover(arguments: &Value, config: &ServerConfig) -> Result { + let service = arguments + .get("service") + .and_then(|v| v.as_str()) + .ok_or_else(|| GwsError::Validation("Missing 'service' in gws_discover".to_string()))?; + + if !config.services.contains(&service.to_string()) { + return Err(GwsError::Validation(format!( + "Service '{}' is not enabled. Enabled: {}", + service, + config.services.join(", ") + ))); + } + + let (api_name, version) = crate::parse_service_and_version(&[service.to_string()], service)?; + let doc = crate::discovery::fetch_discovery_document(&api_name, &version).await?; + + let resource_name = arguments.get("resource").and_then(|v| v.as_str()); + let method_name = arguments.get("method").and_then(|v| v.as_str()); + + let result = match (resource_name, method_name) { + (None, _) => { + let mut resource_entries = Vec::new(); + collect_resource_entries(&doc.resources, "", &mut resource_entries); + json!({ "service": service, "resources": resource_entries }) + } + (Some(res), None) => { + let mut all_paths = Vec::new(); + collect_resource_paths(&doc.resources, "", &mut all_paths); + let resource = find_resource(&doc.resources, res).ok_or_else(|| { + GwsError::Validation(format!( + "Resource '{}' not found in {}. Available: {}", + res, + service, + all_paths.join(", ") + )) + })?; + let methods: Vec = resource + .methods + .iter() + .map(|(name, m)| { + json!({ + "name": name, + "httpMethod": m.http_method, + "description": m.description.as_deref().unwrap_or("") + }) + }) + .collect(); + let sub_resources: Vec<&str> = resource.resources.keys().map(|s| s.as_str()).collect(); + let mut result = json!({ "service": service, "resource": res, "methods": methods }); + if !sub_resources.is_empty() { + result["subResources"] = json!(sub_resources); + } + result + } + (Some(res), Some(meth)) => { + let resource = find_resource(&doc.resources, res).ok_or_else(|| { + let mut all_paths = Vec::new(); + collect_resource_paths(&doc.resources, "", &mut all_paths); + GwsError::Validation(format!( + "Resource '{}' not found in {}. Available: {}", + res, + service, + all_paths.join(", ") + )) + })?; + let method = resource.methods.get(meth).ok_or_else(|| { + GwsError::Validation(format!( + "Method '{}' not found in {}.{}. Available: {}", + meth, + service, + res, + resource + .methods + .keys() + .cloned() + .collect::>() + .join(", ") + )) + })?; + let params: Vec = method + .parameters + .iter() + .map(|(name, p)| { + json!({ + "name": name, + "type": p.param_type.as_deref().unwrap_or("string"), + "required": p.required, + "location": p.location.as_deref().unwrap_or("query"), + "description": p.description.as_deref().unwrap_or("") + }) + }) + .collect(); + json!({ + "service": service, + "resource": res, + "method": meth, + "httpMethod": method.http_method, + "description": method.description.as_deref().unwrap_or(""), + "parameters": params, + "supportsMediaUpload": method.supports_media_upload, + "supportsMediaDownload": method.supports_media_download + }) + } + }; + + Ok(json!({ + "content": [{ "type": "text", "text": serde_json::to_string_pretty(&result).unwrap_or_default() }], + "isError": false + })) +} + +fn collect_resource_paths( + resources: &HashMap, + prefix: &str, + out: &mut Vec, +) { + for (name, res) in resources { + let path = if prefix.is_empty() { + name.clone() + } else { + format!("{}.{}", prefix, name) + }; + out.push(path.clone()); + if !res.resources.is_empty() { + collect_resource_paths(&res.resources, &path, out); + } + } +} + +fn collect_resource_entries( + resources: &HashMap, + prefix: &str, + out: &mut Vec, +) { + for (name, res) in resources { + let path = if prefix.is_empty() { + name.clone() + } else { + format!("{}.{}", prefix, name) + }; + let methods: Vec<&str> = res.methods.keys().map(|s| s.as_str()).collect(); + if !methods.is_empty() { + out.push(json!({ + "name": path.clone(), + "methods": methods + })); + } + if !res.resources.is_empty() { + collect_resource_entries(&res.resources, &path, out); + } + } +} + +fn find_resource<'a>( + resources: &'a HashMap, + path: &str, +) -> Option<&'a RestResource> { + let mut segments = path.split('.'); + let first_segment = segments.next()?; + let mut current_res = resources.get(first_segment)?; + for segment in segments { + current_res = current_res.resources.get(segment)?; + } + Some(current_res) +} + +pub(crate) async fn handle_tools_call( + params: &Value, + config: &ServerConfig, +) -> Result { + let tool_name = params + .get("name") + .and_then(|n| n.as_str()) + .ok_or_else(|| GwsError::Validation("Missing 'name' in tools/call".to_string()))?; + + let default_args = json!({}); + let arguments = params.get("arguments").unwrap_or(&default_args); + + if tool_name.starts_with("workflow_") { + return Err(GwsError::Other(anyhow::anyhow!( + "Workflows are not yet fully implemented via MCP" + ))); + } + + if tool_name == "gws_discover" { + return handle_discover(arguments, config).await; + } + + // Helper tool dispatch + let helper_result = match tool_name { + "gmail_send" => Some(handle_gmail_send(arguments).await), + "gmail_reply" => Some(handle_gmail_reply(arguments).await), + _ => None, + }; + if let Some(result) = helper_result { + if !config.helpers { + return Err(GwsError::Validation( + "Helper tools are not enabled. Re-run with --helpers flag.".to_string(), + )); + } + return result; + } + + // Compact mode + if config.tool_mode == ToolMode::Compact { + let resource_path = arguments + .get("resource") + .and_then(|v| v.as_str()) + .ok_or_else(|| GwsError::Validation("Missing 'resource' argument".to_string()))?; + let method_name = arguments + .get("method") + .and_then(|v| v.as_str()) + .ok_or_else(|| GwsError::Validation("Missing 'method' argument".to_string()))?; + + let svc_alias = tool_name; + if !config.services.contains(&svc_alias.to_string()) { + return Err(GwsError::Validation(format!( + "Service '{}' is not enabled in this MCP session", + svc_alias + ))); + } + + let (api_name, version) = + crate::parse_service_and_version(&[svc_alias.to_string()], svc_alias)?; + let doc = crate::discovery::fetch_discovery_document(&api_name, &version).await?; + + let resource = find_resource(&doc.resources, resource_path).ok_or_else(|| { + GwsError::Validation(format!( + "Resource '{}' not found in {}", + resource_path, svc_alias + )) + })?; + + let method = resource.methods.get(method_name).ok_or_else(|| { + GwsError::Validation(format!( + "Method '{}' not found in {}.{}", + method_name, svc_alias, resource_path + )) + })?; + + return execute_mcp_method(&doc, method, arguments).await; + } + + // Full mode — greedy parse that handles resource names containing underscores + // (upstream #170, e.g. `admin_role_assignments_list`). + let (svc_alias, remaining) = tool_name + .split_once('_') + .ok_or_else(|| GwsError::Validation(format!("Invalid API tool name: {}", tool_name)))?; + + if !config.services.contains(&svc_alias.to_string()) { + return Err(GwsError::Validation(format!( + "Service '{}' is not enabled in this MCP session", + svc_alias + ))); + } + + let (api_name, version) = + crate::parse_service_and_version(&[svc_alias.to_string()], svc_alias)?; + let doc = crate::discovery::fetch_discovery_document(&api_name, &version).await?; + + let (resource_path, method_name) = + resolve_tool_path(remaining, &doc.resources).ok_or_else(|| { + GwsError::Validation(format!( + "Tool '{}' not found in Discovery Document", + tool_name + )) + })?; + + let mut current_resources = &doc.resources; + let mut current_res = None; + for res_name in &resource_path { + let res = current_resources + .get(res_name) + .ok_or_else(|| GwsError::Validation(format!("Resource '{}' not found", res_name)))?; + current_res = Some(res); + current_resources = &res.resources; + } + let method = current_res + .and_then(|r| r.methods.get(&method_name)) + .ok_or_else(|| GwsError::Validation(format!("Method '{}' not found", method_name)))?; + + execute_mcp_method(&doc, method, arguments).await +} + +async fn execute_mcp_method( + doc: &crate::discovery::RestDescription, + method: &crate::discovery::RestMethod, + arguments: &Value, +) -> Result { + let params_json_val = arguments.get("params"); + let params_str = params_json_val + .map(serde_json::to_string) + .transpose() + .map_err(|e| GwsError::Validation(format!("Failed to serialize params: {e}")))?; + + let body_json_val = arguments + .get("body") + .filter(|v| !v.as_object().is_some_and(|m| m.is_empty())); + let body_str = body_json_val + .map(serde_json::to_string) + .transpose() + .map_err(|e| GwsError::Validation(format!("Failed to serialize body: {e}")))?; + + let upload_source = if let Some(raw) = arguments + .get("upload") + .and_then(|v| v.as_str()) + .filter(|s| !s.is_empty()) + { + let p = std::path::Path::new(raw); + if p.is_absolute() || p.components().any(|c| c == std::path::Component::ParentDir) { + return Err(GwsError::Validation(format!( + "Upload path '{}' is not allowed. Paths must be relative and within the current directory.", + raw + ))); + } + Some(crate::executor::UploadSource::File { + path: raw, + content_type: None, + }) + } else { + None + }; + + let page_all = arguments + .get("page_all") + .and_then(|v| v.as_bool()) + .unwrap_or(false); + + let pagination = crate::executor::PaginationConfig { + page_all, + page_limit: 100, + page_delay_ms: 100, + }; + + let scopes: Vec<&str> = crate::select_scope(&method.scopes).into_iter().collect(); + let (token, auth_method) = match crate::auth::get_token(&scopes).await { + Ok(t) => (Some(t), crate::executor::AuthMethod::OAuth), + Err(e) => { + eprintln!( + "[gws mcp] Warning: Authentication failed, proceeding without credentials: {e}" + ); + (None, crate::executor::AuthMethod::None) + } + }; + + let result = crate::executor::execute_method( + doc, + method, + params_str.as_deref(), + body_str.as_deref(), + token.as_deref(), + auth_method, + None, + upload_source, + false, + &pagination, + None, + &crate::helpers::modelarmor::SanitizeMode::Warn, + &crate::formatter::OutputFormat::default(), + true, + ) + .await?; + + let text_content = match result { + Some(val) => serde_json::to_string_pretty(&val).unwrap_or_else(|_| "[]".to_string()), + None => "Execution completed with no output.".to_string(), + }; + + Ok(json!({ + "content": [ + { + "type": "text", + "text": text_content + } + ], + "isError": false + })) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::discovery::{MethodParameter, RestDescription, RestMethod, RestResource}; + use std::collections::HashMap; + + #[test] + fn test_http_method_annotations_get() { + let a = http_method_annotations("GET"); + assert_eq!(a["readOnlyHint"], true); + assert_eq!(a["destructiveHint"], false); + assert_eq!(a["idempotentHint"], true); + } + + #[test] + fn test_http_method_annotations_delete() { + let a = http_method_annotations("DELETE"); + assert_eq!(a["readOnlyHint"], false); + assert_eq!(a["destructiveHint"], true); + assert_eq!(a["idempotentHint"], true); + } + + #[test] + fn test_http_method_annotations_post() { + let a = http_method_annotations("POST"); + assert_eq!(a["readOnlyHint"], false); + assert_eq!(a["destructiveHint"], false); + assert_eq!(a["idempotentHint"], false); + } + + #[test] + fn test_resolve_tool_path_simple() { + let mut methods = HashMap::new(); + methods.insert( + "list".to_string(), + RestMethod { + http_method: "GET".to_string(), + ..Default::default() + }, + ); + let mut resources = HashMap::new(); + resources.insert( + "files".to_string(), + RestResource { + methods, + resources: HashMap::new(), + }, + ); + + let (path, method) = resolve_tool_path("files_list", &resources).unwrap(); + assert_eq!(path, vec!["files".to_string()]); + assert_eq!(method, "list"); + } + + #[test] + fn test_resolve_tool_path_multi_word_resource() { + // Regression for upstream #170: resource name contains underscore. + let mut methods = HashMap::new(); + methods.insert( + "list".to_string(), + RestMethod { + http_method: "GET".to_string(), + ..Default::default() + }, + ); + let mut resources = HashMap::new(); + resources.insert( + "role_assignments".to_string(), + RestResource { + methods, + resources: HashMap::new(), + }, + ); + + let (path, method) = resolve_tool_path("role_assignments_list", &resources).unwrap(); + assert_eq!(path, vec!["role_assignments".to_string()]); + assert_eq!(method, "list"); + } + + #[test] + fn test_resolve_tool_path_nested() { + let mut inner_methods = HashMap::new(); + inner_methods.insert( + "list".to_string(), + RestMethod { + http_method: "GET".to_string(), + ..Default::default() + }, + ); + let mut inner_resources = HashMap::new(); + inner_resources.insert( + "space_events".to_string(), + RestResource { + methods: inner_methods, + resources: HashMap::new(), + }, + ); + let mut outer_resources = HashMap::new(); + outer_resources.insert( + "spaces".to_string(), + RestResource { + methods: HashMap::new(), + resources: inner_resources, + }, + ); + + let (path, method) = + resolve_tool_path("spaces_space_events_list", &outer_resources).unwrap(); + assert_eq!(path, vec!["spaces".to_string(), "space_events".to_string()]); + assert_eq!(method, "list"); + } + + #[test] + fn test_resolve_tool_path_not_found() { + let resources = HashMap::new(); + assert!(resolve_tool_path("nonexistent_method", &resources).is_none()); + } + + fn mock_config_compact(services: Vec<&str>) -> ServerConfig { + ServerConfig { + services: services.into_iter().map(String::from).collect(), + workflows: false, + helpers: false, + tool_mode: ToolMode::Compact, + } + } + + fn mock_doc() -> RestDescription { + let mut params = HashMap::new(); + params.insert( + "fileId".to_string(), + MethodParameter { + param_type: Some("string".to_string()), + required: true, + location: Some("path".to_string()), + description: Some("The ID of the file".to_string()), + ..Default::default() + }, + ); + params.insert( + "fields".to_string(), + MethodParameter { + param_type: Some("string".to_string()), + required: false, + location: Some("query".to_string()), + description: Some("Selector specifying fields".to_string()), + ..Default::default() + }, + ); + + let mut methods = HashMap::new(); + methods.insert( + "list".to_string(), + RestMethod { + http_method: "GET".to_string(), + path: "files".to_string(), + description: Some("Lists files".to_string()), + ..Default::default() + }, + ); + methods.insert( + "get".to_string(), + RestMethod { + http_method: "GET".to_string(), + path: "files/{fileId}".to_string(), + description: Some("Gets a file".to_string()), + parameters: params, + ..Default::default() + }, + ); + + let mut resources = HashMap::new(); + resources.insert( + "files".to_string(), + RestResource { + methods, + ..Default::default() + }, + ); + + RestDescription { + name: "drive".to_string(), + resources, + ..Default::default() + } + } + + fn mock_nested_doc() -> RestDescription { + let mut msg_methods = HashMap::new(); + msg_methods.insert( + "list".to_string(), + RestMethod { + http_method: "GET".to_string(), + path: "messages".to_string(), + description: Some("Lists messages".to_string()), + ..Default::default() + }, + ); + msg_methods.insert( + "get".to_string(), + RestMethod { + http_method: "GET".to_string(), + path: "messages/{id}".to_string(), + description: Some("Gets a message".to_string()), + ..Default::default() + }, + ); + let messages = RestResource { + methods: msg_methods, + ..Default::default() + }; + + let mut thread_methods = HashMap::new(); + thread_methods.insert( + "list".to_string(), + RestMethod { + http_method: "GET".to_string(), + path: "threads".to_string(), + ..Default::default() + }, + ); + let threads = RestResource { + methods: thread_methods, + ..Default::default() + }; + + let mut user_methods = HashMap::new(); + user_methods.insert( + "getProfile".to_string(), + RestMethod { + http_method: "GET".to_string(), + path: "users/{userId}/profile".to_string(), + ..Default::default() + }, + ); + + let mut sub_resources = HashMap::new(); + sub_resources.insert("messages".to_string(), messages); + sub_resources.insert("threads".to_string(), threads); + + let users = RestResource { + methods: user_methods, + resources: sub_resources, + }; + + let mut resources = HashMap::new(); + resources.insert("users".to_string(), users); + + RestDescription { + name: "gmail".to_string(), + resources, + ..Default::default() + } + } + + #[test] + fn test_find_resource_top_level() { + let doc = mock_doc(); + let res = find_resource(&doc.resources, "files"); + assert!(res.is_some()); + assert!(res.unwrap().methods.contains_key("list")); + } + + #[test] + fn test_find_resource_not_found() { + let doc = mock_doc(); + assert!(find_resource(&doc.resources, "missing").is_none()); + } + + #[test] + fn test_find_resource_nested_dot_path() { + let mut inner_methods = HashMap::new(); + inner_methods.insert( + "create".to_string(), + RestMethod { + http_method: "POST".to_string(), + path: "permissions".to_string(), + ..Default::default() + }, + ); + let inner = RestResource { + methods: inner_methods, + ..Default::default() + }; + let mut sub_resources = HashMap::new(); + sub_resources.insert("permissions".to_string(), inner); + + let outer = RestResource { + resources: sub_resources, + ..Default::default() + }; + let mut top = HashMap::new(); + top.insert("files".to_string(), outer); + + let res = find_resource(&top, "files.permissions"); + assert!(res.is_some()); + assert!(res.unwrap().methods.contains_key("create")); + } + + #[test] + fn test_collect_resource_paths_flat() { + let doc = mock_doc(); + let mut paths = Vec::new(); + collect_resource_paths(&doc.resources, "", &mut paths); + paths.sort(); + assert_eq!(paths, vec!["files"]); + } + + #[test] + fn test_collect_resource_paths_nested() { + let doc = mock_nested_doc(); + let mut paths = Vec::new(); + collect_resource_paths(&doc.resources, "", &mut paths); + paths.sort(); + assert!(paths.contains(&"users".to_string())); + assert!(paths.contains(&"users.messages".to_string())); + } + + #[test] + fn test_collect_resource_entries_includes_nested() { + let doc = mock_nested_doc(); + let mut entries = Vec::new(); + collect_resource_entries(&doc.resources, "", &mut entries); + let names: Vec<&str> = entries.iter().filter_map(|e| e["name"].as_str()).collect(); + assert!(names.contains(&"users")); + assert!(names.contains(&"users.messages")); + } + + #[tokio::test] + async fn test_discover_service_not_enabled() { + let config = mock_config_compact(vec!["gmail"]); + let args = json!({"service": "drive"}); + + let result = handle_discover(&args, &config).await; + assert!(result.is_err()); + let err_msg = result.unwrap_err().to_string(); + assert!(err_msg.contains("not enabled")); + } + + #[tokio::test] + async fn test_discover_missing_service_arg() { + let config = mock_config_compact(vec!["drive"]); + let args = json!({}); + + let result = handle_discover(&args, &config).await; + assert!(result.is_err()); + let err_msg = result.unwrap_err().to_string(); + assert!(err_msg.contains("Missing 'service'")); + } + + #[test] + fn test_tool_mode_enum_equality() { + assert_eq!(ToolMode::Compact, ToolMode::Compact); + assert_ne!(ToolMode::Compact, ToolMode::Full); + } + + #[test] + fn test_cli_tool_mode_default_is_full() { + let cli = build_mcp_cli(); + let matches = cli.get_matches_from(vec!["mcp"]); + let mode = matches.get_one::("tool-mode").unwrap(); + assert_eq!(mode, "full"); + } + + #[test] + fn test_cli_tool_mode_compact() { + let cli = build_mcp_cli(); + let matches = cli.get_matches_from(vec!["mcp", "--tool-mode", "compact"]); + let mode = matches.get_one::("tool-mode").unwrap(); + assert_eq!(mode, "compact"); + } + + #[test] + fn test_cli_tool_mode_invalid_rejected() { + let cli = build_mcp_cli(); + let result = cli.try_get_matches_from(vec!["mcp", "--tool-mode", "invalid"]); + assert!(result.is_err()); + } + + #[test] + fn test_append_workflow_tools_adds_five() { + let mut tools = Vec::new(); + append_workflow_tools(&mut tools); + assert_eq!(tools.len(), 5); + assert_eq!(tools[0]["name"], "workflow_standup_report"); + assert_eq!(tools[4]["name"], "workflow_file_announce"); + } + + #[test] + fn test_append_helper_tools_gmail_adds_send_and_reply() { + let services = vec!["gmail".to_string()]; + let mut tools = Vec::new(); + append_helper_tools(&services, &mut tools); + assert_eq!(tools.len(), 2); + assert_eq!(tools[0]["name"], "gmail_send"); + assert_eq!(tools[1]["name"], "gmail_reply"); + + let schema = &tools[0]["inputSchema"]; + let required = schema["required"].as_array().unwrap(); + let required_strs: Vec<&str> = required.iter().filter_map(|v| v.as_str()).collect(); + assert!(required_strs.contains(&"to")); + assert!(required_strs.contains(&"subject")); + assert!(required_strs.contains(&"body")); + + let props = schema["properties"].as_object().unwrap(); + assert!(props.contains_key("cc")); + assert!(props.contains_key("bcc")); + } + + #[test] + fn test_append_helper_tools_no_gmail_adds_nothing() { + let services = vec!["drive".to_string(), "calendar".to_string()]; + let mut tools = Vec::new(); + append_helper_tools(&services, &mut tools); + assert!(tools.is_empty()); + } + + #[tokio::test] + async fn test_handle_gmail_send_missing_to() { + let args = json!({"subject": "Hi", "body": "Hello"}); + let result = handle_gmail_send(&args).await; + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("'to'")); + } + + #[tokio::test] + async fn test_handle_gmail_send_missing_subject() { + let args = json!({"to": "a@b.com", "body": "Hello"}); + let result = handle_gmail_send(&args).await; + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("'subject'")); + } + + #[tokio::test] + async fn test_handle_gmail_send_missing_body() { + let args = json!({"to": "a@b.com", "subject": "Hi"}); + let result = handle_gmail_send(&args).await; + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("'body'")); + } + + #[tokio::test] + async fn test_gmail_send_rejected_when_helpers_disabled() { + let config = ServerConfig { + services: vec!["gmail".to_string()], + workflows: false, + helpers: false, + tool_mode: ToolMode::Full, + }; + let params = json!({ + "name": "gmail_send", + "arguments": {"to": "a@b.com", "subject": "Hi", "body": "Hello"} + }); + let result = handle_tools_call(¶ms, &config).await; + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("--helpers")); + } + + // --- gmail_reply tests --- + + #[test] + fn test_append_helper_tools_gmail_reply_schema() { + let services = vec!["gmail".to_string()]; + let mut tools = Vec::new(); + append_helper_tools(&services, &mut tools); + + let reply_tool = tools.iter().find(|t| t["name"] == "gmail_reply").unwrap(); + let schema = &reply_tool["inputSchema"]; + let required = schema["required"].as_array().unwrap(); + let required_strs: Vec<&str> = required.iter().filter_map(|v| v.as_str()).collect(); + assert!(required_strs.contains(&"message_id")); + assert!(required_strs.contains(&"body")); + + let props = schema["properties"].as_object().unwrap(); + assert!(props.contains_key("reply_all")); + assert!(props.contains_key("cc")); + assert!(props.contains_key("bcc")); + } + + #[tokio::test] + async fn test_handle_gmail_reply_missing_message_id() { + let args = json!({"body": "Thanks!"}); + let result = handle_gmail_reply(&args).await; + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("'message_id'")); + } + + #[tokio::test] + async fn test_handle_gmail_reply_missing_body() { + let args = json!({"message_id": "abc123"}); + let result = handle_gmail_reply(&args).await; + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("'body'")); + } + + #[tokio::test] + async fn test_gmail_reply_rejected_when_helpers_disabled() { + let config = ServerConfig { + services: vec!["gmail".to_string()], + workflows: false, + helpers: false, + tool_mode: ToolMode::Full, + }; + let params = json!({ + "name": "gmail_reply", + "arguments": {"message_id": "abc123", "body": "Thanks!"} + }); + let result = handle_tools_call(¶ms, &config).await; + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("--helpers")); + } + + // --- Common utility tests --- + + #[test] + fn test_get_required_str_present() { + let args = json!({"name": "test"}); + assert_eq!(get_required_str(&args, "name").unwrap(), "test"); + } + + #[test] + fn test_get_required_str_missing() { + let args = json!({}); + assert!(get_required_str(&args, "name").is_err()); + } + + #[test] + fn test_get_required_str_empty() { + let args = json!({"name": " "}); + assert!(get_required_str(&args, "name").is_err()); + } + + #[test] + fn test_get_optional_str_present() { + let args = json!({"cc": "a@b.com"}); + assert_eq!(get_optional_str(&args, "cc"), Some("a@b.com")); + } + + #[test] + fn test_get_optional_str_missing() { + let args = json!({}); + assert_eq!(get_optional_str(&args, "cc"), None); + } + + #[test] + fn test_get_optional_str_empty() { + let args = json!({"cc": ""}); + assert_eq!(get_optional_str(&args, "cc"), None); + } +} diff --git a/crates/google-workspace-cli/src/setup.rs b/crates/google-workspace-cli/src/setup.rs index 9ebd19cb..f25f7ae6 100644 --- a/crates/google-workspace-cli/src/setup.rs +++ b/crates/google-workspace-cli/src/setup.rs @@ -235,8 +235,6 @@ pub enum ScopeClassification { Restricted, } -pub const PLATFORM_SCOPE: &str = "https://www.googleapis.com/auth/cloud-platform"; - /// A scope discovered from a Discovery Document. #[derive(Clone)] pub struct DiscoveredScope {