From 19cca2cb152bed1b8a3c4f1b613370adc8f42226 Mon Sep 17 00:00:00 2001 From: "openai-code-agent[bot]" <242516109+Codex@users.noreply.github.com> Date: Wed, 11 Mar 2026 21:30:34 +0000 Subject: [PATCH 1/4] Initial plan From cedb61f5ec2383ed3f4f295a77b233f3d9f9eca8 Mon Sep 17 00:00:00 2001 From: "openai-code-agent[bot]" <242516109+Codex@users.noreply.github.com> Date: Wed, 11 Mar 2026 21:36:03 +0000 Subject: [PATCH 2/4] feat: add pagerduty plugin support --- cmd/configure_scopes.go | 111 ++++++++ cmd/configure_scopes_test.go | 49 ++++ cmd/connection_types.go | 12 + cmd/connection_types_test.go | 52 ++++ docs/configure-connection.md | 15 +- docs/configure-scope.md | 24 +- internal/devlake/types.go | 516 ++++++++++++++++++----------------- 7 files changed, 509 insertions(+), 270 deletions(-) diff --git a/cmd/configure_scopes.go b/cmd/configure_scopes.go index 4a29805..fc96799 100644 --- a/cmd/configure_scopes.go +++ b/cmd/configure_scopes.go @@ -1200,6 +1200,117 @@ func putBitbucketScopes(client *devlake.Client, connID int, repos []*devlake.Bit return client.PutScopes("bitbucket", connID, &devlake.ScopeBatchRequest{Data: data}) } +// scopePagerDutyHandler is the ScopeHandler for the pagerduty plugin. +func scopePagerDutyHandler(client *devlake.Client, connID int, org, enterprise string, opts *ScopeOpts) (*devlake.BlueprintConnection, error) { + fmt.Println("\nšŸ” Listing PagerDuty services...") + var ( + allChildren []devlake.RemoteScopeChild + pageToken string + ) + for { + resp, err := client.ListRemoteScopes("pagerduty", connID, "", pageToken) + if err != nil { + return nil, fmt.Errorf("listing PagerDuty services: %w", err) + } + allChildren = append(allChildren, resp.Children...) + if resp.NextPageToken == "" { + break + } + pageToken = resp.NextPageToken + } + + var ( + serviceLabels []string + serviceByLabel = make(map[string]*devlake.RemoteScopeChild) + ) + for i := range allChildren { + child := &allChildren[i] + if child.Type != "scope" { + continue + } + label := child.Name + if label == "" { + label = child.FullName + } + if label == "" { + label = child.ID + } + if child.ID != "" && label != child.ID { + label = fmt.Sprintf("%s (ID: %s)", label, child.ID) + } + if label == "" { + continue + } + serviceLabels = append(serviceLabels, label) + serviceByLabel[label] = child + } + + if len(serviceLabels) == 0 { + return nil, fmt.Errorf("no PagerDuty services found — verify your API key has access") + } + + fmt.Println() + selected := prompt.SelectMulti("Select PagerDuty services to track", serviceLabels) + if len(selected) == 0 { + return nil, fmt.Errorf("at least one PagerDuty service must be selected") + } + + fmt.Println("\nšŸ“ Adding PagerDuty service scopes...") + var ( + scopeData []any + blueprintScopes []devlake.BlueprintScope + ) + for _, label := range selected { + child := serviceByLabel[label] + scope := pagerDutyServiceFromChild(child, connID) + if scope.ID == "" || scope.Name == "" { + continue + } + scopeData = append(scopeData, scope) + blueprintScopes = append(blueprintScopes, devlake.BlueprintScope{ + ScopeID: scope.ID, + ScopeName: scope.Name, + }) + } + + if len(scopeData) == 0 { + return nil, fmt.Errorf("no valid PagerDuty services to add") + } + + if err := client.PutScopes("pagerduty", connID, &devlake.ScopeBatchRequest{Data: scopeData}); err != nil { + return nil, fmt.Errorf("failed to add PagerDuty scopes: %w", err) + } + fmt.Printf(" āœ… Added %d service scope(s)\n", len(scopeData)) + + return &devlake.BlueprintConnection{ + PluginName: "pagerduty", + ConnectionID: connID, + Scopes: blueprintScopes, + }, nil +} + +// pagerDutyServiceFromChild builds a PagerDuty service scope from a remote-scope child. +func pagerDutyServiceFromChild(child *devlake.RemoteScopeChild, connID int) devlake.PagerDutyServiceScope { + scope := devlake.PagerDutyServiceScope{ConnectionID: connID} + if child != nil && len(child.Data) > 0 { + _ = json.Unmarshal(child.Data, &scope) + } + if scope.ID == "" && child != nil { + scope.ID = child.ID + } + if scope.Name == "" && child != nil { + switch { + case child.Name != "": + scope.Name = child.Name + case child.FullName != "": + scope.Name = child.FullName + default: + scope.Name = child.ID + } + } + return scope +} + // scopeSonarQubeHandler is the ScopeHandler for the sonarqube plugin. func scopeSonarQubeHandler(client *devlake.Client, connID int, org, enterprise string, opts *ScopeOpts) (*devlake.BlueprintConnection, error) { fmt.Println("\nšŸ“‹ Fetching SonarQube projects...") diff --git a/cmd/configure_scopes_test.go b/cmd/configure_scopes_test.go index 897067a..9198525 100644 --- a/cmd/configure_scopes_test.go +++ b/cmd/configure_scopes_test.go @@ -125,6 +125,55 @@ func TestAzureDevOpsScopePayload_KeepsExistingFields(t *testing.T) { } } +func TestPagerDutyServiceFromChild_UsesData(t *testing.T) { + data, _ := json.Marshal(map[string]any{ + "id": "SVC123", + "name": "Checkout", + "url": "https://api.pagerduty.com/services/SVC123", + }) + child := &devlake.RemoteScopeChild{ + ID: "fallback-id", + Name: "fallback-name", + FullName: "fallback/full", + Data: data, + } + + scope := pagerDutyServiceFromChild(child, 101) + if scope.ID != "SVC123" { + t.Fatalf("ID = %q, want SVC123", scope.ID) + } + if scope.Name != "Checkout" { + t.Fatalf("Name = %q, want Checkout", scope.Name) + } + if scope.URL != "https://api.pagerduty.com/services/SVC123" { + t.Fatalf("URL = %q, want https://api.pagerduty.com/services/SVC123", scope.URL) + } + if scope.ConnectionID != 101 { + t.Fatalf("ConnectionID = %d, want 101", scope.ConnectionID) + } +} + +func TestPagerDutyServiceFromChild_Fallbacks(t *testing.T) { + child := &devlake.RemoteScopeChild{ + ID: "SVC999", + FullName: "Platform/Incident", + } + + scope := pagerDutyServiceFromChild(child, 7) + if scope.ID != "SVC999" { + t.Fatalf("ID = %q, want SVC999", scope.ID) + } + if scope.Name != "Platform/Incident" { + t.Fatalf("Name = %q, want Platform/Incident", scope.Name) + } + if scope.URL != "" { + t.Fatalf("URL = %q, want empty", scope.URL) + } + if scope.ConnectionID != 7 { + t.Fatalf("ConnectionID = %d, want 7", scope.ConnectionID) + } +} + func TestRunConfigureScopes_PluginFlag(t *testing.T) { makeCmd := func() (*cobra.Command, *ScopeOpts) { opts := &ScopeOpts{} diff --git a/cmd/connection_types.go b/cmd/connection_types.go index 98d9079..45c14ee 100644 --- a/cmd/connection_types.go +++ b/cmd/connection_types.go @@ -335,6 +335,18 @@ var connectionRegistry = []*ConnectionDef{ ScopeIDField: "boardId", HasRepoScopes: false, }, + { + Plugin: "pagerduty", + DisplayName: "PagerDuty", + Available: true, + Endpoint: "https://api.pagerduty.com/", + SupportsTest: true, + TokenPrompt: "PagerDuty API key", + EnvVarNames: []string{"PAGERDUTY_TOKEN", "PAGERDUTY_API_KEY"}, + EnvFileKeys: []string{"PAGERDUTY_TOKEN", "PAGERDUTY_API_KEY"}, + ScopeFunc: scopePagerDutyHandler, + ScopeIDField: "id", + }, { Plugin: "sonarqube", DisplayName: "SonarQube", diff --git a/cmd/connection_types_test.go b/cmd/connection_types_test.go index 54c31f8..6883bec 100644 --- a/cmd/connection_types_test.go +++ b/cmd/connection_types_test.go @@ -217,6 +217,58 @@ func TestJiraConnectionDef(t *testing.T) { } } +func TestConnectionRegistry_PagerDuty(t *testing.T) { + def := FindConnectionDef("pagerduty") + if def == nil { + t.Fatal("pagerduty plugin not found in registry") + } + + tests := []struct { + name string + got any + want any + }{ + {"Plugin", def.Plugin, "pagerduty"}, + {"DisplayName", def.DisplayName, "PagerDuty"}, + {"Available", def.Available, true}, + {"Endpoint", def.Endpoint, "https://api.pagerduty.com/"}, + {"SupportsTest", def.SupportsTest, true}, + {"ScopeIDField", def.ScopeIDField, "id"}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.got != tt.want { + t.Errorf("%s: got %v, want %v", tt.name, tt.got, tt.want) + } + }) + } + + if def.ScopeFunc == nil { + t.Fatal("ScopeFunc should not be nil for PagerDuty") + } + if def.TokenPrompt != "PagerDuty API key" { + t.Errorf("TokenPrompt = %q, want %q", def.TokenPrompt, "PagerDuty API key") + } + + expectedEnv := []string{"PAGERDUTY_TOKEN", "PAGERDUTY_API_KEY"} + if len(def.EnvVarNames) != len(expectedEnv) { + t.Fatalf("EnvVarNames length = %d, want %d", len(def.EnvVarNames), len(expectedEnv)) + } + for i, v := range expectedEnv { + if def.EnvVarNames[i] != v { + t.Errorf("EnvVarNames[%d] = %q, want %q", i, def.EnvVarNames[i], v) + } + } + if len(def.EnvFileKeys) != len(expectedEnv) { + t.Fatalf("EnvFileKeys length = %d, want %d", len(def.EnvFileKeys), len(expectedEnv)) + } + for i, v := range expectedEnv { + if def.EnvFileKeys[i] != v { + t.Errorf("EnvFileKeys[%d] = %q, want %q", i, def.EnvFileKeys[i], v) + } + } +} + // TestAzureDevOpsRegistryEntry verifies the Azure DevOps plugin registry entry. func TestAzureDevOpsRegistryEntry(t *testing.T) { def := FindConnectionDef("azuredevops_go") diff --git a/docs/configure-connection.md b/docs/configure-connection.md index b32459c..37e4415 100644 --- a/docs/configure-connection.md +++ b/docs/configure-connection.md @@ -22,7 +22,7 @@ Aliases: `connections` | Flag | Default | Description | |------|---------|-------------| -| `--plugin` | *(interactive)* | Plugin to configure (`github`, `gh-copilot`, `jenkins`) | +| `--plugin` | *(interactive)* | Plugin to configure (`github`, `gh-copilot`, `gitlab`, `bitbucket`, `azuredevops_go`, `jenkins`, `jira`, `sonarqube`, `pagerduty`) | | `--org` | *(required for Copilot)* | GitHub organization slug | | `--enterprise` | | GitHub enterprise slug (for enterprise-level Copilot metrics) | | `--name` | `Plugin - org` | Connection display name | @@ -35,11 +35,12 @@ Aliases: `connections` ### Required PAT Scopes -| Plugin | Required Scopes | +| Plugin | Required Scopes | |--------|----------------| | `github` | `repo`, `read:org`, `read:user` | | `gh-copilot` | `manage_billing:copilot`, `read:org` | | `gh-copilot` (enterprise metrics) | + `read:enterprise` | +| `pagerduty` | PagerDuty API key (Token `token=`) | | `jenkins` | Username + API token/password (BasicAuth) | ### Token Resolution Order @@ -155,7 +156,7 @@ gh devlake configure connection test [--plugin ] [--id ] | Flag | Default | Description | |------|---------|-------------| -| `--plugin` | *(interactive)* | Plugin to test (`github`, `gh-copilot`, `jenkins`) | +| `--plugin` | *(interactive)* | Plugin to test (`github`, `gh-copilot`, `gitlab`, `bitbucket`, `azuredevops_go`, `jenkins`, `jira`, `sonarqube`, `pagerduty`) | | `--id` | `0` | Connection ID to test | Both flags are required for non-interactive mode. If either is omitted, the CLI prompts interactively. @@ -187,7 +188,7 @@ gh devlake configure connection update [--plugin ] [--id ] [update f | Flag | Default | Description | |------|---------|-------------| -| `--plugin` | *(interactive)* | Plugin slug (`github`, `gh-copilot`, `jenkins`) | +| `--plugin` | *(interactive)* | Plugin slug (`github`, `gh-copilot`, `gitlab`, `bitbucket`, `azuredevops_go`, `jenkins`, `jira`, `sonarqube`, `pagerduty`) | | `--id` | *(interactive)* | Connection ID to update | | `--token` | | New PAT for token rotation | | `--org` | | New organization slug | @@ -227,9 +228,9 @@ gh devlake configure connection delete [--plugin ] [--id ] ### Flags -| Flag | Default | Description | -|------|---------|-------------| -| `--plugin` | *(interactive)* | Plugin of the connection to delete | +| Flag | Default | Description | +|------|---------|-------------| +| `--plugin` | *(interactive)* | Plugin of the connection to delete (`github`, `gh-copilot`, `gitlab`, `bitbucket`, `azuredevops_go`, `jenkins`, `jira`, `sonarqube`, `pagerduty`) | | `--id` | *(interactive)* | ID of the connection to delete | | `--force` | `false` | Skip confirmation prompt | diff --git a/docs/configure-scope.md b/docs/configure-scope.md index df6de19..9370630 100644 --- a/docs/configure-scope.md +++ b/docs/configure-scope.md @@ -32,7 +32,7 @@ gh devlake configure scope add [flags] | Flag | Default | Description | |------|---------|-------------| -| `--plugin` | *(interactive or required)* | Plugin to configure (`github`, `gh-copilot`, `jenkins`) | +| `--plugin` | *(interactive or required)* | Plugin to configure (`github`, `gh-copilot`, `gitlab`, `bitbucket`, `azuredevops_go`, `jenkins`, `jira`, `sonarqube`, `pagerduty`) | | `--connection-id` | *(auto-detected)* | Override the connection ID to scope | | `--org` | *(required)* | GitHub organization slug | | `--enterprise` | | Enterprise slug (enables enterprise-level Copilot metrics) | @@ -116,12 +116,18 @@ gh devlake configure scope add 1. Lists Jenkins jobs via the remote-scope API (interactive picker) 2. Uses `--jobs` when provided instead of prompting 3. Calls `PUT /plugins/jenkins/connections/{id}/scopes` with the selected jobs - ---- - -## configure scope list - -List all scopes configured on a DevLake plugin connection. + +### What It Does (PagerDuty) + +1. Lists PagerDuty services via the remote-scope API (interactive picker) +2. Prompts to select one or more services +3. Calls `PUT /plugins/pagerduty/connections/{id}/scopes` with the selected services + +--- + +## configure scope list + +List all scopes configured on a DevLake plugin connection. ### Usage @@ -133,7 +139,7 @@ gh devlake configure scope list [--plugin ] [--connection-id ] | Flag | Default | Description | |------|---------|-------------| -| `--plugin` | *(interactive)* | Plugin to query (`github`, `gh-copilot`, `jenkins`) | +| `--plugin` | *(interactive)* | Plugin to query (`github`, `gh-copilot`, `gitlab`, `bitbucket`, `azuredevops_go`, `jenkins`, `jira`, `sonarqube`, `pagerduty`) | | `--connection-id` | *(interactive)* | Connection ID to list scopes for | **Flag mode:** both `--plugin` and `--connection-id` are required. @@ -181,7 +187,7 @@ gh devlake configure scope delete [--plugin ] [--connection-id ] [-- | Flag | Default | Description | |------|---------|-------------| -| `--plugin` | *(interactive)* | Plugin of the connection (`github`, `gh-copilot`, `jenkins`) | +| `--plugin` | *(interactive)* | Plugin of the connection (`github`, `gh-copilot`, `gitlab`, `bitbucket`, `azuredevops_go`, `jenkins`, `jira`, `sonarqube`, `pagerduty`) | | `--connection-id` | *(interactive)* | Connection ID | | `--scope-id` | *(interactive)* | Scope ID to delete | | `--force` | `false` | Skip confirmation prompt | diff --git a/internal/devlake/types.go b/internal/devlake/types.go index 3edaa9e..55fb596 100644 --- a/internal/devlake/types.go +++ b/internal/devlake/types.go @@ -1,254 +1,262 @@ -package devlake - -import ( - "encoding/json" - "strconv" -) - -// ScopeConfig represents a DevLake scope configuration (e.g., DORA settings). -type ScopeConfig struct { - ID int `json:"id,omitempty"` - Name string `json:"name"` - ConnectionID int `json:"connectionId"` - DeploymentPattern string `json:"deploymentPattern,omitempty"` - ProductionPattern string `json:"productionPattern,omitempty"` - IssueTypeIncident string `json:"issueTypeIncident,omitempty"` - Refdiff *RefdiffConfig `json:"refdiff,omitempty"` -} - -// RefdiffConfig holds refdiff tag-matching settings. -type RefdiffConfig struct { - TagsPattern string `json:"tagsPattern"` - TagsLimit int `json:"tagsLimit"` - TagsOrder string `json:"tagsOrder"` -} - -// GitHubRepoScope represents a GitHub repository scope entry for PUT /scopes. -type GitHubRepoScope struct { - GithubID int `json:"githubId"` - ConnectionID int `json:"connectionId"` - Name string `json:"name"` - FullName string `json:"fullName"` - HTMLURL string `json:"htmlUrl"` - CloneURL string `json:"cloneUrl"` - ScopeConfigID int `json:"scopeConfigId,omitempty"` -} - -// CopilotScope represents a Copilot organization or enterprise scope entry. -type CopilotScope struct { - ID string `json:"id"` - ConnectionID int `json:"connectionId"` - Organization string `json:"organization"` - Enterprise string `json:"enterprise,omitempty"` - Name string `json:"name"` - FullName string `json:"fullName"` -} - -// GitLabProjectScope represents a GitLab project scope entry for PUT /scopes. -type GitLabProjectScope struct { - GitlabID int `json:"gitlabId"` - ConnectionID int `json:"connectionId"` - Name string `json:"name"` - PathWithNamespace string `json:"pathWithNamespace"` - HTTPURLToRepo string `json:"httpUrlToRepo,omitempty"` - SSHURLToRepo string `json:"sshUrlToRepo,omitempty"` - ScopeConfigID int `json:"scopeConfigId,omitempty"` -} - -// JenkinsJobScope represents a Jenkins job scope entry. -type JenkinsJobScope struct { - ConnectionID int `json:"connectionId"` - FullName string `json:"fullName"` - Name string `json:"name"` -} - -// JiraBoardScope represents a Jira board scope entry for PUT /scopes. -type JiraBoardScope struct { - BoardID uint64 `json:"boardId"` - ConnectionID int `json:"connectionId"` - Name string `json:"name"` -} - -// BitbucketRepoScope represents a Bitbucket Cloud repository scope entry for PUT /scopes. -// BitbucketID holds the repository full name (workspace/repo-slug), which is the -// canonical scope identifier used by the DevLake Bitbucket plugin. -type BitbucketRepoScope struct { - BitbucketID string `json:"bitbucketId"` - ConnectionID int `json:"connectionId"` - Name string `json:"name"` - FullName string `json:"fullName"` - CloneURL string `json:"cloneUrl,omitempty"` - HTMLURL string `json:"htmlUrl,omitempty"` -} - -// SonarQubeProjectScope represents a SonarQube project scope entry for PUT /scopes. -type SonarQubeProjectScope struct { - ConnectionID int `json:"connectionId"` - ProjectKey string `json:"projectKey"` - Name string `json:"name"` -} - -// ScopeBatchRequest is the payload for PUT /scopes (batch upsert). -type ScopeBatchRequest struct { - Data []any `json:"data"` -} - -// ScopeListWrapper wraps a scope object as returned by the DevLake GET scopes API. -// The API nests each scope inside a "scope" key: { "scope": { ... } }. -// RawScope preserves the full plugin-specific payload for generic ID extraction. -type ScopeListWrapper struct { - RawScope json.RawMessage `json:"scope"` - parsed map[string]json.RawMessage // lazily populated by parseScope -} - -// parseScope unmarshals RawScope into a map exactly once per wrapper instance, -// caching the result so callers that invoke both ScopeName and ScopeFullName on -// the same item do not unmarshal the same JSON twice. -func (w *ScopeListWrapper) parseScope() map[string]json.RawMessage { - if w.parsed == nil { - var m map[string]json.RawMessage - if err := json.Unmarshal(w.RawScope, &m); err != nil || m == nil { - m = make(map[string]json.RawMessage) - } - w.parsed = m - } - return w.parsed -} - -// ScopeName returns the display name from the raw scope JSON (checks "fullName" then "name"). -// Empty string values are skipped so the next candidate key is tried. -// Parsing is cached via parseScope() so calling ScopeName and ScopeFullName on the -// same instance only unmarshals the JSON once. -func (w *ScopeListWrapper) ScopeName() string { - m := w.parseScope() - for _, key := range []string{"fullName", "name"} { - if v, ok := m[key]; ok { - var s string - if err := json.Unmarshal(v, &s); err == nil && s != "" { - return s - } - } - } - return "" -} - -// ScopeFullName returns the "fullName" field from the raw scope JSON, or "". -// An empty string value is treated as absent (returns ""). -func (w *ScopeListWrapper) ScopeFullName() string { - m := w.parseScope() - if v, ok := m["fullName"]; ok { - var s string - if err := json.Unmarshal(v, &s); err == nil && s != "" { - return s - } - } - return "" -} - -// ExtractScopeID extracts the scope ID from a raw JSON scope object using the -// given field name. It tries to decode the value as a string first, then as -// an integer (converted to its decimal string representation). -func ExtractScopeID(raw json.RawMessage, fieldName string) string { - if fieldName == "" { - return "" - } - var m map[string]json.RawMessage - if err := json.Unmarshal(raw, &m); err != nil { - return "" - } - v, ok := m[fieldName] - if !ok { - return "" - } - var s string - if err := json.Unmarshal(v, &s); err == nil && s != "" { - return s - } - var n int64 - if err := json.Unmarshal(v, &n); err == nil && n != 0 { - return strconv.FormatInt(n, 10) - } - return "" -} - -// ScopeListResponse is the response from GET /plugins/{plugin}/connections/{id}/scopes. -type ScopeListResponse struct { - Scopes []ScopeListWrapper `json:"scopes"` - Count int `json:"count"` -} - -// RemoteScopeChild represents one item (group or scope) from the remote-scope API. -type RemoteScopeChild struct { - Type string `json:"type"` // "group" or "scope" - ID string `json:"id"` - ParentID string `json:"parentId"` - Name string `json:"name"` - FullName string `json:"fullName"` - Data json.RawMessage `json:"data"` -} - -// RemoteScopeResponse is the response from GET /plugins/{plugin}/connections/{id}/remote-scopes. -type RemoteScopeResponse struct { - Children []RemoteScopeChild `json:"children"` - NextPageToken string `json:"nextPageToken"` -} - -// Project represents a DevLake project. -type Project struct { - Name string `json:"name"` - Description string `json:"description,omitempty"` - Metrics []ProjectMetric `json:"metrics,omitempty"` - Blueprint *Blueprint `json:"blueprint,omitempty"` -} - -// ProjectListResponse is the response from GET /projects. -type ProjectListResponse struct { - Count int `json:"count"` - Projects []Project `json:"projects"` -} - -// ProjectMetric enables a metric plugin for a project. -type ProjectMetric struct { - PluginName string `json:"pluginName"` - Enable bool `json:"enable"` -} - -// Blueprint represents a DevLake blueprint (returned from project creation or GET). -type Blueprint struct { - ID int `json:"id"` - Name string `json:"name,omitempty"` - Enable bool `json:"enable,omitempty"` - CronConfig string `json:"cronConfig,omitempty"` - TimeAfter string `json:"timeAfter,omitempty"` - Connections []BlueprintConnection `json:"connections,omitempty"` -} - -// BlueprintPatch is the payload for PATCH /blueprints/:id. -type BlueprintPatch struct { - Enable *bool `json:"enable,omitempty"` - Mode string `json:"mode,omitempty"` - CronConfig string `json:"cronConfig,omitempty"` - TimeAfter string `json:"timeAfter,omitempty"` - Connections []BlueprintConnection `json:"connections,omitempty"` -} - -// BlueprintConnection associates a plugin connection with scopes in a blueprint. -type BlueprintConnection struct { - PluginName string `json:"pluginName"` - ConnectionID int `json:"connectionId"` - Scopes []BlueprintScope `json:"scopes"` -} - -// BlueprintScope identifies a single scope within a blueprint connection. -type BlueprintScope struct { - ScopeID string `json:"scopeId"` - ScopeName string `json:"scopeName"` -} - -// Pipeline represents a DevLake pipeline (returned from trigger or GET). -type Pipeline struct { - ID int `json:"id"` - Status string `json:"status"` - FinishedTasks int `json:"finishedTasks"` - TotalTasks int `json:"totalTasks"` -} +package devlake + +import ( + "encoding/json" + "strconv" +) + +// ScopeConfig represents a DevLake scope configuration (e.g., DORA settings). +type ScopeConfig struct { + ID int `json:"id,omitempty"` + Name string `json:"name"` + ConnectionID int `json:"connectionId"` + DeploymentPattern string `json:"deploymentPattern,omitempty"` + ProductionPattern string `json:"productionPattern,omitempty"` + IssueTypeIncident string `json:"issueTypeIncident,omitempty"` + Refdiff *RefdiffConfig `json:"refdiff,omitempty"` +} + +// RefdiffConfig holds refdiff tag-matching settings. +type RefdiffConfig struct { + TagsPattern string `json:"tagsPattern"` + TagsLimit int `json:"tagsLimit"` + TagsOrder string `json:"tagsOrder"` +} + +// GitHubRepoScope represents a GitHub repository scope entry for PUT /scopes. +type GitHubRepoScope struct { + GithubID int `json:"githubId"` + ConnectionID int `json:"connectionId"` + Name string `json:"name"` + FullName string `json:"fullName"` + HTMLURL string `json:"htmlUrl"` + CloneURL string `json:"cloneUrl"` + ScopeConfigID int `json:"scopeConfigId,omitempty"` +} + +// CopilotScope represents a Copilot organization or enterprise scope entry. +type CopilotScope struct { + ID string `json:"id"` + ConnectionID int `json:"connectionId"` + Organization string `json:"organization"` + Enterprise string `json:"enterprise,omitempty"` + Name string `json:"name"` + FullName string `json:"fullName"` +} + +// GitLabProjectScope represents a GitLab project scope entry for PUT /scopes. +type GitLabProjectScope struct { + GitlabID int `json:"gitlabId"` + ConnectionID int `json:"connectionId"` + Name string `json:"name"` + PathWithNamespace string `json:"pathWithNamespace"` + HTTPURLToRepo string `json:"httpUrlToRepo,omitempty"` + SSHURLToRepo string `json:"sshUrlToRepo,omitempty"` + ScopeConfigID int `json:"scopeConfigId,omitempty"` +} + +// JenkinsJobScope represents a Jenkins job scope entry. +type JenkinsJobScope struct { + ConnectionID int `json:"connectionId"` + FullName string `json:"fullName"` + Name string `json:"name"` +} + +// JiraBoardScope represents a Jira board scope entry for PUT /scopes. +type JiraBoardScope struct { + BoardID uint64 `json:"boardId"` + ConnectionID int `json:"connectionId"` + Name string `json:"name"` +} + +// BitbucketRepoScope represents a Bitbucket Cloud repository scope entry for PUT /scopes. +// BitbucketID holds the repository full name (workspace/repo-slug), which is the +// canonical scope identifier used by the DevLake Bitbucket plugin. +type BitbucketRepoScope struct { + BitbucketID string `json:"bitbucketId"` + ConnectionID int `json:"connectionId"` + Name string `json:"name"` + FullName string `json:"fullName"` + CloneURL string `json:"cloneUrl,omitempty"` + HTMLURL string `json:"htmlUrl,omitempty"` +} + +// SonarQubeProjectScope represents a SonarQube project scope entry for PUT /scopes. +type SonarQubeProjectScope struct { + ConnectionID int `json:"connectionId"` + ProjectKey string `json:"projectKey"` + Name string `json:"name"` +} + +// PagerDutyServiceScope represents a PagerDuty service scope entry for PUT /scopes. +type PagerDutyServiceScope struct { + ConnectionID int `json:"connectionId"` + ID string `json:"id"` + Name string `json:"name"` + URL string `json:"url,omitempty"` +} + +// ScopeBatchRequest is the payload for PUT /scopes (batch upsert). +type ScopeBatchRequest struct { + Data []any `json:"data"` +} + +// ScopeListWrapper wraps a scope object as returned by the DevLake GET scopes API. +// The API nests each scope inside a "scope" key: { "scope": { ... } }. +// RawScope preserves the full plugin-specific payload for generic ID extraction. +type ScopeListWrapper struct { + RawScope json.RawMessage `json:"scope"` + parsed map[string]json.RawMessage // lazily populated by parseScope +} + +// parseScope unmarshals RawScope into a map exactly once per wrapper instance, +// caching the result so callers that invoke both ScopeName and ScopeFullName on +// the same item do not unmarshal the same JSON twice. +func (w *ScopeListWrapper) parseScope() map[string]json.RawMessage { + if w.parsed == nil { + var m map[string]json.RawMessage + if err := json.Unmarshal(w.RawScope, &m); err != nil || m == nil { + m = make(map[string]json.RawMessage) + } + w.parsed = m + } + return w.parsed +} + +// ScopeName returns the display name from the raw scope JSON (checks "fullName" then "name"). +// Empty string values are skipped so the next candidate key is tried. +// Parsing is cached via parseScope() so calling ScopeName and ScopeFullName on the +// same instance only unmarshals the JSON once. +func (w *ScopeListWrapper) ScopeName() string { + m := w.parseScope() + for _, key := range []string{"fullName", "name"} { + if v, ok := m[key]; ok { + var s string + if err := json.Unmarshal(v, &s); err == nil && s != "" { + return s + } + } + } + return "" +} + +// ScopeFullName returns the "fullName" field from the raw scope JSON, or "". +// An empty string value is treated as absent (returns ""). +func (w *ScopeListWrapper) ScopeFullName() string { + m := w.parseScope() + if v, ok := m["fullName"]; ok { + var s string + if err := json.Unmarshal(v, &s); err == nil && s != "" { + return s + } + } + return "" +} + +// ExtractScopeID extracts the scope ID from a raw JSON scope object using the +// given field name. It tries to decode the value as a string first, then as +// an integer (converted to its decimal string representation). +func ExtractScopeID(raw json.RawMessage, fieldName string) string { + if fieldName == "" { + return "" + } + var m map[string]json.RawMessage + if err := json.Unmarshal(raw, &m); err != nil { + return "" + } + v, ok := m[fieldName] + if !ok { + return "" + } + var s string + if err := json.Unmarshal(v, &s); err == nil && s != "" { + return s + } + var n int64 + if err := json.Unmarshal(v, &n); err == nil && n != 0 { + return strconv.FormatInt(n, 10) + } + return "" +} + +// ScopeListResponse is the response from GET /plugins/{plugin}/connections/{id}/scopes. +type ScopeListResponse struct { + Scopes []ScopeListWrapper `json:"scopes"` + Count int `json:"count"` +} + +// RemoteScopeChild represents one item (group or scope) from the remote-scope API. +type RemoteScopeChild struct { + Type string `json:"type"` // "group" or "scope" + ID string `json:"id"` + ParentID string `json:"parentId"` + Name string `json:"name"` + FullName string `json:"fullName"` + Data json.RawMessage `json:"data"` +} + +// RemoteScopeResponse is the response from GET /plugins/{plugin}/connections/{id}/remote-scopes. +type RemoteScopeResponse struct { + Children []RemoteScopeChild `json:"children"` + NextPageToken string `json:"nextPageToken"` +} + +// Project represents a DevLake project. +type Project struct { + Name string `json:"name"` + Description string `json:"description,omitempty"` + Metrics []ProjectMetric `json:"metrics,omitempty"` + Blueprint *Blueprint `json:"blueprint,omitempty"` +} + +// ProjectListResponse is the response from GET /projects. +type ProjectListResponse struct { + Count int `json:"count"` + Projects []Project `json:"projects"` +} + +// ProjectMetric enables a metric plugin for a project. +type ProjectMetric struct { + PluginName string `json:"pluginName"` + Enable bool `json:"enable"` +} + +// Blueprint represents a DevLake blueprint (returned from project creation or GET). +type Blueprint struct { + ID int `json:"id"` + Name string `json:"name,omitempty"` + Enable bool `json:"enable,omitempty"` + CronConfig string `json:"cronConfig,omitempty"` + TimeAfter string `json:"timeAfter,omitempty"` + Connections []BlueprintConnection `json:"connections,omitempty"` +} + +// BlueprintPatch is the payload for PATCH /blueprints/:id. +type BlueprintPatch struct { + Enable *bool `json:"enable,omitempty"` + Mode string `json:"mode,omitempty"` + CronConfig string `json:"cronConfig,omitempty"` + TimeAfter string `json:"timeAfter,omitempty"` + Connections []BlueprintConnection `json:"connections,omitempty"` +} + +// BlueprintConnection associates a plugin connection with scopes in a blueprint. +type BlueprintConnection struct { + PluginName string `json:"pluginName"` + ConnectionID int `json:"connectionId"` + Scopes []BlueprintScope `json:"scopes"` +} + +// BlueprintScope identifies a single scope within a blueprint connection. +type BlueprintScope struct { + ScopeID string `json:"scopeId"` + ScopeName string `json:"scopeName"` +} + +// Pipeline represents a DevLake pipeline (returned from trigger or GET). +type Pipeline struct { + ID int `json:"id"` + Status string `json:"status"` + FinishedTasks int `json:"finishedTasks"` + TotalTasks int `json:"totalTasks"` +} From 29e7cd3cfe9b898fcc5ca6bf872989366e774f4a Mon Sep 17 00:00:00 2001 From: "openai-code-agent[bot]" <242516109+Codex@users.noreply.github.com> Date: Wed, 11 Mar 2026 22:05:47 +0000 Subject: [PATCH 3/4] fix: harden pagerduty scope selection --- cmd/configure_scopes.go | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/cmd/configure_scopes.go b/cmd/configure_scopes.go index fc96799..aee0d5c 100644 --- a/cmd/configure_scopes.go +++ b/cmd/configure_scopes.go @@ -1228,6 +1228,9 @@ func scopePagerDutyHandler(client *devlake.Client, connID int, org, enterprise s if child.Type != "scope" { continue } + if child.ID == "" { + continue + } label := child.Name if label == "" { label = child.FullName @@ -1293,8 +1296,12 @@ func scopePagerDutyHandler(client *devlake.Client, connID int, org, enterprise s func pagerDutyServiceFromChild(child *devlake.RemoteScopeChild, connID int) devlake.PagerDutyServiceScope { scope := devlake.PagerDutyServiceScope{ConnectionID: connID} if child != nil && len(child.Data) > 0 { - _ = json.Unmarshal(child.Data, &scope) + if err := json.Unmarshal(child.Data, &scope); err != nil { + scope = devlake.PagerDutyServiceScope{ConnectionID: connID} + } } + // Enforce caller-provided connection ID even if the payload carried one. + scope.ConnectionID = connID if scope.ID == "" && child != nil { scope.ID = child.ID } From 4657518b3ae753a6b25b1fb0ee281b5c009b300a Mon Sep 17 00:00:00 2001 From: "openai-code-agent[bot]" <242516109+Codex@users.noreply.github.com> Date: Wed, 11 Mar 2026 22:39:08 +0000 Subject: [PATCH 4/4] chore: merge main and refresh pagerduty changes --- .github/workflows/docs-sync.lock.yml | 1212 ++++++++++++++++++++++++++ .github/workflows/docs-sync.md | 88 ++ README.md | 2 + cmd/configure_scope_add.go | 5 + cmd/configure_scopes.go | 138 ++- cmd/configure_scopes_test.go | 196 +++++ cmd/connection_types.go | 21 + cmd/connection_types_test.go | 76 ++ cmd/deploy.go | 4 + cmd/start.go | 325 +++++++ cmd/start_test.go | 256 ++++++ cmd/status.go | 7 + docs/configure-scope.md | 53 +- docs/day-2.md | 19 + docs/start.md | 114 +++ internal/azure/cli.go | 5 + internal/devlake/types.go | 6 + 17 files changed, 2495 insertions(+), 32 deletions(-) create mode 100644 .github/workflows/docs-sync.lock.yml create mode 100644 .github/workflows/docs-sync.md create mode 100644 cmd/start.go create mode 100644 cmd/start_test.go create mode 100644 docs/start.md diff --git a/.github/workflows/docs-sync.lock.yml b/.github/workflows/docs-sync.lock.yml new file mode 100644 index 0000000..08d344d --- /dev/null +++ b/.github/workflows/docs-sync.lock.yml @@ -0,0 +1,1212 @@ +# +# ___ _ _ +# / _ \ | | (_) +# | |_| | __ _ ___ _ __ | |_ _ ___ +# | _ |/ _` |/ _ \ '_ \| __| |/ __| +# | | | | (_| | __/ | | | |_| | (__ +# \_| |_/\__, |\___|_| |_|\__|_|\___| +# __/ | +# _ _ |___/ +# | | | | / _| | +# | | | | ___ _ __ _ __| |_| | _____ ____ +# | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___| +# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ +# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ +# +# This file was automatically generated by gh-aw (v0.57.2). DO NOT EDIT. +# +# To update this file, edit the corresponding .md file and run: +# gh aw compile +# Not all edits will cause changes to this file. +# +# For more information: https://github.github.com/gh-aw/introduction/overview/ +# +# Identifies documentation files that are out of sync with recent code changes and opens a pull request with the necessary updates. +# +# gh-aw-metadata: {"schema_version":"v2","frontmatter_hash":"5e695fd505770a6435db5704faa5bc12679b0e6394d5ae6ebff13d756219d3f1","compiler_version":"v0.57.2","strict":true} + +name: "Documentation Sync" +"on": + schedule: + - cron: "33 5 * * 1-5" + # Friendly format: daily on weekdays (scattered) + workflow_dispatch: + +permissions: {} + +concurrency: + group: "gh-aw-${{ github.workflow }}" + +run-name: "Documentation Sync" + +jobs: + activation: + runs-on: ubuntu-slim + permissions: + contents: read + outputs: + comment_id: "" + comment_repo: "" + model: ${{ steps.generate_aw_info.outputs.model }} + secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@v0.57.2 + with: + destination: /opt/gh-aw/actions + - name: Generate agentic run info + id: generate_aw_info + env: + GH_AW_INFO_ENGINE_ID: "copilot" + GH_AW_INFO_ENGINE_NAME: "GitHub Copilot CLI" + GH_AW_INFO_MODEL: "gpt-4.1" + GH_AW_INFO_VERSION: "" + GH_AW_INFO_AGENT_VERSION: "latest" + GH_AW_INFO_CLI_VERSION: "v0.57.2" + GH_AW_INFO_WORKFLOW_NAME: "Documentation Sync" + GH_AW_INFO_EXPERIMENTAL: "false" + GH_AW_INFO_SUPPORTS_TOOLS_ALLOWLIST: "true" + GH_AW_INFO_STAGED: "false" + GH_AW_INFO_ALLOWED_DOMAINS: '["go"]' + GH_AW_INFO_FIREWALL_ENABLED: "true" + GH_AW_INFO_AWF_VERSION: "v0.23.0" + GH_AW_INFO_AWMG_VERSION: "" + GH_AW_INFO_FIREWALL_TYPE: "squid" + GH_AW_COMPILED_STRICT: "true" + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { main } = require('/opt/gh-aw/actions/generate_aw_info.cjs'); + await main(core, context); + - name: Validate COPILOT_GITHUB_TOKEN secret + id: validate-secret + run: /opt/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default + env: + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + - name: Checkout .github and .agents folders + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + sparse-checkout: | + .github + .agents + sparse-checkout-cone-mode: true + fetch-depth: 1 + - name: Check workflow file timestamps + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_WORKFLOW_FILE: "docs-sync.lock.yml" + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/check_workflow_timestamp_api.cjs'); + await main(); + - name: Create prompt with built-in context + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + run: | + bash /opt/gh-aw/actions/create_prompt_first.sh + { + cat << 'GH_AW_PROMPT_EOF' + + GH_AW_PROMPT_EOF + cat "/opt/gh-aw/prompts/xpia.md" + cat "/opt/gh-aw/prompts/temp_folder_prompt.md" + cat "/opt/gh-aw/prompts/markdown.md" + cat "/opt/gh-aw/prompts/safe_outputs_prompt.md" + cat << 'GH_AW_PROMPT_EOF' + + Tools: create_pull_request, missing_tool, missing_data, noop + GH_AW_PROMPT_EOF + cat "/opt/gh-aw/prompts/safe_outputs_create_pull_request.md" + cat << 'GH_AW_PROMPT_EOF' + + + The following GitHub context information is available for this workflow: + {{#if __GH_AW_GITHUB_ACTOR__ }} + - **actor**: __GH_AW_GITHUB_ACTOR__ + {{/if}} + {{#if __GH_AW_GITHUB_REPOSITORY__ }} + - **repository**: __GH_AW_GITHUB_REPOSITORY__ + {{/if}} + {{#if __GH_AW_GITHUB_WORKSPACE__ }} + - **workspace**: __GH_AW_GITHUB_WORKSPACE__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }} + - **issue-number**: #__GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ }} + - **discussion-number**: #__GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }} + - **pull-request-number**: #__GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_COMMENT_ID__ }} + - **comment-id**: __GH_AW_GITHUB_EVENT_COMMENT_ID__ + {{/if}} + {{#if __GH_AW_GITHUB_RUN_ID__ }} + - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ + {{/if}} + + + GH_AW_PROMPT_EOF + cat << 'GH_AW_PROMPT_EOF' + + GH_AW_PROMPT_EOF + cat << 'GH_AW_PROMPT_EOF' + {{#runtime-import .github/workflows/docs-sync.md}} + GH_AW_PROMPT_EOF + } > "$GH_AW_PROMPT" + - name: Interpolate variables and render templates + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/interpolate_prompt.cjs'); + await main(); + - name: Substitute placeholders + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + + const substitutePlaceholders = require('/opt/gh-aw/actions/substitute_placeholders.cjs'); + + // Call the substitution function + return await substitutePlaceholders({ + file: process.env.GH_AW_PROMPT, + substitutions: { + GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, + GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: process.env.GH_AW_GITHUB_EVENT_ISSUE_NUMBER, + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, + GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, + GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, + GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE + } + }); + - name: Validate prompt placeholders + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/validate_prompt_placeholders.sh + - name: Print prompt + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/print_prompt_summary.sh + - name: Upload activation artifact + if: success() + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 + with: + name: activation + path: | + /tmp/gh-aw/aw_info.json + /tmp/gh-aw/aw-prompts/prompt.txt + retention-days: 1 + + agent: + needs: activation + runs-on: ubuntu-latest + permissions: + contents: read + issues: read + pull-requests: read + concurrency: + group: "gh-aw-copilot-${{ github.workflow }}" + env: + DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} + GH_AW_ASSETS_ALLOWED_EXTS: "" + GH_AW_ASSETS_BRANCH: "" + GH_AW_ASSETS_MAX_SIZE_KB: 0 + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + GH_AW_SAFE_OUTPUTS: /opt/gh-aw/safeoutputs/outputs.jsonl + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json + GH_AW_WORKFLOW_ID_SANITIZED: docssync + outputs: + checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }} + detection_conclusion: ${{ steps.detection_conclusion.outputs.conclusion }} + detection_success: ${{ steps.detection_conclusion.outputs.success }} + has_patch: ${{ steps.collect_output.outputs.has_patch }} + inference_access_error: ${{ steps.detect-inference-error.outputs.inference_access_error || 'false' }} + model: ${{ needs.activation.outputs.model }} + output: ${{ steps.collect_output.outputs.output }} + output_types: ${{ steps.collect_output.outputs.output_types }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@v0.57.2 + with: + destination: /opt/gh-aw/actions + - name: Checkout repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + - name: Create gh-aw temp directory + run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git config --global am.keepcr true + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Checkout PR branch + id: checkout-pr + if: | + (github.event.pull_request) || (github.event.issue.pull_request) + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/checkout_pr_branch.cjs'); + await main(); + - name: Install GitHub Copilot CLI + run: /opt/gh-aw/actions/install_copilot_cli.sh latest + - name: Install awf binary + run: bash /opt/gh-aw/actions/install_awf_binary.sh v0.23.0 + - name: Determine automatic lockdown mode for GitHub MCP Server + id: determine-automatic-lockdown + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + with: + script: | + const determineAutomaticLockdown = require('/opt/gh-aw/actions/determine_automatic_lockdown.cjs'); + await determineAutomaticLockdown(github, context, core); + - name: Download container images + run: bash /opt/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.23.0 ghcr.io/github/gh-aw-firewall/api-proxy:0.23.0 ghcr.io/github/gh-aw-firewall/squid:0.23.0 ghcr.io/github/gh-aw-mcpg:v0.1.8 node:lts-alpine + - name: Write Safe Outputs Config + run: | + mkdir -p /opt/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs + cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' + {"create_pull_request":{"expires":168,"max":1,"title_prefix":"[docs-sync] "},"missing_data":{},"missing_tool":{},"noop":{"max":1}} + GH_AW_SAFE_OUTPUTS_CONFIG_EOF + cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF' + [ + { + "description": "Create a new GitHub pull request to propose code changes. Use this after making file edits to submit them for review and merging. The PR will be created from the current branch with your committed changes. For code review comments on an existing PR, use create_pull_request_review_comment instead. CONSTRAINTS: Maximum 1 pull request(s) can be created. Title will be prefixed with \"[docs-sync] \". Labels [\"documentation\"] will be automatically added.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "body": { + "description": "Detailed PR description in Markdown. Include what changes were made, why, testing notes, and any breaking changes. Do NOT repeat the title as a heading.", + "type": "string" + }, + "branch": { + "description": "Source branch name containing the changes. If omitted, uses the current working branch.", + "type": "string" + }, + "draft": { + "description": "Whether to create the PR as a draft. Draft PRs cannot be merged until marked as ready for review. Use mark_pull_request_as_ready_for_review to convert a draft PR. Default: true.", + "type": "boolean" + }, + "integrity": { + "description": "Trustworthiness level of the message source (e.g., \"low\", \"medium\", \"high\").", + "type": "string" + }, + "labels": { + "description": "Labels to categorize the PR (e.g., 'enhancement', 'bugfix'). Labels must exist in the repository.", + "items": { + "type": "string" + }, + "type": "array" + }, + "repo": { + "description": "Target repository in 'owner/repo' format. For multi-repo workflows where the target repo differs from the workflow repo, this must match a repo in the allowed-repos list or the configured target-repo. If omitted, defaults to the configured target-repo (from safe-outputs config), NOT the workflow repository. In most cases, you should omit this parameter and let the system use the configured default.", + "type": "string" + }, + "secrecy": { + "description": "Confidentiality level of the message content (e.g., \"public\", \"internal\", \"private\").", + "type": "string" + }, + "title": { + "description": "Concise PR title describing the changes. Follow repository conventions (e.g., conventional commits). The title appears as the main heading.", + "type": "string" + } + }, + "required": [ + "title", + "body" + ], + "type": "object" + }, + "name": "create_pull_request" + }, + { + "description": "Report that a tool or capability needed to complete the task is not available, or share any information you deem important about missing functionality or limitations. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "alternatives": { + "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", + "type": "string" + }, + "integrity": { + "description": "Trustworthiness level of the message source (e.g., \"low\", \"medium\", \"high\").", + "type": "string" + }, + "reason": { + "description": "Explanation of why this tool is needed or what information you want to share about the limitation (max 256 characters).", + "type": "string" + }, + "secrecy": { + "description": "Confidentiality level of the message content (e.g., \"public\", \"internal\", \"private\").", + "type": "string" + }, + "tool": { + "description": "Optional: Name or description of the missing tool or capability (max 128 characters). Be specific about what functionality is needed.", + "type": "string" + } + }, + "required": [ + "reason" + ], + "type": "object" + }, + "name": "missing_tool" + }, + { + "description": "Log a transparency message when no significant actions are needed. Use this to confirm workflow completion and provide visibility when analysis is complete but no changes or outputs are required (e.g., 'No issues found', 'All checks passed'). This ensures the workflow produces human-visible output even when no other actions are taken.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "integrity": { + "description": "Trustworthiness level of the message source (e.g., \"low\", \"medium\", \"high\").", + "type": "string" + }, + "message": { + "description": "Status or completion message to log. Should explain what was analyzed and the outcome (e.g., 'Code review complete - no issues found', 'Analysis complete - all tests passing').", + "type": "string" + }, + "secrecy": { + "description": "Confidentiality level of the message content (e.g., \"public\", \"internal\", \"private\").", + "type": "string" + } + }, + "required": [ + "message" + ], + "type": "object" + }, + "name": "noop" + }, + { + "description": "Report that data or information needed to complete the task is not available. Use this when you cannot accomplish what was requested because required data, context, or information is missing.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "alternatives": { + "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", + "type": "string" + }, + "context": { + "description": "Additional context about the missing data or where it should come from (max 256 characters).", + "type": "string" + }, + "data_type": { + "description": "Type or description of the missing data or information (max 128 characters). Be specific about what data is needed.", + "type": "string" + }, + "integrity": { + "description": "Trustworthiness level of the message source (e.g., \"low\", \"medium\", \"high\").", + "type": "string" + }, + "reason": { + "description": "Explanation of why this data is needed to complete the task (max 256 characters).", + "type": "string" + }, + "secrecy": { + "description": "Confidentiality level of the message content (e.g., \"public\", \"internal\", \"private\").", + "type": "string" + } + }, + "required": [], + "type": "object" + }, + "name": "missing_data" + } + ] + GH_AW_SAFE_OUTPUTS_TOOLS_EOF + cat > /opt/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_EOF' + { + "create_pull_request": { + "defaultMax": 1, + "fields": { + "body": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + }, + "branch": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "draft": { + "type": "boolean" + }, + "labels": { + "type": "array", + "itemType": "string", + "itemSanitize": true, + "itemMaxLength": 128 + }, + "repo": { + "type": "string", + "maxLength": 256 + }, + "title": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 128 + } + } + }, + "missing_data": { + "defaultMax": 20, + "fields": { + "alternatives": { + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "context": { + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "data_type": { + "type": "string", + "sanitize": true, + "maxLength": 128 + }, + "reason": { + "type": "string", + "sanitize": true, + "maxLength": 256 + } + } + }, + "missing_tool": { + "defaultMax": 20, + "fields": { + "alternatives": { + "type": "string", + "sanitize": true, + "maxLength": 512 + }, + "reason": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "tool": { + "type": "string", + "sanitize": true, + "maxLength": 128 + } + } + }, + "noop": { + "defaultMax": 1, + "fields": { + "message": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + } + } + } + } + GH_AW_SAFE_OUTPUTS_VALIDATION_EOF + - name: Generate Safe Outputs MCP Server Config + id: safe-outputs-config + run: | + # Generate a secure random API key (360 bits of entropy, 40+ chars) + # Mask immediately to prevent timing vulnerabilities + API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${API_KEY}" + + PORT=3001 + + # Set outputs for next steps + { + echo "safe_outputs_api_key=${API_KEY}" + echo "safe_outputs_port=${PORT}" + } >> "$GITHUB_OUTPUT" + + echo "Safe Outputs MCP server will run on port ${PORT}" + + - name: Start Safe Outputs MCP HTTP Server + id: safe-outputs-start + env: + DEBUG: '*' + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-config.outputs.safe_outputs_port }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-config.outputs.safe_outputs_api_key }} + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + run: | + # Environment variables are set above to prevent template injection + export DEBUG + export GH_AW_SAFE_OUTPUTS_PORT + export GH_AW_SAFE_OUTPUTS_API_KEY + export GH_AW_SAFE_OUTPUTS_TOOLS_PATH + export GH_AW_SAFE_OUTPUTS_CONFIG_PATH + export GH_AW_MCP_LOG_DIR + + bash /opt/gh-aw/actions/start_safe_outputs_server.sh + + - name: Start MCP Gateway + id: start-mcp-gateway + env: + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-start.outputs.api_key }} + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-start.outputs.port }} + GITHUB_MCP_LOCKDOWN: ${{ steps.determine-automatic-lockdown.outputs.lockdown == 'true' && '1' || '0' }} + GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + run: | + set -eo pipefail + mkdir -p /tmp/gh-aw/mcp-config + + # Export gateway environment variables for MCP config and gateway script + export MCP_GATEWAY_PORT="80" + export MCP_GATEWAY_DOMAIN="host.docker.internal" + MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${MCP_GATEWAY_API_KEY}" + export MCP_GATEWAY_API_KEY + export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" + mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD="524288" + export DEBUG="*" + + export GH_AW_ENGINE="copilot" + export GITHUB_PERSONAL_ACCESS_TOKEN="$GITHUB_MCP_SERVER_TOKEN" + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_PERSONAL_ACCESS_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.8' + + mkdir -p /home/runner/.copilot + cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh + { + "mcpServers": { + "github": { + "type": "http", + "url": "https://api.githubcopilot.com/mcp/", + "headers": { + "Authorization": "Bearer \${GITHUB_PERSONAL_ACCESS_TOKEN}", + "X-MCP-Lockdown": "$([ "$GITHUB_MCP_LOCKDOWN" = "1" ] && echo true || echo false)", + "X-MCP-Readonly": "true", + "X-MCP-Toolsets": "context,repos,issues,pull_requests" + }, + "env": { + "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}" + } + }, + "safeoutputs": { + "type": "http", + "url": "http://host.docker.internal:$GH_AW_SAFE_OUTPUTS_PORT", + "headers": { + "Authorization": "\${GH_AW_SAFE_OUTPUTS_API_KEY}" + } + } + }, + "gateway": { + "port": $MCP_GATEWAY_PORT, + "domain": "${MCP_GATEWAY_DOMAIN}", + "apiKey": "${MCP_GATEWAY_API_KEY}", + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + } + } + GH_AW_MCP_CONFIG_EOF + - name: Download activation artifact + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8 + with: + name: activation + path: /tmp/gh-aw + - name: Clean git credentials + run: bash /opt/gh-aw/actions/clean_git_credentials.sh + - name: Execute GitHub Copilot CLI + id: agentic_execution + # Copilot CLI tool arguments (sorted): + # --allow-tool github + # --allow-tool safeoutputs + # --allow-tool shell(cat) + # --allow-tool shell(date) + # --allow-tool shell(echo) + # --allow-tool shell(find) + # --allow-tool shell(git add:*) + # --allow-tool shell(git branch:*) + # --allow-tool shell(git checkout:*) + # --allow-tool shell(git commit:*) + # --allow-tool shell(git merge:*) + # --allow-tool shell(git rm:*) + # --allow-tool shell(git status) + # --allow-tool shell(git switch:*) + # --allow-tool shell(git:*) + # --allow-tool shell(grep) + # --allow-tool shell(head) + # --allow-tool shell(ls) + # --allow-tool shell(pwd) + # --allow-tool shell(sort) + # --allow-tool shell(tail) + # --allow-tool shell(uniq) + # --allow-tool shell(wc) + # --allow-tool shell(yq) + # --allow-tool write + timeout-minutes: 20 + run: | + set -o pipefail + touch /tmp/gh-aw/agent-step-summary.md + # shellcheck disable=SC1003 + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,github.com,go.dev,golang.org,goproxy.io,host.docker.internal,pkg.go.dev,proxy.golang.org,raw.githubusercontent.com,registry.npmjs.org,storage.googleapis.com,sum.golang.org,telemetry.enterprise.githubcopilot.com" --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.23.0 --skip-pull --enable-api-proxy \ + -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-tool github --allow-tool safeoutputs --allow-tool '\''shell(cat)'\'' --allow-tool '\''shell(date)'\'' --allow-tool '\''shell(echo)'\'' --allow-tool '\''shell(find)'\'' --allow-tool '\''shell(git add:*)'\'' --allow-tool '\''shell(git branch:*)'\'' --allow-tool '\''shell(git checkout:*)'\'' --allow-tool '\''shell(git commit:*)'\'' --allow-tool '\''shell(git merge:*)'\'' --allow-tool '\''shell(git rm:*)'\'' --allow-tool '\''shell(git status)'\'' --allow-tool '\''shell(git switch:*)'\'' --allow-tool '\''shell(git:*)'\'' --allow-tool '\''shell(grep)'\'' --allow-tool '\''shell(head)'\'' --allow-tool '\''shell(ls)'\'' --allow-tool '\''shell(pwd)'\'' --allow-tool '\''shell(sort)'\'' --allow-tool '\''shell(tail)'\'' --allow-tool '\''shell(uniq)'\'' --allow-tool '\''shell(wc)'\'' --allow-tool '\''shell(yq)'\'' --allow-tool write --allow-all-paths --allow-paths '\''README.md,docs/**'\'' --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + env: + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + COPILOT_MODEL: gpt-4.1 + GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json + GH_AW_PHASE: agent + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_VERSION: v0.57.2 + GITHUB_API_URL: ${{ github.api_url }} + GITHUB_AW: true + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md + GITHUB_WORKSPACE: ${{ github.workspace }} + GIT_AUTHOR_EMAIL: github-actions[bot]@users.noreply.github.com + GIT_AUTHOR_NAME: github-actions[bot] + GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com + GIT_COMMITTER_NAME: github-actions[bot] + XDG_CONFIG_HOME: /home/runner + - name: Detect inference access error + id: detect-inference-error + if: always() + continue-on-error: true + run: bash /opt/gh-aw/actions/detect_inference_access_error.sh + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git config --global am.keepcr true + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Copy Copilot session state files to logs + if: always() + continue-on-error: true + run: | + # Copy Copilot session state files to logs folder for artifact collection + # This ensures they are in /tmp/gh-aw/ where secret redaction can scan them + SESSION_STATE_DIR="$HOME/.copilot/session-state" + LOGS_DIR="/tmp/gh-aw/sandbox/agent/logs" + + if [ -d "$SESSION_STATE_DIR" ]; then + echo "Copying Copilot session state files from $SESSION_STATE_DIR to $LOGS_DIR" + mkdir -p "$LOGS_DIR" + cp -v "$SESSION_STATE_DIR"/*.jsonl "$LOGS_DIR/" 2>/dev/null || true + echo "Session state files copied successfully" + else + echo "No session-state directory found at $SESSION_STATE_DIR" + fi + - name: Stop MCP Gateway + if: always() + continue-on-error: true + env: + MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }} + MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }} + GATEWAY_PID: ${{ steps.start-mcp-gateway.outputs.gateway-pid }} + run: | + bash /opt/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID" + - name: Redact secrets in logs + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/redact_secrets.cjs'); + await main(); + env: + GH_AW_SECRET_NAMES: 'COPILOT_GITHUB_TOKEN,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN' + SECRET_COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Append agent step summary + if: always() + run: bash /opt/gh-aw/actions/append_agent_step_summary.sh + - name: Upload Safe Outputs + if: always() + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 + with: + name: safe-output + path: ${{ env.GH_AW_SAFE_OUTPUTS }} + if-no-files-found: warn + - name: Ingest agent output + id: collect_output + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,github.com,go.dev,golang.org,goproxy.io,host.docker.internal,pkg.go.dev,proxy.golang.org,raw.githubusercontent.com,registry.npmjs.org,storage.googleapis.com,sum.golang.org,telemetry.enterprise.githubcopilot.com" + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_API_URL: ${{ github.api_url }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/collect_ndjson_output.cjs'); + await main(); + - name: Upload sanitized agent output + if: always() && env.GH_AW_AGENT_OUTPUT + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 + with: + name: agent-output + path: ${{ env.GH_AW_AGENT_OUTPUT }} + if-no-files-found: warn + - name: Upload engine output files + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 + with: + name: agent_outputs + path: | + /tmp/gh-aw/sandbox/agent/logs/ + /tmp/gh-aw/redacted-urls.log + if-no-files-found: ignore + - name: Parse agent logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/ + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_copilot_log.cjs'); + await main(); + - name: Parse MCP Gateway logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_mcp_gateway_log.cjs'); + await main(); + - name: Print firewall logs + if: always() + continue-on-error: true + env: + AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs + run: | + # Fix permissions on firewall logs so they can be uploaded as artifacts + # AWF runs with sudo, creating files owned by root + sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall/logs 2>/dev/null || true + # Only run awf logs summary if awf command exists (it may not be installed if workflow failed before install step) + if command -v awf &> /dev/null; then + awf logs summary | tee -a "$GITHUB_STEP_SUMMARY" + else + echo 'AWF binary not installed, skipping firewall log summary' + fi + - name: Upload agent artifacts + if: always() + continue-on-error: true + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 + with: + name: agent-artifacts + path: | + /tmp/gh-aw/aw-prompts/prompt.txt + /tmp/gh-aw/mcp-logs/ + /tmp/gh-aw/sandbox/firewall/logs/ + /tmp/gh-aw/agent-stdio.log + /tmp/gh-aw/agent/ + /tmp/gh-aw/aw-*.patch + if-no-files-found: ignore + # --- Threat Detection (inline) --- + - name: Check if detection needed + id: detection_guard + if: always() + env: + OUTPUT_TYPES: ${{ steps.collect_output.outputs.output_types }} + HAS_PATCH: ${{ steps.collect_output.outputs.has_patch }} + run: | + if [[ -n "$OUTPUT_TYPES" || "$HAS_PATCH" == "true" ]]; then + echo "run_detection=true" >> "$GITHUB_OUTPUT" + echo "Detection will run: output_types=$OUTPUT_TYPES, has_patch=$HAS_PATCH" + else + echo "run_detection=false" >> "$GITHUB_OUTPUT" + echo "Detection skipped: no agent outputs or patches to analyze" + fi + - name: Clear MCP configuration for detection + if: always() && steps.detection_guard.outputs.run_detection == 'true' + run: | + rm -f /tmp/gh-aw/mcp-config/mcp-servers.json + rm -f /home/runner/.copilot/mcp-config.json + rm -f "$GITHUB_WORKSPACE/.gemini/settings.json" + - name: Prepare threat detection files + if: always() && steps.detection_guard.outputs.run_detection == 'true' + run: | + mkdir -p /tmp/gh-aw/threat-detection/aw-prompts + cp /tmp/gh-aw/aw-prompts/prompt.txt /tmp/gh-aw/threat-detection/aw-prompts/prompt.txt 2>/dev/null || true + cp /tmp/gh-aw/agent_output.json /tmp/gh-aw/threat-detection/agent_output.json 2>/dev/null || true + for f in /tmp/gh-aw/aw-*.patch; do + [ -f "$f" ] && cp "$f" /tmp/gh-aw/threat-detection/ 2>/dev/null || true + done + echo "Prepared threat detection files:" + ls -la /tmp/gh-aw/threat-detection/ 2>/dev/null || true + - name: Setup threat detection + if: always() && steps.detection_guard.outputs.run_detection == 'true' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + WORKFLOW_NAME: "Documentation Sync" + WORKFLOW_DESCRIPTION: "Identifies documentation files that are out of sync with recent code changes and opens a pull request with the necessary updates." + HAS_PATCH: ${{ steps.collect_output.outputs.has_patch }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/setup_threat_detection.cjs'); + await main(); + - name: Ensure threat-detection directory and log + if: always() && steps.detection_guard.outputs.run_detection == 'true' + run: | + mkdir -p /tmp/gh-aw/threat-detection + touch /tmp/gh-aw/threat-detection/detection.log + - name: Execute GitHub Copilot CLI + if: always() && steps.detection_guard.outputs.run_detection == 'true' + id: detection_agentic_execution + # Copilot CLI tool arguments (sorted): + # --allow-tool shell(cat) + # --allow-tool shell(grep) + # --allow-tool shell(head) + # --allow-tool shell(jq) + # --allow-tool shell(ls) + # --allow-tool shell(tail) + # --allow-tool shell(wc) + timeout-minutes: 20 + run: | + set -o pipefail + touch /tmp/gh-aw/agent-step-summary.md + # shellcheck disable=SC1003 + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,github.com,host.docker.internal,raw.githubusercontent.com,registry.npmjs.org,telemetry.enterprise.githubcopilot.com" --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.23.0 --skip-pull --enable-api-proxy \ + -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-tool '\''shell(cat)'\'' --allow-tool '\''shell(grep)'\'' --allow-tool '\''shell(head)'\'' --allow-tool '\''shell(jq)'\'' --allow-tool '\''shell(ls)'\'' --allow-tool '\''shell(tail)'\'' --allow-tool '\''shell(wc)'\'' --allow-paths '\''README.md,docs/**'\'' --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"' 2>&1 | tee -a /tmp/gh-aw/threat-detection/detection.log + env: + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + COPILOT_MODEL: gpt-4.1 + GH_AW_PHASE: detection + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_VERSION: v0.57.2 + GITHUB_API_URL: ${{ github.api_url }} + GITHUB_AW: true + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_STEP_SUMMARY: /tmp/gh-aw/agent-step-summary.md + GITHUB_WORKSPACE: ${{ github.workspace }} + GIT_AUTHOR_EMAIL: github-actions[bot]@users.noreply.github.com + GIT_AUTHOR_NAME: github-actions[bot] + GIT_COMMITTER_EMAIL: github-actions[bot]@users.noreply.github.com + GIT_COMMITTER_NAME: github-actions[bot] + XDG_CONFIG_HOME: /home/runner + - name: Parse threat detection results + id: parse_detection_results + if: always() && steps.detection_guard.outputs.run_detection == 'true' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_threat_detection_results.cjs'); + await main(); + - name: Upload threat detection log + if: always() && steps.detection_guard.outputs.run_detection == 'true' + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 + with: + name: threat-detection.log + path: /tmp/gh-aw/threat-detection/detection.log + if-no-files-found: ignore + - name: Set detection conclusion + id: detection_conclusion + if: always() + env: + RUN_DETECTION: ${{ steps.detection_guard.outputs.run_detection }} + DETECTION_SUCCESS: ${{ steps.parse_detection_results.outputs.success }} + run: | + if [[ "$RUN_DETECTION" != "true" ]]; then + echo "conclusion=skipped" >> "$GITHUB_OUTPUT" + echo "success=true" >> "$GITHUB_OUTPUT" + echo "Detection was not needed, marking as skipped" + elif [[ "$DETECTION_SUCCESS" == "true" ]]; then + echo "conclusion=success" >> "$GITHUB_OUTPUT" + echo "success=true" >> "$GITHUB_OUTPUT" + echo "Detection passed successfully" + else + echo "conclusion=failure" >> "$GITHUB_OUTPUT" + echo "success=false" >> "$GITHUB_OUTPUT" + echo "Detection found issues" + fi + + conclusion: + needs: + - activation + - agent + - safe_outputs + if: (always()) && (needs.agent.result != 'skipped') + runs-on: ubuntu-slim + permissions: + contents: write + issues: write + pull-requests: write + concurrency: + group: "gh-aw-conclusion-docs-sync" + cancel-in-progress: false + outputs: + noop_message: ${{ steps.noop.outputs.noop_message }} + tools_reported: ${{ steps.missing_tool.outputs.tools_reported }} + total_count: ${{ steps.missing_tool.outputs.total_count }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@v0.57.2 + with: + destination: /opt/gh-aw/actions + - name: Download agent output artifact + id: download-agent-output + continue-on-error: true + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8 + with: + name: agent-output + path: /tmp/gh-aw/safeoutputs/ + - name: Setup agent output environment variable + if: steps.download-agent-output.outcome == 'success' + run: | + mkdir -p /tmp/gh-aw/safeoutputs/ + find "/tmp/gh-aw/safeoutputs/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" + - name: Process No-Op Messages + id: noop + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_NOOP_MAX: "1" + GH_AW_WORKFLOW_NAME: "Documentation Sync" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/noop.cjs'); + await main(); + - name: Record Missing Tool + id: missing_tool + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Documentation Sync" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/missing_tool.cjs'); + await main(); + - name: Handle Agent Failure + id: handle_agent_failure + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Documentation Sync" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_WORKFLOW_ID: "docs-sync" + GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.activation.outputs.secret_verification_result }} + GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }} + GH_AW_INFERENCE_ACCESS_ERROR: ${{ needs.agent.outputs.inference_access_error }} + GH_AW_CODE_PUSH_FAILURE_ERRORS: ${{ needs.safe_outputs.outputs.code_push_failure_errors }} + GH_AW_CODE_PUSH_FAILURE_COUNT: ${{ needs.safe_outputs.outputs.code_push_failure_count }} + GH_AW_GROUP_REPORTS: "false" + GH_AW_FAILURE_REPORT_AS_ISSUE: "true" + GH_AW_TIMEOUT_MINUTES: "20" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_agent_failure.cjs'); + await main(); + - name: Handle No-Op Message + id: handle_noop_message + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Documentation Sync" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_NOOP_MESSAGE: ${{ steps.noop.outputs.noop_message }} + GH_AW_NOOP_REPORT_AS_ISSUE: "true" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_noop_message.cjs'); + await main(); + - name: Handle Create Pull Request Error + id: handle_create_pr_error + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "Documentation Sync" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_create_pr_error.cjs'); + await main(); + + safe_outputs: + needs: + - activation + - agent + if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.agent.outputs.detection_success == 'true') + runs-on: ubuntu-slim + permissions: + contents: write + issues: write + pull-requests: write + timeout-minutes: 15 + env: + GH_AW_CALLER_WORKFLOW_ID: "${{ github.repository }}/docs-sync" + GH_AW_ENGINE_ID: "copilot" + GH_AW_ENGINE_MODEL: "gpt-4.1" + GH_AW_WORKFLOW_ID: "docs-sync" + GH_AW_WORKFLOW_NAME: "Documentation Sync" + outputs: + code_push_failure_count: ${{ steps.process_safe_outputs.outputs.code_push_failure_count }} + code_push_failure_errors: ${{ steps.process_safe_outputs.outputs.code_push_failure_errors }} + create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }} + create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }} + created_pr_number: ${{ steps.process_safe_outputs.outputs.created_pr_number }} + created_pr_url: ${{ steps.process_safe_outputs.outputs.created_pr_url }} + process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }} + process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@v0.57.2 + with: + destination: /opt/gh-aw/actions + - name: Download agent output artifact + id: download-agent-output + continue-on-error: true + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8 + with: + name: agent-output + path: /tmp/gh-aw/safeoutputs/ + - name: Setup agent output environment variable + if: steps.download-agent-output.outcome == 'success' + run: | + mkdir -p /tmp/gh-aw/safeoutputs/ + find "/tmp/gh-aw/safeoutputs/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" + - name: Download patch artifact + continue-on-error: true + uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8 + with: + name: agent-artifacts + path: /tmp/gh-aw/ + - name: Checkout repository + if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'create_pull_request')) + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + ref: ${{ github.base_ref || github.event.pull_request.base.ref || github.ref_name || github.event.repository.default_branch }} + token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + persist-credentials: false + fetch-depth: 1 + - name: Configure Git credentials + if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'create_pull_request')) + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + GIT_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + git config --global am.keepcr true + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${GIT_TOKEN}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Process Safe Outputs + id: process_safe_outputs + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,github.com,go.dev,golang.org,goproxy.io,host.docker.internal,pkg.go.dev,proxy.golang.org,raw.githubusercontent.com,registry.npmjs.org,storage.googleapis.com,sum.golang.org,telemetry.enterprise.githubcopilot.com" + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_API_URL: ${{ github.api_url }} + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_pull_request\":{\"draft\":false,\"expires\":168,\"labels\":[\"documentation\"],\"max\":1,\"max_patch_size\":1024,\"protected_files\":[\"package.json\",\"bun.lockb\",\"bunfig.toml\",\"deno.json\",\"deno.jsonc\",\"deno.lock\",\"global.json\",\"NuGet.Config\",\"Directory.Packages.props\",\"mix.exs\",\"mix.lock\",\"go.mod\",\"go.sum\",\"stack.yaml\",\"stack.yaml.lock\",\"pom.xml\",\"build.gradle\",\"build.gradle.kts\",\"settings.gradle\",\"settings.gradle.kts\",\"gradle.properties\",\"package-lock.json\",\"yarn.lock\",\"pnpm-lock.yaml\",\"npm-shrinkwrap.json\",\"requirements.txt\",\"Pipfile\",\"Pipfile.lock\",\"pyproject.toml\",\"setup.py\",\"setup.cfg\",\"Gemfile\",\"Gemfile.lock\",\"uv.lock\",\"AGENTS.md\"],\"protected_path_prefixes\":[\".github/\",\".agents/\"],\"title_prefix\":\"[docs-sync] \"},\"missing_data\":{},\"missing_tool\":{}}" + GH_AW_CI_TRIGGER_TOKEN: ${{ secrets.GH_AW_CI_TRIGGER_TOKEN }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/safe_output_handler_manager.cjs'); + await main(); + - name: Upload safe output items manifest + if: always() + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 + with: + name: safe-output-items + path: /tmp/safe-output-items.jsonl + if-no-files-found: warn + diff --git a/.github/workflows/docs-sync.md b/.github/workflows/docs-sync.md new file mode 100644 index 0000000..3024427 --- /dev/null +++ b/.github/workflows/docs-sync.md @@ -0,0 +1,88 @@ +--- +name: Documentation Sync +description: > + Identifies documentation files that are out of sync with recent code changes + and opens a pull request with the necessary updates. +on: + schedule: daily on weekdays + workflow_dispatch: +permissions: + contents: read + issues: read + pull-requests: read +engine: + id: copilot + model: gpt-4.1 + args: ["--allow-paths", "README.md,docs/**"] +tools: + github: + mode: remote + toolsets: [default] + bash: ["git", "cat", "grep", "find", "ls", "head", "tail", "wc"] + edit: +network: + allowed: + - go +safe-outputs: + create-pull-request: + title-prefix: "[docs-sync] " + labels: [documentation] + draft: false + expires: 7 +--- + +# Documentation Sync + +You are a documentation maintenance agent for the `gh-devlake` repository — a GitHub CLI extension built with Go + Cobra that automates Apache DevLake deployment, configuration, and monitoring. + +## Your Task + +Identify documentation files that have drifted from the current codebase and open a **single pull request** with all necessary updates. + +## Step 1 — Gather Context + +1. Use `git log --since="7 days ago" --name-only --pretty=format:""` to list files changed in the last 7 days. +2. Filter to changes in `cmd/` and `internal/` (the Go source directories). +3. Read the repository's `README.md`, `AGENTS.md`, and every file under `docs/`. + +> **Note:** `AGENTS.md` is read-only — the safe-outputs handler protects it from PR changes. Use it as a reference only. + +## Step 2 — Identify Stale Documentation + +Compare the current code with the documentation. Look for: + +| Signal | Example | +|--------|---------| +| **New or renamed commands** | A `newXxxCmd()` was added or renamed but the Command Reference table in `README.md` is missing it | +| **Changed flags** | A flag was added, removed, or renamed in a Cobra command but `docs/*.md` still shows the old flag | +| **Changed default values** | A default endpoint, port, or env-var name changed in code but docs reference the old value | +| **New plugins** | A `ConnectionDef` was added to `connectionRegistry` in `cmd/connection_types.go` but the Supported Plugins table in `README.md` is missing it | +| **Removed features** | Code was deleted but docs still reference it | +| **Outdated examples** | CLI examples in docs no longer match actual command syntax | + +Do **not** rewrite prose style or reformat sections that are already accurate. + +## Step 3 — Apply Fixes + +For every stale section you find: + +1. Edit the relevant documentation file (`README.md` or the appropriate `docs/*.md`). +2. Keep edits minimal and surgical — change only what is out of date. +3. Preserve existing formatting, heading levels, and Markdown conventions. + +## Step 4 — Open a Pull Request + +If you made any edits, create a pull request with: +- **Title**: A concise summary such as "Sync docs with recent code changes" +- **Body**: A bulleted list of every documentation change and why it was needed, referencing the code change that caused the drift. + +If no documentation is stale, do **not** create a pull request. Instead, output a short summary confirming all docs are up to date. + +## Guidelines + +- The Command Reference table in `README.md` must list every user-facing command. Cross-check against `cmd/` constructors (`newXxxCmd()`). +- The Supported Plugins table in `README.md` must match `connectionRegistry` entries in `cmd/connection_types.go`. +- Flag documentation in `docs/` files must match the flags registered in each command's constructor. +- `AGENTS.md` architecture section must match the actual directory tree under `internal/`. If it has drifted, note it in the PR body but do not edit `AGENTS.md` directly — it is a protected file. +- Do not add new documentation files — only update existing ones. +- Do not modify Go source code — this workflow is documentation-only. diff --git a/README.md b/README.md index 003e73d..223bb97 100644 --- a/README.md +++ b/README.md @@ -198,6 +198,7 @@ For the full guide, see [Day-2 Operations](docs/day-2.md). | Bitbucket Cloud | āœ… Available | Repos, PRs, commits | Bitbucket username + app password | | SonarQube | āœ… Available | Code quality, coverage, code smells (quality gates) | API token (permissions from user account) | | Azure DevOps | āœ… Available | Repos, pipelines, deployments (DORA) | PAT with repo and pipeline access | +| ArgoCD | āœ… Available | GitOps deployments, deployment frequency (DORA) | ArgoCD auth token | See [Token Handling](docs/token-handling.md) for env key names and multi-plugin `.devlake.env` examples. @@ -226,6 +227,7 @@ See [Token Handling](docs/token-handling.md) for env key names and multi-plugin | `gh devlake configure project list` | List all projects | [configure-project.md](docs/configure-project.md) | | `gh devlake configure project delete` | Delete a project | [configure-project.md](docs/configure-project.md) | | `gh devlake configure full` | Connections + scopes + project in one step | [configure-full.md](docs/configure-full.md) | +| `gh devlake start` | Start stopped or exited DevLake services | [start.md](docs/start.md) | | `gh devlake cleanup` | Tear down local or Azure resources | [cleanup.md](docs/cleanup.md) | ### Global Flags diff --git a/cmd/configure_scope_add.go b/cmd/configure_scope_add.go index 25c92dd..0fd5e6a 100644 --- a/cmd/configure_scope_add.go +++ b/cmd/configure_scope_add.go @@ -36,6 +36,9 @@ GitHub-specific flags: GitHub Copilot-specific flags: --enterprise Enterprise slug (enables enterprise-level metrics) +SonarQube-specific flags: + --projects Comma-separated SonarQube project keys + Example (GitHub): gh devlake configure scope add --plugin github --connection-id 1 --org my-org --repos org/repo1,org/repo2 @@ -52,6 +55,7 @@ Example (Copilot): cmd.Flags().StringVar(&opts.Repos, "repos", "", "Comma-separated repos (owner/repo)") cmd.Flags().StringVar(&opts.ReposFile, "repos-file", "", "Path to file with repos (one per line)") cmd.Flags().StringVar(&opts.Jobs, "jobs", "", "Comma-separated Jenkins job full names") + cmd.Flags().StringVar(&opts.Projects, "projects", "", "Comma-separated SonarQube project keys") cmd.Flags().IntVar(&opts.ConnectionID, "connection-id", 0, "Connection ID (auto-detected if omitted)") cmd.Flags().StringVar(&opts.DeployPattern, "deployment-pattern", "(?i)deploy", "Regex to match deployment workflows") cmd.Flags().StringVar(&opts.ProdPattern, "production-pattern", "(?i)prod", "Regex to match production environment") @@ -82,6 +86,7 @@ func runScopeAdd(cmd *cobra.Command, args []string, opts *ScopeOpts) error { cmd.Flags().Changed("repos") || cmd.Flags().Changed("repos-file") || cmd.Flags().Changed("jobs") || + cmd.Flags().Changed("projects") || cmd.Flags().Changed("connection-id") if flagMode { slugs := availablePluginSlugs() diff --git a/cmd/configure_scopes.go b/cmd/configure_scopes.go index aee0d5c..8af71fa 100644 --- a/cmd/configure_scopes.go +++ b/cmd/configure_scopes.go @@ -22,6 +22,7 @@ type ScopeOpts struct { Repos string ReposFile string Jobs string + Projects string ConnectionID int ProjectName string DeployPattern string @@ -1088,7 +1089,7 @@ func browseBitbucketReposInteractively(client *devlake.Client, connID int, works for nextToken != "" { page, err := client.ListRemoteScopes("bitbucket", connID, "", nextToken) if err != nil { - break + return nil, fmt.Errorf("listing Bitbucket workspaces (page token %s): %w", nextToken, err) } allWS = append(allWS, page.Children...) nextToken = page.NextPageToken @@ -1131,7 +1132,7 @@ func browseBitbucketReposInteractively(client *devlake.Client, connID int, works for nextToken != "" { page, err := client.ListRemoteScopes("bitbucket", connID, workspaceID, nextToken) if err != nil { - break + return nil, fmt.Errorf("listing repositories in workspace %q (page token %s): %w", workspaceID, nextToken, err) } allChildren = append(allChildren, page.Children...) nextToken = page.NextPageToken @@ -1175,8 +1176,11 @@ func browseBitbucketReposInteractively(client *devlake.Client, connID int, works // parseBitbucketRepo extracts repository fields from a RemoteScopeChild's Data payload. func parseBitbucketRepo(child *devlake.RemoteScopeChild) *devlake.BitbucketRepoScope { var r devlake.BitbucketRepoScope - if err := json.Unmarshal(child.Data, &r); err != nil { - return nil + if len(child.Data) > 0 { + if err := json.Unmarshal(child.Data, &r); err != nil { + // Treat missing/invalid payload as empty to fall back to child fields. + r = devlake.BitbucketRepoScope{} + } } if r.FullName == "" { r.FullName = child.FullName @@ -1340,6 +1344,7 @@ func scopeSonarQubeHandler(client *devlake.Client, connID int, org, enterprise s // Extract projects from remote-scope response var projectOptions []string projectMap := make(map[string]*devlake.RemoteScopeChild) + projectByKey := make(map[string]*devlake.RemoteScopeChild) for i := range allChildren { child := &allChildren[i] if child.Type == "scope" { @@ -1350,6 +1355,7 @@ func scopeSonarQubeHandler(client *devlake.Client, connID int, org, enterprise s label := fmt.Sprintf("%s (key: %s)", child.Name, child.ID) projectOptions = append(projectOptions, label) projectMap[label] = child + projectByKey[child.ID] = child } } @@ -1357,18 +1363,49 @@ func scopeSonarQubeHandler(client *devlake.Client, connID int, org, enterprise s return nil, fmt.Errorf("no SonarQube projects found for connection %d", connID) } - fmt.Println() - selectedLabels := prompt.SelectMulti("Select SonarQube projects to track", projectOptions) - if len(selectedLabels) == 0 { - return nil, fmt.Errorf("at least one SonarQube project must be selected") + var selectedProjects []*devlake.RemoteScopeChild + if opts != nil && opts.Projects != "" { + var keys []string + seenKeys := make(map[string]bool) + for _, key := range strings.Split(opts.Projects, ",") { + key = strings.TrimSpace(key) + if key == "" || seenKeys[key] { + continue + } + seenKeys[key] = true + keys = append(keys, key) + } + if len(keys) == 0 { + return nil, fmt.Errorf("no SonarQube projects provided via --projects") + } + for _, key := range keys { + child, ok := projectByKey[key] + if !ok { + return nil, fmt.Errorf("project key %q not found on connection %d", key, connID) + } + selectedProjects = append(selectedProjects, child) + } + if !outputJSON { + fmt.Printf(" Projects from --projects: %s\n", strings.Join(keys, ", ")) + } + } else { + fmt.Println() + selectedLabels := prompt.SelectMulti("Select SonarQube projects to track", projectOptions) + if len(selectedLabels) == 0 { + return nil, fmt.Errorf("at least one SonarQube project must be selected") + } + for _, label := range selectedLabels { + if child := projectMap[label]; child != nil { + selectedProjects = append(selectedProjects, child) + } + } } // Build scope data for PUT fmt.Println("\nšŸ“ Adding SonarQube project scopes...") var scopeData []any var blueprintScopes []devlake.BlueprintScope - for _, label := range selectedLabels { - child := projectMap[label] + for _, child := range selectedProjects { scopeData = append(scopeData, devlake.SonarQubeProjectScope{ ConnectionID: connID, ProjectKey: child.ID, @@ -1396,3 +1433,84 @@ func scopeSonarQubeHandler(client *devlake.Client, connID int, org, enterprise s Scopes: blueprintScopes, }, nil } + +// scopeArgoCDHandler is the ScopeHandler for the argocd plugin. +func scopeArgoCDHandler(client *devlake.Client, connID int, org, enterprise string, opts *ScopeOpts) (*devlake.BlueprintConnection, error) { + fmt.Println("\nšŸ“‹ Fetching ArgoCD applications...") + + // Aggregate all pages of remote scopes + var allChildren []devlake.RemoteScopeChild + pageToken := "" + for { + remoteScopes, err := client.ListRemoteScopes("argocd", connID, "", pageToken) + if err != nil { + return nil, fmt.Errorf("failed to list ArgoCD applications: %w", err) + } + allChildren = append(allChildren, remoteScopes.Children...) + pageToken = remoteScopes.NextPageToken + if pageToken == "" { + break + } + } + + // Extract applications from remote-scope response + var appOptions []string + appMap := make(map[string]*devlake.RemoteScopeChild) + for i := range allChildren { + child := &allChildren[i] + if child.Type == "scope" { + // Skip applications without a valid name (child.ID) + if child.ID == "" { + continue + } + label := child.ID + if child.Name != "" { + label = fmt.Sprintf("%s (%s)", child.Name, child.ID) + } + appOptions = append(appOptions, label) + appMap[label] = child + } + } + + if len(appOptions) == 0 { + return nil, fmt.Errorf("no ArgoCD applications found for connection %d", connID) + } + + fmt.Println() + selectedLabels := prompt.SelectMulti("Select ArgoCD applications to track", appOptions) + if len(selectedLabels) == 0 { + return nil, fmt.Errorf("at least one ArgoCD application must be selected") + } + + // Build scope data for PUT + fmt.Println("\nšŸ“ Adding ArgoCD application scopes...") + var scopeData []any + var blueprintScopes []devlake.BlueprintScope + for _, label := range selectedLabels { + child := appMap[label] + scopeData = append(scopeData, devlake.ArgoCDAppScope{ + ConnectionID: connID, + Name: child.ID, + }) + blueprintScopes = append(blueprintScopes, devlake.BlueprintScope{ + ScopeID: child.ID, + ScopeName: child.Name, + }) + } + + if len(scopeData) == 0 { + return nil, fmt.Errorf("no valid applications to add") + } + + err := client.PutScopes("argocd", connID, &devlake.ScopeBatchRequest{Data: scopeData}) + if err != nil { + return nil, fmt.Errorf("failed to add ArgoCD application scopes: %w", err) + } + fmt.Printf(" āœ… Added %d application scope(s)\n", len(scopeData)) + + return &devlake.BlueprintConnection{ + PluginName: "argocd", + ConnectionID: connID, + Scopes: blueprintScopes, + }, nil +} diff --git a/cmd/configure_scopes_test.go b/cmd/configure_scopes_test.go index 9198525..ed0550d 100644 --- a/cmd/configure_scopes_test.go +++ b/cmd/configure_scopes_test.go @@ -2,8 +2,11 @@ package cmd import ( "encoding/json" + "net/http" + "net/http/httptest" "os" "path/filepath" + "strings" "testing" "github.com/spf13/cobra" @@ -125,6 +128,85 @@ func TestAzureDevOpsScopePayload_KeepsExistingFields(t *testing.T) { } } +func TestParseBitbucketRepo(t *testing.T) { + t.Run("uses payload fields when present", func(t *testing.T) { + data, _ := json.Marshal(map[string]any{ + "bitbucketId": "workspace/api", + "name": "api", + "fullName": "workspace/api", + "htmlUrl": "https://bitbucket.org/workspace/api", + "cloneUrl": "https://bitbucket.org/workspace/api.git", + }) + child := devlake.RemoteScopeChild{ + ID: "ignored", + Name: "api-child", + FullName: "workspace/api-child", + Data: data, + } + repo := parseBitbucketRepo(&child) + if repo == nil { + t.Fatal("expected repo, got nil") + } + if repo.BitbucketID != "workspace/api" { + t.Fatalf("bitbucketId = %q, want %q", repo.BitbucketID, "workspace/api") + } + if repo.Name != "api" { + t.Fatalf("name = %q, want %q", repo.Name, "api") + } + if repo.FullName != "workspace/api" { + t.Fatalf("fullName = %q, want %q", repo.FullName, "workspace/api") + } + if repo.CloneURL != "https://bitbucket.org/workspace/api.git" { + t.Fatalf("cloneUrl = %q, want https://bitbucket.org/workspace/api.git", repo.CloneURL) + } + if repo.HTMLURL != "https://bitbucket.org/workspace/api" { + t.Fatalf("htmlUrl = %q, want https://bitbucket.org/workspace/api", repo.HTMLURL) + } + }) + + t.Run("falls back to child fields when payload is sparse", func(t *testing.T) { + child := devlake.RemoteScopeChild{ + Name: "frontend", + FullName: "team/frontend", + Data: []byte(`{"bitbucketId":"","name":"","fullName":""}`), + } + repo := parseBitbucketRepo(&child) + if repo == nil { + t.Fatal("expected repo, got nil") + } + if repo.BitbucketID != "team/frontend" { + t.Fatalf("bitbucketId = %q, want %q", repo.BitbucketID, "team/frontend") + } + if repo.Name != "frontend" { + t.Fatalf("name = %q, want %q", repo.Name, "frontend") + } + if repo.FullName != "team/frontend" { + t.Fatalf("fullName = %q, want %q", repo.FullName, "team/frontend") + } + }) + + t.Run("handles missing data by using child fields", func(t *testing.T) { + child := devlake.RemoteScopeChild{ + Name: "ui", + FullName: "workspace/ui", + Data: nil, + } + repo := parseBitbucketRepo(&child) + if repo == nil { + t.Fatal("expected repo, got nil") + } + if repo.BitbucketID != "workspace/ui" { + t.Fatalf("bitbucketId = %q, want %q", repo.BitbucketID, "workspace/ui") + } + if repo.Name != "ui" { + t.Fatalf("name = %q, want %q", repo.Name, "ui") + } + if repo.FullName != "workspace/ui" { + t.Fatalf("fullName = %q, want %q", repo.FullName, "workspace/ui") + } + }) +} + func TestPagerDutyServiceFromChild_UsesData(t *testing.T) { data, _ := json.Marshal(map[string]any{ "id": "SVC123", @@ -383,3 +465,117 @@ func TestResolveJenkinsJobs_WithJobsFlag(t *testing.T) { }) } } + +func TestScopeSonarQubeHandler_ProjectsFlag(t *testing.T) { + origJSON := outputJSON + outputJSON = false + t.Cleanup(func() { outputJSON = origJSON }) + + t.Run("valid project keys put scopes", func(t *testing.T) { + var ( + putCalls int + captured devlake.ScopeBatchRequest + remoteResp = devlake.RemoteScopeResponse{ + Children: []devlake.RemoteScopeChild{ + {Type: "scope", ID: "proj-a", Name: "Project A"}, + {Type: "scope", ID: "proj-b", Name: "Project B"}, + }, + } + ) + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch { + case strings.HasPrefix(r.URL.Path, "/plugins/sonarqube/connections/123/remote-scopes"): + data, _ := json.Marshal(remoteResp) + _, _ = w.Write(data) + case r.Method == http.MethodPut && strings.HasPrefix(r.URL.Path, "/plugins/sonarqube/connections/123/scopes"): + putCalls++ + if err := json.NewDecoder(r.Body).Decode(&captured); err != nil { + t.Fatalf("decoding scopes payload: %v", err) + } + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{}`)) + default: + w.WriteHeader(http.StatusNotFound) + } + })) + t.Cleanup(srv.Close) + + client := devlake.NewClient(srv.URL) + client.HTTPClient = srv.Client() + + opts := &ScopeOpts{Projects: "proj-a, proj-b"} + bp, err := scopeSonarQubeHandler(client, 123, "", "", opts) + if err != nil { + t.Fatalf("scopeSonarQubeHandler returned error: %v", err) + } + if putCalls != 1 { + t.Fatalf("expected 1 PutScopes call, got %d", putCalls) + } + if len(captured.Data) != 2 { + t.Fatalf("expected 2 scopes in payload, got %d", len(captured.Data)) + } + + assertScope := func(idx int, expectKey, expectName string) { + item, ok := captured.Data[idx].(map[string]any) + if !ok { + t.Fatalf("scope %d type = %T, want map[string]any", idx, captured.Data[idx]) + } + if got := item["projectKey"]; got != expectKey { + t.Errorf("scope %d projectKey = %v, want %s", idx, got, expectKey) + } + if got := item["name"]; got != expectName { + t.Errorf("scope %d name = %v, want %s", idx, got, expectName) + } + if got := item["connectionId"]; got != float64(123) { // JSON numbers decode as float64 + t.Errorf("scope %d connectionId = %v, want 123", idx, got) + } + } + assertScope(0, "proj-a", "Project A") + assertScope(1, "proj-b", "Project B") + + if bp == nil || bp.PluginName != "sonarqube" || bp.ConnectionID != 123 || len(bp.Scopes) != 2 { + t.Fatalf("unexpected blueprint connection: %+v", bp) + } + }) + + t.Run("invalid project key errors", func(t *testing.T) { + var ( + putCalls int + remoteResp = devlake.RemoteScopeResponse{ + Children: []devlake.RemoteScopeChild{ + {Type: "scope", ID: "proj-a", Name: "Project A"}, + }, + } + ) + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch { + case strings.HasPrefix(r.URL.Path, "/plugins/sonarqube/connections/456/remote-scopes"): + data, _ := json.Marshal(remoteResp) + _, _ = w.Write(data) + case r.Method == http.MethodPut && strings.HasPrefix(r.URL.Path, "/plugins/sonarqube/connections/456/scopes"): + putCalls++ + w.WriteHeader(http.StatusOK) + default: + w.WriteHeader(http.StatusNotFound) + } + })) + t.Cleanup(srv.Close) + + client := devlake.NewClient(srv.URL) + client.HTTPClient = srv.Client() + + opts := &ScopeOpts{Projects: "missing"} + _, err := scopeSonarQubeHandler(client, 456, "", "", opts) + if err == nil { + t.Fatal("expected error for missing project key, got nil") + } + if !strings.Contains(err.Error(), `project key "missing" not found`) { + t.Fatalf("unexpected error: %v", err) + } + if putCalls != 0 { + t.Fatalf("expected no PutScopes calls on error, got %d", putCalls) + } + }) +} diff --git a/cmd/connection_types.go b/cmd/connection_types.go index 45c14ee..269463c 100644 --- a/cmd/connection_types.go +++ b/cmd/connection_types.go @@ -364,6 +364,27 @@ var connectionRegistry = []*ConnectionDef{ ScopeFunc: scopeSonarQubeHandler, ScopeIDField: "projectKey", HasRepoScopes: false, + ScopeFlags: []FlagDef{ + {Name: "projects", Description: "Comma-separated SonarQube project keys"}, + }, + }, + { + Plugin: "argocd", + DisplayName: "ArgoCD", + Available: true, + Endpoint: "", // user must provide (e.g., https://argocd.example.com) + SupportsTest: true, + AuthMethod: "AccessToken", + RateLimitPerHour: 0, // uses default 4500 + // ArgoCD uses auth tokens; permissions come from the user account. + RequiredScopes: []string{}, + ScopeHint: "", + TokenPrompt: "ArgoCD auth token", + EnvVarNames: []string{"ARGOCD_TOKEN", "ARGOCD_AUTH_TOKEN"}, + EnvFileKeys: []string{"ARGOCD_TOKEN", "ARGOCD_AUTH_TOKEN"}, + ScopeFunc: scopeArgoCDHandler, + ScopeIDField: "name", + HasRepoScopes: false, }, } diff --git a/cmd/connection_types_test.go b/cmd/connection_types_test.go index 6883bec..8f681f8 100644 --- a/cmd/connection_types_test.go +++ b/cmd/connection_types_test.go @@ -814,6 +814,17 @@ func TestConnectionRegistry_SonarQube(t *testing.T) { t.Error("ScopeFunc should not be nil") } + foundProjectsFlag := false + for _, f := range def.ScopeFlags { + if f.Name == "projects" { + foundProjectsFlag = true + break + } + } + if !foundProjectsFlag { + t.Errorf("ScopeFlags should include projects flag") + } + // SonarQube uses API tokens, not OAuth/PAT scopes if len(def.RequiredScopes) != 0 { t.Errorf("RequiredScopes should be empty for SonarQube API tokens, got %v", def.RequiredScopes) @@ -844,3 +855,68 @@ func TestConnectionRegistry_SonarQube(t *testing.T) { } } } + +// TestConnectionRegistry_ArgoCD verifies the ArgoCD plugin registry entry. +func TestConnectionRegistry_ArgoCD(t *testing.T) { + def := FindConnectionDef("argocd") + if def == nil { + t.Fatal("argocd plugin not found in registry") + } + + tests := []struct { + name string + got interface{} + want interface{} + }{ + {"Plugin", def.Plugin, "argocd"}, + {"DisplayName", def.DisplayName, "ArgoCD"}, + {"Available", def.Available, true}, + {"Endpoint", def.Endpoint, ""}, + {"SupportsTest", def.SupportsTest, true}, + {"AuthMethod", def.AuthMethod, "AccessToken"}, + {"ScopeIDField", def.ScopeIDField, "name"}, + {"HasRepoScopes", def.HasRepoScopes, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.got != tt.want { + t.Errorf("%s: got %v, want %v", tt.name, tt.got, tt.want) + } + }) + } + + if def.ScopeFunc == nil { + t.Error("ScopeFunc should not be nil") + } + + // ArgoCD uses auth tokens, not OAuth/PAT scopes + if len(def.RequiredScopes) != 0 { + t.Errorf("RequiredScopes should be empty for ArgoCD auth tokens, got %v", def.RequiredScopes) + } + if def.ScopeHint != "" { + t.Errorf("ScopeHint should be empty for ArgoCD auth tokens, got %q", def.ScopeHint) + } + + expectedEnvVars := []string{"ARGOCD_TOKEN", "ARGOCD_AUTH_TOKEN"} + if len(def.EnvVarNames) != len(expectedEnvVars) { + t.Errorf("EnvVarNames length: got %d, want %d", len(def.EnvVarNames), len(expectedEnvVars)) + } else { + for i, v := range expectedEnvVars { + if def.EnvVarNames[i] != v { + t.Errorf("EnvVarNames[%d]: got %q, want %q", i, def.EnvVarNames[i], v) + } + } + } + + expectedEnvFileKeys := []string{"ARGOCD_TOKEN", "ARGOCD_AUTH_TOKEN"} + if len(def.EnvFileKeys) != len(expectedEnvFileKeys) { + t.Errorf("EnvFileKeys length: got %d, want %d", len(def.EnvFileKeys), len(expectedEnvFileKeys)) + } else { + for i, v := range expectedEnvFileKeys { + if def.EnvFileKeys[i] != v { + t.Errorf("EnvFileKeys[%d]: got %q, want %q", i, def.EnvFileKeys[i], v) + } + } + } +} diff --git a/cmd/deploy.go b/cmd/deploy.go index 8182593..d10c9f3 100644 --- a/cmd/deploy.go +++ b/cmd/deploy.go @@ -50,4 +50,8 @@ func init() { cleanupCmd := newCleanupCmd() cleanupCmd.GroupID = "operate" rootCmd.AddCommand(cleanupCmd) + + startCmd := newStartCmd() + startCmd.GroupID = "operate" + rootCmd.AddCommand(startCmd) } diff --git a/cmd/start.go b/cmd/start.go new file mode 100644 index 0000000..f1eb046 --- /dev/null +++ b/cmd/start.go @@ -0,0 +1,325 @@ +package cmd + +import ( + "encoding/json" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "time" + + azurepkg "github.com/DevExpGBB/gh-devlake/internal/azure" + dockerpkg "github.com/DevExpGBB/gh-devlake/internal/docker" + "github.com/spf13/cobra" +) + +var ( + startService string + startNoWait bool + startAzure bool + startLocal bool + startState string +) + +// startHealthAttempts is the number of 10-second polling intervals used when waiting +// for DevLake to become healthy after start. 6 Ɨ 10s = 60s total — much shorter than +// the 36 Ɨ 10s = 6-minute timeout used during deploy, because databases and volumes +// already exist when starting an existing deployment. +const startHealthAttempts = 6 + +// Well-known local port pairs for DevLake services (matching discovery.go). +const ( + localBackendPort8080 = "http://localhost:8080" + localGrafanaPort8080 = "http://localhost:3002" + localConfigUIPort8080 = "http://localhost:4000" + localBackendPort8085 = "http://localhost:8085" + localGrafanaPort8085 = "http://localhost:3004" + localConfigUIPort8085 = "http://localhost:4004" +) + +func newStartCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "start", + Short: "Start stopped or exited DevLake services", + Long: `Brings up stopped or exited DevLake services for an existing deployment. + +For local deployments (Docker Compose), runs 'docker compose up -d' from the +deployment directory. This is idempotent — running containers are unaffected, +and exited or crashed containers are restarted. + +For Azure deployments, starts any stopped Container Instances and MySQL server. + +Auto-detects deployment type from state files in the current directory.`, + RunE: runStart, + } + + cmd.Flags().StringVar(&startService, "service", "", "Start only a specific service (e.g., config-ui)") + cmd.Flags().BoolVar(&startNoWait, "no-wait", false, "Skip health polling after start") + cmd.Flags().BoolVar(&startAzure, "azure", false, "Force Azure start mode") + cmd.Flags().BoolVar(&startLocal, "local", false, "Force local (Docker Compose) start mode") + cmd.Flags().StringVar(&startState, "state-file", "", "Path to state file (auto-detected if omitted)") + + return cmd +} + +func runStart(cmd *cobra.Command, args []string) error { + mode := detectStartMode() + switch mode { + case "local": + return runLocalStart() + case "azure": + return runAzureStart() + default: + return fmt.Errorf("no deployment found — no state file or docker-compose.yml in current directory\nRun 'gh devlake deploy' to create a new deployment") + } +} + +// detectStartMode determines whether to start local (Docker Compose) or Azure resources. +// Priority: explicit flags → explicit state file (inspected for method) → auto-detect files. +func detectStartMode() string { + if startAzure { + return "azure" + } + if startLocal { + return "local" + } + + // Check explicit state file — inspect its content rather than guessing from the filename. + if startState != "" { + if data, err := os.ReadFile(startState); err == nil { + var meta struct { + Method string `json:"method"` + ResourceGroup string `json:"resourceGroup"` + } + if json.Unmarshal(data, &meta) == nil { + switch strings.ToLower(meta.Method) { + case "azure": + return "azure" + case "local", "docker-compose": + return "local" + } + // If method is absent but resourceGroup is set, it's an Azure state file. + if meta.ResourceGroup != "" { + return "azure" + } + } + } + // File exists but could not be parsed, or method is unknown — fall through. + return "local" + } + + // Auto-detect from well-known state file names. + if _, err := os.Stat(".devlake-azure.json"); err == nil { + return "azure" + } + if _, err := os.Stat(".devlake-local.json"); err == nil { + return "local" + } + // Fall back to docker-compose.yml in cwd. + if _, err := os.Stat("docker-compose.yml"); err == nil { + return "local" + } + return "" +} + +func runLocalStart() error { + // In JSON mode, all progress goes to stderr to keep stdout clean for JSON. + var prog io.Writer = os.Stdout + if outputJSON { + prog = os.Stderr + } + + fmt.Fprintln(prog) + fmt.Fprintln(prog, "════════════════════════════════════════") + fmt.Fprintln(prog, " DevLake — Start Services") + fmt.Fprintln(prog, "════════════════════════════════════════") + + // ── Check Docker ── + fmt.Fprintln(prog, "\n🐳 Checking Docker...") + if err := dockerpkg.CheckAvailable(); err != nil { + return fmt.Errorf("Docker is not available: %w\nMake sure Docker Desktop or the Docker daemon is running", err) + } + fmt.Fprintln(prog, " āœ… Docker is running") + + // ── Find deployment directory ── + // When --state-file is provided, run docker compose from that file's directory + // so it finds the correct docker-compose.yml. + cwd, _ := os.Getwd() + dir := cwd + if startState != "" { + absState, err := filepath.Abs(startState) + if err != nil { + fmt.Fprintf(prog, " āš ļø Could not resolve --state-file path: %v — using current directory\n", err) + } else { + dir = filepath.Dir(absState) + } + } + + // ── Determine services to start ── + var services []string + if startService != "" { + services = []string{startService} + } + + // ── Run docker compose up -d ── + if len(services) > 0 { + fmt.Fprintf(prog, "\n🐳 Starting service %q in %s...\n", startService, dir) + } else { + fmt.Fprintf(prog, "\n🐳 Starting containers in %s...\n", dir) + } + if err := dockerpkg.ComposeUp(dir, false, services...); err != nil { + return fmt.Errorf("failed to start containers: %w", err) + } + fmt.Fprintln(prog, " āœ… Containers starting") + + // ── Health polling ── + backendURL := "" + if !startNoWait && startService == "" { + fmt.Fprintln(prog, "\nā³ Waiting for DevLake to be ready...") + backendURLCandidates := []string{localBackendPort8080, localBackendPort8085} + var err error + backendURL, err = waitForReadyAny(backendURLCandidates, startHealthAttempts, 10*time.Second) + if err != nil { + fmt.Fprintln(prog, " āš ļø DevLake not ready after 60s — services may still be initializing") + fmt.Fprintln(prog, " Run 'gh devlake status' to check.") + } + } + + if outputJSON { + return printJSON(map[string]string{"status": "started", "mode": "local"}) + } + + fmt.Fprintln(prog) + fmt.Fprintln(prog, "════════════════════════════════════════") + fmt.Fprintln(prog, " āœ… Services Started!") + fmt.Fprintln(prog, "════════════════════════════════════════") + + // Print accurate URLs based on the healthy backend that responded. + if backendURL == "" { + backendURL = localBackendPort8080 + } + grafanaURL, configUIURL := localCompanionURLs(backendURL) + fmt.Fprintf(prog, "\n Backend API: %s\n", backendURL) + if configUIURL != "" { + fmt.Fprintf(prog, " Config UI: %s\n", configUIURL) + } + if grafanaURL != "" { + fmt.Fprintf(prog, " Grafana: %s (admin/admin)\n", grafanaURL) + } + fmt.Fprintln(prog) + return nil +} + +// localCompanionURLs returns the Grafana and Config UI URLs that correspond to +// a given DevLake backend URL, matching the well-known local port pairs. +func localCompanionURLs(backendURL string) (grafanaURL, configUIURL string) { + if strings.HasPrefix(backendURL, localBackendPort8085) { + return localGrafanaPort8085, localConfigUIPort8085 + } + // Default port mapping (8080). + return localGrafanaPort8080, localConfigUIPort8080 +} + +func runAzureStart() error { + // In JSON mode, all progress goes to stderr to keep stdout clean for JSON. + var prog io.Writer = os.Stdout + if outputJSON { + prog = os.Stderr + } + + fmt.Fprintln(prog) + fmt.Fprintln(prog, "════════════════════════════════════════") + fmt.Fprintln(prog, " DevLake Azure — Start Services") + fmt.Fprintln(prog, "════════════════════════════════════════") + + stateFile := startState + if stateFile == "" { + stateFile = ".devlake-azure.json" + } + + var state azureStateData + data, err := os.ReadFile(stateFile) + if err != nil { + if os.IsNotExist(err) { + return fmt.Errorf("state file not found: %s\nUse --state-file to specify the path", stateFile) + } + return fmt.Errorf("failed to read state file %s: %w", stateFile, err) + } + if err := json.Unmarshal(data, &state); err != nil { + return fmt.Errorf("invalid state file: %w", err) + } + if state.ResourceGroup == "" { + return fmt.Errorf("state file %s has no resource group — cannot start Azure resources", stateFile) + } + + // ── Check Azure CLI login ── + fmt.Fprintln(prog, "\nšŸ”‘ Checking Azure login...") + if _, err := azurepkg.CheckLogin(); err != nil { + return fmt.Errorf("not logged in to Azure CLI — run 'az login' first") + } + fmt.Fprintln(prog, " āœ… Logged in") + + // ── Start MySQL ── + if state.Resources.MySQL != "" { + fmt.Fprintf(prog, "\n🐳 Starting MySQL server %q...\n", state.Resources.MySQL) + if err := azurepkg.MySQLStart(state.Resources.MySQL, state.ResourceGroup); err != nil { + fmt.Fprintf(prog, " āš ļø Could not start MySQL: %v\n", err) + } else { + fmt.Fprintln(prog, " āœ… MySQL start initiated") + } + } + + // ── Start containers ── + containers := state.Resources.Containers + if startService != "" { + var filtered []string + for _, c := range containers { + if strings.Contains(c, startService) { + filtered = append(filtered, c) + } + } + if len(filtered) == 0 { + return fmt.Errorf("no container matching %q found in state file", startService) + } + containers = filtered + } + + for _, container := range containers { + fmt.Fprintf(prog, "\nšŸ“¦ Starting container %q...\n", container) + if err := azurepkg.ContainerStart(container, state.ResourceGroup); err != nil { + fmt.Fprintf(prog, " āš ļø Could not start %s: %v\n", container, err) + } else { + fmt.Fprintln(prog, " āœ… Start initiated") + } + } + + // ── Health polling ── + if !startNoWait && state.Endpoints.Backend != "" { + fmt.Fprintln(prog, "\nā³ Waiting for DevLake to be ready...") + if err := waitForReady(state.Endpoints.Backend, startHealthAttempts, 10*time.Second); err != nil { + fmt.Fprintln(prog, " āš ļø Backend not ready after 60s — Azure containers may still be starting") + fmt.Fprintln(prog, " Run 'gh devlake status' to check.") + } + } + + if outputJSON { + return printJSON(map[string]string{"status": "started", "mode": "azure"}) + } + + fmt.Fprintln(prog) + fmt.Fprintln(prog, "════════════════════════════════════════") + fmt.Fprintln(prog, " āœ… Services Started!") + fmt.Fprintln(prog, "════════════════════════════════════════") + if state.Endpoints.Backend != "" { + fmt.Fprintf(prog, "\n Backend API: %s\n", state.Endpoints.Backend) + } + if state.Endpoints.ConfigUI != "" { + fmt.Fprintf(prog, " Config UI: %s\n", state.Endpoints.ConfigUI) + } + if state.Endpoints.Grafana != "" { + fmt.Fprintf(prog, " Grafana: %s\n", state.Endpoints.Grafana) + } + fmt.Fprintln(prog) + return nil +} diff --git a/cmd/start_test.go b/cmd/start_test.go new file mode 100644 index 0000000..ebedc48 --- /dev/null +++ b/cmd/start_test.go @@ -0,0 +1,256 @@ +package cmd + +import ( + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" +) + +// ── detectStartMode tests ──────────────────────────────────────────────────── + +func TestDetectStartMode_ExplicitAzureFlag(t *testing.T) { + orig := startAzure + startAzure = true + t.Cleanup(func() { startAzure = orig }) + + if got := detectStartMode(); got != "azure" { + t.Errorf("expected azure, got %q", got) + } +} + +func TestDetectStartMode_ExplicitLocalFlag(t *testing.T) { + orig := startLocal + startLocal = true + t.Cleanup(func() { startLocal = orig }) + + if got := detectStartMode(); got != "local" { + t.Errorf("expected local, got %q", got) + } +} + +func TestDetectStartMode_StateFile_AzureMethod(t *testing.T) { + dir := t.TempDir() + sf := filepath.Join(dir, "mystate.json") + if err := os.WriteFile(sf, []byte(`{"method":"azure","resourceGroup":"my-rg"}`), 0644); err != nil { + t.Fatal(err) + } + + origState := startState + startState = sf + t.Cleanup(func() { startState = origState }) + + if got := detectStartMode(); got != "azure" { + t.Errorf("expected azure, got %q", got) + } +} + +func TestDetectStartMode_StateFile_LocalMethod(t *testing.T) { + dir := t.TempDir() + sf := filepath.Join(dir, "mystate.json") + if err := os.WriteFile(sf, []byte(`{"method":"local"}`), 0644); err != nil { + t.Fatal(err) + } + + origState := startState + startState = sf + t.Cleanup(func() { startState = origState }) + + if got := detectStartMode(); got != "local" { + t.Errorf("expected local, got %q", got) + } +} + +// An Azure state file with a non-obvious filename (no "azure" in path) should still +// be detected as azure when it contains a resourceGroup field. +func TestDetectStartMode_StateFile_ResourceGroupFallback(t *testing.T) { + dir := t.TempDir() + sf := filepath.Join(dir, "state.json") // no "azure" in name + if err := os.WriteFile(sf, []byte(`{"resourceGroup":"my-rg"}`), 0644); err != nil { + t.Fatal(err) + } + + origState := startState + startState = sf + t.Cleanup(func() { startState = origState }) + + if got := detectStartMode(); got != "azure" { + t.Errorf("expected azure from resourceGroup fallback, got %q", got) + } +} + +// A state file that can't be parsed should fall back to "local" rather than erroring. +func TestDetectStartMode_StateFile_UnparsableFallsBackToLocal(t *testing.T) { + dir := t.TempDir() + sf := filepath.Join(dir, "broken.json") + if err := os.WriteFile(sf, []byte(`not-json`), 0644); err != nil { + t.Fatal(err) + } + + origState := startState + startState = sf + t.Cleanup(func() { startState = origState }) + + if got := detectStartMode(); got != "local" { + t.Errorf("expected local fallback for unparsable state file, got %q", got) + } +} + +func TestDetectStartMode_AutoDetect_AzureJson(t *testing.T) { + dir := t.TempDir() + origWd, _ := os.Getwd() + if err := os.Chdir(dir); err != nil { + t.Fatal(err) + } + t.Cleanup(func() { os.Chdir(origWd) }) + + if err := os.WriteFile(".devlake-azure.json", []byte(`{}`), 0644); err != nil { + t.Fatal(err) + } + + if got := detectStartMode(); got != "azure" { + t.Errorf("expected azure from .devlake-azure.json, got %q", got) + } +} + +func TestDetectStartMode_AutoDetect_LocalJson(t *testing.T) { + dir := t.TempDir() + origWd, _ := os.Getwd() + if err := os.Chdir(dir); err != nil { + t.Fatal(err) + } + t.Cleanup(func() { os.Chdir(origWd) }) + + if err := os.WriteFile(".devlake-local.json", []byte(`{}`), 0644); err != nil { + t.Fatal(err) + } + + if got := detectStartMode(); got != "local" { + t.Errorf("expected local from .devlake-local.json, got %q", got) + } +} + +func TestDetectStartMode_AutoDetect_DockerCompose(t *testing.T) { + dir := t.TempDir() + origWd, _ := os.Getwd() + if err := os.Chdir(dir); err != nil { + t.Fatal(err) + } + t.Cleanup(func() { os.Chdir(origWd) }) + + if err := os.WriteFile("docker-compose.yml", []byte("version: '3'"), 0644); err != nil { + t.Fatal(err) + } + + if got := detectStartMode(); got != "local" { + t.Errorf("expected local from docker-compose.yml, got %q", got) + } +} + +func TestDetectStartMode_NoDeployment(t *testing.T) { + dir := t.TempDir() + origWd, _ := os.Getwd() + if err := os.Chdir(dir); err != nil { + t.Fatal(err) + } + t.Cleanup(func() { os.Chdir(origWd) }) + + if got := detectStartMode(); got != "" { + t.Errorf("expected empty string when no deployment found, got %q", got) + } +} + +// ── localCompanionURLs tests ────────────────────────────────────────────────── + +func TestLocalCompanionURLs_Port8080(t *testing.T) { + grafana, configUI := localCompanionURLs("http://localhost:8080") + if grafana != "http://localhost:3002" { + t.Errorf("unexpected grafana URL: %q", grafana) + } + if configUI != "http://localhost:4000" { + t.Errorf("unexpected configUI URL: %q", configUI) + } +} + +func TestLocalCompanionURLs_Port8085(t *testing.T) { + grafana, configUI := localCompanionURLs("http://localhost:8085") + if grafana != "http://localhost:3004" { + t.Errorf("unexpected grafana URL: %q", grafana) + } + if configUI != "http://localhost:4004" { + t.Errorf("unexpected configUI URL: %q", configUI) + } +} + +// ── JSON output tests ───────────────────────────────────────────────────────── + +// TestRunStart_JSONMode_NoDeployment verifies that when --json is set and no +// deployment exists, runStart returns an error without printing banners to stdout. +func TestRunStart_JSONMode_NoDeployment(t *testing.T) { + dir := t.TempDir() + origWd, _ := os.Getwd() + if err := os.Chdir(dir); err != nil { + t.Fatal(err) + } + t.Cleanup(func() { os.Chdir(origWd) }) + + origJSON := outputJSON + outputJSON = true + t.Cleanup(func() { outputJSON = origJSON }) + + origAzure := startAzure + origLocal := startLocal + origState := startState + startAzure = false + startLocal = false + startState = "" + t.Cleanup(func() { + startAzure = origAzure + startLocal = origLocal + startState = origState + }) + + var capturedErr error + stdoutOut := captureStdout(func() { + capturedErr = runStart(newStartCmd(), nil) + }) + + if capturedErr == nil { + t.Fatal("expected error when no deployment found") + } + // stdout must be empty — no banners mixed into JSON output + if strings.TrimSpace(stdoutOut) != "" { + t.Errorf("expected no stdout output in JSON mode with no deployment, got: %q", stdoutOut) + } +} + +// TestRunStart_JSONPayload_LocalShape verifies the JSON payload structure for local mode. +func TestRunStart_JSONPayload_LocalShape(t *testing.T) { + out := capturePrintJSON(t, map[string]string{"status": "started", "mode": "local"}) + var got map[string]string + if err := json.Unmarshal([]byte(strings.TrimSpace(out)), &got); err != nil { + t.Fatalf("output is not valid JSON: %v — got: %q", err, out) + } + if got["status"] != "started" { + t.Errorf("expected status=started, got %q", got["status"]) + } + if got["mode"] != "local" { + t.Errorf("expected mode=local, got %q", got["mode"]) + } +} + +// TestRunStart_JSONPayload_AzureShape verifies the JSON payload structure for azure mode. +func TestRunStart_JSONPayload_AzureShape(t *testing.T) { + out := capturePrintJSON(t, map[string]string{"status": "started", "mode": "azure"}) + var got map[string]string + if err := json.Unmarshal([]byte(strings.TrimSpace(out)), &got); err != nil { + t.Fatalf("output is not valid JSON: %v — got: %q", err, out) + } + if got["status"] != "started" { + t.Errorf("expected status=started, got %q", got["status"]) + } + if got["mode"] != "azure" { + t.Errorf("expected mode=azure, got %q", got["mode"]) + } +} diff --git a/cmd/status.go b/cmd/status.go index a8892ef..17204ae 100644 --- a/cmd/status.go +++ b/cmd/status.go @@ -154,12 +154,19 @@ func runStatus(cmd *cobra.Command, args []string) error { if hasServices { fmt.Println("\n Services") fmt.Println(sep) + hasUnhealthy := false for _, svc := range svcs { if svc.url == "" { continue } icon := pingEndpoint(svc.url, svc.kind) fmt.Printf(" %s %s %s\n", svc.label, icon, svc.url) + if icon == "āŒ" { + hasUnhealthy = true + } + } + if hasUnhealthy { + fmt.Println("\n šŸ’” Run 'gh devlake start' to bring services back up.") } } diff --git a/docs/configure-scope.md b/docs/configure-scope.md index 9370630..967f292 100644 --- a/docs/configure-scope.md +++ b/docs/configure-scope.md @@ -32,12 +32,12 @@ gh devlake configure scope add [flags] | Flag | Default | Description | |------|---------|-------------| -| `--plugin` | *(interactive or required)* | Plugin to configure (`github`, `gh-copilot`, `gitlab`, `bitbucket`, `azuredevops_go`, `jenkins`, `jira`, `sonarqube`, `pagerduty`) | +| `--plugin` | *(interactive or required)* | Plugin to configure (`github`, `gitlab`, `bitbucket`, `gh-copilot`, `jenkins`, `azure-devops`, `sonarqube`, `pagerduty`) | | `--connection-id` | *(auto-detected)* | Override the connection ID to scope | -| `--org` | *(required)* | GitHub organization slug | +| `--org` | *(plugin-dependent)* | Org/workspace slug (`github`, `gitlab` group path, `bitbucket` workspace, `azure-devops` org). Required for plugins whose connection definition needs an org (for example, Azure DevOps) or when running non-interactively; optional in interactive mode for plugins that support workspace discovery (for example, Bitbucket). | | `--enterprise` | | Enterprise slug (enables enterprise-level Copilot metrics) | -| `--repos` | | Comma-separated repos to add (`owner/repo,owner/repo2`) | -| `--repos-file` | | Path to a file with repos (one `owner/repo` per line) | +| `--repos` | | Comma-separated repos to add (`owner/repo` for GitHub, `group/project` for GitLab, `workspace/repo-slug` for Bitbucket) | +| `--repos-file` | | Path to a file with repos (one per line: `owner/repo` for GitHub, `group/project` for GitLab, `workspace/repo-slug` for Bitbucket) | | `--jobs` | | Comma-separated Jenkins job full names | | `--deployment-pattern` | `(?i)deploy` | Regex matching CI/CD workflow names for deployments | | `--production-pattern` | `(?i)prod` | Regex matching environment names for production | @@ -77,12 +77,15 @@ gh devlake configure scope add --plugin github --org my-org \ --repos my-org/api,my-org/frontend # Load repos from a file -gh devlake configure scope add --plugin github --org my-org \ - --repos-file repos.txt - -# Interactive repo selection (omit --repos) -gh devlake configure scope add --plugin github --org my-org - +gh devlake configure scope add --plugin github --org my-org \ + --repos-file repos.txt + +# Interactive repo selection (omit --repos) +gh devlake configure scope add --plugin github --org my-org + +# Bitbucket repos (interactive remote-scope picker) +gh devlake configure scope add --plugin bitbucket --org my-workspace + # Add Copilot org scope gh devlake configure scope add --plugin gh-copilot --org my-org @@ -101,11 +104,17 @@ gh devlake configure scope add ### What It Does (GitHub) -1. Resolves repos from `--repos`, `--repos-file`, or interactive selection -2. Fetches repo details via `gh api repos//` -3. Creates or reuses a DORA scope config (deployment/production patterns, incident label) -4. Calls `PUT /plugins/github/connections/{id}/scopes` to add repos - +1. Resolves repos from `--repos`, `--repos-file`, or interactive selection +2. Fetches repo details via `gh api repos//` +3. Creates or reuses a DORA scope config (deployment/production patterns, incident label) +4. Calls `PUT /plugins/github/connections/{id}/scopes` to add repos + +### What It Does (Bitbucket) + +1. Resolves workspaces and repos via the DevLake remote-scope API (interactive picker when `--repos` is omitted) +2. Accepts repo slugs from `--repos` / `--repos-file` (`workspace/repo-slug`) +3. Calls `PUT /plugins/bitbucket/connections/{id}/scopes` with `bitbucketId` = `workspace/repo-slug` + ### What It Does (Copilot) 1. Computes scope ID from org + enterprise: `enterprise/org`, `enterprise`, or `org` @@ -124,10 +133,10 @@ gh devlake configure scope add 3. Calls `PUT /plugins/pagerduty/connections/{id}/scopes` with the selected services --- - -## configure scope list - -List all scopes configured on a DevLake plugin connection. + +## configure scope list + +List all scopes configured on a DevLake plugin connection. ### Usage @@ -139,7 +148,7 @@ gh devlake configure scope list [--plugin ] [--connection-id ] | Flag | Default | Description | |------|---------|-------------| -| `--plugin` | *(interactive)* | Plugin to query (`github`, `gh-copilot`, `gitlab`, `bitbucket`, `azuredevops_go`, `jenkins`, `jira`, `sonarqube`, `pagerduty`) | +| `--plugin` | *(interactive)* | Plugin to query (`github`, `gitlab`, `bitbucket`, `gh-copilot`, `jenkins`, `azure-devops`, `sonarqube`, `pagerduty`) | | `--connection-id` | *(interactive)* | Connection ID to list scopes for | **Flag mode:** both `--plugin` and `--connection-id` are required. @@ -187,10 +196,10 @@ gh devlake configure scope delete [--plugin ] [--connection-id ] [-- | Flag | Default | Description | |------|---------|-------------| -| `--plugin` | *(interactive)* | Plugin of the connection (`github`, `gh-copilot`, `gitlab`, `bitbucket`, `azuredevops_go`, `jenkins`, `jira`, `sonarqube`, `pagerduty`) | +| `--plugin` | *(interactive)* | Plugin of the connection (`github`, `gitlab`, `bitbucket`, `gh-copilot`, `jenkins`, `azure-devops`, `sonarqube`, `pagerduty`) | | `--connection-id` | *(interactive)* | Connection ID | | `--scope-id` | *(interactive)* | Scope ID to delete | -| `--force` | `false` | Skip confirmation prompt | +| `--force` | `false` | Skip confirmation prompt | **Flag mode:** all three flags are required. diff --git a/docs/day-2.md b/docs/day-2.md index 920cf7f..78a0b7e 100644 --- a/docs/day-2.md +++ b/docs/day-2.md @@ -10,6 +10,24 @@ gh devlake status Shows deployment info, service health (Backend / Grafana / Config UI), active connections, and project configuration. See [status.md](status.md) for full output reference. +## Restarting Services + +If services are stopped, crashed, or exited (e.g. after a machine reboot): + +```bash +gh devlake start +``` + +Runs `docker compose up -d` for local deployments, or starts stopped Azure Container Instances. See [start.md](start.md) for all flags. + +```bash +# Start only a specific service +gh devlake start --service config-ui + +# Start without waiting for health check +gh devlake start --no-wait +``` + ## Managing Connections ### List connections @@ -80,6 +98,7 @@ Deletes the Azure resource group and all resources within it. See [cleanup.md](c ## Related - [status.md](status.md) — full output reference +- [start.md](start.md) — restart stopped services - [configure-connection.md](configure-connection.md) — connection CRUD - [configure-scope.md](configure-scope.md) — scope management - [cleanup.md](cleanup.md) — tear down diff --git a/docs/start.md b/docs/start.md new file mode 100644 index 0000000..80d3d94 --- /dev/null +++ b/docs/start.md @@ -0,0 +1,114 @@ +# start + +Brings up stopped or exited DevLake services for an existing deployment. + +## Usage + +```bash +gh devlake start [flags] +``` + +Auto-detects deployment type from state files in the current directory. + +## Flags + +| Flag | Default | Description | +|------|---------|-------------| +| `--service ` | *(all)* | Start only a specific service (e.g., `config-ui`) | +| `--no-wait` | `false` | Skip health polling after start | +| `--local` | `false` | Force local (Docker Compose) start mode | +| `--azure` | `false` | Force Azure start mode | +| `--state-file ` | *(auto-detected)* | Path to state file | + +## Auto-Detection + +Without `--local` or `--azure`, the command checks: +1. `--state-file` path (if provided) +2. `.devlake-azure.json` → Azure mode +3. `.devlake-local.json` → Local mode +4. `docker-compose.yml` in current directory → Local mode + +If no deployment is detected, an error is returned — use `gh devlake deploy` to create a new deployment. + +## Local Deployments (Docker Compose) + +Runs `docker compose up -d` from the current directory. This is idempotent: +- Running containers are unaffected +- Stopped containers are started +- Crashed or exited containers are restarted + +```bash +gh devlake start +gh devlake start --service config-ui +gh devlake start --no-wait +``` + +What it does: +1. Checks Docker availability +2. Runs `docker compose up -d` (with optional service filter) +3. Polls the backend `/ping` endpoint until healthy (up to 60s) +4. Prints service URLs + +> **Shorter health timeout:** `start` uses a 60-second health timeout (vs 6 minutes for `deploy`) because databases and volumes are already initialized. + +## Azure Deployments (Container Instances) + +Reads container names and resource group from `.devlake-azure.json` and starts any stopped resources. + +```bash +gh devlake start --azure +gh devlake start --azure --service backend +``` + +What it does: +1. Reads resource group and container names from `.devlake-azure.json` +2. Checks Azure CLI login +3. Starts the MySQL flexible server (if present) +4. Starts each Container Instance via `az container start` +5. Polls the backend endpoint until healthy (up to 60s) +6. Prints endpoints + +## JSON Output + +```bash +gh devlake start --json +``` + +Returns: +```json +{"status": "started", "mode": "local"} +``` + +## Examples + +```bash +# Auto-detect and start all services +gh devlake start + +# Start only config-ui (local) +gh devlake start --service config-ui + +# Start without waiting for health check +gh devlake start --no-wait + +# Force Azure mode +gh devlake start --azure + +# Use a specific state file +gh devlake start --state-file /path/to/.devlake-azure.json +``` + +## Motivating Scenario + +After a machine reboot, `gh devlake status` shows `āŒ` for one or more services. Instead of manually finding the docker-compose directory and running raw Docker commands, run: + +```bash +gh devlake start +``` + +## Related + +- [status.md](status.md) — check service health +- [cleanup.md](cleanup.md) — tear down all resources +- [deploy.md](deploy.md) — initial deployment +- [day-2.md](day-2.md) — day-2 operations overview diff --git a/internal/azure/cli.go b/internal/azure/cli.go index d80b21b..4370f25 100644 --- a/internal/azure/cli.go +++ b/internal/azure/cli.go @@ -126,6 +126,11 @@ func MySQLStart(name, resourceGroup string) error { return runAz("mysql", "flexible-server", "start", "--name", name, "--resource-group", resourceGroup, "--output", "none") } +// ContainerStart starts a stopped Azure Container Instance. +func ContainerStart(name, resourceGroup string) error { + return runAz("container", "start", "--name", name, "--resource-group", resourceGroup) +} + // MySQLState returns the current state of a MySQL flexible server. func MySQLState(name, resourceGroup string) (string, error) { out, err := exec.Command("az", "mysql", "flexible-server", "show", diff --git a/internal/devlake/types.go b/internal/devlake/types.go index 55fb596..3d5b19e 100644 --- a/internal/devlake/types.go +++ b/internal/devlake/types.go @@ -96,6 +96,12 @@ type PagerDutyServiceScope struct { URL string `json:"url,omitempty"` } +// ArgoCDAppScope represents an ArgoCD application scope entry for PUT /scopes. +type ArgoCDAppScope struct { + ConnectionID int `json:"connectionId"` + Name string `json:"name"` +} + // ScopeBatchRequest is the payload for PUT /scopes (batch upsert). type ScopeBatchRequest struct { Data []any `json:"data"`