diff --git a/README.md b/README.md index 7c9eac4..0746426 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,12 @@ brew tap disk0Dancer/tap && brew install climate Or `go install github.com/disk0Dancer/climate/cmd/climate@latest`. +Enable local shell completion: + +```bash +climate completion install --shell zsh +``` + ## How it works One command turns an OpenAPI 3.x spec into a compiled Go binary with auth, JSON output, and structured errors. @@ -33,6 +39,18 @@ climate generate --name myapi https://api.example.com/openapi.json myapi [flags] --output=json|table|raw ``` +Generated CLIs also ship with spec-aware event commands: + +```bash +myapi events list +myapi config profiles create work +myapi config profiles use work +myapi auth login +myapi config set --secret events.signing_secret supersecret +myapi events listen payment-succeeded --port 8081 --tunnel auto --signature-mode hmac +myapi events emit payment-succeeded --target-url http://localhost:8081/webhooks/payment-succeeded --signature-mode hmac +``` + ## Agent skill An agent with climate can build its own tools. Point it at any OpenAPI spec — @@ -61,12 +79,36 @@ Demo: [disk0Dancer/github](https://github.com/disk0Dancer/github) — 1 100+ end | `generate` | Create CLI from OpenAPI spec | | `compose` | Merge multiple specs (with prefixes) into one facade CLI | | `mock` | Run local mock HTTP server from OpenAPI spec | +| `completion` | Print shell completions or install/uninstall them locally | | `list` | Show registered CLIs | -| `remove` | Delete a generated CLI | +| `remove` | Interactively delete a generated CLI | +| `uninstall` | Remove the climate CLI itself, optionally with full cleanup | | `upgrade` | Regenerate from updated spec | | `publish` | Push CLI to GitHub with CI/auto-fix/release | | `skill generate` | Emit agent skill prompt | +## Shell completion + +```bash +# print a completion script +climate completion zsh + +# install it into your local shell setup +climate completion install --shell zsh + +# remove climate-managed completion wiring later +climate completion uninstall --shell zsh + +# remove one generated CLI with confirmation +climate remove petstore + +# uninstall only the climate executable +climate uninstall + +# uninstall climate plus generated CLIs, manifest, and completions +climate uninstall --full +``` + ## Docs - [Site](https://disk0dancer.github.io/climate/) @@ -74,6 +116,9 @@ Demo: [disk0Dancer/github](https://github.com/disk0Dancer/github) — 1 100+ end - [Compose design](docs/design-compose.md) - [CI auto-fix design](docs/design-ci-autofix.md) - [Mock design](docs/design-mock.md) +- [Generated event listener design](docs/design-generated-events.md) +- [Shell completion design](docs/design-shell-completions.md) +- [Uninstall design](docs/design-uninstall.md) - [OpenAPI 3.0 support matrix](docs/openapi-3-support-matrix.md) ## Development diff --git a/cmd/climate/commands/completion.go b/cmd/climate/commands/completion.go new file mode 100644 index 0000000..a2574f4 --- /dev/null +++ b/cmd/climate/commands/completion.go @@ -0,0 +1,124 @@ +package commands + +import ( + "fmt" + "os" + "runtime" + + cliCompletion "github.com/disk0Dancer/climate/internal/completion" + "github.com/spf13/cobra" +) + +var ( + completionInstallShell string + completionUninstallShell string +) + +var completionCmd = &cobra.Command{ + Use: "completion", + Short: "Generate and manage shell completions", + Long: `Generate shell completion scripts for climate or install them into your local shell setup. + +Examples: + climate completion zsh + climate completion install --shell zsh + climate completion uninstall --shell zsh`, +} + +var completionInstallCmd = &cobra.Command{ + Use: "install", + Short: "Install shell completions into the local shell config", + RunE: func(cmd *cobra.Command, args []string) error { + home, err := os.UserHomeDir() + if err != nil { + exitError("Failed to find home directory", err) + } + + shell, err := cliCompletion.ResolveShell(completionInstallShell, os.Getenv("SHELL"), runtime.GOOS) + if err != nil { + exitError("Failed to determine shell", err) + } + + result, err := cliCompletion.Install(home, shell, runtime.GOOS, func(w cliCompletion.Writer) error { + return generateCompletionScript(cmd.Root(), shell, w) + }) + if err != nil { + exitError("Failed to install shell completions", err) + } + + writeJSON(result) + return nil + }, +} + +var completionUninstallCmd = &cobra.Command{ + Use: "uninstall", + Short: "Remove climate-managed shell completions", + RunE: func(cmd *cobra.Command, args []string) error { + home, err := os.UserHomeDir() + if err != nil { + exitError("Failed to find home directory", err) + } + + shell, err := cliCompletion.ResolveShell(completionUninstallShell, os.Getenv("SHELL"), runtime.GOOS) + if err != nil { + exitError("Failed to determine shell", err) + } + + result, err := cliCompletion.Uninstall(home, shell, runtime.GOOS) + if err != nil { + exitError("Failed to uninstall shell completions", err) + } + + writeJSON(result) + return nil + }, +} + +func newCompletionScriptCmd(shell cliCompletion.Shell) *cobra.Command { + return &cobra.Command{ + Use: string(shell), + Short: fmt.Sprintf("Print the %s completion script", shell), + RunE: func(cmd *cobra.Command, args []string) error { + if err := generateCompletionScript(cmd.Root(), shell, cmd.OutOrStdout()); err != nil { + exitError("Failed to generate completion script", err) + } + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "Tip: run `climate completion install --shell %s` to wire this into your local shell config.\n", shell) + return nil + }, + } +} + +func generateCompletionScript(root *cobra.Command, shell cliCompletion.Shell, out cliCompletion.Writer) error { + switch shell { + case cliCompletion.ShellBash: + return root.GenBashCompletionV2(out, true) + case cliCompletion.ShellZsh: + return root.GenZshCompletion(out) + case cliCompletion.ShellFish: + return root.GenFishCompletion(out, true) + case cliCompletion.ShellPowerShell: + return root.GenPowerShellCompletionWithDesc(out) + default: + return fmt.Errorf("unsupported shell %q", shell) + } +} + +func completeSupportedShells(_ *cobra.Command, _ []string, _ string) ([]string, cobra.ShellCompDirective) { + return cliCompletion.SupportedShellNames(), cobra.ShellCompDirectiveNoFileComp +} + +func init() { + completionInstallCmd.Flags().StringVar(&completionInstallShell, "shell", "", "Shell to install completions for") + completionUninstallCmd.Flags().StringVar(&completionUninstallShell, "shell", "", "Shell to uninstall completions for") + _ = completionInstallCmd.RegisterFlagCompletionFunc("shell", completeSupportedShells) + _ = completionUninstallCmd.RegisterFlagCompletionFunc("shell", completeSupportedShells) + + completionCmd.AddCommand(newCompletionScriptCmd(cliCompletion.ShellBash)) + completionCmd.AddCommand(newCompletionScriptCmd(cliCompletion.ShellZsh)) + completionCmd.AddCommand(newCompletionScriptCmd(cliCompletion.ShellFish)) + completionCmd.AddCommand(newCompletionScriptCmd(cliCompletion.ShellPowerShell)) + completionCmd.AddCommand(completionInstallCmd) + completionCmd.AddCommand(completionUninstallCmd) + rootCmd.AddCommand(completionCmd) +} diff --git a/cmd/climate/commands/completion_test.go b/cmd/climate/commands/completion_test.go new file mode 100644 index 0000000..89519f6 --- /dev/null +++ b/cmd/climate/commands/completion_test.go @@ -0,0 +1,105 @@ +package commands + +import ( + "bytes" + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" +) + +func TestCompletionZshWritesScriptAndTip(t *testing.T) { + completionInstallShell = "" + completionUninstallShell = "" + + var stdout bytes.Buffer + var stderr bytes.Buffer + rootCmd.SetOut(&stdout) + rootCmd.SetErr(&stderr) + rootCmd.SetArgs([]string{"completion", "zsh"}) + + if err := rootCmd.Execute(); err != nil { + t.Fatalf("Execute() error = %v", err) + } + + if stdout.Len() == 0 { + t.Fatal("completion script should be written to stdout") + } + if !strings.Contains(stdout.String(), "climate") { + t.Fatal("completion output should mention the climate command") + } + if !strings.Contains(stderr.String(), "completion install --shell zsh") { + t.Fatal("stderr should include install tip") + } +} + +func TestCompletionInstallAndUninstall(t *testing.T) { + completionInstallShell = "" + completionUninstallShell = "" + + home := t.TempDir() + t.Setenv("HOME", home) + + var stdout bytes.Buffer + var stderr bytes.Buffer + rootCmd.SetOut(&stdout) + rootCmd.SetErr(&stderr) + rootCmd.SetArgs([]string{"completion", "install", "--shell", "zsh"}) + + rawInstall := captureStdout(t, func() { + if err := rootCmd.Execute(); err != nil { + t.Fatalf("install Execute() error = %v", err) + } + }) + + var installResp struct { + Shell string `json:"shell"` + ScriptPath string `json:"script_path"` + ConfigPath string `json:"config_path"` + } + if err := json.Unmarshal([]byte(rawInstall), &installResp); err != nil { + t.Fatalf("unmarshal install response: %v", err) + } + if installResp.Shell != "zsh" { + t.Fatalf("Shell = %q, want zsh", installResp.Shell) + } + if _, err := os.Stat(installResp.ScriptPath); err != nil { + t.Fatalf("installed script missing: %v", err) + } + configBytes, err := os.ReadFile(filepath.Join(home, ".zshrc")) + if err != nil { + t.Fatalf("reading .zshrc: %v", err) + } + if !strings.Contains(string(configBytes), installResp.ScriptPath) { + t.Fatal(".zshrc should source installed completion script") + } + + stdout.Reset() + stderr.Reset() + rootCmd.SetArgs([]string{"completion", "uninstall", "--shell", "zsh"}) + + rawUninstall := captureStdout(t, func() { + if err := rootCmd.Execute(); err != nil { + t.Fatalf("uninstall Execute() error = %v", err) + } + }) + + var uninstallResp struct { + ScriptRemoved bool `json:"script_removed"` + } + if err := json.Unmarshal([]byte(rawUninstall), &uninstallResp); err != nil { + t.Fatalf("unmarshal uninstall response: %v", err) + } + if !uninstallResp.ScriptRemoved { + t.Fatal("script_removed should be true after uninstall") + } + + configBytes, err = os.ReadFile(filepath.Join(home, ".zshrc")) + if err != nil { + t.Fatalf("reading .zshrc after uninstall: %v", err) + } + if strings.Contains(string(configBytes), "climate completion") { + t.Fatal(".zshrc should not contain climate-managed completion block after uninstall") + } +} diff --git a/cmd/climate/commands/remove.go b/cmd/climate/commands/remove.go index 1323be9..e03fbb6 100644 --- a/cmd/climate/commands/remove.go +++ b/cmd/climate/commands/remove.go @@ -2,18 +2,22 @@ package commands import ( "fmt" - "os" + "github.com/disk0Dancer/climate/internal/confirm" "github.com/disk0Dancer/climate/internal/manifest" + cliUninstall "github.com/disk0Dancer/climate/internal/uninstall" "github.com/spf13/cobra" ) -var removePurgeSources bool +var ( + removePurgeSources bool + removeYes bool +) var removeCmd = &cobra.Command{ Use: "remove ", Short: "Remove a generated CLI", - Long: `Remove a CLI binary and its manifest entry. Use --purge-sources to also delete source files.`, + Long: `Remove a CLI binary and its manifest entry. Use --purge-sources to also delete source files. The command asks for confirmation unless --yes is set.`, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { cliName := args[0] @@ -28,34 +32,52 @@ var removeCmd = &cobra.Command{ exitError(fmt.Sprintf("CLI %q not found in manifest", cliName), nil) } - // Remove binary - if entry.BinaryPath != "" { - if rmErr := os.Remove(entry.BinaryPath); rmErr != nil && !os.IsNotExist(rmErr) { - fmt.Fprintf(cmd.ErrOrStderr(), "Warning: could not remove binary %s: %v\n", entry.BinaryPath, rmErr) + if !removeYes { + confirmed, confirmErr := confirm.Ask(cmd.InOrStdin(), cmd.ErrOrStderr(), removePrompt(entry.Name, removePurgeSources)) + if confirmErr != nil { + exitError("Failed to read confirmation", confirmErr) } - } - - // Optionally remove sources - if removePurgeSources && entry.SourceDir != "" { - if rmErr := os.RemoveAll(entry.SourceDir); rmErr != nil { - fmt.Fprintf(cmd.ErrOrStderr(), "Warning: could not remove source dir %s: %v\n", entry.SourceDir, rmErr) + if !confirmed { + type cancelResp struct { + Cancelled bool `json:"cancelled"` + Target string `json:"target"` + } + writeJSON(cancelResp{Cancelled: true, Target: cliName}) + return nil } } + removed := cliUninstall.RemoveGeneratedCLI(entry, removePurgeSources) mf.Remove(cliName) if saveErr := mf.Save(); saveErr != nil { exitError("Failed to save manifest", saveErr) } type removeResp struct { - Removed string `json:"removed"` + Removed string `json:"removed"` + BinaryRemoved bool `json:"binary_removed"` + SourceRemoved bool `json:"source_removed,omitempty"` + Warnings []string `json:"warnings,omitempty"` } - writeJSON(removeResp{Removed: cliName}) + writeJSON(removeResp{ + Removed: cliName, + BinaryRemoved: removed.BinaryRemoved, + SourceRemoved: removed.SourceRemoved, + Warnings: removed.Warnings, + }) return nil }, } +func removePrompt(cliName string, purgeSources bool) string { + if purgeSources { + return fmt.Sprintf("Remove generated CLI %q and delete its source directory?", cliName) + } + return fmt.Sprintf("Remove generated CLI %q?", cliName) +} + func init() { removeCmd.Flags().BoolVar(&removePurgeSources, "purge-sources", false, "Also delete generated source files") + removeCmd.Flags().BoolVar(&removeYes, "yes", false, "Skip the confirmation prompt") rootCmd.AddCommand(removeCmd) } diff --git a/cmd/climate/commands/remove_test.go b/cmd/climate/commands/remove_test.go new file mode 100644 index 0000000..5e570de --- /dev/null +++ b/cmd/climate/commands/remove_test.go @@ -0,0 +1,123 @@ +package commands + +import ( + "bytes" + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/disk0Dancer/climate/internal/manifest" +) + +func TestRemoveCancelsWithoutConfirmation(t *testing.T) { + removePurgeSources = false + removeYes = false + + home := t.TempDir() + t.Setenv("HOME", home) + + binaryPath := filepath.Join(home, ".climate", "bin", "petstore") + if err := os.MkdirAll(filepath.Dir(binaryPath), 0o755); err != nil { + t.Fatalf("mkdir bin dir: %v", err) + } + if err := os.WriteFile(binaryPath, []byte("bin"), 0o755); err != nil { + t.Fatalf("write binary: %v", err) + } + + mf, err := manifest.LoadFrom(filepath.Join(home, ".climate", "manifest.json")) + if err != nil { + t.Fatalf("LoadFrom() error = %v", err) + } + mf.Upsert(manifest.CLIEntry{Name: "petstore", BinaryPath: binaryPath}) + if err := mf.Save(); err != nil { + t.Fatalf("Save() error = %v", err) + } + + var stderr bytes.Buffer + rootCmd.SetErr(&stderr) + rootCmd.SetIn(strings.NewReader("n\n")) + rootCmd.SetArgs([]string{"remove", "petstore"}) + + raw := captureStdout(t, func() { + if err := rootCmd.Execute(); err != nil { + t.Fatalf("Execute() error = %v", err) + } + }) + + var resp struct { + Cancelled bool `json:"cancelled"` + Target string `json:"target"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal response: %v", err) + } + if !resp.Cancelled { + t.Fatal("expected cancellation response") + } + if _, err := os.Stat(binaryPath); err != nil { + t.Fatalf("binary should remain after cancellation: %v", err) + } +} + +func TestRemoveDeletesGeneratedCLIWithConfirmation(t *testing.T) { + removePurgeSources = true + removeYes = false + + home := t.TempDir() + t.Setenv("HOME", home) + + binaryPath := filepath.Join(home, ".climate", "bin", "petstore") + sourceDir := filepath.Join(home, "src", "petstore") + if err := os.MkdirAll(filepath.Dir(binaryPath), 0o755); err != nil { + t.Fatalf("mkdir bin dir: %v", err) + } + if err := os.WriteFile(binaryPath, []byte("bin"), 0o755); err != nil { + t.Fatalf("write binary: %v", err) + } + if err := os.MkdirAll(sourceDir, 0o755); err != nil { + t.Fatalf("mkdir source dir: %v", err) + } + + mf, err := manifest.LoadFrom(filepath.Join(home, ".climate", "manifest.json")) + if err != nil { + t.Fatalf("LoadFrom() error = %v", err) + } + mf.Upsert(manifest.CLIEntry{Name: "petstore", BinaryPath: binaryPath, SourceDir: sourceDir}) + if err := mf.Save(); err != nil { + t.Fatalf("Save() error = %v", err) + } + + var stderr bytes.Buffer + rootCmd.SetErr(&stderr) + rootCmd.SetIn(strings.NewReader("y\n")) + rootCmd.SetArgs([]string{"remove", "--purge-sources", "petstore"}) + + raw := captureStdout(t, func() { + if err := rootCmd.Execute(); err != nil { + t.Fatalf("Execute() error = %v", err) + } + }) + + var resp struct { + Removed string `json:"removed"` + BinaryRemoved bool `json:"binary_removed"` + SourceRemoved bool `json:"source_removed"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal response: %v", err) + } + if resp.Removed != "petstore" { + t.Fatalf("Removed = %q, want petstore", resp.Removed) + } + if !resp.BinaryRemoved || !resp.SourceRemoved { + t.Fatal("expected binary and source removal") + } + if _, err := os.Stat(binaryPath); !os.IsNotExist(err) { + t.Fatalf("binary should be removed, stat err = %v", err) + } + if _, err := os.Stat(sourceDir); !os.IsNotExist(err) { + t.Fatalf("source dir should be removed, stat err = %v", err) + } +} diff --git a/cmd/climate/commands/root.go b/cmd/climate/commands/root.go index b71d5a0..c643938 100644 --- a/cmd/climate/commands/root.go +++ b/cmd/climate/commands/root.go @@ -5,18 +5,20 @@ import ( "encoding/json" "fmt" "os" + "runtime/debug" "github.com/spf13/cobra" ) // version is set at build time via -ldflags "-X github.com/disk0Dancer/climate/cmd/climate/commands.version=vX.Y.Z" var version = "dev" +var readBuildInfo = debug.ReadBuildInfo var rootCmd = &cobra.Command{ Use: "climate", Short: "climate — CLI Tool Orchestrator", Long: `climate generates production-ready Go CLIs from OpenAPI specifications.`, - Version: version, + Version: resolvedVersion(), } // Execute runs the root command. @@ -24,6 +26,43 @@ func Execute() error { return rootCmd.Execute() } +func resolvedVersion() string { + if version != "" && version != "dev" { + return version + } + + info, ok := readBuildInfo() + if !ok { + return "dev" + } + + if info.Main.Version != "" && info.Main.Version != "(devel)" { + return info.Main.Version + } + + var revision string + var dirty bool + for _, setting := range info.Settings { + switch setting.Key { + case "vcs.revision": + revision = setting.Value + case "vcs.modified": + dirty = setting.Value == "true" + } + } + + if revision == "" { + return "dev" + } + if len(revision) > 12 { + revision = revision[:12] + } + if dirty { + return "dev+" + revision + "-dirty" + } + return "dev+" + revision +} + // writeJSON prints v as indented JSON to stdout. func writeJSON(v interface{}) { enc := json.NewEncoder(os.Stdout) diff --git a/cmd/climate/commands/root_test.go b/cmd/climate/commands/root_test.go new file mode 100644 index 0000000..68609ec --- /dev/null +++ b/cmd/climate/commands/root_test.go @@ -0,0 +1,87 @@ +package commands + +import ( + "runtime/debug" + "testing" +) + +func TestResolvedVersionPrefersLdflagsValue(t *testing.T) { + originalVersion := version + originalReadBuildInfo := readBuildInfo + defer func() { + version = originalVersion + readBuildInfo = originalReadBuildInfo + }() + + version = "v1.2.3" + readBuildInfo = func() (*debug.BuildInfo, bool) { + t.Fatal("readBuildInfo should not be called when explicit version is set") + return nil, false + } + + if got := resolvedVersion(); got != "v1.2.3" { + t.Fatalf("resolvedVersion() = %q, want %q", got, "v1.2.3") + } +} + +func TestResolvedVersionUsesModuleVersionWhenAvailable(t *testing.T) { + originalVersion := version + originalReadBuildInfo := readBuildInfo + defer func() { + version = originalVersion + readBuildInfo = originalReadBuildInfo + }() + + version = "dev" + readBuildInfo = func() (*debug.BuildInfo, bool) { + return &debug.BuildInfo{ + Main: debug.Module{Version: "v9.9.9"}, + }, true + } + + if got := resolvedVersion(); got != "v9.9.9" { + t.Fatalf("resolvedVersion() = %q, want %q", got, "v9.9.9") + } +} + +func TestResolvedVersionFallsBackToVCSRevision(t *testing.T) { + originalVersion := version + originalReadBuildInfo := readBuildInfo + defer func() { + version = originalVersion + readBuildInfo = originalReadBuildInfo + }() + + version = "dev" + readBuildInfo = func() (*debug.BuildInfo, bool) { + return &debug.BuildInfo{ + Main: debug.Module{Version: "(devel)"}, + Settings: []debug.BuildSetting{ + {Key: "vcs.revision", Value: "1234567890abcdef"}, + {Key: "vcs.modified", Value: "true"}, + }, + }, true + } + + if got := resolvedVersion(); got != "dev+1234567890ab-dirty" { + t.Fatalf("resolvedVersion() = %q, want %q", got, "dev+1234567890ab-dirty") + } +} + +func TestResolvedVersionFallsBackToDevWhenNoBuildInfo(t *testing.T) { + originalVersion := version + originalReadBuildInfo := readBuildInfo + defer func() { + version = originalVersion + readBuildInfo = originalReadBuildInfo + }() + + version = "dev" + readBuildInfo = func() (*debug.BuildInfo, bool) { + return nil, false + } + + if got := resolvedVersion(); got != "dev" { + t.Fatalf("resolvedVersion() = %q, want %q", got, "dev") + } +} diff --git a/cmd/climate/commands/test_helpers_test.go b/cmd/climate/commands/test_helpers_test.go new file mode 100644 index 0000000..ec03116 --- /dev/null +++ b/cmd/climate/commands/test_helpers_test.go @@ -0,0 +1,33 @@ +package commands + +import ( + "io" + "os" + "testing" +) + +func captureStdout(t *testing.T, fn func()) string { + t.Helper() + + original := os.Stdout + reader, writer, err := os.Pipe() + if err != nil { + t.Fatalf("os.Pipe() error = %v", err) + } + os.Stdout = writer + defer func() { + os.Stdout = original + }() + + fn() + + if err := writer.Close(); err != nil { + t.Fatalf("writer.Close() error = %v", err) + } + + data, err := io.ReadAll(reader) + if err != nil { + t.Fatalf("io.ReadAll() error = %v", err) + } + return string(data) +} diff --git a/cmd/climate/commands/uninstall.go b/cmd/climate/commands/uninstall.go new file mode 100644 index 0000000..e3442ec --- /dev/null +++ b/cmd/climate/commands/uninstall.go @@ -0,0 +1,118 @@ +package commands + +import ( + "fmt" + "io" + "os" + "os/exec" + "path/filepath" + "runtime" + + "github.com/disk0Dancer/climate/internal/confirm" + "github.com/disk0Dancer/climate/internal/manifest" + cliUninstall "github.com/disk0Dancer/climate/internal/uninstall" + "github.com/spf13/cobra" +) + +var ( + uninstallFull bool + uninstallYes bool +) + +var ( + uninstallExecutablePath = os.Executable + uninstallEvalSymlinks = filepath.EvalSymlinks + uninstallCommandRunner = runExternalCommand +) + +var uninstallCmd = &cobra.Command{ + Use: "uninstall", + Short: "Uninstall the climate CLI", + Long: `Uninstall the climate executable itself. + +By default only the climate CLI is removed. Use --full to also remove +generated CLIs, the manifest, and climate-managed shell completions. + +The command asks for confirmation unless --yes is set.`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + home, err := os.UserHomeDir() + if err != nil { + exitError("Failed to find home directory", err) + } + + executablePath, err := uninstallExecutablePath() + if err != nil { + exitError("Failed to resolve climate executable path", err) + } + + method, _, err := cliUninstall.DetectInstallMethod(home, executablePath, uninstallEvalSymlinks) + if err != nil { + exitError("Failed to detect installation method", err) + } + + if !uninstallYes { + confirmed, confirmErr := confirm.Ask(cmd.InOrStdin(), cmd.ErrOrStderr(), uninstallPrompt(home, method, uninstallFull)) + if confirmErr != nil { + exitError("Failed to read confirmation", confirmErr) + } + if !confirmed { + type cancelResp struct { + Cancelled bool `json:"cancelled"` + Target string `json:"target"` + Mode string `json:"mode"` + } + mode := "cli" + if uninstallFull { + mode = "full" + } + writeJSON(cancelResp{Cancelled: true, Target: "climate", Mode: mode}) + return nil + } + } + + result, err := cliUninstall.Self(cliUninstall.Options{ + Home: home, + GOOS: runtime.GOOS, + ExecutablePath: executablePath, + Full: uninstallFull, + EvalSymlinks: uninstallEvalSymlinks, + RunCommand: func(name string, args ...string) error { + return uninstallCommandRunner(cmd.ErrOrStderr(), cmd.ErrOrStderr(), name, args...) + }, + }) + if err != nil { + exitError("Failed to uninstall climate", err) + } + + writeJSON(result) + return nil + }, +} + +func uninstallPrompt(home string, method cliUninstall.InstallMethod, full bool) string { + if !full { + return fmt.Sprintf("Uninstall climate (%s)? This removes only the climate executable.", method) + } + + count := 0 + mf, err := manifest.LoadFrom(filepath.Join(home, ".climate", "manifest.json")) + if err == nil { + count = len(mf.List()) + } + + return fmt.Sprintf("Fully uninstall climate (%s)? This removes the climate executable, %d generated CLI(s), their source directories, the manifest, and climate-managed shell completions.", method, count) +} + +func runExternalCommand(stdout, stderr io.Writer, name string, args ...string) error { + command := exec.Command(name, args...) + command.Stdout = stdout + command.Stderr = stderr + return command.Run() +} + +func init() { + uninstallCmd.Flags().BoolVar(&uninstallFull, "full", false, "Also remove generated CLIs, manifest, and climate-managed local state") + uninstallCmd.Flags().BoolVar(&uninstallYes, "yes", false, "Skip the confirmation prompt") + rootCmd.AddCommand(uninstallCmd) +} diff --git a/cmd/climate/commands/uninstall_test.go b/cmd/climate/commands/uninstall_test.go new file mode 100644 index 0000000..1c76436 --- /dev/null +++ b/cmd/climate/commands/uninstall_test.go @@ -0,0 +1,109 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/disk0Dancer/climate/internal/completion" + "github.com/disk0Dancer/climate/internal/manifest" +) + +func TestUninstallFullStandalone(t *testing.T) { + uninstallFull = false + uninstallYes = false + + originalExecutablePath := uninstallExecutablePath + originalEvalSymlinks := uninstallEvalSymlinks + originalRunner := uninstallCommandRunner + defer func() { + uninstallExecutablePath = originalExecutablePath + uninstallEvalSymlinks = originalEvalSymlinks + uninstallCommandRunner = originalRunner + }() + + home := t.TempDir() + t.Setenv("HOME", home) + + executable := filepath.Join(home, "bin", "climate") + if err := os.MkdirAll(filepath.Dir(executable), 0o755); err != nil { + t.Fatalf("mkdir executable dir: %v", err) + } + if err := os.WriteFile(executable, []byte("binary"), 0o755); err != nil { + t.Fatalf("write executable: %v", err) + } + + generatedBinary := filepath.Join(home, ".climate", "bin", "petstore") + generatedSource := filepath.Join(home, "src", "petstore") + if err := os.MkdirAll(filepath.Dir(generatedBinary), 0o755); err != nil { + t.Fatalf("mkdir generated binary dir: %v", err) + } + if err := os.WriteFile(generatedBinary, []byte("generated"), 0o755); err != nil { + t.Fatalf("write generated binary: %v", err) + } + if err := os.MkdirAll(generatedSource, 0o755); err != nil { + t.Fatalf("mkdir source dir: %v", err) + } + + mf, err := manifest.LoadFrom(filepath.Join(home, ".climate", "manifest.json")) + if err != nil { + t.Fatalf("LoadFrom() error = %v", err) + } + mf.Upsert(manifest.CLIEntry{Name: "petstore", BinaryPath: generatedBinary, SourceDir: generatedSource}) + if err := mf.Save(); err != nil { + t.Fatalf("Save() error = %v", err) + } + + if _, err := completion.Install(home, completion.ShellZsh, "darwin", func(w completion.Writer) error { + _, writeErr := w.Write([]byte("# completion script\n")) + return writeErr + }); err != nil { + t.Fatalf("completion.Install() error = %v", err) + } + + uninstallExecutablePath = func() (string, error) { return executable, nil } + uninstallEvalSymlinks = func(path string) (string, error) { return path, nil } + uninstallCommandRunner = func(io.Writer, io.Writer, string, ...string) error { return nil } + + var stderr bytes.Buffer + rootCmd.SetErr(&stderr) + rootCmd.SetIn(strings.NewReader("y\n")) + rootCmd.SetArgs([]string{"uninstall", "--full"}) + + raw := captureStdout(t, func() { + if err := rootCmd.Execute(); err != nil { + t.Fatalf("Execute() error = %v", err) + } + }) + + var resp struct { + Mode string `json:"mode"` + InstallMethod string `json:"install_method"` + ExecutableRemoved bool `json:"executable_removed"` + } + if err := json.Unmarshal([]byte(raw), &resp); err != nil { + t.Fatalf("unmarshal response: %v", err) + } + if resp.Mode != "full" { + t.Fatalf("Mode = %q, want full", resp.Mode) + } + if resp.InstallMethod != "standalone" { + t.Fatalf("InstallMethod = %q, want standalone", resp.InstallMethod) + } + if !resp.ExecutableRemoved { + t.Fatal("executable_removed should be true") + } + if _, err := os.Stat(executable); !os.IsNotExist(err) { + t.Fatalf("executable should be removed, stat err = %v", err) + } + if _, err := os.Stat(generatedBinary); !os.IsNotExist(err) { + t.Fatalf("generated binary should be removed, stat err = %v", err) + } + if _, err := os.Stat(generatedSource); !os.IsNotExist(err) { + t.Fatalf("generated source should be removed, stat err = %v", err) + } +} diff --git a/docs/design-generated-events.md b/docs/design-generated-events.md new file mode 100644 index 0000000..1fd0446 --- /dev/null +++ b/docs/design-generated-events.md @@ -0,0 +1,152 @@ +# Design: Generated CLI Event Commands (` events ...`) + +## Problem + +Generated CLIs can call request/response APIs, but they cannot currently act as +local webhook receivers on their own. That makes event-driven APIs awkward: +users still need an external ad hoc HTTP listener plus a tunneling tool if the +provider must reach a public URL. + +## Goals + +- Give every generated CLI built-in event commands plus a local config store. +- Let generated CLIs optionally expose the local listener through `cloudflared`. +- Avoid new dependencies in generated projects. +- Keep the runtime generic and configurable, using HMAC instead of + provider-specific webhook logic. + +## Non-goals + +- Native provider-specific webhook compatibility layers. +- Managing tunnel accounts, auth tokens, or self-hosted tunnel backends. + +## CLI UX + +Every generated CLI gets: + +```bash + config list + config set + config set --secret events.signing_secret + + events list + + events listen [event-name] \ + [--host 127.0.0.1] \ + [--port 8081] \ + [--path /] \ + [--response-status 202] \ + [--response-body '{"ok":true}'] \ + [--tunnel none|auto|cloudflared] \ + [--signature-mode none|hmac] \ + [--signature-header X-Signature] \ + [--signature-algorithm sha256|sha1|sha512] \ + [--include-timestamp] \ + [--timestamp-header X-Signature-Timestamp] + + events emit \ + --target-url \ + [--data-json '{"id":"evt_123"}' | --data-file payload.json] \ + [--signature-mode none|hmac] +``` + +Behavior: + +1. `config ...` manages local CLI defaults and secrets using named + configurations with one active profile at a time. +2. `events list` prints named event definitions extracted from OpenAPI + `callbacks` and top-level `webhooks`. +3. `events listen [event-name]` starts a local HTTP server on the requested + host/port/path and defaults to the selected event's path/methods. +4. Print one JSON startup record with the listen URL. +5. For every received request, print one JSON event record to stdout. +6. If HMAC signing is enabled, verify either `body` or `timestamp + "." + body` + using the configured header name and algorithm. +7. Reply with the configured status/body. +8. If `--tunnel` is enabled, start a `cloudflared` process and + print a JSON tunnel record when a public URL is detected. +9. `events emit ` sends the generated sample payload (or an + override payload) to a target URL. + +## Config keys + +The generated CLI stores local defaults in its config file. Useful keys: + +- `events.tunnel` +- `events.signature_mode` +- `events.signature_header` +- `events.signature_algorithm` +- `events.include_timestamp` +- `events.timestamp_header` +- `events.signing_secret` + +Secret values are masked in `config list`. + +Configuration UX is intentionally gcloud-like: + +- `config profiles list` +- `config profiles create ` +- `config profiles use ` + +All `config set/get/unset` operations target the active configuration. + +## Interactive auth + +When the generated CLI includes supported auth schemes, it also exposes: + +- `auth login` +- `auth status` +- `auth logout` + +`auth login` stores credentials into the active configuration. For supported +OAuth2 flows it can fetch and store an access token interactively; otherwise it +falls back to prompting for the required token/credentials and storing them. + +## Tunnel + +This version supports only `cloudflared`. + +- `--tunnel auto` resolves to `cloudflared` +- command: `cloudflared tunnel --url http://127.0.0.1:` + +The listener does not install `cloudflared`. If it is missing from `PATH`, the +command returns an error. + +## Event extraction + +The generated CLI builds event definitions from: + +- top-level OpenAPI `webhooks` +- operation-level `callbacks` + +Webhook names become event names directly after normalization unless overridden +by `x-climate-event-name`. Callback names are namespaced with the parent +operation when needed to keep them stable and unique. + +Supported generic event metadata extensions: + +- `x-climate-event-name` +- `x-climate-event-path` +- `x-climate-signature-mode` +- `x-climate-signature-header` +- `x-climate-signature-algorithm` +- `x-climate-signature-include-timestamp` +- `x-climate-signature-timestamp-header` + +## Output shape + +The long-running command streams JSON records: + +- startup: + - `type: "listener.started"` +- tunnel discovery: + - `type: "listener.tunnel"` +- received event: + - `type: "listener.event"` + +This keeps the command scriptable even though it is long-running. + +## Follow-up path + +After this lands, the next step is richer replay tooling and optional event +delivery fixtures/examples on top of the config-driven HMAC contract. diff --git a/docs/design-shell-completions.md b/docs/design-shell-completions.md new file mode 100644 index 0000000..b125e86 --- /dev/null +++ b/docs/design-shell-completions.md @@ -0,0 +1,89 @@ +# Design: Shell Completions (`climate completion`) + +## Problem + +`climate` already exposes a growing command surface through Cobra, but it does +not ship an ergonomic way to enable shell completion locally. Users can still +wire completions manually by writing shell-specific boilerplate themselves, but +that is friction for both humans and agents. + +## Goals + +- Generate completion scripts for supported shells directly from the CLI. +- Provide a one-command local install flow that writes the completion script and + wires it into the user's shell startup config when needed. +- Provide a matching uninstall flow that only removes climate-managed wiring. +- Keep the behavior deterministic and idempotent. + +## Non-goals + +- Managing completions for generated CLIs built by `climate generate`. +- Detecting or modifying every possible shell startup file variant. +- Prompting interactively before file edits; the install and uninstall commands + are the explicit opt-in surface. + +## CLI UX + +```bash +climate completion bash +climate completion zsh +climate completion fish +climate completion powershell + +climate completion install [--shell bash|zsh|fish|powershell] +climate completion uninstall [--shell bash|zsh|fish|powershell] +``` + +Generation commands print the raw completion script to stdout. + +Install and uninstall commands: + +- accept `--shell`; when omitted, detect the current shell from `$SHELL` +- write a managed completion script under a climate-owned location +- add or remove a climate-managed block in the relevant shell config +- print structured JSON describing which files were touched + +## File layout + +Managed script locations: + +- Bash: `~/.climate/completions/climate.bash` +- Zsh: `~/.climate/completions/climate.zsh` +- Fish: `~/.config/fish/completions/climate.fish` +- PowerShell: `~/.climate/completions/climate.ps1` + +Managed config targets: + +- Bash: prefer `~/.bashrc`; on macOS use `~/.bash_profile` when `~/.bashrc` + does not exist +- Zsh: `~/.zshrc` +- Fish: no config edit required because Fish autoloads files from its + completions directory +- PowerShell: + - macOS/Linux: `~/.config/powershell/Microsoft.PowerShell_profile.ps1` + - Windows: `~/Documents/PowerShell/Microsoft.PowerShell_profile.ps1` + +## Managed block contract + +For shells that need config wiring, `climate completion install` appends or +updates a marker-bounded block: + +```text +# >>> climate completion >>> +... +# <<< climate completion <<< +``` + +`uninstall` removes only that managed block and the managed script file. It +does not rewrite unrelated user configuration. + +## Edge cases + +- If shell auto-detection fails, the command returns an error asking for + `--shell`. +- Re-running install rewrites the managed script and keeps a single managed + config block. +- Re-running uninstall is safe: missing files are treated as already removed. +- `climate completion ` prints a tip on stderr suggesting + `climate completion install --shell ` so users discover the managed + install path without corrupting stdout. diff --git a/docs/design-uninstall.md b/docs/design-uninstall.md new file mode 100644 index 0000000..7e2475d --- /dev/null +++ b/docs/design-uninstall.md @@ -0,0 +1,111 @@ +# Design: Lifecycle Uninstall (`climate uninstall`) + +## Problem + +`climate` can remove generated CLIs via `climate remove`, and it can remove its +own shell completion wiring via `climate completion uninstall`, but it has no +first-class workflow for uninstalling the `climate` binary itself. It also +deletes generated CLIs without any interactive confirmation. + +That leaves two UX gaps: + +- deleting a generated CLI is too easy to do by accident +- removing `climate` itself depends on how it was installed and should not be a + manual memory exercise for the user + +## Goals + +- Make generated-CLI removal interactive by default. +- Add a root-level `climate uninstall` command for removing the `climate` + executable itself. +- Detect the installation method and use the correct removal strategy. +- Support a stricter full uninstall mode that also removes climate-managed local + artifacts. +- Keep automation possible with `--yes`. + +## Non-goals + +- Removing generated GitHub repositories created by `climate publish`. +- Cleaning package-manager caches such as Homebrew downloads or the Go module + cache. +- Deleting arbitrary user files outside climate-managed paths or manifest-owned + generated CLI paths. + +## CLI UX + +```bash +climate remove [--purge-sources] [--yes] + +climate uninstall [--full] [--yes] +``` + +### `climate remove` + +- prompts before deleting a generated CLI +- `--purge-sources` keeps the existing meaning +- `--yes` skips the prompt for automation + +### `climate uninstall` + +Removes the `climate` executable itself. + +- default mode removes only the `climate` CLI +- `--full` additionally removes climate-managed local artifacts: + - generated CLIs recorded in the manifest + - generated source directories recorded in the manifest + - the manifest file + - climate-managed completion scripts and config blocks +- `--yes` skips the prompt + +## Installation-method detection + +The uninstall flow derives the installation method from the resolved executable +path: + +- **Homebrew**: resolved path contains `/Cellar/climate/` +- **Go install**: executable lives in `GOBIN`, `GOPATH/bin`, or `~/go/bin` +- **Standalone**: any other path, including manually moved release binaries + +## Removal strategy + +### Homebrew + +Run: + +```bash +brew uninstall climate +``` + +This keeps removal aligned with the package manager that owns the binary. + +### Go install + +Delete the installed executable directly from the Go bin directory. + +### Standalone binary + +Delete the resolved executable path directly. + +## Confirmation model + +Both destructive flows prompt with a `y/N` confirmation unless `--yes` is +present. + +- `remove` confirms the target generated CLI and whether sources will be purged +- `uninstall` confirms the detected installation method and whether full cleanup + will also remove generated CLIs and climate-managed local state + +Rejected approach: + +- two-step "plan then confirm with exact typed phrase" flow + - rejected because the commands are already explicit destructive entry points, + and a normal `y/N` prompt is enough friction without making CLI use clumsy + +## Safety rules + +- Full uninstall removes only assets owned by the manifest and known climate + completion paths. +- Shell config cleanup removes only climate-managed marker blocks. +- Empty climate-owned directories may be pruned after managed files are removed. +- If the user cancels at the prompt, the command exits 0 and reports a + cancellation payload instead of deleting anything. diff --git a/docs/index.html b/docs/index.html index af51d14..91f83be 100644 --- a/docs/index.html +++ b/docs/index.html @@ -174,6 +174,8 @@

Homebrew

brew tap disk0Dancer/tap && brew install climate

Go

go install github.com/disk0Dancer/climate/cmd/climate@latest
+

Shell completion

+
climate completion install --shell zsh

Binary

curl -L https://github.com/disk0Dancer/climate/releases/latest/download/climate-darwin-arm64.tar.gz | tar xz
 sudo mv climate-darwin-arm64 /usr/local/bin/climate
@@ -183,6 +185,14 @@

Binary

How it works

Every generated CLI has the same shape:

<cli> <group> <operation> [flags] --output=json|table|raw
+

Generated CLIs also include spec-aware event commands:

+
<cli> events list
+<cli> config profiles create work
+<cli> config profiles use work
+<cli> auth login
+<cli> config set --secret events.signing_secret supersecret
+<cli> events listen payment-succeeded --port 8081 --tunnel auto --signature-mode hmac
+<cli> events emit payment-succeeded --target-url http://localhost:8081/webhooks/payment-succeeded --signature-mode hmac

Groups & operations

@@ -198,7 +208,11 @@

Request body

Auth

-

API key, bearer, basic, and OAuth2 client credentials. Set via env vars.

+

API key, bearer, basic, and OAuth2 client credentials. Use env vars or the generated auth login and local config commands.

+
+
+

Webhook listener

+

List, receive, and emit named callbacks/webhooks, with cloudflared exposure, local profiles, and configurable HMAC signatures.

@@ -225,8 +239,10 @@

Demo

Commands

generate

Create CLI from OpenAPI spec

+

completion

Print shell completions or install/uninstall them locally

list

Show registered CLIs

-

remove

Delete a generated CLI

+

remove

Interactively delete a generated CLI

+

uninstall

Remove the climate CLI itself, optionally with full cleanup

upgrade

Regenerate from updated spec

publish

Push CLI to GitHub with CI/release

skill generate

Emit agent skill prompt for a CLI

diff --git a/docs/index.md b/docs/index.md index 47b3293..cf151bb 100644 --- a/docs/index.md +++ b/docs/index.md @@ -14,6 +14,12 @@ brew tap disk0Dancer/tap && brew install climate Or `go install github.com/disk0Dancer/climate/cmd/climate@latest`. +Optional local shell completion: + +```bash +climate completion install --shell zsh +``` + ## Quick start ```bash @@ -32,6 +38,17 @@ petstore pet get --pet-id 1 - Path/query/header params → flags - Body → `--data-json` / `--data-file` - Auth via env vars (API key, bearer, basic, OAuth2) +- Config + auth + event commands → ` config profiles ...`, ` config set/get`, ` auth ...`, ` events ...` + +Example generated-CLI workflow: + +```bash +myapi config profiles create work +myapi config profiles use work +myapi auth login +myapi config set --secret events.signing_secret supersecret +myapi events listen payment-succeeded --port 8081 --tunnel auto --signature-mode hmac +``` ## Demo @@ -44,12 +61,25 @@ petstore pet get --pet-id 1 | `generate` | Create CLI from OpenAPI spec | | `compose` | Merge multiple specs (with prefixes) into one facade CLI | | `mock` | Run local mock HTTP server from OpenAPI spec | +| `completion` | Print shell completions or install/uninstall them locally | | `list` | Show registered CLIs | -| `remove` | Delete a generated CLI | +| `remove` | Interactively delete a generated CLI | +| `uninstall` | Remove the climate CLI itself, optionally with full cleanup | | `upgrade` | Regenerate from updated spec | | `publish` | Push CLI to GitHub with CI/auto-fix/release | | `skill generate` | Emit agent skill prompt | +## Shell completion + +```bash +climate completion zsh +climate completion install --shell zsh +climate completion uninstall --shell zsh +climate remove petstore +climate uninstall +climate uninstall --full +``` + ## Agent skills ```bash @@ -68,6 +98,9 @@ npx skills add https://github.com/disk0Dancer/climate --skill climate-generator - [Compose design](./design-compose.md) - [CI auto-fix design](./design-ci-autofix.md) - [Mock design](./design-mock.md) +- [Generated event listener design](./design-generated-events.md) +- [Shell completion design](./design-shell-completions.md) +- [Uninstall design](./design-uninstall.md) - [OpenAPI 3.0 support matrix](./openapi-3-support-matrix.md) ## License diff --git a/docs/llms.txt b/docs/llms.txt index 3de34f0..15d79e5 100644 --- a/docs/llms.txt +++ b/docs/llms.txt @@ -17,6 +17,7 @@ License: Apache-2.0 - Go: go install github.com/disk0Dancer/climate/cmd/climate@latest - Releases: https://github.com/disk0Dancer/climate/releases - Homebrew: brew tap disk0Dancer/tap && brew install climate +- Shell completion install: climate completion install --shell zsh ## Skills @@ -27,16 +28,39 @@ License: Apache-2.0 ## Key commands - climate generate [--name ] [--out-dir ] [--no-build] [--force] +- climate completion bash|zsh|fish|powershell +- climate completion install [--shell bash|zsh|fish|powershell] +- climate completion uninstall [--shell bash|zsh|fish|powershell] - climate list -- climate remove [--purge-sources] +- climate remove [--purge-sources] [--yes] +- climate uninstall [--full] [--yes] - climate upgrade [--openapi ] - climate publish [--owner ] [--repo ] [--visibility public|private] - climate skill generate [--mode=full|compact] - climate skill generator +## Generated CLI capabilities + +- [flags] --output=json|table|raw +- config list +- config set +- config get +- config unset +- config profiles list|create|use +- config set --secret events.signing_secret +- auth login [--scheme ] +- auth status +- auth logout [--scheme ] +- events list +- events listen [event-name] [--host 127.0.0.1] [--port 8081] [--path /] [--tunnel none|auto|cloudflared] [--signature-mode none|hmac] +- events emit --target-url [--data-json ] [--data-file ] [--signature-mode none|hmac] + ## Ops - GitHub Pages publishes from docs/ - Tagged releases sync the Homebrew formula into disk0Dancer/homebrew-tap when HOMEBREW_TAP_TOKEN is configured - robots policy: https://disk0dancer.github.io/climate/robots.txt - Google Analytics is optional and controlled by docs/site-config.js +- Generated event listener design: https://github.com/disk0Dancer/climate/blob/main/docs/design-generated-events.md +- Shell completion design: https://github.com/disk0Dancer/climate/blob/main/docs/design-shell-completions.md +- Uninstall design: https://github.com/disk0Dancer/climate/blob/main/docs/design-uninstall.md diff --git a/docs/openapi-3-support-matrix.md b/docs/openapi-3-support-matrix.md index 448a72f..ff187f7 100644 --- a/docs/openapi-3-support-matrix.md +++ b/docs/openapi-3-support-matrix.md @@ -23,9 +23,9 @@ what is partially supported, and what should be designed/implemented next. | Local mock simulator (`mock`) | ✅ Implemented | Auto responses from spec schema + latency | Add optional examples-first mode | | `enum` | ✅ Implemented (mock) / ⚠️ partial (CLI) | Mock prefers first enum value | Add flag-level enum validation/help text | | `allOf`, `oneOf`, `anyOf`, `not` | ⚠️ Partial | Core flow works for simple schemas; advanced combiners not fully synthesized | Add schema normalizer for combiners | -| `servers` and server variables | ✅ Implemented | Generated CLIs use primary server URL and support server-template interpolation via `--server-var-` and `_SERVER_VAR_` env vars | Keep stable | -| `callbacks` | ⚠️ Partial | Not mapped to generated CLI surface; `climate mock` can generate and emit synthetic event payloads to target endpoints via flags | Add event command model (`events subscribe`/`events trigger`) | -| `webhooks` (3.1) | ⚠️ Partial | Top-level webhook declarations are not yet parsed as first-class objects; mock has event emission mode for local webhook testing | Add webhook simulation and event ingestion model | +| `servers` and server variables | ✅ Implemented | Generated CLIs use the primary server URL and support server-template interpolation via `--server-var-` and `_SERVER_VAR_` env vars | Keep stable | +| `callbacks` | ✅ Implemented / ⚠️ Partial | Generated CLIs expose callback-derived named event commands via `events list`, `events listen `, and `events emit ` | Improve callback expression/path inference and richer config-driven defaults | +| `webhooks` (3.1) | ✅ Implemented / ⚠️ Partial | Top-level `webhooks` become named generated event commands with local listener + emit flow | Add richer schema-aware event metadata and replay tooling | | Links | ❌ Planned | Ignored | Add optional “follow-up command hint” output | | Examples (`example` / `examples`) | ⚠️ Partial | Not consistently preferred in generation | Use examples as first-class sample payload/response source | @@ -34,19 +34,33 @@ what is partially supported, and what should be designed/implemented next. Some APIs are event-driven and include webhooks/callbacks instead of (or in addition to) plain request/response endpoints. -Proposed direction for generated CLIs: +Current baseline for generated CLIs: -1. **Expose webhook declarations as event commands** +1. **Named event surface** - `myapi events list` - - `myapi events emit --data-json ...` (test mode) -2. **Support local receiver** - - `myapi events listen --port 8081` to receive and inspect payloads -3. **Support production replay/import** + - `myapi events listen ` + - `myapi events emit --target-url ...` +2. **Local config store** + - `myapi config list` + - `myapi config set` + - `myapi config set --secret events.signing_secret ...` +3. **Optional tunnel exposure** + - `--tunnel auto|cloudflared` +4. **Generic HMAC signatures** + - configurable header, algorithm, and optional timestamp signing +5. **Structured event stream** + - startup, tunnel, and received-event records are streamed as JSON + +Proposed next direction for generated CLIs: + +1. **Add richer event metadata to OpenAPI extensions** + - signature defaults, path overrides, and replay hints +2. **Support production replay/import** - `myapi events import --file payload.json --event ` - `myapi events replay --source prod-export.ndjson` -4. **Compose awareness** +3. **Compose awareness** - In `compose`, namespace event names with prefix (same as path/components) -5. **Mock integration** +4. **Mock integration** - `climate mock` can emit synthetic webhook payloads at intervals or on demand for integration tests @@ -77,7 +91,7 @@ Safety defaults: ## Prioritized implementation roadmap 1. Pagination abstraction + generated paging flags (`--all`, `--max-items`) -2. `callbacks` support in generator command tree -3. `webhooks` support + local listener/emitter helpers +2. config-driven secret and signature UX hardening +3. production replay/import workflow for named events 4. examples-first generation mode (payloads + mock responses) 5. advanced schema combiner normalization (`allOf`/`oneOf`/`anyOf`) diff --git a/internal/completion/completion.go b/internal/completion/completion.go new file mode 100644 index 0000000..8ecf2fb --- /dev/null +++ b/internal/completion/completion.go @@ -0,0 +1,317 @@ +package completion + +import ( + "errors" + "fmt" + "io" + "os" + "path/filepath" + "strings" +) + +// Writer is the output surface used when generating completion scripts. +type Writer = io.Writer + +// Generator renders a completion script into the provided writer. +type Generator func(Writer) error + +// Shell identifies a supported completion shell. +type Shell string + +const ( + ShellBash Shell = "bash" + ShellZsh Shell = "zsh" + ShellFish Shell = "fish" + ShellPowerShell Shell = "powershell" +) + +const ( + managedStart = "# >>> climate completion >>>" + managedEnd = "# <<< climate completion <<<" +) + +// Paths describes where climate stores completion assets for a shell. +type Paths struct { + Shell string `json:"shell"` + ScriptPath string `json:"script_path"` + ConfigPath string `json:"config_path,omitempty"` +} + +// InstallResult reports what climate wrote during completion install. +type InstallResult struct { + Paths + ConfigUpdated bool `json:"config_updated"` +} + +// UninstallResult reports what climate removed during completion uninstall. +type UninstallResult struct { + Paths + ScriptRemoved bool `json:"script_removed"` + ConfigUpdated bool `json:"config_updated"` +} + +// SupportedShellNames returns the shells supported by climate completion. +func SupportedShellNames() []string { + return []string{ + string(ShellBash), + string(ShellZsh), + string(ShellFish), + string(ShellPowerShell), + } +} + +// ParseShell validates a shell name. +func ParseShell(name string) (Shell, error) { + switch strings.ToLower(strings.TrimSpace(name)) { + case string(ShellBash): + return ShellBash, nil + case string(ShellZsh): + return ShellZsh, nil + case string(ShellFish): + return ShellFish, nil + case "pwsh", "powershell": + return ShellPowerShell, nil + default: + return "", fmt.Errorf("unsupported shell %q (supported: %s)", name, strings.Join(SupportedShellNames(), ", ")) + } +} + +// DetectShell derives the active shell from a SHELL-style environment value. +func DetectShell(shellEnv string) (Shell, error) { + if strings.TrimSpace(shellEnv) == "" { + return "", errors.New("shell could not be detected automatically; pass --shell") + } + return ParseShell(filepath.Base(shellEnv)) +} + +// ResolveShell returns the explicitly requested shell or auto-detects it. +func ResolveShell(explicit, shellEnv, goos string) (Shell, error) { + if strings.TrimSpace(explicit) != "" { + return ParseShell(explicit) + } + if goos == "windows" && strings.TrimSpace(shellEnv) == "" { + return ShellPowerShell, nil + } + return DetectShell(shellEnv) +} + +// ResolvePaths returns the script and config targets for a shell. +func ResolvePaths(home string, shell Shell, goos string) (Paths, error) { + base := filepath.Join(home, ".climate", "completions") + switch shell { + case ShellBash: + return Paths{ + Shell: string(shell), + ScriptPath: filepath.Join(base, "climate.bash"), + ConfigPath: resolveBashConfigPath(home, goos), + }, nil + case ShellZsh: + return Paths{ + Shell: string(shell), + ScriptPath: filepath.Join(base, "climate.zsh"), + ConfigPath: filepath.Join(home, ".zshrc"), + }, nil + case ShellFish: + return Paths{ + Shell: string(shell), + ScriptPath: filepath.Join(home, ".config", "fish", "completions", "climate.fish"), + }, nil + case ShellPowerShell: + return Paths{ + Shell: string(shell), + ScriptPath: filepath.Join(base, "climate.ps1"), + ConfigPath: resolvePowerShellProfilePath(home, goos), + }, nil + default: + return Paths{}, fmt.Errorf("unsupported shell %q", shell) + } +} + +// Install writes the completion script and managed config block. +func Install(home string, shell Shell, goos string, generate Generator) (InstallResult, error) { + paths, err := ResolvePaths(home, shell, goos) + if err != nil { + return InstallResult{}, err + } + + if err := os.MkdirAll(filepath.Dir(paths.ScriptPath), 0o755); err != nil { + return InstallResult{}, fmt.Errorf("creating completion directory: %w", err) + } + + file, err := os.Create(paths.ScriptPath) + if err != nil { + return InstallResult{}, fmt.Errorf("creating completion script: %w", err) + } + defer file.Close() + + if err := generate(file); err != nil { + return InstallResult{}, fmt.Errorf("generating completion script: %w", err) + } + + configUpdated := false + if paths.ConfigPath != "" { + configUpdated, err = ensureManagedBlock(paths.ConfigPath, managedBlock(paths)) + if err != nil { + return InstallResult{}, err + } + } + + return InstallResult{ + Paths: paths, + ConfigUpdated: configUpdated, + }, nil +} + +// Uninstall removes the completion script and managed config block. +func Uninstall(home string, shell Shell, goos string) (UninstallResult, error) { + paths, err := ResolvePaths(home, shell, goos) + if err != nil { + return UninstallResult{}, err + } + + scriptRemoved := false + if err := os.Remove(paths.ScriptPath); err == nil { + scriptRemoved = true + } else if !errors.Is(err, os.ErrNotExist) { + return UninstallResult{}, fmt.Errorf("removing completion script: %w", err) + } + + configUpdated := false + if paths.ConfigPath != "" { + configUpdated, err = removeManagedBlock(paths.ConfigPath) + if err != nil { + return UninstallResult{}, err + } + } + + return UninstallResult{ + Paths: paths, + ScriptRemoved: scriptRemoved, + ConfigUpdated: configUpdated, + }, nil +} + +func resolveBashConfigPath(home, goos string) string { + bashrc := filepath.Join(home, ".bashrc") + if goos != "darwin" || fileExists(bashrc) { + return bashrc + } + return filepath.Join(home, ".bash_profile") +} + +func resolvePowerShellProfilePath(home, goos string) string { + if goos == "windows" { + return filepath.Join(home, "Documents", "PowerShell", "Microsoft.PowerShell_profile.ps1") + } + return filepath.Join(home, ".config", "powershell", "Microsoft.PowerShell_profile.ps1") +} + +func managedBlock(paths Paths) string { + switch paths.Shell { + case string(ShellPowerShell): + return strings.Join([]string{ + managedStart, + fmt.Sprintf("if (Test-Path \"%s\") { . \"%s\" }", paths.ScriptPath, paths.ScriptPath), + managedEnd, + }, "\n") + default: + return strings.Join([]string{ + managedStart, + fmt.Sprintf("[ -f \"%s\" ] && . \"%s\"", paths.ScriptPath, paths.ScriptPath), + managedEnd, + }, "\n") + } +} + +func ensureManagedBlock(path, block string) (bool, error) { + existing, err := os.ReadFile(path) + if err != nil && !errors.Is(err, os.ErrNotExist) { + return false, fmt.Errorf("reading shell config %s: %w", path, err) + } + + updated := upsertManagedBlock(string(existing), block) + if string(existing) == updated { + return false, nil + } + + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + return false, fmt.Errorf("creating shell config directory: %w", err) + } + if err := os.WriteFile(path, []byte(updated), 0o644); err != nil { + return false, fmt.Errorf("writing shell config %s: %w", path, err) + } + return true, nil +} + +func removeManagedBlock(path string) (bool, error) { + existing, err := os.ReadFile(path) + if errors.Is(err, os.ErrNotExist) { + return false, nil + } + if err != nil { + return false, fmt.Errorf("reading shell config %s: %w", path, err) + } + + updated, changed := stripManagedBlock(string(existing)) + if !changed { + return false, nil + } + + if err := os.WriteFile(path, []byte(updated), 0o644); err != nil { + return false, fmt.Errorf("writing shell config %s: %w", path, err) + } + return true, nil +} + +func upsertManagedBlock(existing, block string) string { + stripped, _ := stripManagedBlock(existing) + block = strings.TrimRight(block, "\n") + base := strings.TrimRight(stripped, "\n") + if base == "" { + return block + "\n" + } + return base + "\n\n" + block + "\n" +} + +func stripManagedBlock(existing string) (string, bool) { + start := strings.Index(existing, managedStart) + if start == -1 { + return normalizeContent(existing), false + } + + endOffset := strings.Index(existing[start:], managedEnd) + if endOffset == -1 { + return normalizeContent(existing), false + } + + end := start + endOffset + len(managedEnd) + for end < len(existing) && (existing[end] == '\n' || existing[end] == '\r') { + end++ + } + + before := strings.TrimRight(existing[:start], "\r\n") + after := strings.TrimLeft(existing[end:], "\r\n") + switch { + case before == "" && after == "": + return "", true + case before == "": + return normalizeContent(after), true + case after == "": + return normalizeContent(before), true + default: + return normalizeContent(before + "\n\n" + after), true + } +} + +func normalizeContent(content string) string { + content = strings.TrimRight(content, "\r\n") + if content == "" { + return "" + } + return content + "\n" +} + +func fileExists(path string) bool { + _, err := os.Stat(path) + return err == nil +} diff --git a/internal/completion/completion_test.go b/internal/completion/completion_test.go new file mode 100644 index 0000000..6dd5ce5 --- /dev/null +++ b/internal/completion/completion_test.go @@ -0,0 +1,249 @@ +package completion_test + +import ( + "os" + "path/filepath" + "strings" + "testing" + + "github.com/disk0Dancer/climate/internal/completion" +) + +func TestDetectShell(t *testing.T) { + t.Parallel() + + cases := []struct { + name string + value string + want completion.Shell + wantErr bool + }{ + {name: "bash", value: "/bin/bash", want: completion.ShellBash}, + {name: "zsh", value: "/bin/zsh", want: completion.ShellZsh}, + {name: "fish", value: "/opt/homebrew/bin/fish", want: completion.ShellFish}, + {name: "pwsh", value: "/usr/local/bin/pwsh", want: completion.ShellPowerShell}, + {name: "empty", value: "", wantErr: true}, + {name: "unsupported", value: "/bin/tcsh", wantErr: true}, + } + + for _, tc := range cases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + got, err := completion.DetectShell(tc.value) + if tc.wantErr { + if err == nil { + t.Fatal("DetectShell() error = nil, want non-nil") + } + return + } + if err != nil { + t.Fatalf("DetectShell() error = %v", err) + } + if got != tc.want { + t.Fatalf("DetectShell() = %q, want %q", got, tc.want) + } + }) + } +} + +func TestResolvePaths(t *testing.T) { + t.Parallel() + + home := t.TempDir() + bashrc := filepath.Join(home, ".bashrc") + if err := os.WriteFile(bashrc, []byte("# bash\n"), 0o644); err != nil { + t.Fatalf("writing .bashrc: %v", err) + } + + cases := []struct { + name string + shell completion.Shell + goos string + wantScriptPath string + wantConfigPath string + }{ + { + name: "bash", + shell: completion.ShellBash, + goos: "linux", + wantScriptPath: filepath.Join(home, ".climate", "completions", "climate.bash"), + wantConfigPath: bashrc, + }, + { + name: "zsh", + shell: completion.ShellZsh, + goos: "darwin", + wantScriptPath: filepath.Join(home, ".climate", "completions", "climate.zsh"), + wantConfigPath: filepath.Join(home, ".zshrc"), + }, + { + name: "fish", + shell: completion.ShellFish, + goos: "darwin", + wantScriptPath: filepath.Join(home, ".config", "fish", "completions", "climate.fish"), + wantConfigPath: "", + }, + { + name: "powershell", + shell: completion.ShellPowerShell, + goos: "linux", + wantScriptPath: filepath.Join(home, ".climate", "completions", "climate.ps1"), + wantConfigPath: filepath.Join(home, ".config", "powershell", "Microsoft.PowerShell_profile.ps1"), + }, + } + + for _, tc := range cases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + got, err := completion.ResolvePaths(home, tc.shell, tc.goos) + if err != nil { + t.Fatalf("ResolvePaths() error = %v", err) + } + if got.ScriptPath != tc.wantScriptPath { + t.Fatalf("ScriptPath = %q, want %q", got.ScriptPath, tc.wantScriptPath) + } + if got.ConfigPath != tc.wantConfigPath { + t.Fatalf("ConfigPath = %q, want %q", got.ConfigPath, tc.wantConfigPath) + } + }) + } +} + +func TestResolvePathsUsesBashProfileOnDarwinWithoutBashrc(t *testing.T) { + t.Parallel() + + home := t.TempDir() + + got, err := completion.ResolvePaths(home, completion.ShellBash, "darwin") + if err != nil { + t.Fatalf("ResolvePaths() error = %v", err) + } + want := filepath.Join(home, ".bash_profile") + if got.ConfigPath != want { + t.Fatalf("ConfigPath = %q, want %q", got.ConfigPath, want) + } +} + +func TestInstallAddsManagedBlock(t *testing.T) { + t.Parallel() + + home := t.TempDir() + configPath := filepath.Join(home, ".zshrc") + if err := os.WriteFile(configPath, []byte("export PATH=\"$HOME/bin:$PATH\"\n"), 0o644); err != nil { + t.Fatalf("writing .zshrc: %v", err) + } + + result, err := completion.Install(home, completion.ShellZsh, "darwin", func(w completion.Writer) error { + _, writeErr := w.Write([]byte("# completion script\n")) + return writeErr + }) + if err != nil { + t.Fatalf("Install() error = %v", err) + } + if result.ScriptPath == "" { + t.Fatal("ScriptPath should not be empty") + } + + scriptBytes, err := os.ReadFile(result.ScriptPath) + if err != nil { + t.Fatalf("reading script: %v", err) + } + if string(scriptBytes) != "# completion script\n" { + t.Fatalf("script content = %q", string(scriptBytes)) + } + + configBytes, err := os.ReadFile(configPath) + if err != nil { + t.Fatalf("reading config: %v", err) + } + config := string(configBytes) + if !strings.Contains(config, "# >>> climate completion >>>") { + t.Fatal("managed block start marker missing") + } + if strings.Count(config, "# >>> climate completion >>>") != 1 { + t.Fatal("managed block should be added exactly once") + } + if !strings.Contains(config, result.ScriptPath) { + t.Fatal("config should source the generated script path") + } +} + +func TestInstallIsIdempotent(t *testing.T) { + t.Parallel() + + home := t.TempDir() + + for i := 0; i < 2; i++ { + _, err := completion.Install(home, completion.ShellZsh, "darwin", func(w completion.Writer) error { + _, writeErr := w.Write([]byte("# completion script\n")) + return writeErr + }) + if err != nil { + t.Fatalf("Install() run %d error = %v", i+1, err) + } + } + + configBytes, err := os.ReadFile(filepath.Join(home, ".zshrc")) + if err != nil { + t.Fatalf("reading .zshrc: %v", err) + } + if strings.Count(string(configBytes), "# >>> climate completion >>>") != 1 { + t.Fatal("managed block should remain singular after repeated install") + } +} + +func TestUninstallRemovesManagedAssets(t *testing.T) { + t.Parallel() + + home := t.TempDir() + + installResult, err := completion.Install(home, completion.ShellZsh, "darwin", func(w completion.Writer) error { + _, writeErr := w.Write([]byte("# completion script\n")) + return writeErr + }) + if err != nil { + t.Fatalf("Install() error = %v", err) + } + + uninstallResult, err := completion.Uninstall(home, completion.ShellZsh, "darwin") + if err != nil { + t.Fatalf("Uninstall() error = %v", err) + } + if !uninstallResult.ScriptRemoved { + t.Fatal("ScriptRemoved should be true after uninstall") + } + + if _, err := os.Stat(installResult.ScriptPath); !os.IsNotExist(err) { + t.Fatalf("script should be removed, stat err = %v", err) + } + + configBytes, err := os.ReadFile(filepath.Join(home, ".zshrc")) + if err != nil { + t.Fatalf("reading .zshrc: %v", err) + } + config := string(configBytes) + if strings.Contains(config, "# >>> climate completion >>>") { + t.Fatal("managed block should be removed from config") + } +} + +func TestUninstallIsSafeWhenNothingExists(t *testing.T) { + t.Parallel() + + home := t.TempDir() + + result, err := completion.Uninstall(home, completion.ShellFish, "darwin") + if err != nil { + t.Fatalf("Uninstall() error = %v", err) + } + if result.ScriptRemoved { + t.Fatal("ScriptRemoved should be false when nothing was installed") + } + if result.ConfigUpdated { + t.Fatal("ConfigUpdated should be false for fish with no config file") + } +} diff --git a/internal/confirm/confirm.go b/internal/confirm/confirm.go new file mode 100644 index 0000000..8d938dc --- /dev/null +++ b/internal/confirm/confirm.go @@ -0,0 +1,40 @@ +package confirm + +import ( + "bufio" + "fmt" + "io" + "strings" +) + +// Ask prompts the user for a y/N confirmation. +func Ask(in io.Reader, out io.Writer, prompt string) (bool, error) { + reader := bufio.NewReader(in) + + for { + if _, err := fmt.Fprintf(out, "%s [y/N]: ", prompt); err != nil { + return false, err + } + + line, err := reader.ReadString('\n') + if err != nil && err != io.EOF { + return false, err + } + + answer := strings.ToLower(strings.TrimSpace(line)) + switch answer { + case "y", "yes": + return true, nil + case "", "n", "no": + return false, nil + } + + if _, writeErr := fmt.Fprintln(out, "Please answer yes or no."); writeErr != nil { + return false, writeErr + } + + if err == io.EOF { + return false, nil + } + } +} diff --git a/internal/confirm/confirm_test.go b/internal/confirm/confirm_test.go new file mode 100644 index 0000000..5f38f46 --- /dev/null +++ b/internal/confirm/confirm_test.go @@ -0,0 +1,54 @@ +package confirm_test + +import ( + "bytes" + "strings" + "testing" + + "github.com/disk0Dancer/climate/internal/confirm" +) + +func TestAskYes(t *testing.T) { + t.Parallel() + + var out bytes.Buffer + ok, err := confirm.Ask(strings.NewReader("yes\n"), &out, "Delete it?") + if err != nil { + t.Fatalf("Ask() error = %v", err) + } + if !ok { + t.Fatal("Ask() should return true for yes") + } + if !strings.Contains(out.String(), "Delete it? [y/N]:") { + t.Fatal("prompt should be written") + } +} + +func TestAskNoByDefault(t *testing.T) { + t.Parallel() + + var out bytes.Buffer + ok, err := confirm.Ask(strings.NewReader("\n"), &out, "Delete it?") + if err != nil { + t.Fatalf("Ask() error = %v", err) + } + if ok { + t.Fatal("Ask() should return false on empty input") + } +} + +func TestAskRetriesOnInvalidInput(t *testing.T) { + t.Parallel() + + var out bytes.Buffer + ok, err := confirm.Ask(strings.NewReader("maybe\ny\n"), &out, "Delete it?") + if err != nil { + t.Fatalf("Ask() error = %v", err) + } + if !ok { + t.Fatal("Ask() should return true after retrying with yes") + } + if !strings.Contains(out.String(), "Please answer yes or no.") { + t.Fatal("Ask() should explain invalid input") + } +} diff --git a/internal/generator/generator.go b/internal/generator/generator.go index 16fc856..393d37a 100644 --- a/internal/generator/generator.go +++ b/internal/generator/generator.go @@ -2,6 +2,8 @@ package generator import ( + "bytes" + "embed" "encoding/json" "fmt" "os" @@ -9,16 +11,21 @@ import ( "path/filepath" "sort" "strings" + "text/template" "time" "unicode" "github.com/disk0Dancer/climate/internal/auth" + "github.com/disk0Dancer/climate/internal/mock" "github.com/disk0Dancer/climate/internal/spec" ) // Version is the current climate version. const Version = "0.1.0" +//go:embed templates/* +var templateFS embed.FS + // Options configures the code generator. type Options struct { CLIName string @@ -46,6 +53,49 @@ type Meta struct { SpecSource string `json:"spec_source,omitempty"` } +type eventDefinition struct { + Name string + DisplayName string + Source string + Expression string + DefaultPath string + Methods []string + DefaultMethod string + Summary string + Description string + SampleJSON string + SignatureMode string + SignatureHeader string + SignatureAlgorithm string + SignatureIncludeTimestamp bool + SignatureTimestampHeader string +} + +type authSchemeDefinition struct { + Name string + ConfigKey string + Type string + AuthorizationURL string + TokenURL string + HasClientCredentials bool + HasPasswordFlow bool + HasAuthorizationCode bool + HasImplicitFlow bool +} + +func renderTemplate(name string, data interface{}) (string, error) { + tmpl, err := template.ParseFS(templateFS, "templates/"+name) + if err != nil { + return "", fmt.Errorf("parse template %s: %w", name, err) + } + + var buf bytes.Buffer + if err := tmpl.Execute(&buf, data); err != nil { + return "", fmt.Errorf("execute template %s: %w", name, err) + } + return buf.String(), nil +} + // Generate generates a Go CLI project from an OpenAPI spec and optionally builds it. func Generate(openAPI *spec.OpenAPI, rawSpec []byte, opts Options) (*Result, error) { if opts.CLIName == "" { @@ -110,6 +160,11 @@ func Generate(openAPI *spec.OpenAPI, rawSpec []byte, opts Options) (*Result, err // generateFiles writes all Go source files for the CLI project. func generateFiles(openAPI *spec.OpenAPI, cliName, outDir, hash, specSource string) error { schemes := auth.ParseSchemes(openAPI) + eventDefs, err := extractEventDefinitions(openAPI) + if err != nil { + return err + } + authDefs := extractAuthSchemeDefinitions(schemes) // Write go.mod if err := writeFile(filepath.Join(outDir, "go.mod"), goModContent(cliName)); err != nil { @@ -117,7 +172,11 @@ func generateFiles(openAPI *spec.OpenAPI, cliName, outDir, hash, specSource stri } // Write main.go - if err := writeFile(filepath.Join(outDir, "main.go"), mainGoContent(cliName)); err != nil { + mainContent, err := mainGoContent(cliName) + if err != nil { + return err + } + if err := writeFile(filepath.Join(outDir, "main.go"), mainContent); err != nil { return err } @@ -126,7 +185,11 @@ func generateFiles(openAPI *spec.OpenAPI, cliName, outDir, hash, specSource stri if err := os.MkdirAll(cmdDir, 0o755); err != nil { return err } - if err := writeFile(filepath.Join(cmdDir, "root.go"), rootGoContent(openAPI, cliName, schemes)); err != nil { + rootContent, err := rootGoContent(openAPI, cliName, schemes) + if err != nil { + return err + } + if err := writeFile(filepath.Join(cmdDir, "root.go"), rootContent); err != nil { return err } @@ -139,12 +202,71 @@ func generateFiles(openAPI *spec.OpenAPI, cliName, outDir, hash, specSource stri return err } + // Write cmd/events.go + eventsContent, err := eventsGoContent(cliName, eventDefs) + if err != nil { + return err + } + if err := writeFile(filepath.Join(cmdDir, "events.go"), eventsContent); err != nil { + return err + } + + // Write cmd/config.go + configCmdContent, err := configGoContent(cliName) + if err != nil { + return err + } + if err := writeFile(filepath.Join(cmdDir, "config.go"), configCmdContent); err != nil { + return err + } + + // Write cmd/auth.go when auth schemes exist. + if len(authDefs) > 0 { + authCmdContent, err := authGoContent(cliName, authDefs) + if err != nil { + return err + } + if err := writeFile(filepath.Join(cmdDir, "auth.go"), authCmdContent); err != nil { + return err + } + } + // Write internal/client/client.go clientDir := filepath.Join(outDir, "internal", "client") if err := os.MkdirAll(clientDir, 0o755); err != nil { return err } - if err := writeFile(filepath.Join(clientDir, "client.go"), clientGoContent(openAPI)); err != nil { + clientContent, err := clientGoContent(openAPI) + if err != nil { + return err + } + if err := writeFile(filepath.Join(clientDir, "client.go"), clientContent); err != nil { + return err + } + + // Write internal/config/config.go + configDir := filepath.Join(outDir, "internal", "config") + if err := os.MkdirAll(configDir, 0o755); err != nil { + return err + } + configContent, err := internalConfigGoContent(cliName) + if err != nil { + return err + } + if err := writeFile(filepath.Join(configDir, "config.go"), configContent); err != nil { + return err + } + + // Write internal/events/events.go + eventsDir := filepath.Join(outDir, "internal", "events") + if err := os.MkdirAll(eventsDir, 0o755); err != nil { + return err + } + internalEventsContent, err := internalEventsGoContent() + if err != nil { + return err + } + if err := writeFile(filepath.Join(eventsDir, "events.go"), internalEventsContent); err != nil { return err } @@ -163,6 +285,300 @@ func generateFiles(openAPI *spec.OpenAPI, cliName, outDir, hash, specSource stri return writeFile(filepath.Join(outDir, "climate_meta.json"), string(metaJSON)) } +func extractEventDefinitions(openAPI *spec.OpenAPI) ([]eventDefinition, error) { + if openAPI == nil { + return nil, fmt.Errorf("openapi spec is nil") + } + + defs := []eventDefinition{} + usedNames := map[string]int{} + + webhookNames := make([]string, 0, len(openAPI.Webhooks)) + for name := range openAPI.Webhooks { + webhookNames = append(webhookNames, name) + } + sort.Strings(webhookNames) + + for _, name := range webhookNames { + def, err := buildEventDefinition(openAPI, spec.NormalizeName(name), name, "webhook", "", "/webhooks/"+spec.NormalizeName(name), openAPI.Webhooks[name], openAPI, openAPI.Webhooks[name], openAPI.Webhooks[name].Post) + if err != nil { + return nil, err + } + def.Name = dedupeEventName(def.Name, usedNames) + defs = append(defs, def) + } + + pathKeys := make([]string, 0, len(openAPI.Paths)) + for path := range openAPI.Paths { + pathKeys = append(pathKeys, path) + } + sort.Strings(pathKeys) + + for _, path := range pathKeys { + item := openAPI.Paths[path] + methods := make([]string, 0, len(item.Operations())) + for method := range item.Operations() { + methods = append(methods, method) + } + sort.Strings(methods) + + for _, method := range methods { + op := item.Operations()[method] + if op == nil || len(op.Callbacks) == 0 { + continue + } + + callbackNames := make([]string, 0, len(op.Callbacks)) + for callbackName := range op.Callbacks { + callbackNames = append(callbackNames, callbackName) + } + sort.Strings(callbackNames) + + for _, callbackName := range callbackNames { + callback := op.Callbacks[callbackName] + expressions := make([]string, 0, len(callback)) + for expression := range callback { + expressions = append(expressions, expression) + } + sort.Strings(expressions) + for _, expression := range expressions { + prefix := callbackEventPrefix(op, method, path) + baseName := spec.NormalizeName(prefix + "-" + callbackName) + defaultPath := callbackDefaultPath(callbackName, expression) + callbackItem := callback[expression] + def, err := buildEventDefinition(openAPI, baseName, callbackName, "callback", expression, defaultPath, callbackItem, openAPI, callbackItem, callbackItem.Post) + if err != nil { + return nil, err + } + def.Name = dedupeEventName(def.Name, usedNames) + defs = append(defs, def) + } + } + } + } + + return defs, nil +} + +func extractAuthSchemeDefinitions(schemes []auth.Scheme) []authSchemeDefinition { + defs := make([]authSchemeDefinition, 0, len(schemes)) + for _, scheme := range schemes { + def := authSchemeDefinition{ + Name: scheme.Name, + ConfigKey: spec.NormalizeName(scheme.Name), + Type: string(scheme.Type), + } + if scheme.Spec.Flows != nil { + if flow := scheme.Spec.Flows.ClientCredentials; flow != nil { + def.HasClientCredentials = true + if def.TokenURL == "" { + def.TokenURL = flow.TokenURL + } + } + if flow := scheme.Spec.Flows.Password; flow != nil { + def.HasPasswordFlow = true + if def.TokenURL == "" { + def.TokenURL = flow.TokenURL + } + } + if flow := scheme.Spec.Flows.AuthorizationCode; flow != nil { + def.HasAuthorizationCode = true + if def.AuthorizationURL == "" { + def.AuthorizationURL = flow.AuthorizationURL + } + if def.TokenURL == "" { + def.TokenURL = flow.TokenURL + } + } + if flow := scheme.Spec.Flows.Implicit; flow != nil { + def.HasImplicitFlow = true + if def.AuthorizationURL == "" { + def.AuthorizationURL = flow.AuthorizationURL + } + } + } + defs = append(defs, def) + } + sort.Slice(defs, func(i, j int) bool { + return defs[i].Name < defs[j].Name + }) + return defs +} + +func buildEventDefinition(openAPI *spec.OpenAPI, name, displayName, source, expression, defaultPath string, item spec.PathItem, root *spec.OpenAPI, pathItem spec.PathItem, op *spec.Operation) (eventDefinition, error) { + methods := make([]string, 0, len(item.Operations())) + for method := range item.Operations() { + methods = append(methods, method) + } + sort.Strings(methods) + if len(methods) == 0 { + return eventDefinition{}, fmt.Errorf("%s %q has no operations", source, displayName) + } + + defaultMethod := methods[0] + op = item.Operations()[defaultMethod] + summary := displayName + if op != nil && op.Summary != "" { + summary = op.Summary + } + description := "" + if op != nil { + description = op.Description + } + + sampleJSON := "{}" + if op != nil { + payload, err := mock.GeneratePayloadForOperation(openAPI, op) + if err == nil { + if data, marshalErr := json.Marshal(payload); marshalErr == nil { + sampleJSON = string(data) + } + } + } + + metadata := resolveEventMetadata(root, pathItem, op) + + return eventDefinition{ + Name: firstNonEmpty(metadata.EventName, name), + DisplayName: displayName, + Source: source, + Expression: expression, + DefaultPath: firstNonEmpty(metadata.EventPath, defaultPath), + Methods: methods, + DefaultMethod: defaultMethod, + Summary: summary, + Description: description, + SampleJSON: sampleJSON, + SignatureMode: metadata.SignatureMode, + SignatureHeader: metadata.SignatureHeader, + SignatureAlgorithm: metadata.SignatureAlgorithm, + SignatureIncludeTimestamp: metadata.SignatureIncludeTimestamp, + SignatureTimestampHeader: metadata.SignatureTimestampHeader, + }, nil +} + +type eventMetadata struct { + EventName string + EventPath string + SignatureMode string + SignatureHeader string + SignatureAlgorithm string + SignatureIncludeTimestamp bool + SignatureTimestampHeader string +} + +func resolveEventMetadata(root *spec.OpenAPI, pathItem spec.PathItem, op *spec.Operation) eventMetadata { + meta := eventMetadata{} + if root != nil { + meta.EventName = root.XClimateEventName + meta.EventPath = root.XClimateEventPath + meta.SignatureMode = root.XClimateSignatureMode + meta.SignatureHeader = root.XClimateSignatureHeader + meta.SignatureAlgorithm = root.XClimateSignatureAlgorithm + meta.SignatureIncludeTimestamp = root.XClimateSignatureIncludeTimestamp + meta.SignatureTimestampHeader = root.XClimateSignatureTimestampHeader + } + if pathItem.XClimateEventName != "" { + meta.EventName = pathItem.XClimateEventName + } + if pathItem.XClimateEventPath != "" { + meta.EventPath = pathItem.XClimateEventPath + } + if pathItem.XClimateSignatureMode != "" { + meta.SignatureMode = pathItem.XClimateSignatureMode + } + if pathItem.XClimateSignatureHeader != "" { + meta.SignatureHeader = pathItem.XClimateSignatureHeader + } + if pathItem.XClimateSignatureAlgorithm != "" { + meta.SignatureAlgorithm = pathItem.XClimateSignatureAlgorithm + } + if pathItem.XClimateSignatureIncludeTimestamp { + meta.SignatureIncludeTimestamp = true + } + if pathItem.XClimateSignatureTimestampHeader != "" { + meta.SignatureTimestampHeader = pathItem.XClimateSignatureTimestampHeader + } + if op != nil { + if op.XClimateEventName != "" { + meta.EventName = op.XClimateEventName + } + if op.XClimateEventPath != "" { + meta.EventPath = op.XClimateEventPath + } + if op.XClimateSignatureMode != "" { + meta.SignatureMode = op.XClimateSignatureMode + } + if op.XClimateSignatureHeader != "" { + meta.SignatureHeader = op.XClimateSignatureHeader + } + if op.XClimateSignatureAlgorithm != "" { + meta.SignatureAlgorithm = op.XClimateSignatureAlgorithm + } + if op.XClimateSignatureIncludeTimestamp { + meta.SignatureIncludeTimestamp = true + } + if op.XClimateSignatureTimestampHeader != "" { + meta.SignatureTimestampHeader = op.XClimateSignatureTimestampHeader + } + } + return meta +} + +func firstNonEmpty(values ...string) string { + for _, value := range values { + if strings.TrimSpace(value) != "" { + return value + } + } + return "" +} + +func callbackEventPrefix(op *spec.Operation, method, path string) string { + if op != nil && op.OperationID != "" { + return spec.NormalizeName(op.OperationID) + } + return spec.NormalizeName(strings.ToLower(method) + "-" + strings.Trim(path, "/")) +} + +func callbackDefaultPath(callbackName, expression string) string { + if expression != "" { + if parsed, ok := staticCallbackPath(expression); ok { + return parsed + } + } + return "/callbacks/" + spec.NormalizeName(callbackName) +} + +func staticCallbackPath(expression string) (string, bool) { + trimmed := strings.TrimSpace(expression) + if trimmed == "" { + return "", false + } + if strings.HasPrefix(trimmed, "/") { + return trimmed, true + } + if strings.Contains(trimmed, "://") { + parts := strings.SplitN(trimmed, "://", 2) + if len(parts) == 2 { + rest := parts[1] + if slash := strings.Index(rest, "/"); slash >= 0 { + return rest[slash:], true + } + } + } + return "", false +} + +func dedupeEventName(name string, used map[string]int) string { + if used[name] == 0 { + used[name] = 1 + return name + } + used[name]++ + return fmt.Sprintf("%s-%d", name, used[name]) +} + func buildBinary(sourceDir, outputPath string) error { return runGoCmd(sourceDir, "go", "build", "-o", outputPath, ".") } @@ -197,27 +613,16 @@ require ( } // mainGoContent returns the main.go content for a generated CLI. -func mainGoContent(cliName string) string { - return fmt.Sprintf(`package main - -import ( - "fmt" - "os" - - "%s/cmd" -) - -func main() { - if err := cmd.Execute(); err != nil { - fmt.Fprintln(os.Stderr, err) - os.Exit(1) - } -} -`, cliName) +func mainGoContent(cliName string) (string, error) { + return renderTemplate("main.go.tmpl", struct { + CLIName string + }{ + CLIName: cliName, + }) } // rootGoContent returns the cmd/root.go content. -func rootGoContent(openAPI *spec.OpenAPI, cliName string, schemes []auth.Scheme) string { +func rootGoContent(openAPI *spec.OpenAPI, cliName string, schemes []auth.Scheme) (string, error) { cliUpper := strings.ToUpper(strings.ReplaceAll(cliName, "-", "_")) var authVarDecls strings.Builder @@ -242,6 +647,7 @@ func rootGoContent(openAPI *spec.OpenAPI, cliName string, schemes []auth.Scheme) case auth.SchemeAPIKey: varName := safeIdent(camelCase(scheme.Name) + "APIKey") envVar := cliUpper + "_" + strings.ToUpper(strings.ReplaceAll(scheme.Name, "-", "_")) + "_API_KEY" + configKey := "auth.api_keys." + spec.NormalizeName(scheme.Name) if !seenVars[varName] { seenVars[varName] = true flagName := kebabCase(scheme.Name) + "-key" @@ -251,9 +657,12 @@ func rootGoContent(openAPI *spec.OpenAPI, cliName string, schemes []auth.Scheme) varName, flagName, "API key for "+scheme.Name, ) keyExpr := fmt.Sprintf(` + if %s == "" { + %s = getConfigValue(%q) + } if %s == "" { %s = os.Getenv(%q) - }`, varName, varName, envVar) + }`, varName, varName, configKey, varName, varName, envVar) switch scheme.Spec.In { case "header": authHeadersBody.WriteString(keyExpr) @@ -292,6 +701,9 @@ func rootGoContent(openAPI *spec.OpenAPI, cliName string, schemes []auth.Scheme) _, _ = fmt.Fprintf(&authHeadersBody, ` { tok := bearerToken + if tok == "" { + tok = getConfigValue("auth.bearer_token") + } if tok == "" { tok = os.Getenv(%q) } @@ -313,10 +725,16 @@ func rootGoContent(openAPI *spec.OpenAPI, cliName string, schemes []auth.Scheme) _, _ = fmt.Fprintf(&authHeadersBody, ` { u := username + if u == "" { + u = getConfigValue("auth.basic_username") + } if u == "" { u = os.Getenv(%q) } p := password + if p == "" { + p = getConfigValue("auth.basic_password") + } if p == "" { p = os.Getenv(%q) } @@ -352,15 +770,24 @@ func rootGoContent(openAPI *spec.OpenAPI, cliName string, schemes []auth.Scheme) _, _ = fmt.Fprintf(&authHeadersBody, ` { tok := oauth2Token + if tok == "" { + tok = getConfigValue("auth.oauth2_token") + } if tok == "" { tok = os.Getenv(%q) } if tok == "" { cid := clientID + if cid == "" { + cid = getConfigValue("auth.oauth2_client_id") + } if cid == "" { cid = os.Getenv(%q) } csec := clientSecret + if csec == "" { + csec = getConfigValue("auth.oauth2_client_secret") + } if csec == "" { csec = os.Getenv(%q) } @@ -501,109 +928,37 @@ func fetchOAuth2Token(tokenURL, clientID, clientSecret string) (string, error) { } _ = base64Import - return fmt.Sprintf(`package cmd - -import ( -%s) - -var ( - outputFormat string - baseURL string -%s%s) - -const defaultBaseURLTemplate = %q - -var version = %q - -var rootCmd = &cobra.Command{ - Use: %q, - Short: %q, - Version: version, -} - -// Execute runs the root command. -func Execute() error { - return rootCmd.Execute() -} - -func init() { - rootCmd.PersistentFlags().StringVar(&outputFormat, "output", "json", "Output format: json|table|raw") - rootCmd.PersistentFlags().StringVar(&baseURL, "base-url", "", "Override API base URL") -%s%s} - -func getBaseURL() string { - if baseURL != "" { - return baseURL - } - if v := os.Getenv(%q); v != "" { - return v - } - return resolveDefaultBaseURL() -} - -func resolveDefaultBaseURL() string { -%s -} - -// getAuthHeaders returns HTTP headers required for authentication. -// Priority: CLI flag → environment variable → empty. -func getAuthHeaders() map[string]string { - headers := map[string]string{} -%s - return headers -} - -// getAuthQueryParams returns query parameters required for authentication -// (used when an API key scheme has in: query). -func getAuthQueryParams() map[string]string { - params := map[string]string{} -%s - return params -} - -// writeOutput prints v as indented JSON to stdout. -func writeOutput(v interface{}) { - enc := json.NewEncoder(os.Stdout) - enc.SetIndent("", " ") - if err := enc.Encode(v); err != nil { - fmt.Fprintln(os.Stderr, "error encoding output:", err) - os.Exit(1) - } -} - -// exitWithError prints an error as JSON to stderr and exits non-zero. -func exitWithError(statusCode int, code, message string, raw interface{}) { - type errObj struct { - Status int `+"`json:\"status\"`"+` - Code string `+"`json:\"code\"`"+` - Message string `+"`json:\"message\"`"+` - Raw interface{} `+"`json:\"raw,omitempty\"`"+` - } - type errorWrapper struct { - Error errObj `+"`json:\"error\"`"+` - } - obj := errorWrapper{Error: errObj{Status: statusCode, Code: code, Message: message, Raw: raw}} - enc := json.NewEncoder(os.Stderr) - enc.SetIndent("", " ") - _ = enc.Encode(obj) - os.Exit(1) - } -%s`, - imports.String(), - authVarDecls.String(), - serverVarDecls.String(), - baseURL, - openAPI.Info.Version, - cliName, - description, - authFlagInits.String(), - serverVarFlagInits.String(), - cliUpper+"_BASE_URL", - defaultBaseURLResolver, - authHeadersBody.String(), - authQueryBody.String(), - oauth2Helper, - ) + return renderTemplate("root.go.tmpl", struct { + Imports string + AuthVarDecls string + ServerVarDecls string + DefaultBaseURLTemplate string + Version string + CLIName string + Description string + AuthFlagInits string + ServerVarFlagInits string + BaseURLEnv string + ResolveDefaultBaseURL string + AuthHeaders string + AuthQuery string + OAuth2Helper string + }{ + Imports: imports.String(), + AuthVarDecls: authVarDecls.String(), + ServerVarDecls: serverVarDecls.String(), + DefaultBaseURLTemplate: baseURL, + Version: openAPI.Info.Version, + CLIName: cliName, + Description: description, + AuthFlagInits: authFlagInits.String(), + ServerVarFlagInits: serverVarFlagInits.String(), + BaseURLEnv: cliUpper + "_BASE_URL", + ResolveDefaultBaseURL: defaultBaseURLResolver, + AuthHeaders: authHeadersBody.String(), + AuthQuery: authQueryBody.String(), + OAuth2Helper: oauth2Helper, + }) } // commandsGoContent generates the cobra subcommands for all operations. @@ -856,110 +1211,108 @@ func commandsGoContent(openAPI *spec.OpenAPI, cliName string) (string, error) { return sb.String(), nil } -// clientGoContent generates the internal/client/client.go content. -func clientGoContent(openAPI *spec.OpenAPI) string { - baseURL := "" - if len(openAPI.Servers) > 0 { - baseURL = openAPI.Servers[0].URL - } - - return fmt.Sprintf(`package client - -import ( - "bytes" - "encoding/json" - "fmt" - "io" - "net/http" - "net/url" - "strings" - "time" -) - -// DefaultBaseURL is the default server URL. -const DefaultBaseURL = %q - -// Client is an HTTP API client. -type Client struct { - BaseURL string - Headers map[string]string - HTTPClient *http.Client +// eventsGoContent generates the cobra commands for event handling. +func eventsGoContent(cliName string, defs []eventDefinition) (string, error) { + return renderTemplate("events.go.tmpl", struct { + CLIName string + EventDefinitions string + WebhookSecretEnv string + }{ + CLIName: cliName, + EventDefinitions: eventDefinitionsLiteral(defs), + WebhookSecretEnv: strings.ToUpper(strings.ReplaceAll(cliName, "-", "_")) + "_WEBHOOK_SECRET", + }) } -// Response holds an API response. -type Response struct { - StatusCode int - Body string - Raw interface{} +func eventDefinitionsLiteral(defs []eventDefinition) string { + var sb strings.Builder + for _, def := range defs { + methods := make([]string, 0, len(def.Methods)) + for _, method := range def.Methods { + methods = append(methods, fmt.Sprintf("%q", method)) + } + _, _ = fmt.Fprintf(&sb, "\t{\n") + _, _ = fmt.Fprintf(&sb, "\t\tName: %q,\n", def.Name) + _, _ = fmt.Fprintf(&sb, "\t\tDisplayName: %q,\n", def.DisplayName) + _, _ = fmt.Fprintf(&sb, "\t\tSource: %q,\n", def.Source) + _, _ = fmt.Fprintf(&sb, "\t\tExpression: %q,\n", def.Expression) + _, _ = fmt.Fprintf(&sb, "\t\tDefaultPath: %q,\n", def.DefaultPath) + _, _ = fmt.Fprintf(&sb, "\t\tMethods: []string{%s},\n", strings.Join(methods, ", ")) + _, _ = fmt.Fprintf(&sb, "\t\tDefaultMethod: %q,\n", def.DefaultMethod) + _, _ = fmt.Fprintf(&sb, "\t\tSummary: %q,\n", def.Summary) + _, _ = fmt.Fprintf(&sb, "\t\tDescription: %q,\n", def.Description) + _, _ = fmt.Fprintf(&sb, "\t\tSampleJSON: %q,\n", def.SampleJSON) + _, _ = fmt.Fprintf(&sb, "\t\tSignatureMode: %q,\n", def.SignatureMode) + _, _ = fmt.Fprintf(&sb, "\t\tSignatureHeader: %q,\n", def.SignatureHeader) + _, _ = fmt.Fprintf(&sb, "\t\tSignatureAlgorithm: %q,\n", def.SignatureAlgorithm) + _, _ = fmt.Fprintf(&sb, "\t\tSignatureIncludeTimestamp: %t,\n", def.SignatureIncludeTimestamp) + _, _ = fmt.Fprintf(&sb, "\t\tSignatureTimestampHeader: %q,\n", def.SignatureTimestampHeader) + _, _ = fmt.Fprintf(&sb, "\t},\n") + } + return sb.String() } -// NewClient creates a new Client. -func NewClient(baseURL string, headers map[string]string) *Client { - if baseURL == "" { - baseURL = DefaultBaseURL - } - return &Client{ - BaseURL: strings.TrimRight(baseURL, "/"), - Headers: headers, - HTTPClient: &http.Client{Timeout: 30 * time.Second}, - } +// internalEventsGoContent generates helpers for local event listening. +func internalEventsGoContent() (string, error) { + return renderTemplate("internal_events.go.tmpl", nil) } -// Do executes an HTTP request. -func (c *Client) Do(method, path string, query map[string]string, body []byte, extraHeaders ...map[string]string) (*Response, error) { - fullURL := c.BaseURL + path - if len(query) > 0 { - params := url.Values{} - for k, v := range query { - params.Set(k, v) - } - fullURL += "?" + params.Encode() - } - - var bodyReader io.Reader - if body != nil { - bodyReader = bytes.NewReader(body) - } - - req, err := http.NewRequest(method, fullURL, bodyReader) - if err != nil { - return nil, fmt.Errorf("creating request: %%w", err) - } - - req.Header.Set("Content-Type", "application/json") - req.Header.Set("Accept", "application/json") - for k, v := range c.Headers { - req.Header.Set(k, v) - } - for _, eh := range extraHeaders { - for k, v := range eh { - req.Header.Set(k, v) - } - } +func configGoContent(cliName string) (string, error) { + return renderTemplate("config.go.tmpl", struct { + CLIName string + }{ + CLIName: cliName, + }) +} - resp, err := c.HTTPClient.Do(req) - if err != nil { - return nil, fmt.Errorf("making request: %%w", err) - } - defer resp.Body.Close() +func internalConfigGoContent(cliName string) (string, error) { + return renderTemplate("internal_config.go.tmpl", struct { + CLIName string + }{ + CLIName: cliName, + }) +} - respBody, err := io.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("reading response: %%w", err) - } +func authGoContent(cliName string, defs []authSchemeDefinition) (string, error) { + return renderTemplate("auth.go.tmpl", struct { + CLIName string + AuthSchemes string + }{ + CLIName: cliName, + AuthSchemes: authSchemeDefinitionsLiteral(defs), + }) +} - var raw interface{} - if len(respBody) > 0 { - _ = json.Unmarshal(respBody, &raw) +// clientGoContent generates the internal/client/client.go content. +func clientGoContent(openAPI *spec.OpenAPI) (string, error) { + baseURL := "" + if len(openAPI.Servers) > 0 { + baseURL = openAPI.Servers[0].URL } - return &Response{ - StatusCode: resp.StatusCode, - Body: string(respBody), - Raw: raw, - }, nil + return renderTemplate("client.go.tmpl", struct { + DefaultBaseURL string + }{ + DefaultBaseURL: baseURL, + }) } - `, baseURL) + +func authSchemeDefinitionsLiteral(defs []authSchemeDefinition) string { + var sb strings.Builder + for _, def := range defs { + _, _ = fmt.Fprintf(&sb, "\t{\n") + _, _ = fmt.Fprintf(&sb, "\t\tName: %q,\n", def.Name) + _, _ = fmt.Fprintf(&sb, "\t\tConfigKey: %q,\n", def.ConfigKey) + _, _ = fmt.Fprintf(&sb, "\t\tType: %q,\n", def.Type) + _, _ = fmt.Fprintf(&sb, "\t\tAuthorizationURL: %q,\n", def.AuthorizationURL) + _, _ = fmt.Fprintf(&sb, "\t\tTokenURL: %q,\n", def.TokenURL) + _, _ = fmt.Fprintf(&sb, "\t\tHasClientCredentials: %t,\n", def.HasClientCredentials) + _, _ = fmt.Fprintf(&sb, "\t\tHasPasswordFlow: %t,\n", def.HasPasswordFlow) + _, _ = fmt.Fprintf(&sb, "\t\tHasAuthorizationCode: %t,\n", def.HasAuthorizationCode) + _, _ = fmt.Fprintf(&sb, "\t\tHasImplicitFlow: %t,\n", def.HasImplicitFlow) + _, _ = fmt.Fprintf(&sb, "\t},\n") + } + return sb.String() } // --- Naming helpers --- diff --git a/internal/generator/generator_test.go b/internal/generator/generator_test.go index 9505ef1..15a6398 100644 --- a/internal/generator/generator_test.go +++ b/internal/generator/generator_test.go @@ -1,7 +1,10 @@ package generator_test import ( + "go/parser" + "go/token" "os" + "os/exec" "path/filepath" "strings" "testing" @@ -30,10 +33,38 @@ func sampleOpenAPI() *spec.OpenAPI { }, }, Post: &spec.Operation{ - OperationID: "pets_create", - Summary: "Create a pet", - Tags: []string{"pets"}, - RequestBody: &spec.RequestBody{Required: true}, + OperationID: "pets_create", + Summary: "Create a pet", + Tags: []string{"pets"}, + XClimateEventName: "pet-created", + XClimateSignatureMode: "hmac", + XClimateSignatureHeader: "X-GitHub-Signature", + XClimateSignatureAlgorithm: "sha256", + XClimateSignatureIncludeTimestamp: false, + RequestBody: &spec.RequestBody{Required: true}, + Callbacks: map[string]spec.Callback{ + "petCreated": { + "{$request.body#/callback_url}": { + Post: &spec.Operation{ + Summary: "Pet created callback", + XClimateEventPath: "/webhooks/pet-created", + RequestBody: &spec.RequestBody{ + Content: map[string]spec.MediaType{ + "application/json": { + Schema: &spec.Schema{ + Type: "object", + Properties: map[string]*spec.Schema{ + "id": {Type: "string"}, + "type": {Type: "string"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, }, }, "/pets/{petId}": { @@ -47,6 +78,45 @@ func sampleOpenAPI() *spec.OpenAPI { }, }, }, + Webhooks: map[string]spec.PathItem{ + "payment.succeeded": { + Post: &spec.Operation{ + Summary: "Payment succeeded webhook", + XClimateEventName: "payment-succeeded", + XClimateEventPath: "/webhooks/payment-succeeded", + XClimateSignatureMode: "hmac", + XClimateSignatureHeader: "X-Signature", + XClimateSignatureAlgorithm: "sha256", + XClimateSignatureIncludeTimestamp: true, + XClimateSignatureTimestampHeader: "X-Signature-Timestamp", + RequestBody: &spec.RequestBody{ + Content: map[string]spec.MediaType{ + "application/json": { + Schema: &spec.Schema{ + Type: "object", + Properties: map[string]*spec.Schema{ + "event_id": {Type: "string"}, + "type": {Type: "string"}, + }, + }, + }, + }, + }, + }, + }, + }, + Components: spec.Components{ + SecuritySchemes: map[string]spec.SecurityScheme{ + "oauth": { + Type: "oauth2", + Flows: &spec.OAuthFlows{ + ClientCredentials: &spec.OAuthFlow{ + TokenURL: "https://petstore.example.com/oauth/token", + }, + }, + }, + }, + }, } } @@ -100,9 +170,14 @@ func TestGenerateCreatesFiles(t *testing.T) { expectedFiles := []string{ "go.mod", "main.go", + "cmd/auth.go", + "cmd/config.go", "cmd/root.go", "cmd/commands.go", + "cmd/events.go", "internal/client/client.go", + "internal/config/config.go", + "internal/events/events.go", "climate_meta.json", } for _, f := range expectedFiles { @@ -233,6 +308,45 @@ func TestGenerateRootVersionIsBuildOverridable(t *testing.T) { } } +func TestGenerateIncludesEventsListenerCommand(t *testing.T) { + outDir := t.TempDir() + openAPI := sampleOpenAPI() + + _, err := generator.Generate(openAPI, []byte(`{}`), generator.Options{ + CLIName: "petstore", + OutDir: outDir, + NoBuild: true, + Force: true, + }) + if err != nil { + t.Fatalf("Generate() error = %v", err) + } + + eventsCmd, err := os.ReadFile(filepath.Join(outDir, "cmd", "events.go")) + if err != nil { + t.Fatalf("reading cmd/events.go: %v", err) + } + eventsContent := string(eventsCmd) + if !strings.Contains(eventsContent, `Use: "events"`) { + t.Fatal("generated CLI should include an events command group") + } + if !strings.Contains(eventsContent, `Use: "list"`) { + t.Fatal("generated CLI should include an events list command") + } + if !strings.Contains(eventsContent, `Use: "listen [event-name]"`) { + t.Fatal("generated CLI should include an events listen command") + } + if !strings.Contains(eventsContent, `Use: "emit "`) { + t.Fatal("generated CLI should include an events emit command") + } + if !strings.Contains(eventsContent, `"listener.started"`) { + t.Fatal("events listener should emit structured startup records") + } + if !strings.Contains(eventsContent, "payment-succeeded") { + t.Fatal("generated events command should include named webhook definitions") + } +} + func TestGenerateServerVariableFlagsAndInterpolation(t *testing.T) { outDir := t.TempDir() openAPI := sampleOpenAPI() @@ -267,7 +381,7 @@ func TestGenerateServerVariableFlagsAndInterpolation(t *testing.T) { } content := string(data) - if !strings.Contains(content, "const defaultBaseURLTemplate = \"https://{region}.api.example.com/{basePath}\"") { + if !strings.Contains(content, "defaultBaseURLTemplate") { t.Fatal("root.go should keep the templated server URL") } if !strings.Contains(content, `StringVar(&serverVarRegion, "server-var-region"`) { @@ -282,10 +396,324 @@ func TestGenerateServerVariableFlagsAndInterpolation(t *testing.T) { if !strings.Contains(content, "PETSTORE_SERVER_VAR_BASE_PATH") { t.Fatal("root.go should expose PETSTORE_SERVER_VAR_BASE_PATH env override") } - if !strings.Contains(content, `u = strings.ReplaceAll(u, "{region}", v)`) { - t.Fatal("root.go should interpolate {region}") + if !strings.Contains(content, `strings.ReplaceAll`) { + t.Fatal("root.go should interpolate server variables") + } +} + +func TestGenerateIncludesConfigCommands(t *testing.T) { + outDir := t.TempDir() + openAPI := sampleOpenAPI() + + _, err := generator.Generate(openAPI, []byte(`{}`), generator.Options{ + CLIName: "petstore", + OutDir: outDir, + NoBuild: true, + Force: true, + }) + if err != nil { + t.Fatalf("Generate() error = %v", err) + } + + configCmd, err := os.ReadFile(filepath.Join(outDir, "cmd", "config.go")) + if err != nil { + t.Fatalf("reading cmd/config.go: %v", err) + } + configContent := string(configCmd) + if !strings.Contains(configContent, `Use: "config"`) { + t.Fatal("generated CLI should include a config command group") + } + if !strings.Contains(configContent, `Use: "list"`) { + t.Fatal("generated CLI should include config list") + } + if !strings.Contains(configContent, `Use: "set "`) { + t.Fatal("generated CLI should include config set") + } + if !strings.Contains(configContent, `"secret"`) { + t.Fatal("generated config command should support secret storage") + } +} + +func TestGenerateIncludesAuthCommands(t *testing.T) { + outDir := t.TempDir() + openAPI := sampleOpenAPI() + + _, err := generator.Generate(openAPI, []byte(`{}`), generator.Options{ + CLIName: "petstore", + OutDir: outDir, + NoBuild: true, + Force: true, + }) + if err != nil { + t.Fatalf("Generate() error = %v", err) + } + + authCmd, err := os.ReadFile(filepath.Join(outDir, "cmd", "auth.go")) + if err != nil { + t.Fatalf("reading cmd/auth.go: %v", err) + } + authContent := string(authCmd) + if !strings.Contains(authContent, `Use: "auth"`) { + t.Fatal("generated CLI should include an auth command group") + } + if !strings.Contains(authContent, `Use: "login"`) { + t.Fatal("generated CLI should include auth login") + } + if !strings.Contains(authContent, `Use: "status"`) { + t.Fatal("generated CLI should include auth status") + } + if !strings.Contains(authContent, `Use: "logout"`) { + t.Fatal("generated CLI should include auth logout") + } +} + +func TestGenerateIncludesTunnelProviderHelpers(t *testing.T) { + outDir := t.TempDir() + openAPI := sampleOpenAPI() + + _, err := generator.Generate(openAPI, []byte(`{}`), generator.Options{ + CLIName: "petstore", + OutDir: outDir, + NoBuild: true, + Force: true, + }) + if err != nil { + t.Fatalf("Generate() error = %v", err) + } + + eventsHelper, err := os.ReadFile(filepath.Join(outDir, "internal", "events", "events.go")) + if err != nil { + t.Fatalf("reading internal/events/events.go: %v", err) + } + content := string(eventsHelper) + for _, want := range []string{ + `"cloudflared"`, + `"hmac"`, + `"sha256"`, + `"sha1"`, + `"sha512"`, + `"listener.tunnel"`, + `"verified"`, + `X-Signature`, + } { + if !strings.Contains(content, want) { + t.Fatalf("generated events helper should mention %q", want) + } + } +} + +func TestGeneratedGoFilesParse(t *testing.T) { + outDir := t.TempDir() + openAPI := sampleOpenAPI() + + _, err := generator.Generate(openAPI, []byte(`{}`), generator.Options{ + CLIName: "petstore", + OutDir: outDir, + NoBuild: true, + Force: true, + }) + if err != nil { + t.Fatalf("Generate() error = %v", err) + } + + goFiles := []string{ + filepath.Join(outDir, "main.go"), + filepath.Join(outDir, "cmd", "auth.go"), + filepath.Join(outDir, "cmd", "config.go"), + filepath.Join(outDir, "cmd", "root.go"), + filepath.Join(outDir, "cmd", "commands.go"), + filepath.Join(outDir, "cmd", "events.go"), + filepath.Join(outDir, "internal", "client", "client.go"), + filepath.Join(outDir, "internal", "config", "config.go"), + filepath.Join(outDir, "internal", "events", "events.go"), + } + + fset := token.NewFileSet() + for _, path := range goFiles { + if _, err := parser.ParseFile(fset, path, nil, parser.AllErrors); err != nil { + t.Fatalf("generated Go file %s should parse: %v", path, err) + } + } +} + +func TestGeneratedEventsRuntime(t *testing.T) { + outDir := t.TempDir() + openAPI := sampleOpenAPI() + + _, err := generator.Generate(openAPI, []byte(`{}`), generator.Options{ + CLIName: "petstore", + OutDir: outDir, + NoBuild: true, + Force: true, + }) + if err != nil { + t.Fatalf("Generate() error = %v", err) + } + + testContent := `package events + +import ( + "context" + "net/http" + "os" + "path/filepath" + "testing" + "time" +) + +func TestTunnelProvidersEndToEnd(t *testing.T) { + tempDir := t.TempDir() + scriptPath := filepath.Join(tempDir, "cloudflared") + script := "#!/bin/sh\nprintf '%s\\n' 'https://cloudflared.example.test'\nsleep 1\n" + if err := os.WriteFile(scriptPath, []byte(script), 0o755); err != nil { + t.Fatalf("write script: %v", err) + } + + t.Setenv("PATH", tempDir+string(os.PathListSeparator)+os.Getenv("PATH")) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + records := make(chan TunnelRecord, 1) + _, err := StartTunnel(ctx, "cloudflared", "http://127.0.0.1:8081/webhooks/test", func(v interface{}) { + if rec, ok := v.(TunnelRecord); ok { + select { + case records <- rec: + default: + } + } + }) + if err != nil { + t.Fatalf("StartTunnel() error = %v", err) + } + + select { + case rec := <-records: + if rec.PublicURL != "https://cloudflared.example.test" { + t.Fatalf("PublicURL = %q", rec.PublicURL) + } + case <-time.After(2 * time.Second): + t.Fatal("timed out waiting for tunnel record") + } +} + +func TestHMACVerificationBodyOnly(t *testing.T) { + body := []byte("{\"action\":\"ping\"}") + headers, err := SignatureHeaders(SignatureOptions{ + Mode: "hmac", + Header: "X-Signature", + Secret: "secret", + Algorithm: "sha256", + }, body) + if err != nil { + t.Fatalf("SignatureHeaders() error = %v", err) + } + verified, err := verifySignature(SignatureOptions{ + Mode: "hmac", + Header: "X-Signature", + Secret: "secret", + Algorithm: "sha256", + }, http.Header{ + "X-Signature": []string{headers["X-Signature"]}, + }, body) + if err != nil { + t.Fatalf("verifySignature() error = %v", err) + } + if !verified { + t.Fatal("expected verification to pass") + } +} + +func TestHMACVerificationWithTimestamp(t *testing.T) { + body := []byte("{\"action\":\"ping\"}") + headers, err := SignatureHeaders(SignatureOptions{ + Mode: "hmac", + Header: "X-Signature", + Secret: "secret", + Algorithm: "sha512", + IncludeTimestamp: true, + TimestampHeader: "X-Signature-Timestamp", + }, body) + if err != nil { + t.Fatalf("SignatureHeaders() error = %v", err) + } + httpHeaders := http.Header{} + for key, value := range headers { + httpHeaders.Set(key, value) + } + verified, err := verifySignature(SignatureOptions{ + Mode: "hmac", + Header: "X-Signature", + Secret: "secret", + Algorithm: "sha512", + IncludeTimestamp: true, + TimestampHeader: "X-Signature-Timestamp", + TimestampTolerance: time.Minute, + }, httpHeaders, body) + if err != nil { + t.Fatalf("verifySignature() error = %v", err) + } + if !verified { + t.Fatal("expected verification to pass") + } +} +` + testPath := filepath.Join(outDir, "internal", "events", "events_runtime_test.go") + if err := os.WriteFile(testPath, []byte(testContent), 0o644); err != nil { + t.Fatalf("WriteFile(%s) error = %v", testPath, err) + } + + configTestContent := `package config + +import "testing" + +func TestConfigurationsLifecycle(t *testing.T) { + store := newStore("/tmp/config.json") + if store.ActiveProfileName() != "default" { + t.Fatalf("active = %q", store.ActiveProfileName()) } - if !strings.Contains(content, `u = strings.ReplaceAll(u, "{basePath}", v)`) { - t.Fatal("root.go should interpolate {basePath}") + if err := store.CreateProfile("work"); err != nil { + t.Fatalf("CreateProfile() error = %v", err) + } + if err := store.UseProfile("work"); err != nil { + t.Fatalf("UseProfile() error = %v", err) + } + store.Set("core.base_url", "https://api.example.test", false) + store.Set("events.signing_secret", "secret", true) + if value, ok := store.Get("core.base_url"); !ok || value != "https://api.example.test" { + t.Fatalf("Get(core.base_url) = %q, %v", value, ok) + } + if value, ok := store.Get("events.signing_secret"); !ok || value != "secret" { + t.Fatalf("Get(events.signing_secret) = %q, %v", value, ok) + } + if !store.Unset("core.base_url") { + t.Fatal("Unset(core.base_url) should return true") + } +} +` + configTestPath := filepath.Join(outDir, "internal", "config", "config_runtime_test.go") + if err := os.WriteFile(configTestPath, []byte(configTestContent), 0o644); err != nil { + t.Fatalf("WriteFile(%s) error = %v", configTestPath, err) + } + + packageDir, err := os.Getwd() + if err != nil { + t.Fatalf("Getwd() error = %v", err) + } + repoRoot := filepath.Clean(filepath.Join(packageDir, "..", "..")) + gomodcache := filepath.Join(repoRoot, ".cache", "go-mod") + gocache := filepath.Join(outDir, ".gocache") + + cmd := exec.Command("go", "test", "./internal/...") + cmd.Dir = outDir + cmd.Env = append(os.Environ(), + "GOCACHE="+gocache, + "GOMODCACHE="+gomodcache, + "GOSUMDB=off", + "GOPROXY=off", + ) + output, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("generated go test ./internal/... failed: %v\n%s", err, string(output)) } } diff --git a/internal/generator/templates/auth.go.tmpl b/internal/generator/templates/auth.go.tmpl new file mode 100644 index 0000000..adb2d3b --- /dev/null +++ b/internal/generator/templates/auth.go.tmpl @@ -0,0 +1,348 @@ +package cmd + +import ( + "bufio" + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "os" + "strings" + + cfg "{{.CLIName}}/internal/config" + "github.com/spf13/cobra" +) + +type authSchemeDefinition struct { + Name string + ConfigKey string + Type string + AuthorizationURL string + TokenURL string + HasClientCredentials bool + HasPasswordFlow bool + HasAuthorizationCode bool + HasImplicitFlow bool +} + +var generatedAuthSchemes = []authSchemeDefinition{ +{{.AuthSchemes}}} + +var authSchemeName string + +var authCmd = &cobra.Command{ + Use: "auth", + Short: "Manage authentication for this CLI", +} + +var authLoginCmd = &cobra.Command{ + Use: "login", + Short: "Interactively configure authentication", + RunE: func(cmd *cobra.Command, args []string) error { + store, err := cfg.Load() + if err != nil { + return err + } + scheme, err := selectedAuthScheme(authSchemeName) + if err != nil { + return err + } + + reader := bufio.NewReader(cmd.InOrStdin()) + switch scheme.Type { + case "apiKey": + value, err := prompt(reader, cmd.ErrOrStderr(), "API key") + if err != nil { + return err + } + store.Set("auth.api_keys."+scheme.ConfigKey, value, true) + case "http_bearer", "openIdConnect": + token, err := prompt(reader, cmd.ErrOrStderr(), "Access token") + if err != nil { + return err + } + store.Set("auth.bearer_token", token, true) + case "http_basic": + username, err := prompt(reader, cmd.ErrOrStderr(), "Username") + if err != nil { + return err + } + password, err := prompt(reader, cmd.ErrOrStderr(), "Password (input visible)") + if err != nil { + return err + } + store.Set("auth.basic_username", username, false) + store.Set("auth.basic_password", password, true) + case "oauth2": + if err := interactiveOAuthLogin(reader, cmd.ErrOrStderr(), store, scheme); err != nil { + return err + } + default: + return fmt.Errorf("unsupported auth scheme %q", scheme.Type) + } + + if err := store.Save(); err != nil { + return err + } + type resp struct { + Active string `json:"active"` + Scheme string `json:"scheme"` + Status string `json:"status"` + } + writeJSON(resp{Active: store.ActiveProfileName(), Scheme: scheme.Name, Status: "configured"}) + return nil + }, +} + +var authStatusCmd = &cobra.Command{ + Use: "status", + Short: "Show authentication status for the active configuration", + RunE: func(cmd *cobra.Command, args []string) error { + store, err := cfg.Load() + if err != nil { + return err + } + + type schemeStatus struct { + Name string `json:"name"` + Type string `json:"type"` + Configured bool `json:"configured"` + Keys []string `json:"keys"` + } + type resp struct { + Active string `json:"active"` + Schemes []schemeStatus `json:"schemes"` + } + + statuses := make([]schemeStatus, 0, len(generatedAuthSchemes)) + for _, scheme := range generatedAuthSchemes { + keys := authKeysForScheme(scheme) + configured := false + for _, key := range keys { + if _, ok := store.Get(key); ok { + configured = true + break + } + } + statuses = append(statuses, schemeStatus{ + Name: scheme.Name, + Type: scheme.Type, + Configured: configured, + Keys: keys, + }) + } + + writeJSON(resp{Active: store.ActiveProfileName(), Schemes: statuses}) + return nil + }, +} + +var authLogoutCmd = &cobra.Command{ + Use: "logout", + Short: "Remove stored authentication from the active configuration", + RunE: func(cmd *cobra.Command, args []string) error { + store, err := cfg.Load() + if err != nil { + return err + } + + removed := []string{} + if authSchemeName != "" { + scheme, err := selectedAuthScheme(authSchemeName) + if err != nil { + return err + } + for _, key := range authKeysForScheme(scheme) { + if store.Unset(key) { + removed = append(removed, key) + } + } + } else { + for _, scheme := range generatedAuthSchemes { + for _, key := range authKeysForScheme(scheme) { + if store.Unset(key) { + removed = append(removed, key) + } + } + } + } + + if err := store.Save(); err != nil { + return err + } + type resp struct { + Active string `json:"active"` + Removed []string `json:"removed"` + } + writeJSON(resp{Active: store.ActiveProfileName(), Removed: removed}) + return nil + }, +} + +func selectedAuthScheme(name string) (authSchemeDefinition, error) { + if len(generatedAuthSchemes) == 0 { + return authSchemeDefinition{}, fmt.Errorf("this CLI has no supported auth schemes") + } + if strings.TrimSpace(name) == "" { + return generatedAuthSchemes[0], nil + } + for _, scheme := range generatedAuthSchemes { + if scheme.Name == name { + return scheme, nil + } + } + return authSchemeDefinition{}, fmt.Errorf("unknown auth scheme %q", name) +} + +func authKeysForScheme(scheme authSchemeDefinition) []string { + switch scheme.Type { + case "apiKey": + return []string{"auth.api_keys." + scheme.ConfigKey} + case "http_bearer", "openIdConnect": + return []string{"auth.bearer_token"} + case "http_basic": + return []string{"auth.basic_username", "auth.basic_password"} + case "oauth2": + return []string{"auth.oauth2_token", "auth.oauth2_client_id", "auth.oauth2_client_secret", "auth.oauth2_username", "auth.oauth2_password"} + default: + return nil + } +} + +func interactiveOAuthLogin(reader *bufio.Reader, stderr io.Writer, store *cfg.Store, scheme authSchemeDefinition) error { + switch { + case scheme.HasClientCredentials: + clientID, err := prompt(reader, stderr, "OAuth2 client ID") + if err != nil { + return err + } + clientSecret, err := prompt(reader, stderr, "OAuth2 client secret (input visible)") + if err != nil { + return err + } + token, err := requestOAuthToken(scheme.TokenURL, url.Values{ + "grant_type": {"client_credentials"}, + "client_id": {clientID}, + "client_secret": {clientSecret}, + }) + if err != nil { + return err + } + store.Set("auth.oauth2_client_id", clientID, false) + store.Set("auth.oauth2_client_secret", clientSecret, true) + store.Set("auth.oauth2_token", token, true) + return nil + case scheme.HasPasswordFlow: + username, err := prompt(reader, stderr, "OAuth2 username") + if err != nil { + return err + } + password, err := prompt(reader, stderr, "OAuth2 password (input visible)") + if err != nil { + return err + } + clientID, err := prompt(reader, stderr, "OAuth2 client ID (optional)") + if err != nil { + return err + } + clientSecret, err := prompt(reader, stderr, "OAuth2 client secret (optional, input visible)") + if err != nil { + return err + } + form := url.Values{ + "grant_type": {"password"}, + "username": {username}, + "password": {password}, + } + if strings.TrimSpace(clientID) != "" { + form.Set("client_id", clientID) + } + if strings.TrimSpace(clientSecret) != "" { + form.Set("client_secret", clientSecret) + } + token, err := requestOAuthToken(scheme.TokenURL, form) + if err != nil { + return err + } + store.Set("auth.oauth2_username", username, false) + store.Set("auth.oauth2_password", password, true) + if strings.TrimSpace(clientID) != "" { + store.Set("auth.oauth2_client_id", clientID, false) + } + if strings.TrimSpace(clientSecret) != "" { + store.Set("auth.oauth2_client_secret", clientSecret, true) + } + store.Set("auth.oauth2_token", token, true) + return nil + default: + if scheme.AuthorizationURL != "" { + _, _ = fmt.Fprintf(stderr, "Open this URL in a browser and complete the flow:\n%s\n", scheme.AuthorizationURL) + } + token, err := prompt(reader, stderr, "Paste access token") + if err != nil { + return err + } + store.Set("auth.oauth2_token", token, true) + return nil + } +} + +func requestOAuthToken(tokenURL string, form url.Values) (string, error) { + if strings.TrimSpace(tokenURL) == "" { + return "", fmt.Errorf("oauth2 token URL is not available in the OpenAPI security scheme") + } + resp, err := http.PostForm(tokenURL, form) + if err != nil { + return "", fmt.Errorf("requesting oauth2 token: %w", err) + } + defer resp.Body.Close() + body, err := io.ReadAll(resp.Body) + if err != nil { + return "", fmt.Errorf("reading oauth2 token response: %w", err) + } + if resp.StatusCode >= 400 { + return "", fmt.Errorf("oauth2 token endpoint returned %d: %s", resp.StatusCode, strings.TrimSpace(string(body))) + } + + var decoded struct { + AccessToken string `json:"access_token"` + } + if err := json.Unmarshal(body, &decoded); err != nil { + return "", fmt.Errorf("parsing oauth2 token response: %w", err) + } + if strings.TrimSpace(decoded.AccessToken) == "" { + return "", fmt.Errorf("oauth2 token response did not contain access_token") + } + return decoded.AccessToken, nil +} + +func prompt(reader *bufio.Reader, stderr io.Writer, label string) (string, error) { + _, _ = fmt.Fprintf(stderr, "%s: ", label) + value, err := reader.ReadString('\n') + if err != nil && err != io.EOF { + return "", err + } + return strings.TrimSpace(value), nil +} + +func completeAuthSchemes(_ *cobra.Command, _ []string, _ string) ([]string, cobra.ShellCompDirective) { + names := make([]string, 0, len(generatedAuthSchemes)) + for _, scheme := range generatedAuthSchemes { + names = append(names, scheme.Name) + } + return names, cobra.ShellCompDirectiveNoFileComp +} + +func init() { + authLoginCmd.Flags().StringVar(&authSchemeName, "scheme", "", "Select the auth scheme to configure") + authLogoutCmd.Flags().StringVar(&authSchemeName, "scheme", "", "Select the auth scheme to remove") + _ = authLoginCmd.RegisterFlagCompletionFunc("scheme", completeAuthSchemes) + _ = authLogoutCmd.RegisterFlagCompletionFunc("scheme", completeAuthSchemes) + + authCmd.AddCommand(authLoginCmd) + authCmd.AddCommand(authStatusCmd) + authCmd.AddCommand(authLogoutCmd) + rootCmd.AddCommand(authCmd) +} diff --git a/internal/generator/templates/client.go.tmpl b/internal/generator/templates/client.go.tmpl new file mode 100644 index 0000000..294428d --- /dev/null +++ b/internal/generator/templates/client.go.tmpl @@ -0,0 +1,96 @@ +package client + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" +) + +// DefaultBaseURL is the default server URL. +const DefaultBaseURL = {{printf "%q" .DefaultBaseURL}} + +// Client is an HTTP API client. +type Client struct { + BaseURL string + Headers map[string]string + HTTPClient *http.Client +} + +// Response holds an API response. +type Response struct { + StatusCode int + Body string + Raw interface{} +} + +// NewClient creates a new Client. +func NewClient(baseURL string, headers map[string]string) *Client { + if baseURL == "" { + baseURL = DefaultBaseURL + } + return &Client{ + BaseURL: strings.TrimRight(baseURL, "/"), + Headers: headers, + HTTPClient: &http.Client{Timeout: 30 * time.Second}, + } +} + +// Do executes an HTTP request. +func (c *Client) Do(method, path string, query map[string]string, body []byte, extraHeaders ...map[string]string) (*Response, error) { + fullURL := c.BaseURL + path + if len(query) > 0 { + params := url.Values{} + for k, v := range query { + params.Set(k, v) + } + fullURL += "?" + params.Encode() + } + + var bodyReader io.Reader + if body != nil { + bodyReader = bytes.NewReader(body) + } + + req, err := http.NewRequest(method, fullURL, bodyReader) + if err != nil { + return nil, fmt.Errorf("creating request: %w", err) + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept", "application/json") + for k, v := range c.Headers { + req.Header.Set(k, v) + } + for _, eh := range extraHeaders { + for k, v := range eh { + req.Header.Set(k, v) + } + } + + resp, err := c.HTTPClient.Do(req) + if err != nil { + return nil, fmt.Errorf("making request: %w", err) + } + defer resp.Body.Close() + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("reading response: %w", err) + } + + var raw interface{} + if len(respBody) > 0 { + _ = json.Unmarshal(respBody, &raw) + } + + return &Response{ + StatusCode: resp.StatusCode, + Body: string(respBody), + Raw: raw, + }, nil +} diff --git a/internal/generator/templates/config.go.tmpl b/internal/generator/templates/config.go.tmpl new file mode 100644 index 0000000..3a7ef66 --- /dev/null +++ b/internal/generator/templates/config.go.tmpl @@ -0,0 +1,225 @@ +package cmd + +import ( + "fmt" + + cfg "{{.CLIName}}/internal/config" + "github.com/spf13/cobra" +) + +var configSetSecret bool + +var configCmd = &cobra.Command{ + Use: "config", + Short: "Manage local CLI configuration", +} + +var configListCmd = &cobra.Command{ + Use: "list", + Short: "List properties in the active configuration", + RunE: func(cmd *cobra.Command, args []string) error { + store, err := cfg.Load() + if err != nil { + return err + } + + type entry struct { + Key string `json:"key"` + Value string `json:"value"` + Secret bool `json:"secret"` + } + type resp struct { + Path string `json:"path"` + Active string `json:"active_profile"` + Entries []entry `json:"entries"` + } + + masked := store.MaskedEntries() + entries := make([]entry, 0, len(masked)) + for _, item := range masked { + entries = append(entries, entry{Key: item.Key, Value: item.Value, Secret: item.Secret}) + } + + writeJSON(resp{ + Path: store.Path(), + Active: store.ActiveProfileName(), + Entries: entries, + }) + return nil + }, +} + +var configSetCmd = &cobra.Command{ + Use: "set ", + Short: "Set a property in the active configuration", + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + store, err := cfg.Load() + if err != nil { + return err + } + key := args[0] + value := args[1] + + store.Set(key, value, configSetSecret) + if err := store.Save(); err != nil { + return err + } + + type resp struct { + Active string `json:"active_profile"` + Updated string `json:"updated"` + Secret bool `json:"secret"` + } + writeJSON(resp{ + Active: store.ActiveProfileName(), + Updated: key, + Secret: configSetSecret, + }) + return nil + }, +} + +var configGetCmd = &cobra.Command{ + Use: "get ", + Short: "Get a property from the active configuration", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + store, err := cfg.Load() + if err != nil { + return err + } + key := args[0] + value, ok := store.Get(key) + if !ok { + return fmt.Errorf("property %q is not set in profile %q", key, store.ActiveProfileName()) + } + + type resp struct { + Active string `json:"active_profile"` + Key string `json:"key"` + Value string `json:"value"` + } + writeJSON(resp{Active: store.ActiveProfileName(), Key: key, Value: value}) + return nil + }, +} + +var configUnsetCmd = &cobra.Command{ + Use: "unset ", + Short: "Unset a property in the active configuration", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + store, err := cfg.Load() + if err != nil { + return err + } + key := args[0] + removed := store.Unset(key) + if err := store.Save(); err != nil { + return err + } + + type resp struct { + Active string `json:"active_profile"` + Removed string `json:"removed"` + Found bool `json:"found"` + } + writeJSON(resp{Active: store.ActiveProfileName(), Removed: key, Found: removed}) + return nil + }, +} + +var configProfilesCmd = &cobra.Command{ + Use: "profiles", + Short: "Manage named profiles", +} + +var configProfilesListCmd = &cobra.Command{ + Use: "list", + Short: "List profile names", + RunE: func(cmd *cobra.Command, args []string) error { + store, err := cfg.Load() + if err != nil { + return err + } + + type item struct { + Name string `json:"name"` + Active bool `json:"active"` + } + type resp struct { + Profiles []item `json:"profiles"` + } + + items := make([]item, 0, len(store.ProfileNames())) + for _, name := range store.ProfileNames() { + items = append(items, item{Name: name, Active: name == store.ActiveProfileName()}) + } + writeJSON(resp{Profiles: items}) + return nil + }, +} + +var configProfilesCreateCmd = &cobra.Command{ + Use: "create ", + Short: "Create a named profile", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + store, err := cfg.Load() + if err != nil { + return err + } + if err := store.CreateProfile(args[0]); err != nil { + return err + } + if err := store.Save(); err != nil { + return err + } + + type resp struct { + Created string `json:"created"` + } + writeJSON(resp{Created: args[0]}) + return nil + }, +} + +var configProfilesUseCmd = &cobra.Command{ + Use: "use ", + Short: "Use a named profile", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + store, err := cfg.Load() + if err != nil { + return err + } + if err := store.UseProfile(args[0]); err != nil { + return err + } + if err := store.Save(); err != nil { + return err + } + + type resp struct { + Active string `json:"active_profile"` + } + writeJSON(resp{Active: store.ActiveProfileName()}) + return nil + }, +} + +func init() { + configSetCmd.Flags().BoolVar(&configSetSecret, "secret", false, "Store the value as a secret and mask it in config list") + + configProfilesCmd.AddCommand(configProfilesListCmd) + configProfilesCmd.AddCommand(configProfilesCreateCmd) + configProfilesCmd.AddCommand(configProfilesUseCmd) + + configCmd.AddCommand(configListCmd) + configCmd.AddCommand(configSetCmd) + configCmd.AddCommand(configGetCmd) + configCmd.AddCommand(configUnsetCmd) + configCmd.AddCommand(configProfilesCmd) + rootCmd.AddCommand(configCmd) +} diff --git a/internal/generator/templates/events.go.tmpl b/internal/generator/templates/events.go.tmpl new file mode 100644 index 0000000..9cd9d42 --- /dev/null +++ b/internal/generator/templates/events.go.tmpl @@ -0,0 +1,351 @@ +package cmd + +import ( + "context" + "encoding/json" + "fmt" + "os" + "os/signal" + "time" + + cfg "{{.CLIName}}/internal/config" + eventlistener "{{.CLIName}}/internal/events" + "github.com/spf13/cobra" +) + +var generatedEventDefinitions = []eventlistener.EventDefinition{ +{{.EventDefinitions}}} + +var ( + eventsListenHost string + eventsListenPath string + eventsListenPort int + eventsListenResponseStatus int + eventsListenResponseBody string + eventsListenTunnel string + eventsListenSignatureMode string + eventsListenSignatureHeader string + eventsListenSigningSecret string + eventsListenSignatureAlgorithm string + eventsListenIncludeTimestamp bool + eventsListenTimestampHeader string + eventsListenTimestampTolerance int + + eventsEmitTargetURL string + eventsEmitMethod string + eventsEmitDataJSON string + eventsEmitDataFile string + eventsEmitSignatureMode string + eventsEmitSignatureHeader string + eventsEmitSigningSecret string + eventsEmitSignatureAlgorithm string + eventsEmitIncludeTimestamp bool + eventsEmitTimestampHeader string +) + +var eventsCmd = &cobra.Command{ + Use: "events", + Short: "Inspect, receive, and emit callback/webhook events", +} + +var eventsListCmd = &cobra.Command{ + Use: "list", + Short: "List known callback and webhook event definitions", + RunE: func(cmd *cobra.Command, args []string) error { + type listResp struct { + Events []eventlistener.EventDefinition `json:"events"` + } + writeJSON(listResp{Events: generatedEventDefinitions}) + return nil + }, +} + +var eventsListenCmd = &cobra.Command{ + Use: "listen [event-name]", + Short: "Start a local event listener", + Args: cobra.MaximumNArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + store, err := cfg.Load() + if err != nil { + return err + } + + var def eventlistener.EventDefinition + eventName := "" + if len(args) == 1 { + var ok bool + eventName = args[0] + def, ok = eventlistener.LookupEvent(generatedEventDefinitions, eventName) + if !ok { + return fmt.Errorf("unknown event %q", eventName) + } + } + + path := eventsListenPath + if eventName != "" && !cmd.Flags().Changed("path") { + path = def.DefaultPath + } + + allowedMethods := []string(nil) + if eventName != "" { + allowedMethods = append(allowedMethods, def.Methods...) + } + + signatureMode := eventlistener.ResolveString( + cmd.Flags().Changed("signature-mode"), eventsListenSignatureMode, + store, "events.signature_mode", + def.SignatureMode, + "none", + ) + signatureHeader := eventlistener.ResolveString( + cmd.Flags().Changed("signature-header"), eventsListenSignatureHeader, + store, "events.signature_header", + def.SignatureHeader, + "X-Signature", + ) + signatureAlgorithm := eventlistener.ResolveString( + cmd.Flags().Changed("signature-algorithm"), eventsListenSignatureAlgorithm, + store, "events.signature_algorithm", + def.SignatureAlgorithm, + "sha256", + ) + timestampHeader := eventlistener.ResolveString( + cmd.Flags().Changed("timestamp-header"), eventsListenTimestampHeader, + store, "events.timestamp_header", + def.SignatureTimestampHeader, + "X-Signature-Timestamp", + ) + includeTimestamp := eventlistener.ResolveBool( + cmd.Flags().Changed("include-timestamp"), eventsListenIncludeTimestamp, + store, "events.include_timestamp", + def.SignatureIncludeTimestamp, + false, + ) + tunnel := eventlistener.ResolveString( + cmd.Flags().Changed("tunnel"), eventsListenTunnel, + store, "events.tunnel", + "", + "none", + ) + + signingSecret := eventsListenSigningSecret + if signingSecret == "" { + if value, ok := store.Get("events.signing_secret"); ok { + signingSecret = value + } + } + if signingSecret == "" && signatureMode != "none" { + signingSecret = os.Getenv({{printf "%q" .WebhookSecretEnv}}) + } + + ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt) + defer stop() + + listener, err := eventlistener.NewListener(eventlistener.ListenOptions{ + Host: eventsListenHost, + Port: eventsListenPort, + Path: path, + EventName: eventName, + AllowedMethods: allowedMethods, + ResponseStatus: eventsListenResponseStatus, + ResponseBody: eventsListenResponseBody, + SignatureMode: signatureMode, + SignatureHeader: signatureHeader, + SigningSecret: signingSecret, + SignatureAlgorithm: signatureAlgorithm, + IncludeTimestamp: includeTimestamp, + TimestampHeader: timestampHeader, + TimestampTolerance: time.Duration(eventsListenTimestampTolerance) * time.Second, + }, emitEventRecord) + if err != nil { + return err + } + if err := listener.Start(); err != nil { + return err + } + + emitEventRecord(eventlistener.StartRecord{ + Type: "listener.started", + ListenURL: listener.ListenURL(), + Path: listener.Path(), + EventName: eventName, + Methods: listener.AllowedMethods(), + SignatureMode: signatureMode, + SignatureHeader: signatureHeader, + SignatureAlgorithm: signatureAlgorithm, + IncludeTimestamp: includeTimestamp, + }) + + if tunnel != "none" { + if _, err := eventlistener.StartTunnel(ctx, tunnel, listener.ListenURL(), emitEventRecord); err != nil { + return err + } + } + + <-ctx.Done() + shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + return listener.Shutdown(shutdownCtx) + }, +} + +var eventsEmitCmd = &cobra.Command{ + Use: "emit ", + Short: "Emit a synthetic event payload to a target URL", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + store, err := cfg.Load() + if err != nil { + return err + } + + def, ok := eventlistener.LookupEvent(generatedEventDefinitions, args[0]) + if !ok { + return fmt.Errorf("unknown event %q", args[0]) + } + if eventsEmitTargetURL == "" { + return fmt.Errorf("missing required flag --target-url") + } + + method := eventsEmitMethod + if method == "" { + method = def.DefaultMethod + } + + payload, err := eventlistener.PayloadForEvent(def, eventsEmitDataJSON, eventsEmitDataFile) + if err != nil { + return err + } + + signatureMode := eventlistener.ResolveString( + cmd.Flags().Changed("signature-mode"), eventsEmitSignatureMode, + store, "events.signature_mode", + def.SignatureMode, + "none", + ) + signatureHeader := eventlistener.ResolveString( + cmd.Flags().Changed("signature-header"), eventsEmitSignatureHeader, + store, "events.signature_header", + def.SignatureHeader, + "X-Signature", + ) + signatureAlgorithm := eventlistener.ResolveString( + cmd.Flags().Changed("signature-algorithm"), eventsEmitSignatureAlgorithm, + store, "events.signature_algorithm", + def.SignatureAlgorithm, + "sha256", + ) + timestampHeader := eventlistener.ResolveString( + cmd.Flags().Changed("timestamp-header"), eventsEmitTimestampHeader, + store, "events.timestamp_header", + def.SignatureTimestampHeader, + "X-Signature-Timestamp", + ) + includeTimestamp := eventlistener.ResolveBool( + cmd.Flags().Changed("include-timestamp"), eventsEmitIncludeTimestamp, + store, "events.include_timestamp", + def.SignatureIncludeTimestamp, + false, + ) + + signingSecret := eventsEmitSigningSecret + if signingSecret == "" { + if value, ok := store.Get("events.signing_secret"); ok { + signingSecret = value + } + } + if signingSecret == "" && signatureMode != "none" { + signingSecret = os.Getenv({{printf "%q" .WebhookSecretEnv}}) + } + + headers, err := eventlistener.SignatureHeaders(eventlistener.SignatureOptions{ + Mode: signatureMode, + Header: signatureHeader, + Secret: signingSecret, + Algorithm: signatureAlgorithm, + IncludeTimestamp: includeTimestamp, + TimestampHeader: timestampHeader, + }, payload) + if err != nil { + return err + } + + statusCode, err := eventlistener.EmitEvent(eventsEmitTargetURL, method, payload, headers) + if err != nil { + return err + } + + writeJSON(eventlistener.EmitRecord{ + Type: "listener.emit", + EventName: def.Name, + TargetURL: eventsEmitTargetURL, + Method: method, + StatusCode: statusCode, + }) + return nil + }, +} + +func emitEventRecord(v interface{}) { + enc := json.NewEncoder(os.Stdout) + enc.SetEscapeHTML(false) + if err := enc.Encode(v); err != nil { + fmt.Fprintln(os.Stderr, "error encoding event output:", err) + } +} + +func completeTunnelProviders(_ *cobra.Command, _ []string, _ string) ([]string, cobra.ShellCompDirective) { + return eventlistener.SupportedTunnelProviders(), cobra.ShellCompDirectiveNoFileComp +} + +func completeSignatureModes(_ *cobra.Command, _ []string, _ string) ([]string, cobra.ShellCompDirective) { + return eventlistener.SupportedSignatureModes(), cobra.ShellCompDirectiveNoFileComp +} + +func completeSignatureAlgorithms(_ *cobra.Command, _ []string, _ string) ([]string, cobra.ShellCompDirective) { + return eventlistener.SupportedSignatureAlgorithms(), cobra.ShellCompDirectiveNoFileComp +} + +func completeEventNames(_ *cobra.Command, _ []string, _ string) ([]string, cobra.ShellCompDirective) { + return eventlistener.EventNames(generatedEventDefinitions), cobra.ShellCompDirectiveNoFileComp +} + +func init() { + eventsListenCmd.Flags().StringVar(&eventsListenHost, "host", "127.0.0.1", "Host interface to bind the listener to") + eventsListenCmd.Flags().IntVar(&eventsListenPort, "port", 8081, "Port to listen on") + eventsListenCmd.Flags().StringVar(&eventsListenPath, "path", "/", "Request path to accept") + eventsListenCmd.Flags().IntVar(&eventsListenResponseStatus, "response-status", 202, "HTTP status code returned to the sender") + eventsListenCmd.Flags().StringVar(&eventsListenResponseBody, "response-body", "{\"ok\":true}", "Response body returned to the sender") + eventsListenCmd.Flags().StringVar(&eventsListenTunnel, "tunnel", "none", "Expose the listener with cloudflared: none|auto|cloudflared") + eventsListenCmd.Flags().StringVar(&eventsListenSignatureMode, "signature-mode", "", "Verify incoming requests: none|hmac") + eventsListenCmd.Flags().StringVar(&eventsListenSignatureHeader, "signature-header", "", "Header containing the request signature") + eventsListenCmd.Flags().StringVar(&eventsListenSigningSecret, "signing-secret", "", "Secret used for signature verification") + eventsListenCmd.Flags().StringVar(&eventsListenSignatureAlgorithm, "signature-algorithm", "", "HMAC algorithm: sha256|sha1|sha512") + eventsListenCmd.Flags().BoolVar(&eventsListenIncludeTimestamp, "include-timestamp", false, "Verify signatures over timestamp.body instead of body only") + eventsListenCmd.Flags().StringVar(&eventsListenTimestampHeader, "timestamp-header", "", "Header containing the signature timestamp") + eventsListenCmd.Flags().IntVar(&eventsListenTimestampTolerance, "timestamp-tolerance", 300, "Maximum age in seconds for timestamped signatures") + _ = eventsListenCmd.RegisterFlagCompletionFunc("tunnel", completeTunnelProviders) + _ = eventsListenCmd.RegisterFlagCompletionFunc("signature-mode", completeSignatureModes) + _ = eventsListenCmd.RegisterFlagCompletionFunc("signature-algorithm", completeSignatureAlgorithms) + + eventsEmitCmd.Flags().StringVar(&eventsEmitTargetURL, "target-url", "", "Destination URL for the emitted event") + eventsEmitCmd.Flags().StringVar(&eventsEmitMethod, "method", "", "Override the HTTP method used to emit the event") + eventsEmitCmd.Flags().StringVar(&eventsEmitDataJSON, "data-json", "", "Inline JSON payload (defaults to the generated sample payload)") + eventsEmitCmd.Flags().StringVar(&eventsEmitDataFile, "data-file", "", "Path to a JSON payload file") + eventsEmitCmd.Flags().StringVar(&eventsEmitSignatureMode, "signature-mode", "", "Sign emitted requests: none|hmac") + eventsEmitCmd.Flags().StringVar(&eventsEmitSignatureHeader, "signature-header", "", "Header used to carry the emitted signature") + eventsEmitCmd.Flags().StringVar(&eventsEmitSigningSecret, "signing-secret", "", "Secret used for emitted request signatures") + eventsEmitCmd.Flags().StringVar(&eventsEmitSignatureAlgorithm, "signature-algorithm", "", "HMAC algorithm: sha256|sha1|sha512") + eventsEmitCmd.Flags().BoolVar(&eventsEmitIncludeTimestamp, "include-timestamp", false, "Sign timestamp.body instead of body only") + eventsEmitCmd.Flags().StringVar(&eventsEmitTimestampHeader, "timestamp-header", "", "Header used to carry the signing timestamp") + _ = eventsEmitCmd.RegisterFlagCompletionFunc("signature-mode", completeSignatureModes) + _ = eventsEmitCmd.RegisterFlagCompletionFunc("signature-algorithm", completeSignatureAlgorithms) + + eventsListenCmd.ValidArgsFunction = completeEventNames + eventsEmitCmd.ValidArgsFunction = completeEventNames + + eventsCmd.AddCommand(eventsListCmd) + eventsCmd.AddCommand(eventsListenCmd) + eventsCmd.AddCommand(eventsEmitCmd) + rootCmd.AddCommand(eventsCmd) +} diff --git a/internal/generator/templates/internal_config.go.tmpl b/internal/generator/templates/internal_config.go.tmpl new file mode 100644 index 0000000..69552c5 --- /dev/null +++ b/internal/generator/templates/internal_config.go.tmpl @@ -0,0 +1,242 @@ +package config + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "sort" + "strings" +) + +const ( + appName = {{printf "%q" .CLIName}} + defaultProfile = "default" +) + +// Profile is one named configuration profile. +type Profile struct { + Properties map[string]string `json:"properties"` + Secrets map[string]string `json:"secrets"` +} + +// Store is the local profile store for the generated CLI. +type Store struct { + Active string `json:"active"` + Profiles map[string]*Profile `json:"profiles"` + path string +} + +// MaskedEntry is one config entry prepared for user-facing listing. +type MaskedEntry struct { + Key string + Value string + Secret bool +} + +// Load loads the local config store, creating an empty default profile if +// it does not exist. +func Load() (*Store, error) { + path, err := DefaultPath() + if err != nil { + return nil, err + } + data, err := os.ReadFile(path) + if os.IsNotExist(err) { + return newStore(path), nil + } + if err != nil { + return nil, fmt.Errorf("reading config: %w", err) + } + + var store Store + if err := json.Unmarshal(data, &store); err != nil { + return nil, fmt.Errorf("parsing config: %w", err) + } + store.path = path + store.ensureDefaults() + return &store, nil +} + +func newStore(path string) *Store { + store := &Store{ + Active: defaultProfile, + Profiles: map[string]*Profile{}, + path: path, + } + store.ensureDefaults() + return store +} + +func (s *Store) ensureDefaults() { + if s.Profiles == nil { + s.Profiles = map[string]*Profile{} + } + if strings.TrimSpace(s.Active) == "" { + s.Active = defaultProfile + } + if _, ok := s.Profiles[s.Active]; !ok { + s.Profiles[s.Active] = &Profile{ + Properties: map[string]string{}, + Secrets: map[string]string{}, + } + } + for name, cfg := range s.Profiles { + if cfg == nil { + s.Profiles[name] = &Profile{ + Properties: map[string]string{}, + Secrets: map[string]string{}, + } + continue + } + if cfg.Properties == nil { + cfg.Properties = map[string]string{} + } + if cfg.Secrets == nil { + cfg.Secrets = map[string]string{} + } + } +} + +// DefaultPath returns the config file path for the generated CLI. +func DefaultPath() (string, error) { + configDir, err := os.UserConfigDir() + if err != nil { + return "", fmt.Errorf("resolving user config dir: %w", err) + } + return filepath.Join(configDir, appName, "config.json"), nil +} + +// Path returns the on-disk config file path. +func (s *Store) Path() string { + return s.path +} + +// Save writes the config store to disk. +func (s *Store) Save() error { + s.ensureDefaults() + if err := os.MkdirAll(filepath.Dir(s.path), 0o700); err != nil { + return fmt.Errorf("creating config dir: %w", err) + } + data, err := json.MarshalIndent(s, "", " ") + if err != nil { + return fmt.Errorf("serializing config: %w", err) + } + if err := os.WriteFile(s.path, data, 0o600); err != nil { + return fmt.Errorf("writing config: %w", err) + } + return nil +} + +// ActiveProfileName returns the active profile name. +func (s *Store) ActiveProfileName() string { + s.ensureDefaults() + return s.Active +} + +// ActiveProfile returns the active profile, creating it if needed. +func (s *Store) ActiveProfile() *Profile { + s.ensureDefaults() + return s.Profiles[s.Active] +} + +// ProfileNames returns profile names in sorted order. +func (s *Store) ProfileNames() []string { + s.ensureDefaults() + names := make([]string, 0, len(s.Profiles)) + for name := range s.Profiles { + names = append(names, name) + } + sort.Strings(names) + return names +} + +// CreateProfile creates a named profile if it does not exist. +func (s *Store) CreateProfile(name string) error { + name = strings.TrimSpace(name) + if name == "" { + return fmt.Errorf("profile name must not be empty") + } + s.ensureDefaults() + if _, exists := s.Profiles[name]; exists { + return fmt.Errorf("profile %q already exists", name) + } + s.Profiles[name] = &Profile{ + Properties: map[string]string{}, + Secrets: map[string]string{}, + } + return nil +} + +// UseProfile switches the active profile. +func (s *Store) UseProfile(name string) error { + name = strings.TrimSpace(name) + s.ensureDefaults() + if _, exists := s.Profiles[name]; !exists { + return fmt.Errorf("profile %q does not exist", name) + } + s.Active = name + return nil +} + +// Set stores a key/value pair in the active profile. Secret keys are +// masked in list output. +func (s *Store) Set(key, value string, secret bool) { + key = strings.TrimSpace(key) + cfg := s.ActiveProfile() + delete(cfg.Properties, key) + delete(cfg.Secrets, key) + if secret { + cfg.Secrets[key] = value + return + } + cfg.Properties[key] = value +} + +// Unset removes a key from the active profile. +func (s *Store) Unset(key string) bool { + cfg := s.ActiveProfile() + _, propertyExists := cfg.Properties[key] + _, secretExists := cfg.Secrets[key] + delete(cfg.Properties, key) + delete(cfg.Secrets, key) + return propertyExists || secretExists +} + +// Get returns a config value from the active profile. +func (s *Store) Get(key string) (string, bool) { + cfg := s.ActiveProfile() + if value, ok := cfg.Secrets[key]; ok { + return value, true + } + value, ok := cfg.Properties[key] + return value, ok +} + +// MaskedEntries returns active-profile entries sorted by key with secrets masked. +func (s *Store) MaskedEntries() []MaskedEntry { + cfg := s.ActiveProfile() + keys := make([]string, 0, len(cfg.Properties)+len(cfg.Secrets)) + for key := range cfg.Properties { + keys = append(keys, key) + } + for key := range cfg.Secrets { + keys = append(keys, key) + } + sort.Strings(keys) + + entries := make([]MaskedEntry, 0, len(keys)) + seen := map[string]bool{} + for _, key := range keys { + if seen[key] { + continue + } + seen[key] = true + if _, ok := cfg.Secrets[key]; ok { + entries = append(entries, MaskedEntry{Key: key, Value: "[secret]", Secret: true}) + continue + } + entries = append(entries, MaskedEntry{Key: key, Value: cfg.Properties[key], Secret: false}) + } + return entries +} diff --git a/internal/generator/templates/internal_events.go.tmpl b/internal/generator/templates/internal_events.go.tmpl new file mode 100644 index 0000000..b71478d --- /dev/null +++ b/internal/generator/templates/internal_events.go.tmpl @@ -0,0 +1,677 @@ +package events + +import ( + "bufio" + "bytes" + "context" + "crypto/hmac" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "hash" + "io" + "net" + "net/http" + "net/url" + "os" + "os/exec" + "regexp" + "sort" + "strconv" + "strings" + "sync" + "time" +) + +type emitter func(interface{}) + +// EventDefinition describes a callback or webhook known at generation time. +type EventDefinition struct { + Name string `json:"name"` + DisplayName string `json:"display_name"` + Source string `json:"source"` + Expression string `json:"expression,omitempty"` + DefaultPath string `json:"default_path"` + Methods []string `json:"methods"` + DefaultMethod string `json:"default_method"` + Summary string `json:"summary"` + Description string `json:"description,omitempty"` + SampleJSON string `json:"sample_json,omitempty"` + SignatureMode string `json:"signature_mode,omitempty"` + SignatureHeader string `json:"signature_header,omitempty"` + SignatureAlgorithm string `json:"signature_algorithm,omitempty"` + SignatureIncludeTimestamp bool `json:"signature_include_timestamp,omitempty"` + SignatureTimestampHeader string `json:"signature_timestamp_header,omitempty"` +} + +// SignatureOptions configures HMAC signing/verification. +type SignatureOptions struct { + Mode string + Header string + Secret string + Algorithm string + IncludeTimestamp bool + TimestampHeader string + TimestampTolerance time.Duration +} + +// ListenOptions configures the local webhook listener. +type ListenOptions struct { + Host string + Port int + Path string + EventName string + AllowedMethods []string + ResponseStatus int + ResponseBody string + SignatureMode string + SignatureHeader string + SigningSecret string + SignatureAlgorithm string + IncludeTimestamp bool + TimestampHeader string + TimestampTolerance time.Duration +} + +// StartRecord describes a started listener. +type StartRecord struct { + Type string `json:"type"` + ListenURL string `json:"listen_url"` + Path string `json:"path"` + EventName string `json:"event_name,omitempty"` + Methods []string `json:"methods,omitempty"` + SignatureMode string `json:"signature_mode,omitempty"` + SignatureHeader string `json:"signature_header,omitempty"` + SignatureAlgorithm string `json:"signature_algorithm,omitempty"` + IncludeTimestamp bool `json:"include_timestamp,omitempty"` +} + +// TunnelRecord reports a discovered tunnel URL. +type TunnelRecord struct { + Type string `json:"type"` + Provider string `json:"provider"` + PublicURL string `json:"public_url"` + TargetURL string `json:"target_url"` +} + +// EmitRecord reports a synthetic emitted event. +type EmitRecord struct { + Type string `json:"type"` + EventName string `json:"event_name"` + TargetURL string `json:"target_url"` + Method string `json:"method"` + StatusCode int `json:"status_code"` +} + +// ErrorRecord reports listener or tunnel failures. +type ErrorRecord struct { + Type string `json:"type"` + Stage string `json:"stage"` + Message string `json:"message"` +} + +// EventRecord captures one incoming webhook request. +type EventRecord struct { + Type string `json:"type"` + Name string `json:"name,omitempty"` + Method string `json:"method"` + Path string `json:"path"` + Query string `json:"query,omitempty"` + Headers map[string][]string `json:"headers,omitempty"` + Body interface{} `json:"body,omitempty"` + BodyRaw string `json:"body_raw,omitempty"` + Verified bool `json:"verified"` + ReceivedAt string `json:"received_at"` +} + +// Listener is a local webhook listener. +type Listener struct { + opts ListenOptions + emit emitter + ln net.Listener + srv *http.Server +} + +// TunnelSession is a running tunnel process. +type TunnelSession struct { + Provider string +} + +type tunnelProvider struct { + Name string + Binaries []string + BuildArgs func(*url.URL) ([]string, error) +} + +type tunnelSelection struct { + Provider string + Binary string + Args []string +} + +var publicURLPattern = regexp.MustCompile(`https?://[^\s\"]+`) + +// SupportedTunnelProviders returns all accepted --tunnel values. +func SupportedTunnelProviders() []string { + return []string{"none", "auto", "cloudflared"} +} + +// SupportedSignatureModes returns all accepted signature modes. +func SupportedSignatureModes() []string { + return []string{"none", "hmac"} +} + +// SupportedSignatureAlgorithms returns all accepted HMAC algorithms. +func SupportedSignatureAlgorithms() []string { + return []string{"sha256", "sha1", "sha512"} +} + +// LookupEvent finds a named event definition. +func LookupEvent(defs []EventDefinition, name string) (EventDefinition, bool) { + for _, def := range defs { + if def.Name == name { + return def, true + } + } + return EventDefinition{}, false +} + +// EventNames returns all generated event names. +func EventNames(defs []EventDefinition) []string { + names := make([]string, 0, len(defs)) + for _, def := range defs { + names = append(names, def.Name) + } + sort.Strings(names) + return names +} + +// ResolveString applies flag > config > event default > hard default precedence. +func ResolveString(flagChanged bool, flagValue string, store interface{ Get(string) (string, bool) }, key string, eventDefault string, hardDefault string) string { + if flagChanged && strings.TrimSpace(flagValue) != "" { + return flagValue + } + if store != nil { + if value, ok := store.Get(key); ok && strings.TrimSpace(value) != "" { + return value + } + } + if strings.TrimSpace(eventDefault) != "" { + return eventDefault + } + return hardDefault +} + +// ResolveBool applies flag > config > event default > hard default precedence. +func ResolveBool(flagChanged bool, flagValue bool, store interface{ Get(string) (string, bool) }, key string, eventDefault bool, hardDefault bool) bool { + if flagChanged { + return flagValue + } + if store != nil { + if value, ok := store.Get(key); ok { + parsed, err := strconv.ParseBool(strings.TrimSpace(value)) + if err == nil { + return parsed + } + } + } + if eventDefault { + return true + } + return hardDefault +} + +// PayloadForEvent returns the payload bytes for an event. +func PayloadForEvent(def EventDefinition, dataJSON string, dataFile string) ([]byte, error) { + switch { + case strings.TrimSpace(dataJSON) != "": + return []byte(dataJSON), nil + case strings.TrimSpace(dataFile) != "": + return os.ReadFile(dataFile) + case strings.TrimSpace(def.SampleJSON) != "": + return []byte(def.SampleJSON), nil + default: + return []byte("{}"), nil + } +} + +// SignatureHeaders builds generic HMAC signature headers. +func SignatureHeaders(opts SignatureOptions, payload []byte) (map[string]string, error) { + mode := normalizeSignatureMode(opts.Mode) + if mode == "none" { + return nil, nil + } + if strings.TrimSpace(opts.Secret) == "" { + return nil, errors.New("signing secret is required when signature mode is enabled") + } + + header := firstNonEmpty(opts.Header, "X-Signature") + algorithm := normalizeSignatureAlgorithm(opts.Algorithm) + if algorithm == "" { + algorithm = "sha256" + } + + input := payload + headers := map[string]string{} + if opts.IncludeTimestamp { + timestampHeader := firstNonEmpty(opts.TimestampHeader, "X-Signature-Timestamp") + timestamp := strconv.FormatInt(time.Now().Unix(), 10) + headers[timestampHeader] = timestamp + input = []byte(timestamp + "." + string(payload)) + } + + signature, err := computeHMAC(algorithm, opts.Secret, input) + if err != nil { + return nil, err + } + headers[header] = signature + return headers, nil +} + +// NewListener creates a local listener. +func NewListener(opts ListenOptions, emit func(interface{})) (*Listener, error) { + if emit == nil { + return nil, errors.New("emit callback is required") + } + opts.Host = normalizeHost(opts.Host) + opts.Path = normalizePath(opts.Path) + opts.SignatureMode = normalizeSignatureMode(opts.SignatureMode) + opts.SignatureHeader = firstNonEmpty(opts.SignatureHeader, "X-Signature") + opts.SignatureAlgorithm = firstNonEmpty(normalizeSignatureAlgorithm(opts.SignatureAlgorithm), "sha256") + opts.TimestampHeader = firstNonEmpty(opts.TimestampHeader, "X-Signature-Timestamp") + if opts.ResponseStatus == 0 { + opts.ResponseStatus = http.StatusAccepted + } + if opts.ResponseBody == "" { + opts.ResponseBody = "{\"ok\":true}" + } + if opts.TimestampTolerance <= 0 { + opts.TimestampTolerance = 5 * time.Minute + } + if opts.SignatureMode != "none" && strings.TrimSpace(opts.SigningSecret) == "" { + return nil, errors.New("signing secret is required when signature mode is enabled") + } + + listener := &Listener{opts: opts, emit: emit} + listener.srv = &http.Server{ + Handler: listener.handler(), + ReadHeaderTimeout: 10 * time.Second, + } + return listener, nil +} + +// Start begins serving requests. +func (l *Listener) Start() error { + ln, err := net.Listen("tcp", net.JoinHostPort(l.opts.Host, strconv.Itoa(l.opts.Port))) + if err != nil { + return fmt.Errorf("starting listener: %w", err) + } + l.ln = ln + + go func() { + if serveErr := l.srv.Serve(ln); serveErr != nil && !errors.Is(serveErr, http.ErrServerClosed) { + l.emit(ErrorRecord{Type: "listener.error", Stage: "server", Message: serveErr.Error()}) + } + }() + + return nil +} + +// Shutdown stops the listener. +func (l *Listener) Shutdown(ctx context.Context) error { + if l.srv == nil { + return nil + } + return l.srv.Shutdown(ctx) +} + +// ListenURL returns the local URL for the listener. +func (l *Listener) ListenURL() string { + if l.ln == nil { + return "" + } + tcpAddr, ok := l.ln.Addr().(*net.TCPAddr) + if !ok { + return "" + } + host := displayHost(l.opts.Host) + path := l.opts.Path + if path == "/" { + path = "" + } + return fmt.Sprintf("http://%s:%d%s", host, tcpAddr.Port, path) +} + +// Path returns the normalized request path. +func (l *Listener) Path() string { + return l.opts.Path +} + +// AllowedMethods returns the configured allowed methods. +func (l *Listener) AllowedMethods() []string { + if len(l.opts.AllowedMethods) == 0 { + return nil + } + methods := make([]string, len(l.opts.AllowedMethods)) + copy(methods, l.opts.AllowedMethods) + return methods +} + +func (l *Listener) handler() http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if l.opts.Path != "/" && r.URL.Path != l.opts.Path { + http.NotFound(w, r) + return + } + if len(l.opts.AllowedMethods) > 0 && !allowsMethod(l.opts.AllowedMethods, r.Method) { + w.Header().Set("Allow", strings.Join(l.opts.AllowedMethods, ", ")) + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + + bodyBytes, err := io.ReadAll(r.Body) + if err != nil { + l.emit(ErrorRecord{Type: "listener.error", Stage: "request", Message: fmt.Sprintf("reading request body: %v", err)}) + http.Error(w, "failed to read request body", http.StatusInternalServerError) + return + } + + verified, err := verifySignature(SignatureOptions{ + Mode: l.opts.SignatureMode, + Header: l.opts.SignatureHeader, + Secret: l.opts.SigningSecret, + Algorithm: l.opts.SignatureAlgorithm, + IncludeTimestamp: l.opts.IncludeTimestamp, + TimestampHeader: l.opts.TimestampHeader, + TimestampTolerance: l.opts.TimestampTolerance, + }, r.Header, bodyBytes) + if err != nil { + l.emit(ErrorRecord{Type: "listener.error", Stage: "verification", Message: err.Error()}) + http.Error(w, "signature verification failed", http.StatusUnauthorized) + return + } + + decodedBody, bodyRaw := decodeBody(bodyBytes) + l.emit(EventRecord{ + Type: "listener.event", + Name: l.opts.EventName, + Method: r.Method, + Path: r.URL.Path, + Query: r.URL.RawQuery, + Headers: cloneHeaders(r.Header), + Body: decodedBody, + BodyRaw: bodyRaw, + Verified: verified, + ReceivedAt: time.Now().UTC().Format(time.RFC3339), + }) + + if json.Valid([]byte(l.opts.ResponseBody)) { + w.Header().Set("Content-Type", "application/json") + } else { + w.Header().Set("Content-Type", "text/plain; charset=utf-8") + } + w.WriteHeader(l.opts.ResponseStatus) + _, _ = io.WriteString(w, l.opts.ResponseBody) + }) +} + +func verifySignature(opts SignatureOptions, headers http.Header, payload []byte) (bool, error) { + mode := normalizeSignatureMode(opts.Mode) + if mode == "none" { + return false, nil + } + if strings.TrimSpace(opts.Secret) == "" { + return false, errors.New("signing secret is required when signature mode is enabled") + } + + header := firstNonEmpty(opts.Header, "X-Signature") + actual := headers.Get(header) + if actual == "" { + return false, fmt.Errorf("%s header is missing", http.CanonicalHeaderKey(header)) + } + + input := payload + if opts.IncludeTimestamp { + timestampHeader := firstNonEmpty(opts.TimestampHeader, "X-Signature-Timestamp") + timestamp := headers.Get(timestampHeader) + if timestamp == "" { + return false, fmt.Errorf("%s header is missing", http.CanonicalHeaderKey(timestampHeader)) + } + if opts.TimestampTolerance > 0 { + ts, err := strconv.ParseInt(timestamp, 10, 64) + if err != nil { + return false, fmt.Errorf("invalid signature timestamp: %w", err) + } + requestTime := time.Unix(ts, 0) + if time.Since(requestTime) > opts.TimestampTolerance || requestTime.Sub(time.Now()) > opts.TimestampTolerance { + return false, errors.New("signature timestamp is outside the allowed tolerance") + } + } + input = []byte(timestamp + "." + string(payload)) + } + + expected, err := computeHMAC(normalizeSignatureAlgorithm(opts.Algorithm), opts.Secret, input) + if err != nil { + return false, err + } + if !hmac.Equal([]byte(expected), []byte(actual)) { + return false, errors.New("signature verification failed") + } + return true, nil +} + +// EmitEvent sends a JSON payload to a target URL using method. +func EmitEvent(targetURL string, method string, payload []byte, headers map[string]string) (int, error) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + req, err := http.NewRequestWithContext(ctx, strings.ToUpper(strings.TrimSpace(method)), targetURL, bytes.NewReader(payload)) + if err != nil { + return 0, fmt.Errorf("build request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + for key, value := range headers { + req.Header.Set(key, value) + } + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return 0, fmt.Errorf("send event: %w", err) + } + defer resp.Body.Close() + return resp.StatusCode, nil +} + +// StartTunnel starts a cloudflared tunnel and emits a tunnel record once +// a public URL is detected. +func StartTunnel(ctx context.Context, requested string, targetURL string, emit func(interface{})) (*TunnelSession, error) { + selection, err := resolveTunnelSelection(requested, targetURL) + if err != nil { + return nil, err + } + + cmd := exec.CommandContext(ctx, selection.Binary, selection.Args...) //nolint:gosec + stdout, err := cmd.StdoutPipe() + if err != nil { + return nil, fmt.Errorf("preparing %s stdout: %w", selection.Provider, err) + } + stderr, err := cmd.StderrPipe() + if err != nil { + return nil, fmt.Errorf("preparing %s stderr: %w", selection.Provider, err) + } + + if err := cmd.Start(); err != nil { + return nil, fmt.Errorf("starting %s tunnel: %w", selection.Provider, err) + } + + var once sync.Once + scan := func(reader io.Reader) { + scanner := bufio.NewScanner(reader) + for scanner.Scan() { + line := scanner.Text() + if publicURL := publicURLPattern.FindString(line); publicURL != "" { + once.Do(func() { + emit(TunnelRecord{ + Type: "listener.tunnel", + Provider: selection.Provider, + PublicURL: publicURL, + TargetURL: targetURL, + }) + }) + } + } + } + + go scan(stdout) + go scan(stderr) + go func() { + waitErr := cmd.Wait() + if waitErr != nil && ctx.Err() == nil { + emit(ErrorRecord{Type: "listener.error", Stage: "tunnel", Message: fmt.Sprintf("%s tunnel exited: %v", selection.Provider, waitErr)}) + } + }() + + return &TunnelSession{Provider: selection.Provider}, nil +} + +func resolveTunnelSelection(requested string, targetURL string) (tunnelSelection, error) { + target, err := url.Parse(targetURL) + if err != nil { + return tunnelSelection{}, fmt.Errorf("parsing target URL: %w", err) + } + + requested = strings.TrimSpace(strings.ToLower(requested)) + if requested == "" || requested == "none" { + return tunnelSelection{}, errors.New("tunnel provider must not be none") + } + if requested == "auto" { + requested = "cloudflared" + } + return providerSelection(requested, target) +} + +func providerSelection(name string, target *url.URL) (tunnelSelection, error) { + if name != "cloudflared" { + return tunnelSelection{}, fmt.Errorf("unsupported tunnel provider %q", name) + } + + binary, err := exec.LookPath("cloudflared") + if err != nil { + return tunnelSelection{}, errors.New("cloudflared not found in PATH") + } + + return tunnelSelection{ + Provider: "cloudflared", + Binary: binary, + Args: []string{"tunnel", "--url", target.String()}, + }, nil +} + +func computeHMAC(algorithm string, secret string, payload []byte) (string, error) { + var factory func() hash.Hash + switch normalizeSignatureAlgorithm(algorithm) { + case "sha1": + factory = sha1.New + case "sha512": + factory = sha512.New + case "", "sha256": + factory = sha256.New + default: + return "", fmt.Errorf("unsupported signature algorithm %q", algorithm) + } + + mac := hmac.New(factory, []byte(secret)) + _, _ = mac.Write(payload) + return hex.EncodeToString(mac.Sum(nil)), nil +} + +func normalizeHost(host string) string { + host = strings.TrimSpace(host) + if host == "" { + return "127.0.0.1" + } + return host +} + +func normalizePath(path string) string { + path = strings.TrimSpace(path) + if path == "" || path == "/" { + return "/" + } + if !strings.HasPrefix(path, "/") { + return "/" + path + } + return path +} + +func normalizeSignatureMode(mode string) string { + mode = strings.TrimSpace(strings.ToLower(mode)) + if mode == "" { + return "none" + } + return mode +} + +func normalizeSignatureAlgorithm(algorithm string) string { + algorithm = strings.TrimSpace(strings.ToLower(algorithm)) + if algorithm == "" { + return "" + } + return algorithm +} + +func displayHost(host string) string { + switch host { + case "", "0.0.0.0", "::": + return "127.0.0.1" + default: + return host + } +} + +func allowsMethod(allowed []string, method string) bool { + method = strings.ToUpper(strings.TrimSpace(method)) + for _, candidate := range allowed { + if strings.ToUpper(candidate) == method { + return true + } + } + return false +} + +func decodeBody(body []byte) (interface{}, string) { + if len(body) == 0 { + return nil, "" + } + var decoded interface{} + if err := json.Unmarshal(body, &decoded); err == nil { + return decoded, "" + } + return nil, string(body) +} + +func cloneHeaders(headers http.Header) map[string][]string { + if len(headers) == 0 { + return nil + } + cloned := make(map[string][]string, len(headers)) + for key, values := range headers { + copyValues := make([]string, len(values)) + copy(copyValues, values) + cloned[key] = copyValues + } + return cloned +} + +func firstNonEmpty(values ...string) string { + for _, value := range values { + if strings.TrimSpace(value) != "" { + return value + } + } + return "" +} diff --git a/internal/generator/templates/main.go.tmpl b/internal/generator/templates/main.go.tmpl new file mode 100644 index 0000000..134a648 --- /dev/null +++ b/internal/generator/templates/main.go.tmpl @@ -0,0 +1,15 @@ +package main + +import ( + "fmt" + "os" + + "{{.CLIName}}/cmd" +) + +func main() { + if err := cmd.Execute(); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } +} diff --git a/internal/generator/templates/root.go.tmpl b/internal/generator/templates/root.go.tmpl new file mode 100644 index 0000000..7021b16 --- /dev/null +++ b/internal/generator/templates/root.go.tmpl @@ -0,0 +1,110 @@ +package cmd + +import ( +{{.Imports}} + cfg "{{.CLIName}}/internal/config" +) + +var ( + outputFormat string + baseURL string +{{.AuthVarDecls}}{{.ServerVarDecls}}) + +const defaultBaseURLTemplate = {{printf "%q" .DefaultBaseURLTemplate}} + +var version = {{printf "%q" .Version}} + +var rootCmd = &cobra.Command{ + Use: {{printf "%q" .CLIName}}, + Short: {{printf "%q" .Description}}, + Version: version, +} + +// Execute runs the root command. +func Execute() error { + return rootCmd.Execute() +} + +func init() { + rootCmd.PersistentFlags().StringVar(&outputFormat, "output", "json", "Output format: json|table|raw") + rootCmd.PersistentFlags().StringVar(&baseURL, "base-url", "", "Override API base URL") +{{.AuthFlagInits}}{{.ServerVarFlagInits}}} + +func getBaseURL() string { + if baseURL != "" { + return baseURL + } + if v := getConfigValue("core.base_url"); v != "" { + return v + } + if v := os.Getenv({{printf "%q" .BaseURLEnv}}); v != "" { + return v + } + return resolveDefaultBaseURL() +} + +func resolveDefaultBaseURL() string { +{{.ResolveDefaultBaseURL}} +} + +func getConfigValue(key string) string { + store, err := cfg.Load() + if err != nil { + return "" + } + value, ok := store.Get(key) + if !ok { + return "" + } + return value +} + +// getAuthHeaders returns HTTP headers required for authentication. +// Priority: CLI flag -> config -> environment variable -> empty. +func getAuthHeaders() map[string]string { + headers := map[string]string{} +{{.AuthHeaders}} + return headers +} + +// getAuthQueryParams returns query parameters required for authentication +// (used when an API key scheme has in: query). +func getAuthQueryParams() map[string]string { + params := map[string]string{} +{{.AuthQuery}} + return params +} + +// writeOutput prints v as indented JSON to stdout. +func writeOutput(v interface{}) { + enc := json.NewEncoder(os.Stdout) + enc.SetIndent("", " ") + if err := enc.Encode(v); err != nil { + fmt.Fprintln(os.Stderr, "error encoding output:", err) + os.Exit(1) + } +} + +// writeJSON prints v as indented JSON to stdout. +func writeJSON(v interface{}) { + writeOutput(v) +} + +// exitWithError prints an error as JSON to stderr and exits non-zero. +func exitWithError(statusCode int, code, message string, raw interface{}) { + type errObj struct { + Status int `json:"status"` + Code string `json:"code"` + Message string `json:"message"` + Raw interface{} `json:"raw,omitempty"` + } + type errorWrapper struct { + Error errObj `json:"error"` + } + obj := errorWrapper{Error: errObj{Status: statusCode, Code: code, Message: message, Raw: raw}} + enc := json.NewEncoder(os.Stderr) + enc.SetIndent("", " ") + _ = enc.Encode(obj) + os.Exit(1) +} +{{.OAuth2Helper}} diff --git a/internal/mock/mock.go b/internal/mock/mock.go index 1fe48b7..4568d1d 100644 --- a/internal/mock/mock.go +++ b/internal/mock/mock.go @@ -335,6 +335,20 @@ func GenerateEventPayload(openAPI *spec.OpenAPI, path string, method string) (in return nil, fmt.Errorf("method %q is not defined for path %q", method, path) } + return GeneratePayloadForOperation(openAPI, op) +} + +// GeneratePayloadForOperation builds a synthetic JSON payload for an operation. +// It prefers the requestBody schema (`application/json` first) and falls back +// to the first successful 2xx response schema. +func GeneratePayloadForOperation(openAPI *spec.OpenAPI, op *spec.Operation) (interface{}, error) { + if openAPI == nil { + return nil, errors.New("openapi spec is nil") + } + if op == nil { + return nil, errors.New("operation is nil") + } + if schema := requestBodySchema(op.RequestBody); schema != nil { return generateValue(schema, openAPI, 0), nil } diff --git a/internal/publish/publish.go b/internal/publish/publish.go index a5b7f8d..9c75ae0 100644 --- a/internal/publish/publish.go +++ b/internal/publish/publish.go @@ -58,9 +58,6 @@ func ResolveToken(explicit string) string { // pushes the generated CLI source into that repository. func Publish(ctx context.Context, entry manifest.CLIEntry, openAPI *spec.OpenAPI, opts Options) (*Result, error) { token := ResolveToken(opts.Token) - if token == "" { - return nil, fmt.Errorf("github token is required; set --github-token, GITHUB_TOKEN, or GH_TOKEN") - } if entry.SourceDir == "" { return nil, fmt.Errorf("cli %q has no source directory recorded", entry.Name) } @@ -74,36 +71,44 @@ func Publish(ctx context.Context, entry manifest.CLIEntry, openAPI *spec.OpenAPI branch = "main" } - client := githubutil.NewClient(token) - repo, created, err := client.EnsureRepository(ctx, githubutil.EnsureRepositoryRequest{ - Owner: opts.Owner, - Name: repoName, - Description: repositoryDescription(entry.Name, openAPI, opts.Description), - Homepage: opts.Homepage, - Private: strings.EqualFold(opts.Visibility, "private"), - ReuseExisting: opts.ReuseExisting, - }) - if err != nil { - return nil, err + var ( + repo *githubutil.Repository + created bool + err error + ) + if token == "" { + repo, err = existingRepositoryFromEntry(entry, branch) + if err != nil { + return nil, fmt.Errorf("github token is required; set --github-token, GITHUB_TOKEN, or GH_TOKEN") + } + } else { + client := githubutil.NewClient(token) + repo, created, err = client.EnsureRepository(ctx, githubutil.EnsureRepositoryRequest{ + Owner: opts.Owner, + Name: repoName, + Description: repositoryDescription(entry.Name, openAPI, opts.Description), + Homepage: opts.Homepage, + Private: strings.EqualFold(opts.Visibility, "private"), + ReuseExisting: opts.ReuseExisting, + }) + if err != nil { + return nil, err + } } if repo.DefaultBranch != "" { branch = repo.DefaultBranch } - lifecycleFiles, err := EnsureLifecycleFiles(entry.SourceDir, ProjectMetadata{ + lifecycleFiles, err := syncGitRepository(entry.SourceDir, repo, branch, ProjectMetadata{ CLIName: entry.Name, Repository: repo.FullName, DefaultBranch: branch, OpenAPI: openAPI, - }) + }, !created) if err != nil { return nil, err } - if err := syncGitRepository(entry.SourceDir, repo, branch); err != nil { - return nil, err - } - return &Result{ CLIName: entry.Name, SourceDir: entry.SourceDir, @@ -126,19 +131,26 @@ func repositoryDescription(cliName string, openAPI *spec.OpenAPI, explicit strin return fmt.Sprintf("%s CLI for %s, generated by climate", cliName, openAPI.Info.Title) } -func syncGitRepository(sourceDir string, repo *githubutil.Repository, branch string) error { - if err := os.MkdirAll(sourceDir, 0o755); err != nil { - return fmt.Errorf("creating source dir: %w", err) +func existingRepositoryFromEntry(entry manifest.CLIEntry, branch string) (*githubutil.Repository, error) { + if strings.TrimSpace(entry.RepositoryFullName) == "" { + return nil, fmt.Errorf("cli %q has no repository metadata recorded", entry.Name) } - - if _, err := os.Stat(filepath.Join(sourceDir, ".git")); os.IsNotExist(err) { - if err := runGit(sourceDir, "init"); err != nil { - return err - } + defaultBranch := branch + if strings.TrimSpace(entry.RepositoryDefaultBranch) != "" { + defaultBranch = entry.RepositoryDefaultBranch } + return &githubutil.Repository{ + Name: entry.Name, + FullName: entry.RepositoryFullName, + HTMLURL: entry.RepositoryURL, + SSHURL: entry.RepositorySSHURL, + DefaultBranch: defaultBranch, + }, nil +} - if err := runGit(sourceDir, "branch", "-M", branch); err != nil { - return err +func syncGitRepository(sourceDir string, repo *githubutil.Repository, branch string, metadata ProjectMetadata, existingRepo bool) ([]string, error) { + if err := os.MkdirAll(sourceDir, 0o755); err != nil { + return nil, fmt.Errorf("creating source dir: %w", err) } remoteURL := repo.SSHURL @@ -146,28 +158,47 @@ func syncGitRepository(sourceDir string, repo *githubutil.Repository, branch str remoteURL = repo.CloneURL } if remoteURL == "" { - return fmt.Errorf("repository %s has no clone url", repo.FullName) + return nil, fmt.Errorf("repository %s has no clone url", repo.FullName) + } + + if existingRepo { + return syncExistingRepository(sourceDir, remoteURL, branch, metadata, repo.FullName) + } + + lifecycleFiles, err := EnsureLifecycleFiles(sourceDir, metadata) + if err != nil { + return nil, err + } + + if _, err := os.Stat(filepath.Join(sourceDir, ".git")); os.IsNotExist(err) { + if err := runGit(sourceDir, "init"); err != nil { + return nil, err + } + } + + if err := runGit(sourceDir, "branch", "-M", branch); err != nil { + return nil, err } if err := ensureOriginRemote(sourceDir, remoteURL); err != nil { - return err + return nil, err } if err := runGit(sourceDir, "add", "."); err != nil { - return err + return nil, err } hasHead := runGit(sourceDir, "rev-parse", "--verify", "HEAD") == nil if !hasHead || hasStagedChanges(sourceDir) { if err := commitLifecycleBootstrap(sourceDir, repo.FullName); err != nil { - return err + return nil, err } } if err := runGit(sourceDir, "push", "-u", "origin", branch); err != nil { - return err + return nil, err } - return nil + return lifecycleFiles, nil } func hasStagedChanges(sourceDir string) bool { @@ -189,6 +220,108 @@ func ensureOriginRemote(sourceDir, remoteURL string) error { return runGit(sourceDir, "remote", "set-url", "origin", remoteURL) } +func syncExistingRepository(sourceDir, remoteURL, branch string, metadata ProjectMetadata, repoFullName string) ([]string, error) { + tempDir, err := os.MkdirTemp("", "climate-publish-*") + if err != nil { + return nil, fmt.Errorf("creating temp repo dir: %w", err) + } + defer os.RemoveAll(tempDir) + + if err := runGit("", "clone", remoteURL, tempDir); err != nil { + return nil, err + } + if err := runGit(tempDir, "checkout", branch); err != nil { + if err := runGit(tempDir, "checkout", "-B", branch, "origin/"+branch); err != nil { + return nil, err + } + } + + if err := syncGeneratedProjectRoots(sourceDir, tempDir); err != nil { + return nil, err + } + + lifecycleFiles, err := EnsureLifecycleFiles(tempDir, metadata) + if err != nil { + return nil, err + } + + if err := runGit(tempDir, "add", "."); err != nil { + return nil, err + } + if hasStagedChanges(tempDir) { + if err := commitLifecycleBootstrap(tempDir, repoFullName); err != nil { + return nil, err + } + } + if err := runGit(tempDir, "push", "origin", branch); err != nil { + return nil, err + } + return lifecycleFiles, nil +} + +func syncGeneratedProjectRoots(sourceDir, targetDir string) error { + for _, root := range []string{ + "cmd", + "internal", + "main.go", + "go.mod", + "go.sum", + "climate_meta.json", + } { + sourcePath := filepath.Join(sourceDir, root) + targetPath := filepath.Join(targetDir, root) + + if _, err := os.Stat(sourcePath); os.IsNotExist(err) { + if rmErr := os.RemoveAll(targetPath); rmErr != nil && !os.IsNotExist(rmErr) { + return fmt.Errorf("removing %s: %w", targetPath, rmErr) + } + continue + } + + if err := os.RemoveAll(targetPath); err != nil && !os.IsNotExist(err) { + return fmt.Errorf("clearing %s: %w", targetPath, err) + } + if err := copyPath(sourcePath, targetPath); err != nil { + return err + } + } + return nil +} + +func copyPath(sourcePath, targetPath string) error { + info, err := os.Stat(sourcePath) + if err != nil { + return fmt.Errorf("stat %s: %w", sourcePath, err) + } + if info.IsDir() { + if err := os.MkdirAll(targetPath, info.Mode()); err != nil { + return fmt.Errorf("creating dir %s: %w", targetPath, err) + } + entries, err := os.ReadDir(sourcePath) + if err != nil { + return fmt.Errorf("reading dir %s: %w", sourcePath, err) + } + for _, entry := range entries { + if err := copyPath(filepath.Join(sourcePath, entry.Name()), filepath.Join(targetPath, entry.Name())); err != nil { + return err + } + } + return nil + } + + data, err := os.ReadFile(sourcePath) + if err != nil { + return fmt.Errorf("reading file %s: %w", sourcePath, err) + } + if err := os.MkdirAll(filepath.Dir(targetPath), 0o755); err != nil { + return fmt.Errorf("creating parent dir for %s: %w", targetPath, err) + } + if err := os.WriteFile(targetPath, data, info.Mode()); err != nil { + return fmt.Errorf("writing file %s: %w", targetPath, err) + } + return nil +} + func commitLifecycleBootstrap(sourceDir, repoFullName string) error { messagePath := filepath.Join(sourceDir, ".climate-git-commit.txt") defer func() { diff --git a/internal/publish/publish_test.go b/internal/publish/publish_test.go new file mode 100644 index 0000000..3d02672 --- /dev/null +++ b/internal/publish/publish_test.go @@ -0,0 +1,110 @@ +package publish + +import ( + "os" + "path/filepath" + "strings" + "testing" + + "github.com/disk0Dancer/climate/internal/githubutil" +) + +func TestSyncGitRepositoryExistingRepoPreservesRemoteFiles(t *testing.T) { + remoteBare := filepath.Join(t.TempDir(), "remote.git") + if err := runGit("", "init", "--bare", remoteBare); err != nil { + t.Fatalf("init bare repo: %v", err) + } + + seedDir := t.TempDir() + if err := runGit(seedDir, "init"); err != nil { + t.Fatalf("init seed repo: %v", err) + } + if err := runGit(seedDir, "config", "user.name", "climate"); err != nil { + t.Fatalf("git config user.name: %v", err) + } + if err := runGit(seedDir, "config", "user.email", "climate@example.test"); err != nil { + t.Fatalf("git config user.email: %v", err) + } + if err := os.WriteFile(filepath.Join(seedDir, "README.md"), []byte("# Existing README\n"), 0o644); err != nil { + t.Fatalf("write README: %v", err) + } + if err := os.MkdirAll(filepath.Join(seedDir, ".github", "workflows"), 0o755); err != nil { + t.Fatalf("mkdir workflows: %v", err) + } + if err := os.WriteFile(filepath.Join(seedDir, ".github", "workflows", "ci.yml"), []byte("name: CI\n"), 0o644); err != nil { + t.Fatalf("write ci workflow: %v", err) + } + if err := runGit(seedDir, "add", "."); err != nil { + t.Fatalf("git add seed: %v", err) + } + if err := runGit(seedDir, "commit", "-m", "seed"); err != nil { + t.Fatalf("git commit seed: %v", err) + } + if err := runGit(seedDir, "branch", "-M", "main"); err != nil { + t.Fatalf("git branch -M main: %v", err) + } + if err := runGit(seedDir, "remote", "add", "origin", remoteBare); err != nil { + t.Fatalf("git remote add origin: %v", err) + } + if err := runGit(seedDir, "push", "-u", "origin", "main"); err != nil { + t.Fatalf("git push seed: %v", err) + } + + sourceDir := t.TempDir() + if err := os.MkdirAll(filepath.Join(sourceDir, "cmd"), 0o755); err != nil { + t.Fatalf("mkdir cmd: %v", err) + } + if err := os.MkdirAll(filepath.Join(sourceDir, "internal", "config"), 0o755); err != nil { + t.Fatalf("mkdir internal/config: %v", err) + } + if err := os.WriteFile(filepath.Join(sourceDir, "go.mod"), []byte("module github\n\ngo 1.21\n"), 0o644); err != nil { + t.Fatalf("write go.mod: %v", err) + } + if err := os.WriteFile(filepath.Join(sourceDir, "main.go"), []byte("package main\nfunc main() {}\n"), 0o644); err != nil { + t.Fatalf("write main.go: %v", err) + } + if err := os.WriteFile(filepath.Join(sourceDir, "cmd", "config.go"), []byte("package cmd\n"), 0o644); err != nil { + t.Fatalf("write cmd/config.go: %v", err) + } + if err := os.WriteFile(filepath.Join(sourceDir, "internal", "config", "config.go"), []byte("package config\n"), 0o644); err != nil { + t.Fatalf("write internal/config/config.go: %v", err) + } + if err := os.WriteFile(filepath.Join(sourceDir, "climate_meta.json"), []byte("{}\n"), 0o644); err != nil { + t.Fatalf("write climate_meta.json: %v", err) + } + + repo := &githubutil.Repository{ + FullName: "disk0Dancer/github", + SSHURL: remoteBare, + DefaultBranch: "main", + } + files, err := syncGitRepository(sourceDir, repo, "main", ProjectMetadata{ + CLIName: "github", + Repository: repo.FullName, + DefaultBranch: "main", + }, true) + if err != nil { + t.Fatalf("syncGitRepository() error = %v", err) + } + if len(files) == 0 { + t.Fatal("expected lifecycle files to be written during existing repo sync") + } + + verifyDir := t.TempDir() + if err := runGit("", "clone", remoteBare, verifyDir); err != nil { + t.Fatalf("clone verify repo: %v", err) + } + readme, err := os.ReadFile(filepath.Join(verifyDir, "README.md")) + if err != nil { + t.Fatalf("read README after sync: %v", err) + } + if !strings.Contains(string(readme), "# Existing README") { + t.Fatal("existing README should be preserved when it is not climate-managed") + } + if _, err := os.Stat(filepath.Join(verifyDir, "cmd", "config.go")); err != nil { + t.Fatalf("generated cmd/config.go should be pushed: %v", err) + } + if _, err := os.Stat(filepath.Join(verifyDir, "internal", "config", "config.go")); err != nil { + t.Fatalf("generated internal/config/config.go should be pushed: %v", err) + } +} diff --git a/internal/skill/skill.go b/internal/skill/skill.go index 9e7daad..e7eb455 100644 --- a/internal/skill/skill.go +++ b/internal/skill/skill.go @@ -108,6 +108,36 @@ func GenerateCLIPrompt(entry manifest.CLIEntry, openAPI *spec.OpenAPI, mode Mode } } } + b.WriteString("\nEvery generated CLI also includes local config commands:\n\n") + b.WriteString("```\n") + b.WriteString(bin + " config list\n") + b.WriteString(bin + " config set \n") + b.WriteString(bin + " config get \n") + b.WriteString(bin + " config unset \n") + b.WriteString(bin + " config profiles list\n") + b.WriteString(bin + " config profiles create \n") + b.WriteString(bin + " config profiles use \n") + b.WriteString(bin + " config set --secret events.signing_secret \n") + b.WriteString("```\n\n") + b.WriteString("Use config to persist defaults such as base URL, signature settings, and signing secrets. Commands operate on the active profile, similar to gcloud configurations.\n") + + if len(schemes) > 0 { + b.WriteString("\nGenerated CLIs also include auth commands:\n\n") + b.WriteString("```\n") + b.WriteString(bin + " auth login [--scheme ]\n") + b.WriteString(bin + " auth status\n") + b.WriteString(bin + " auth logout [--scheme ]\n") + b.WriteString("```\n\n") + b.WriteString("Use `auth login` to interactively store credentials or fetch/store an OAuth2 token when the security scheme exposes a compatible token URL.\n") + } + + b.WriteString("\nEvery generated CLI also includes spec-aware event commands:\n\n") + b.WriteString("```\n") + b.WriteString(bin + " events list\n") + b.WriteString(bin + " events listen [event-name] [--host 127.0.0.1] [--port 8081] [--path /] [--tunnel none|auto|cloudflared] [--signature-mode none|hmac]\n") + b.WriteString(bin + " events emit --target-url [--data-json ''] [--data-file ] [--signature-mode none|hmac]\n") + b.WriteString("```\n\n") + b.WriteString("Use `events list` to inspect generated callback/webhook names. Use `events listen ` to receive a specific callback locally. Pass `--tunnel auto` to expose the listener through `cloudflared`. Use `--signature-mode hmac` plus config or flags for `events.signing_secret`, header name, algorithm, and timestamp behavior.\n") b.WriteString("\n") if mode == ModeCompact { diff --git a/internal/skill/skill_test.go b/internal/skill/skill_test.go index dedeeff..fae55c1 100644 --- a/internal/skill/skill_test.go +++ b/internal/skill/skill_test.go @@ -186,6 +186,46 @@ func TestGenerateCLIPromptServerVariables(t *testing.T) { } } +func TestGenerateCLIPromptIncludesEventsListener(t *testing.T) { + entry := manifest.CLIEntry{Name: "petstore", Version: "1.0.0"} + openAPI := sampleOpenAPI() + openAPI.Components = spec.Components{ + SecuritySchemes: map[string]spec.SecurityScheme{ + "bearerAuth": {Type: "http", Scheme: "bearer"}, + }, + } + + prompt := skill.GenerateCLIPrompt(entry, openAPI, skill.ModeFull) + + if !strings.Contains(prompt, "events list") { + t.Error("prompt should mention the generated events list command") + } + if !strings.Contains(prompt, "events listen") { + t.Error("prompt should mention the generated events listener command") + } + if !strings.Contains(prompt, "events emit") { + t.Error("prompt should mention the generated events emit command") + } + if !strings.Contains(prompt, "config list") { + t.Error("prompt should mention generated config commands") + } + if !strings.Contains(prompt, "config set --secret events.signing_secret") { + t.Error("prompt should mention secret config storage") + } + if !strings.Contains(prompt, "config profiles use") { + t.Error("prompt should mention named profile management") + } + if !strings.Contains(prompt, "--tunnel auto") { + t.Error("prompt should mention tunnel support for the events listener") + } + if !strings.Contains(prompt, "--signature-mode") { + t.Error("prompt should mention generic HMAC signature support") + } + if !strings.Contains(prompt, "auth login") { + t.Error("prompt should mention interactive auth commands") + } +} + func TestGenerateCLIPromptRequiredParam(t *testing.T) { entry := manifest.CLIEntry{Name: "petstore", Version: "1.0.0"} openAPI := sampleOpenAPI() diff --git a/internal/spec/loader.go b/internal/spec/loader.go index a54b164..dff94b1 100644 --- a/internal/spec/loader.go +++ b/internal/spec/loader.go @@ -68,8 +68,8 @@ func Validate(spec *OpenAPI) error { if spec.Info.Version == "" { return &ValidationError{Message: "missing required field: info.version"} } - if len(spec.Paths) == 0 { - return &ValidationError{Message: "spec has no paths defined"} + if len(spec.Paths) == 0 && len(spec.Webhooks) == 0 { + return &ValidationError{Message: "spec has no paths or webhooks defined"} } return nil } @@ -105,9 +105,49 @@ func resolveParameterRefs(spec *OpenAPI) { op.Parameters[i] = resolved } } + resolveCallbackParameterRefs(spec, op) } _ = pathKey } + + for _, pi := range spec.Webhooks { + for _, op := range pi.Operations() { + for i, p := range op.Parameters { + if p.Ref == "" { + continue + } + if !strings.HasPrefix(p.Ref, prefix) { + continue + } + name := strings.TrimPrefix(p.Ref, prefix) + if resolved, ok := spec.Components.Parameters[name]; ok { + op.Parameters[i] = resolved + } + } + } + } +} + +func resolveCallbackParameterRefs(spec *OpenAPI, op *Operation) { + const prefix = "#/components/parameters/" + for _, callback := range op.Callbacks { + for _, item := range callback { + for _, callbackOp := range item.Operations() { + for i, p := range callbackOp.Parameters { + if p.Ref == "" { + continue + } + if !strings.HasPrefix(p.Ref, prefix) { + continue + } + name := strings.TrimPrefix(p.Ref, prefix) + if resolved, ok := spec.Components.Parameters[name]; ok { + callbackOp.Parameters[i] = resolved + } + } + } + } + } } // isURL reports whether s looks like an HTTP(S) URL. diff --git a/internal/spec/spec_test.go b/internal/spec/spec_test.go index 9ddec02..a620053 100644 --- a/internal/spec/spec_test.go +++ b/internal/spec/spec_test.go @@ -79,7 +79,7 @@ func TestValidate(t *testing.T) { wantErr: true, }, { - name: "no paths", + name: "no paths or webhooks", spec: &spec.OpenAPI{ OpenAPI: "3.0.0", Info: spec.Info{Title: "Test", Version: "1.0.0"}, @@ -87,6 +87,19 @@ func TestValidate(t *testing.T) { }, wantErr: true, }, + { + name: "webhooks without paths", + spec: &spec.OpenAPI{ + OpenAPI: "3.1.0", + Info: spec.Info{Title: "Webhook API", Version: "1.0.0"}, + Webhooks: map[string]spec.PathItem{ + "payment.succeeded": { + Post: &spec.Operation{Summary: "Payment succeeded"}, + }, + }, + }, + wantErr: false, + }, } for _, tt := range tests { @@ -174,6 +187,89 @@ paths: } } +func TestParse_WebhooksAndCallbacks(t *testing.T) { + yamlSpec := ` +openapi: "3.1.0" +x-climate-signature-mode: hmac +info: + title: "Webhook API" + version: "1.0.0" +paths: + /subscriptions: + post: + operationId: subscriptions_create + x-climate-signature-header: X-Custom-Signature + callbacks: + invoicePaid: + "{$request.body#/callback_url}": + post: + summary: "Invoice paid callback" + x-climate-event-name: invoice-paid + x-climate-event-path: /webhooks/invoice-paid + requestBody: + content: + application/json: + schema: + type: object + properties: + event: + type: string +webhooks: + payment.succeeded: + post: + summary: "Payment succeeded webhook" + x-climate-event-name: payment-succeeded + x-climate-signature-algorithm: sha512 + x-climate-signature-include-timestamp: true + requestBody: + content: + application/json: + schema: + type: object + properties: + id: + type: string +` + s, err := spec.Parse("test.yaml", []byte(yamlSpec)) + if err != nil { + t.Fatalf("Parse() error = %v", err) + } + if len(s.Webhooks) != 1 { + t.Fatalf("Webhooks count = %d, want 1", len(s.Webhooks)) + } + if s.XClimateSignatureMode != "hmac" { + t.Fatalf("root signature mode = %q, want hmac", s.XClimateSignatureMode) + } + webhookOp := s.Webhooks["payment.succeeded"].Post + if webhookOp == nil || webhookOp.Summary != "Payment succeeded webhook" { + t.Fatal("expected parsed webhook operation") + } + if webhookOp.XClimateSignatureAlgorithm != "sha512" { + t.Fatalf("webhook algorithm = %q, want sha512", webhookOp.XClimateSignatureAlgorithm) + } + subscriptionOp := s.Paths["/subscriptions"].Post + if subscriptionOp == nil { + t.Fatal("expected parsed subscriptions operation") + } + if subscriptionOp.XClimateSignatureHeader != "X-Custom-Signature" { + t.Fatalf("callback parent signature header = %q", subscriptionOp.XClimateSignatureHeader) + } + callback, ok := subscriptionOp.Callbacks["invoicePaid"] + if !ok { + t.Fatal("expected callback to be parsed") + } + item, ok := callback["{$request.body#/callback_url}"] + if !ok || item.Post == nil { + t.Fatal("expected callback expression path item") + } + if item.Post.XClimateEventName != "invoice-paid" { + t.Fatalf("callback event name = %q, want invoice-paid", item.Post.XClimateEventName) + } + if item.Post.XClimateEventPath != "/webhooks/invoice-paid" { + t.Fatalf("callback event path = %q", item.Post.XClimateEventPath) + } +} + func TestIsURL(t *testing.T) { tests := []struct { input string diff --git a/internal/spec/types.go b/internal/spec/types.go index d8ceb87..41a5663 100644 --- a/internal/spec/types.go +++ b/internal/spec/types.go @@ -3,13 +3,21 @@ package spec // OpenAPI represents the top-level OpenAPI document. type OpenAPI struct { - OpenAPI string `json:"openapi" yaml:"openapi"` - Info Info `json:"info" yaml:"info"` - Servers []Server `json:"servers" yaml:"servers"` - Paths map[string]PathItem `json:"paths" yaml:"paths"` - Components Components `json:"components" yaml:"components"` - Security []SecurityRequirement `json:"security" yaml:"security"` - Tags []Tag `json:"tags" yaml:"tags"` + OpenAPI string `json:"openapi" yaml:"openapi"` + Info Info `json:"info" yaml:"info"` + Servers []Server `json:"servers" yaml:"servers"` + Paths map[string]PathItem `json:"paths" yaml:"paths"` + Webhooks map[string]PathItem `json:"webhooks" yaml:"webhooks"` + Components Components `json:"components" yaml:"components"` + Security []SecurityRequirement `json:"security" yaml:"security"` + Tags []Tag `json:"tags" yaml:"tags"` + XClimateEventName string `json:"x-climate-event-name" yaml:"x-climate-event-name"` + XClimateEventPath string `json:"x-climate-event-path" yaml:"x-climate-event-path"` + XClimateSignatureMode string `json:"x-climate-signature-mode" yaml:"x-climate-signature-mode"` + XClimateSignatureHeader string `json:"x-climate-signature-header" yaml:"x-climate-signature-header"` + XClimateSignatureAlgorithm string `json:"x-climate-signature-algorithm" yaml:"x-climate-signature-algorithm"` + XClimateSignatureIncludeTimestamp bool `json:"x-climate-signature-include-timestamp" yaml:"x-climate-signature-include-timestamp"` + XClimateSignatureTimestampHeader string `json:"x-climate-signature-timestamp-header" yaml:"x-climate-signature-timestamp-header"` } // Info holds API metadata. @@ -41,13 +49,20 @@ type Tag struct { // PathItem holds all operations for a path. type PathItem struct { - Get *Operation `json:"get" yaml:"get"` - Post *Operation `json:"post" yaml:"post"` - Put *Operation `json:"put" yaml:"put"` - Patch *Operation `json:"patch" yaml:"patch"` - Delete *Operation `json:"delete" yaml:"delete"` - Head *Operation `json:"head" yaml:"head"` - Options *Operation `json:"options" yaml:"options"` + Get *Operation `json:"get" yaml:"get"` + Post *Operation `json:"post" yaml:"post"` + Put *Operation `json:"put" yaml:"put"` + Patch *Operation `json:"patch" yaml:"patch"` + Delete *Operation `json:"delete" yaml:"delete"` + Head *Operation `json:"head" yaml:"head"` + Options *Operation `json:"options" yaml:"options"` + XClimateEventName string `json:"x-climate-event-name" yaml:"x-climate-event-name"` + XClimateEventPath string `json:"x-climate-event-path" yaml:"x-climate-event-path"` + XClimateSignatureMode string `json:"x-climate-signature-mode" yaml:"x-climate-signature-mode"` + XClimateSignatureHeader string `json:"x-climate-signature-header" yaml:"x-climate-signature-header"` + XClimateSignatureAlgorithm string `json:"x-climate-signature-algorithm" yaml:"x-climate-signature-algorithm"` + XClimateSignatureIncludeTimestamp bool `json:"x-climate-signature-include-timestamp" yaml:"x-climate-signature-include-timestamp"` + XClimateSignatureTimestampHeader string `json:"x-climate-signature-timestamp-header" yaml:"x-climate-signature-timestamp-header"` } // Operations returns all non-nil operations with their HTTP method. @@ -79,16 +94,27 @@ func (pi PathItem) Operations() map[string]*Operation { // Operation represents an OpenAPI operation. type Operation struct { - OperationID string `json:"operationId" yaml:"operationId"` - Summary string `json:"summary" yaml:"summary"` - Description string `json:"description" yaml:"description"` - Tags []string `json:"tags" yaml:"tags"` - Parameters []Parameter `json:"parameters" yaml:"parameters"` - RequestBody *RequestBody `json:"requestBody" yaml:"requestBody"` - Responses map[string]Response `json:"responses" yaml:"responses"` - Security []SecurityRequirement `json:"security" yaml:"security"` - Deprecated bool `json:"deprecated" yaml:"deprecated"` -} + OperationID string `json:"operationId" yaml:"operationId"` + Summary string `json:"summary" yaml:"summary"` + Description string `json:"description" yaml:"description"` + Tags []string `json:"tags" yaml:"tags"` + Parameters []Parameter `json:"parameters" yaml:"parameters"` + RequestBody *RequestBody `json:"requestBody" yaml:"requestBody"` + Responses map[string]Response `json:"responses" yaml:"responses"` + Callbacks map[string]Callback `json:"callbacks" yaml:"callbacks"` + Security []SecurityRequirement `json:"security" yaml:"security"` + Deprecated bool `json:"deprecated" yaml:"deprecated"` + XClimateEventName string `json:"x-climate-event-name" yaml:"x-climate-event-name"` + XClimateEventPath string `json:"x-climate-event-path" yaml:"x-climate-event-path"` + XClimateSignatureMode string `json:"x-climate-signature-mode" yaml:"x-climate-signature-mode"` + XClimateSignatureHeader string `json:"x-climate-signature-header" yaml:"x-climate-signature-header"` + XClimateSignatureAlgorithm string `json:"x-climate-signature-algorithm" yaml:"x-climate-signature-algorithm"` + XClimateSignatureIncludeTimestamp bool `json:"x-climate-signature-include-timestamp" yaml:"x-climate-signature-include-timestamp"` + XClimateSignatureTimestampHeader string `json:"x-climate-signature-timestamp-header" yaml:"x-climate-signature-timestamp-header"` +} + +// Callback maps runtime expressions to callback path items. +type Callback map[string]PathItem // Parameter represents an OpenAPI parameter. type Parameter struct { diff --git a/internal/uninstall/uninstall.go b/internal/uninstall/uninstall.go new file mode 100644 index 0000000..660f7d5 --- /dev/null +++ b/internal/uninstall/uninstall.go @@ -0,0 +1,215 @@ +package uninstall + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/disk0Dancer/climate/internal/completion" + "github.com/disk0Dancer/climate/internal/manifest" +) + +// InstallMethod describes how the climate binary was installed. +type InstallMethod string + +const ( + MethodHomebrew InstallMethod = "homebrew" + MethodGoInstall InstallMethod = "go-install" + MethodStandalone InstallMethod = "standalone" +) + +// RunCommand executes an external command. +type RunCommand func(name string, args ...string) error + +// Options controls self-uninstall behavior. +type Options struct { + Home string + GOOS string + ExecutablePath string + Full bool + EvalSymlinks func(string) (string, error) + RunCommand RunCommand +} + +// GeneratedCLIResult describes cleanup work for one generated CLI. +type GeneratedCLIResult struct { + Name string `json:"name"` + BinaryPath string `json:"binary_path,omitempty"` + BinaryRemoved bool `json:"binary_removed"` + SourceDir string `json:"source_dir,omitempty"` + SourceRemoved bool `json:"source_removed,omitempty"` + Warnings []string `json:"warnings,omitempty"` +} + +// Result describes self-uninstall work. +type Result struct { + InstallMethod string `json:"install_method"` + Mode string `json:"mode"` + ExecutablePath string `json:"executable_path"` + ExecutableRemoved bool `json:"executable_removed"` + GeneratedCLIs []GeneratedCLIResult `json:"generated_clis,omitempty"` + CompletionCleanup []completion.UninstallResult `json:"completion_cleanup,omitempty"` + ManifestRemoved bool `json:"manifest_removed"` + Warnings []string `json:"warnings,omitempty"` +} + +// DetectInstallMethod derives the installation method from the executable path. +func DetectInstallMethod(home, executablePath string, evalSymlinks func(string) (string, error)) (InstallMethod, string, error) { + resolved := executablePath + if evalSymlinks != nil { + realPath, err := evalSymlinks(executablePath) + if err != nil && !os.IsNotExist(err) { + return "", "", fmt.Errorf("resolving executable path: %w", err) + } + if err == nil && realPath != "" { + resolved = realPath + } + } + + if isHomebrewPath(executablePath) || isHomebrewPath(resolved) { + return MethodHomebrew, resolved, nil + } + if isGoInstallPath(home, executablePath) || isGoInstallPath(home, resolved) { + return MethodGoInstall, resolved, nil + } + return MethodStandalone, resolved, nil +} + +// RemoveGeneratedCLI deletes a generated CLI binary and optionally its sources. +func RemoveGeneratedCLI(entry manifest.CLIEntry, purgeSources bool) GeneratedCLIResult { + result := GeneratedCLIResult{ + Name: entry.Name, + BinaryPath: entry.BinaryPath, + SourceDir: entry.SourceDir, + } + + if entry.BinaryPath != "" { + if err := os.Remove(entry.BinaryPath); err == nil { + result.BinaryRemoved = true + } else if !os.IsNotExist(err) { + result.Warnings = append(result.Warnings, fmt.Sprintf("could not remove binary %s: %v", entry.BinaryPath, err)) + } + } + + if purgeSources && entry.SourceDir != "" { + if err := os.RemoveAll(entry.SourceDir); err == nil { + result.SourceRemoved = true + } else { + result.Warnings = append(result.Warnings, fmt.Sprintf("could not remove source dir %s: %v", entry.SourceDir, err)) + } + } + + return result +} + +// Self uninstalls the climate binary and optionally climate-managed local data. +func Self(opts Options) (Result, error) { + method, resolvedExecutable, err := DetectInstallMethod(opts.Home, opts.ExecutablePath, opts.EvalSymlinks) + if err != nil { + return Result{}, err + } + + result := Result{ + InstallMethod: string(method), + Mode: "cli", + ExecutablePath: resolvedExecutable, + } + if opts.Full { + result.Mode = "full" + + mf, loadErr := manifest.LoadFrom(filepath.Join(opts.Home, ".climate", "manifest.json")) + if loadErr != nil { + return Result{}, loadErr + } + + for _, entry := range mf.List() { + removed := RemoveGeneratedCLI(entry, true) + result.GeneratedCLIs = append(result.GeneratedCLIs, removed) + result.Warnings = append(result.Warnings, removed.Warnings...) + } + + if err := os.Remove(mf.Path()); err == nil { + result.ManifestRemoved = true + } else if !os.IsNotExist(err) { + return Result{}, fmt.Errorf("removing manifest: %w", err) + } + + for _, shellName := range completion.SupportedShellNames() { + shell, parseErr := completion.ParseShell(shellName) + if parseErr != nil { + return Result{}, parseErr + } + cleanupResult, cleanupErr := completion.Uninstall(opts.Home, shell, opts.GOOS) + if cleanupErr != nil { + return Result{}, cleanupErr + } + result.CompletionCleanup = append(result.CompletionCleanup, cleanupResult) + } + + pruneIfEmpty(filepath.Join(opts.Home, ".climate", "completions")) + pruneIfEmpty(filepath.Join(opts.Home, ".climate")) + } + + switch method { + case MethodHomebrew: + if opts.RunCommand == nil { + return Result{}, fmt.Errorf("homebrew uninstall requires a command runner") + } + if err := opts.RunCommand("brew", "uninstall", "climate"); err != nil { + return Result{}, fmt.Errorf("running brew uninstall climate: %w", err) + } + result.ExecutableRemoved = true + case MethodGoInstall, MethodStandalone: + if err := os.Remove(resolvedExecutable); err != nil && !os.IsNotExist(err) { + return Result{}, fmt.Errorf("removing executable %s: %w", resolvedExecutable, err) + } + result.ExecutableRemoved = true + default: + return Result{}, fmt.Errorf("unsupported install method %q", method) + } + + return result, nil +} + +func isHomebrewPath(path string) bool { + return strings.Contains(filepath.ToSlash(path), "/Cellar/climate/") +} + +func isGoInstallPath(home, path string) bool { + cleanPath := filepath.Clean(path) + for _, candidate := range goInstallCandidates(home) { + if candidate == "" { + continue + } + if cleanPath == filepath.Clean(candidate) { + return true + } + } + return false +} + +func goInstallCandidates(home string) []string { + candidates := []string{} + if gobin := strings.TrimSpace(os.Getenv("GOBIN")); gobin != "" { + candidates = append(candidates, filepath.Join(gobin, "climate")) + } + if gopath := strings.TrimSpace(os.Getenv("GOPATH")); gopath != "" { + for _, part := range filepath.SplitList(gopath) { + if strings.TrimSpace(part) == "" { + continue + } + candidates = append(candidates, filepath.Join(part, "bin", "climate")) + } + } + candidates = append(candidates, filepath.Join(home, "go", "bin", "climate")) + return candidates +} + +func pruneIfEmpty(path string) { + entries, err := os.ReadDir(path) + if err != nil || len(entries) > 0 { + return + } + _ = os.Remove(path) +} diff --git a/internal/uninstall/uninstall_test.go b/internal/uninstall/uninstall_test.go new file mode 100644 index 0000000..713f00c --- /dev/null +++ b/internal/uninstall/uninstall_test.go @@ -0,0 +1,161 @@ +package uninstall_test + +import ( + "os" + "path/filepath" + "testing" + + "github.com/disk0Dancer/climate/internal/completion" + "github.com/disk0Dancer/climate/internal/manifest" + "github.com/disk0Dancer/climate/internal/uninstall" +) + +func TestDetectInstallMethodHomebrew(t *testing.T) { + t.Parallel() + + method, resolved, err := uninstall.DetectInstallMethod( + t.TempDir(), + "/opt/homebrew/bin/climate", + func(string) (string, error) { + return "/opt/homebrew/Cellar/climate/1.2.3/bin/climate", nil + }, + ) + if err != nil { + t.Fatalf("DetectInstallMethod() error = %v", err) + } + if method != uninstall.MethodHomebrew { + t.Fatalf("method = %q, want %q", method, uninstall.MethodHomebrew) + } + if resolved != "/opt/homebrew/Cellar/climate/1.2.3/bin/climate" { + t.Fatalf("resolved = %q", resolved) + } +} + +func TestDetectInstallMethodGoInstall(t *testing.T) { + t.Parallel() + + home := t.TempDir() + executable := filepath.Join(home, "go", "bin", "climate") + + method, _, err := uninstall.DetectInstallMethod(home, executable, nil) + if err != nil { + t.Fatalf("DetectInstallMethod() error = %v", err) + } + if method != uninstall.MethodGoInstall { + t.Fatalf("method = %q, want %q", method, uninstall.MethodGoInstall) + } +} + +func TestSelfFullStandalone(t *testing.T) { + t.Parallel() + + home := t.TempDir() + executable := filepath.Join(home, "bin", "climate") + if err := os.MkdirAll(filepath.Dir(executable), 0o755); err != nil { + t.Fatalf("mkdir executable dir: %v", err) + } + if err := os.WriteFile(executable, []byte("binary"), 0o755); err != nil { + t.Fatalf("write executable: %v", err) + } + + entryBinary := filepath.Join(home, ".climate", "bin", "petstore") + entrySource := filepath.Join(home, "src", "petstore") + if err := os.MkdirAll(filepath.Dir(entryBinary), 0o755); err != nil { + t.Fatalf("mkdir generated bin dir: %v", err) + } + if err := os.WriteFile(entryBinary, []byte("generated"), 0o755); err != nil { + t.Fatalf("write generated binary: %v", err) + } + if err := os.MkdirAll(entrySource, 0o755); err != nil { + t.Fatalf("mkdir source dir: %v", err) + } + if err := os.WriteFile(filepath.Join(entrySource, "main.go"), []byte("package main"), 0o644); err != nil { + t.Fatalf("write source file: %v", err) + } + + mf, err := manifest.LoadFrom(filepath.Join(home, ".climate", "manifest.json")) + if err != nil { + t.Fatalf("LoadFrom() error = %v", err) + } + mf.Upsert(manifest.CLIEntry{ + Name: "petstore", + BinaryPath: entryBinary, + SourceDir: entrySource, + }) + if err := mf.Save(); err != nil { + t.Fatalf("Save() error = %v", err) + } + + if _, err := completion.Install(home, completion.ShellZsh, "darwin", func(w completion.Writer) error { + _, writeErr := w.Write([]byte("# completion script\n")) + return writeErr + }); err != nil { + t.Fatalf("completion.Install() error = %v", err) + } + + result, err := uninstall.Self(uninstall.Options{ + Home: home, + GOOS: "darwin", + ExecutablePath: executable, + Full: true, + }) + if err != nil { + t.Fatalf("Self() error = %v", err) + } + if result.Mode != "full" { + t.Fatalf("Mode = %q, want full", result.Mode) + } + if !result.ExecutableRemoved { + t.Fatal("ExecutableRemoved should be true") + } + if len(result.GeneratedCLIs) != 1 { + t.Fatalf("expected 1 generated CLI cleanup result, got %d", len(result.GeneratedCLIs)) + } + if _, err := os.Stat(executable); !os.IsNotExist(err) { + t.Fatalf("executable should be removed, stat err = %v", err) + } + if _, err := os.Stat(entryBinary); !os.IsNotExist(err) { + t.Fatalf("generated binary should be removed, stat err = %v", err) + } + if _, err := os.Stat(entrySource); !os.IsNotExist(err) { + t.Fatalf("source dir should be removed, stat err = %v", err) + } + if _, err := os.Stat(filepath.Join(home, ".climate", "manifest.json")); !os.IsNotExist(err) { + t.Fatalf("manifest should be removed, stat err = %v", err) + } +} + +func TestSelfHomebrewUsesBrewUninstall(t *testing.T) { + t.Parallel() + + home := t.TempDir() + called := false + + result, err := uninstall.Self(uninstall.Options{ + Home: home, + GOOS: "darwin", + ExecutablePath: "/opt/homebrew/bin/climate", + EvalSymlinks: func(string) (string, error) { + return "/opt/homebrew/Cellar/climate/1.2.3/bin/climate", nil + }, + RunCommand: func(name string, args ...string) error { + called = true + if name != "brew" { + t.Fatalf("command name = %q, want brew", name) + } + if len(args) != 2 || args[0] != "uninstall" || args[1] != "climate" { + t.Fatalf("args = %#v", args) + } + return nil + }, + }) + if err != nil { + t.Fatalf("Self() error = %v", err) + } + if !called { + t.Fatal("brew uninstall should be invoked") + } + if result.InstallMethod != string(uninstall.MethodHomebrew) { + t.Fatalf("InstallMethod = %q", result.InstallMethod) + } +} diff --git a/skills-lock.json b/skills-lock.json new file mode 100644 index 0000000..8f7f75a --- /dev/null +++ b/skills-lock.json @@ -0,0 +1,10 @@ +{ + "version": 1, + "skills": { + "climate-generator": { + "source": "disk0Dancer/climate", + "sourceType": "github", + "computedHash": "b11674e8c3d850410882e5b80b8d80aa032a48b0ebb7066de6306c313b5d723d" + } + } +} diff --git a/skills/climate-generator/SKILL.md b/skills/climate-generator/SKILL.md index 6298d1d..10e2861 100644 --- a/skills/climate-generator/SKILL.md +++ b/skills/climate-generator/SKILL.md @@ -22,7 +22,10 @@ clients from OpenAPI specifications and can emit Markdown prompts for agent skil - The user wants to turn a generated CLI into a reusable agent skill. - The user wants to compose multiple microservice specs into one facade CLI. - The user wants a local OpenAPI simulator (mock server) for testing. +- The user wants the generated CLI to receive or emit webhook callbacks itself. +- The user wants shell completion for climate itself. - The user wants to list, remove, or upgrade a previously generated CLI. +- The user wants to uninstall climate itself and clean up local climate-managed artifacts. ## Core workflow @@ -31,8 +34,9 @@ clients from OpenAPI specifications and can emit Markdown prompts for agent skil 3. Capture the resulting `cli_name`, `binary_path`, and `source_dir`. 4. If the user wants agent integration, run `climate skill generate `. 5. If the user needs sandbox/simulator behavior, run `climate mock `. -6. If the user wants the CLI managed on GitHub, run `climate publish `. -7. Follow the generated instructions from that Markdown prompt. +6. If the user wants shell completion for climate itself, run `climate completion install --shell `. +7. If the user wants the CLI managed on GitHub, run `climate publish `. +8. Follow the generated instructions from that Markdown prompt. ## Commands @@ -58,6 +62,25 @@ Success output is JSON: } ``` +Generated CLIs also include config plus spec-aware event commands: + +```bash + events list + config list + config set + config get + config unset + config profiles list + config profiles create + config profiles use + config set --secret events.signing_secret + auth login [--scheme ] + auth status + auth logout [--scheme ] + events listen [event-name] [--host 127.0.0.1] [--port 8081] [--path /] [--tunnel none|auto|cloudflared] [--signature-mode none|hmac] + events emit --target-url [--data-json ] [--data-file ] [--signature-mode none|hmac] +``` + ### List generated CLIs ```bash @@ -79,10 +102,18 @@ path or URL and `` starts with `/`. climate mock [--port ] [--latency ] [--emit-url --event-path [--event-method ]] ``` +### Generate or install shell completions + +```bash +climate completion bash|zsh|fish|powershell +climate completion install [--shell bash|zsh|fish|powershell] +climate completion uninstall [--shell bash|zsh|fish|powershell] +``` + ### Remove a generated CLI ```bash -climate remove [--purge-sources] +climate remove [--purge-sources] [--yes] ``` ### Upgrade a generated CLI @@ -91,6 +122,12 @@ climate remove [--purge-sources] climate upgrade [--openapi ] ``` +### Uninstall climate itself + +```bash +climate uninstall [--full] [--yes] +``` + ### Generate a skill prompt for a CLI ```bash @@ -147,11 +184,11 @@ climate publish petstore --owner disk0Dancer ## Notes -- Most climate commands output JSON on success (`generate`, `compose`, `list`, - `publish`, `remove`, `upgrade`). -- `climate mock` in server mode and both `climate skill ...` commands output - plain text / Markdown by design. +- Most climate management output is JSON on success. +- `climate skill generate`, `climate skill generator`, and `climate completion ` print text to stdout. +- `climate mock` in server mode and `climate mock --emit-url ...` intentionally print plain-text runtime output. - Errors are emitted as structured JSON on stderr. - Generated CLIs follow the shape ` [flags] --output=json|table|raw`. - Homebrew install is available via `brew tap disk0Dancer/tap && brew install climate`. - GitHub publish auth is read from `--github-token`, `GITHUB_TOKEN`, or `GH_TOKEN`. +- `climate remove` and `climate uninstall` prompt unless `--yes` is passed. diff --git a/skills/climate.md b/skills/climate.md index d30062e..225a0cf 100644 --- a/skills/climate.md +++ b/skills/climate.md @@ -11,10 +11,13 @@ so you can self-register those CLIs as new skills. - Generate a typed Go CLI from any OpenAPI spec (URL or local file). - Compose several OpenAPI specs into one facade CLI with per-spec path prefixes. - Run a local OpenAPI-based mock HTTP server for simulator/sandbox workflows. +- Generate CLIs that can run spec-aware webhook/callback commands with optional cloudflared exposure and a local config store. +- Generate shell completion scripts for climate and manage local install/uninstall. - List all CLIs you have already generated. - Get a plain-text skill prompt for any generated CLI so you can self-register it. - Publish a generated CLI into a GitHub repository with lifecycle bootstrap. - Remove or upgrade a previously generated CLI. +- Uninstall the climate CLI itself, with optional full cleanup of climate-managed local artifacts. --- @@ -46,6 +49,33 @@ climate generate [--name ] [--out-dir ] [--no-build] [--force] events list + config list + config set + config get + config unset + config profiles list + config profiles create + config profiles use + config set --secret events.signing_secret + auth login [--scheme ] + auth status + auth logout [--scheme ] + events listen [event-name] [--host 127.0.0.1] [--port 8081] [--path /] [--tunnel none|auto|cloudflared] [--signature-mode none|hmac] + events emit --target-url [--data-json ] [--data-file ] [--signature-mode none|hmac] +``` + +Use this to inspect generated callback/webhook names, receive them locally, and +emit synthetic payloads. Named profiles act like lightweight gcloud-style +profiles, and `auth login` can interactively store credentials or fetch/store +OAuth2 tokens when the API exposes compatible flows. `config set --secret +events.signing_secret ...` stores the signing secret for later use. `--tunnel +auto` exposes the listener through `cloudflared`. HMAC signing is configurable +via header name, algorithm, and optional timestamp signing. + --- ### Compose multiple specs into one facade CLI @@ -88,6 +118,21 @@ manifest (`~/.climate/manifest.json`). --- +### Generate or manage shell completions for climate + +``` +climate completion bash|zsh|fish|powershell +climate completion install [--shell bash|zsh|fish|powershell] +climate completion uninstall [--shell bash|zsh|fish|powershell] +``` + +`climate completion ` prints the raw completion script to stdout. +`install` writes a climate-managed script file and updates the relevant local +shell config when needed. `uninstall` removes only climate-managed completion +files and config blocks. + +--- + ### Get a skill prompt for a generated CLI ``` @@ -122,11 +167,28 @@ Authentication is read from `--github-token`, `GITHUB_TOKEN`, or `GH_TOKEN`. ### Remove a generated CLI ``` -climate remove [--purge-sources] +climate remove [--purge-sources] [--yes] +``` + +Prompts before deletion by default. Removes the binary and manifest entry. +`--purge-sources` also deletes the generated source directory. `--yes` skips +the prompt. + +--- + +### Uninstall the climate CLI itself + +``` +climate uninstall [--full] [--yes] ``` -Removes the binary and manifest entry. `--purge-sources` also deletes the -generated source directory. +Detects whether climate was installed via Homebrew, `go install`, or a +standalone binary and removes it the right way for that installation method. + +- Default mode removes only the climate executable. +- `--full` also removes generated CLIs recorded in the manifest, their source + directories, the manifest file, and climate-managed shell completion wiring. +- `--yes` skips the prompt. --- @@ -142,17 +204,15 @@ Re-generates and rebuilds a CLI. Pass `--openapi` to use a different spec. ## Output format -Most commands exit 0 and print JSON to stdout (`generate`, `compose`, `list`, -`publish`, `remove`, `upgrade`). - -Text/Markdown-oriented commands intentionally print plain text: +Most climate management commands exit 0 and print JSON to stdout. +Text-printing commands include: -- `climate mock` (server mode) prints startup info and route table -- `climate mock --emit-url ...` prints a one-line emission result -- `climate skill generate` prints Markdown prompt text -- `climate skill generator` prints the built-in Markdown skill +- `climate skill generate` +- `climate skill generator` +- `climate completion ` +- `climate mock` runtime output -On error commands exit non-zero and print to stderr: +On error commands exit non-zero and print structured JSON to stderr: ```json { @@ -173,9 +233,11 @@ On error commands exit non-zero and print to stderr: 3. Note the `cli_name` in the JSON response. 4. Run `climate skill generate ` → read the plain-text prompt it prints. 5. Optional: run `climate mock ` for local simulator/sandbox testing. -6. Run `climate publish ` if the user wants the generated CLI managed on GitHub. -7. Follow the self-registration instructions inside that prompt. -8. Use the new CLI skill for all subsequent tasks that involve that API. +6. Optional: run `climate completion install --shell zsh` if the user wants tab completion for climate itself. +7. Run `climate publish ` if the user wants the generated CLI managed on GitHub. +8. Follow the self-registration instructions inside that prompt. +9. Use the new CLI skill for all subsequent tasks that involve that API. +10. If the user wants to remove a generated CLI later, prefer `climate remove` and let the confirmation prompt guard accidental deletion. ---