From ea73de5d66c0147d7ab64f82c6618078116cb3fb Mon Sep 17 00:00:00 2001 From: Lennart Kats Date: Wed, 24 Dec 2025 15:12:29 +0100 Subject: [PATCH 1/5] Add init-template subcommands for jobs, pipelines, and empty bundles MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refactors init-template into dedicated subcommands with comprehensive L2 guidance for each resource type: - Add job subcommand using default-python template - Add pipeline subcommand using lakeflow-pipelines template - Add empty subcommand using default-minimal template - Create L2 templates: target_jobs, target_pipelines, target_mixed - Add AGENTS.tmpl for scaffolded projects - Parameterize default catalog in discover flow - Update detector to show "mixed" guidance for non-app-only projects - Move implementation to cmd/init_template/ subpackage - Make databricks_discover description more forceful (MANDATORY) šŸ¤– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- experimental/apps-mcp/cmd/apps_mcp.go | 2 + .../app.go} | 136 +++-------------- .../apps-mcp/cmd/init_template/common.go | 132 ++++++++++++++++ .../apps-mcp/cmd/init_template/empty.go | 143 +++++++++++++++++ .../cmd/init_template/init_template.go | 25 +++ .../apps-mcp/cmd/init_template/job.go | 133 ++++++++++++++++ .../apps-mcp/cmd/init_template/pipeline.go | 144 ++++++++++++++++++ experimental/apps-mcp/cmd/tools.go | 3 +- experimental/apps-mcp/lib/common/output.go | 15 ++ .../apps-mcp/lib/detector/bundle_detector.go | 28 +++- experimental/apps-mcp/lib/prompts/AGENTS.tmpl | 35 +++++ experimental/apps-mcp/lib/prompts/flow.tmpl | 23 ++- .../apps-mcp/lib/prompts/target_jobs.tmpl | 57 +++++++ .../apps-mcp/lib/prompts/target_mixed.tmpl | 58 +++++++ .../lib/prompts/target_pipelines.tmpl | 61 ++++++++ .../lib/providers/clitools/discover.go | 38 +++-- .../lib/providers/clitools/provider.go | 2 +- 17 files changed, 900 insertions(+), 135 deletions(-) rename experimental/apps-mcp/cmd/{init_template.go => init_template/app.go} (69%) create mode 100644 experimental/apps-mcp/cmd/init_template/common.go create mode 100644 experimental/apps-mcp/cmd/init_template/empty.go create mode 100644 experimental/apps-mcp/cmd/init_template/init_template.go create mode 100644 experimental/apps-mcp/cmd/init_template/job.go create mode 100644 experimental/apps-mcp/cmd/init_template/pipeline.go create mode 100644 experimental/apps-mcp/lib/prompts/AGENTS.tmpl create mode 100644 experimental/apps-mcp/lib/prompts/target_jobs.tmpl create mode 100644 experimental/apps-mcp/lib/prompts/target_mixed.tmpl create mode 100644 experimental/apps-mcp/lib/prompts/target_pipelines.tmpl diff --git a/experimental/apps-mcp/cmd/apps_mcp.go b/experimental/apps-mcp/cmd/apps_mcp.go index 83da91447c..67bc8213be 100644 --- a/experimental/apps-mcp/cmd/apps_mcp.go +++ b/experimental/apps-mcp/cmd/apps_mcp.go @@ -2,6 +2,7 @@ package mcp import ( mcplib "github.com/databricks/cli/experimental/apps-mcp/lib" + "github.com/databricks/cli/experimental/apps-mcp/cmd/init_template" "github.com/databricks/cli/experimental/apps-mcp/lib/server" "github.com/databricks/cli/libs/log" "github.com/spf13/cobra" @@ -51,6 +52,7 @@ The server communicates via stdio using the Model Context Protocol.`, cmd.AddCommand(newInstallCmd()) cmd.AddCommand(newToolsCmd()) + cmd.AddCommand(init_template.NewInitTemplateCommand()) return cmd } diff --git a/experimental/apps-mcp/cmd/init_template.go b/experimental/apps-mcp/cmd/init_template/app.go similarity index 69% rename from experimental/apps-mcp/cmd/init_template.go rename to experimental/apps-mcp/cmd/init_template/app.go index a003b14d37..a27da91871 100644 --- a/experimental/apps-mcp/cmd/init_template.go +++ b/experimental/apps-mcp/cmd/init_template/app.go @@ -1,4 +1,4 @@ -package mcp +package init_template import ( "context" @@ -7,8 +7,6 @@ import ( "fmt" "os" "path/filepath" - "sort" - "strings" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/experimental/apps-mcp/lib/common" @@ -19,6 +17,13 @@ import ( "github.com/spf13/cobra" ) +const ( + defaultTemplateRepo = "https://github.com/databricks/cli" + defaultTemplateDir = "experimental/apps-mcp/templates/appkit" + defaultBranch = "main" + templatePathEnvVar = "DATABRICKS_APPKIT_TEMPLATE_PATH" +) + func validateAppNameLength(projectName string) error { const maxAppNameLength = 30 const devTargetPrefix = "dev-" @@ -73,103 +78,19 @@ func readClaudeMd(ctx context.Context, configFile string) { cmdio.LogString(ctx, "=================\n") } -// generateFileTree creates a tree-style visualization of the file structure. -// Collapses directories with more than 10 files to avoid clutter. -func generateFileTree(outputDir string) (string, error) { - const maxFilesToShow = 10 - - // collect all files in the output directory - var allFiles []string - err := filepath.Walk(outputDir, func(path string, info os.FileInfo, err error) error { - if err != nil { - return err - } - if !info.IsDir() { - relPath, err := filepath.Rel(outputDir, path) - if err != nil { - return err - } - allFiles = append(allFiles, filepath.ToSlash(relPath)) - } - return nil - }) - if err != nil { - return "", err - } - - // build a tree structure - tree := make(map[string][]string) - - for _, relPath := range allFiles { - parts := strings.Split(relPath, "/") - - if len(parts) == 1 { - // root level file - tree[""] = append(tree[""], parts[0]) - } else { - // file in subdirectory - dir := strings.Join(parts[:len(parts)-1], "/") - fileName := parts[len(parts)-1] - tree[dir] = append(tree[dir], fileName) - } - } - - // format as tree - var output strings.Builder - var sortedDirs []string - for dir := range tree { - sortedDirs = append(sortedDirs, dir) - } - sort.Strings(sortedDirs) - - for _, dir := range sortedDirs { - filesInDir := tree[dir] - if dir == "" { - // root files - always show all - for _, file := range filesInDir { - output.WriteString(file) - output.WriteString("\n") - } - } else { - // directory - output.WriteString(dir) - output.WriteString("/\n") - if len(filesInDir) <= maxFilesToShow { - // show all files - for _, file := range filesInDir { - output.WriteString(" ") - output.WriteString(file) - output.WriteString("\n") - } - } else { - // collapse large directories - output.WriteString(fmt.Sprintf(" (%d files)\n", len(filesInDir))) - } - } - } - - return output.String(), nil -} - -const ( - defaultTemplateRepo = "https://github.com/databricks/cli" - defaultTemplateDir = "experimental/apps-mcp/templates/appkit" - defaultBranch = "main" - templatePathEnvVar = "DATABRICKS_APPKIT_TEMPLATE_PATH" -) - -func newInitTemplateCmd() *cobra.Command { +// newAppCmd creates the app subcommand for init-template. +func newAppCmd() *cobra.Command { cmd := &cobra.Command{ - Use: "init-template", + Use: "app", Short: "Initialize a Databricks App using the appkit template", Args: cobra.NoArgs, Long: `Initialize a Databricks App using the appkit template. Examples: - experimental apps-mcp tools init-template --name my-app - experimental apps-mcp tools init-template --name my-app --warehouse abc123 - experimental apps-mcp tools init-template --name my-app --description "My cool app" - experimental apps-mcp tools init-template --name my-app --output-dir ./projects + experimental apps-mcp tools init-template app --name my-app + experimental apps-mcp tools init-template app --name my-app --warehouse abc123 + experimental apps-mcp tools init-template app --name my-app --description "My cool app" + experimental apps-mcp tools init-template app --name my-app --output-dir ./projects Environment variables: DATABRICKS_APPKIT_TEMPLATE_PATH Override template source with local path (for development) @@ -264,24 +185,11 @@ After initialization: } // Write config to temp file - tmpFile, err := os.CreateTemp("", "mcp-template-config-*.json") - if err != nil { - return fmt.Errorf("create temp config file: %w", err) - } - defer os.Remove(tmpFile.Name()) - - configBytes, err := json.Marshal(configMap) + configFile, err := writeConfigToTempFile(configMap) if err != nil { - return fmt.Errorf("marshal config: %w", err) - } - if _, err := tmpFile.Write(configBytes); err != nil { - return fmt.Errorf("write config file: %w", err) - } - if err := tmpFile.Close(); err != nil { - return fmt.Errorf("close config file: %w", err) + return err } - - configFile := tmpFile.Name() + defer os.Remove(configFile) // Create output directory if specified and doesn't exist if outputDir != "" { @@ -317,17 +225,11 @@ After initialization: } // Count files and get absolute path - fileCount := 0 absOutputDir, err := filepath.Abs(actualOutputDir) if err != nil { absOutputDir = actualOutputDir } - _ = filepath.Walk(absOutputDir, func(path string, info os.FileInfo, err error) error { - if err == nil && !info.IsDir() { - fileCount++ - } - return nil - }) + fileCount := countFiles(absOutputDir) cmdio.LogString(ctx, common.FormatScaffoldSuccess("appkit", absOutputDir, fileCount)) // Generate and print file tree structure diff --git a/experimental/apps-mcp/cmd/init_template/common.go b/experimental/apps-mcp/cmd/init_template/common.go new file mode 100644 index 0000000000..87d720d7af --- /dev/null +++ b/experimental/apps-mcp/cmd/init_template/common.go @@ -0,0 +1,132 @@ +package init_template + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/databricks/cli/experimental/apps-mcp/lib/prompts" +) + +// countFiles counts the number of files in a directory. +func countFiles(dir string) int { + count := 0 + _ = filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if err == nil && !info.IsDir() { + count++ + } + return nil + }) + return count +} + +// writeConfigToTempFile writes a config map to a temporary JSON file. +func writeConfigToTempFile(configMap map[string]any) (string, error) { + tmpFile, err := os.CreateTemp("", "mcp-template-config-*.json") + if err != nil { + return "", fmt.Errorf("create temp config file: %w", err) + } + + configBytes, err := json.Marshal(configMap) + if err != nil { + os.Remove(tmpFile.Name()) + return "", fmt.Errorf("marshal config: %w", err) + } + if _, err := tmpFile.Write(configBytes); err != nil { + os.Remove(tmpFile.Name()) + return "", fmt.Errorf("write config file: %w", err) + } + if err := tmpFile.Close(); err != nil { + os.Remove(tmpFile.Name()) + return "", fmt.Errorf("close config file: %w", err) + } + + return tmpFile.Name(), nil +} + +// generateFileTree creates a tree-style visualization of the file structure. +// Collapses directories with more than 10 files to avoid clutter. +func generateFileTree(outputDir string) (string, error) { + const maxFilesToShow = 10 + + var allFiles []string + err := filepath.Walk(outputDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.IsDir() { + relPath, err := filepath.Rel(outputDir, path) + if err != nil { + return err + } + allFiles = append(allFiles, filepath.ToSlash(relPath)) + } + return nil + }) + if err != nil { + return "", err + } + + tree := make(map[string][]string) + + for _, relPath := range allFiles { + parts := strings.Split(relPath, "/") + + if len(parts) == 1 { + tree[""] = append(tree[""], parts[0]) + } else { + dir := strings.Join(parts[:len(parts)-1], "/") + fileName := parts[len(parts)-1] + tree[dir] = append(tree[dir], fileName) + } + } + + var output strings.Builder + var sortedDirs []string + for dir := range tree { + sortedDirs = append(sortedDirs, dir) + } + sort.Strings(sortedDirs) + + for _, dir := range sortedDirs { + filesInDir := tree[dir] + if dir == "" { + for _, file := range filesInDir { + output.WriteString(file) + output.WriteString("\n") + } + } else { + output.WriteString(dir) + output.WriteString("/\n") + if len(filesInDir) <= maxFilesToShow { + for _, file := range filesInDir { + output.WriteString(" ") + output.WriteString(file) + output.WriteString("\n") + } + } else { + output.WriteString(fmt.Sprintf(" (%d files)\n", len(filesInDir))) + } + } + } + + return output.String(), nil +} + +// writeAgentFiles writes CLAUDE.md and AGENTS.md files to the output directory. +func writeAgentFiles(outputDir string, data map[string]any) error { + content := prompts.MustExecuteTemplate("AGENTS.tmpl", data) + + // Write both CLAUDE.md and AGENTS.md + if err := os.WriteFile(filepath.Join(outputDir, "CLAUDE.md"), []byte(content), 0o644); err != nil { + return fmt.Errorf("failed to write CLAUDE.md: %w", err) + } + if err := os.WriteFile(filepath.Join(outputDir, "AGENTS.md"), []byte(content), 0o644); err != nil { + return fmt.Errorf("failed to write AGENTS.md: %w", err) + } + + return nil +} diff --git a/experimental/apps-mcp/cmd/init_template/empty.go b/experimental/apps-mcp/cmd/init_template/empty.go new file mode 100644 index 0000000000..5c0aba6f5f --- /dev/null +++ b/experimental/apps-mcp/cmd/init_template/empty.go @@ -0,0 +1,143 @@ +package init_template + +import ( + "errors" + "fmt" + "os" + "path/filepath" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/experimental/apps-mcp/lib/common" + "github.com/databricks/cli/experimental/apps-mcp/lib/prompts" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/template" + "github.com/spf13/cobra" +) + +// newEmptyCmd creates the empty subcommand for init-template. +func newEmptyCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "empty", + Short: "Initialize an empty project for custom resources", + Args: cobra.NoArgs, + Long: `Initialize an empty Databricks Asset Bundle project. + +Use this for deploying resource types OTHER than apps, jobs, or pipelines, such as: +- Dashboards (Lakeview dashboards) +- Alerts (SQL alerts) +- Model serving endpoints +- Clusters +- Schemas and tables +- Any other Databricks resources + +This creates a minimal project structure without sample code. For apps, jobs, or pipelines, +use the dedicated 'app', 'job', or 'pipeline' commands instead. + +Examples: + experimental apps-mcp tools init-template empty --name my_dashboard_project + experimental apps-mcp tools init-template empty --name my_alerts --language sql --catalog my_catalog + experimental apps-mcp tools init-template empty --name my_project --output-dir ./projects + +After initialization: + Add resource definitions in resources/ (e.g., resources/my_dashboard.dashboard.yml) + Then deploy: databricks bundle deploy --target dev +`, + } + + var name string + var catalog string + var language string + var outputDir string + + cmd.Flags().StringVar(&name, "name", "", "Project name (required)") + cmd.Flags().StringVar(&catalog, "catalog", "", "Default catalog for tables (defaults to workspace default)") + cmd.Flags().StringVar(&language, "language", "python", "Initial language: 'python', 'sql', or 'other'") + cmd.Flags().StringVar(&outputDir, "output-dir", "", "Directory to write the initialized template to") + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + + if name == "" { + return errors.New("--name is required. Example: init-template empty --name my_project") + } + + if language != "python" && language != "sql" && language != "other" { + return fmt.Errorf("--language must be 'python', 'sql', or 'other', got '%s'", language) + } + + configMap := map[string]any{ + "project_name": name, + "include_job": "no", + "include_pipeline": "no", + "include_python": "no", + "serverless": "yes", + "personal_schemas": "yes", + "language_choice": language, + "lakeflow_only": "no", + "enable_pydabs": "no", + } + if catalog != "" { + configMap["default_catalog"] = catalog + } + + configFile, err := writeConfigToTempFile(configMap) + if err != nil { + return err + } + defer os.Remove(configFile) + + if outputDir != "" { + if err := os.MkdirAll(outputDir, 0o755); err != nil { + return fmt.Errorf("create output directory: %w", err) + } + } + + r := template.Resolver{ + TemplatePathOrUrl: string(template.DefaultMinimal), + ConfigFile: configFile, + OutputDir: outputDir, + } + + tmpl, err := r.Resolve(ctx) + if err != nil { + return err + } + defer tmpl.Reader.Cleanup(ctx) + + err = tmpl.Writer.Materialize(ctx, tmpl.Reader) + if err != nil { + return err + } + tmpl.Writer.LogTelemetry(ctx) + + actualOutputDir := name + if outputDir != "" { + actualOutputDir = filepath.Join(outputDir, name) + } + + absOutputDir, err := filepath.Abs(actualOutputDir) + if err != nil { + absOutputDir = actualOutputDir + } + fileCount := countFiles(absOutputDir) + cmdio.LogString(ctx, common.FormatProjectScaffoldSuccess("empty", "šŸ“¦", "default-minimal", absOutputDir, fileCount, "")) + + fileTree, err := generateFileTree(absOutputDir) + if err == nil && fileTree != "" { + cmdio.LogString(ctx, "\nFile structure:") + cmdio.LogString(ctx, fileTree) + } + + // Write CLAUDE.md and AGENTS.md files + if err := writeAgentFiles(absOutputDir, map[string]any{}); err != nil { + return fmt.Errorf("failed to write agent files: %w", err) + } + + targetMixed := prompts.MustExecuteTemplate("target_mixed.tmpl", map[string]any{}) + cmdio.LogString(ctx, targetMixed) + + return nil + } + return cmd +} diff --git a/experimental/apps-mcp/cmd/init_template/init_template.go b/experimental/apps-mcp/cmd/init_template/init_template.go new file mode 100644 index 0000000000..3e6adc2228 --- /dev/null +++ b/experimental/apps-mcp/cmd/init_template/init_template.go @@ -0,0 +1,25 @@ +package init_template + +import ( + "github.com/spf13/cobra" +) + +// NewInitTemplateCommand creates a command group for initializing project templates. +func NewInitTemplateCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "init-template", + Short: "Initialize project templates", + Long: `Initialize project templates for Databricks resources. + +Subcommands: + app Initialize a Databricks App using the appkit template + job Initialize a job project using the default-python template + pipeline Initialize a Lakeflow pipeline project + empty Initialize an empty bundle for custom resources (dashboards, alerts, etc.)`, + } + cmd.AddCommand(newAppCmd()) + cmd.AddCommand(newJobCmd()) + cmd.AddCommand(newPipelineCmd()) + cmd.AddCommand(newEmptyCmd()) + return cmd +} diff --git a/experimental/apps-mcp/cmd/init_template/job.go b/experimental/apps-mcp/cmd/init_template/job.go new file mode 100644 index 0000000000..2107544ba8 --- /dev/null +++ b/experimental/apps-mcp/cmd/init_template/job.go @@ -0,0 +1,133 @@ +package init_template + +import ( + "errors" + "fmt" + "os" + "path/filepath" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/experimental/apps-mcp/lib/common" + "github.com/databricks/cli/experimental/apps-mcp/lib/prompts" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/template" + "github.com/spf13/cobra" +) + +// newJobCmd creates the job subcommand for init-template. +func newJobCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "job", + Short: "Initialize a job project using the default-python template", + Args: cobra.NoArgs, + Long: `Initialize a job project using the default-python template. + +This creates a project with: +- Python notebooks in src/ directory +- A wheel package defined in pyproject.toml +- Job definitions in resources/ using databricks.yml +- Serverless compute enabled by default +- Personal schemas for development + +Examples: + experimental apps-mcp tools init-template job --name my_job + experimental apps-mcp tools init-template job --name my_job --catalog my_catalog + experimental apps-mcp tools init-template job --name my_job --output-dir ./projects + +After initialization: + databricks bundle deploy --target dev +`, + } + + var name string + var catalog string + var outputDir string + + cmd.Flags().StringVar(&name, "name", "", "Project name (required)") + cmd.Flags().StringVar(&catalog, "catalog", "", "Default catalog for tables (defaults to workspace default)") + cmd.Flags().StringVar(&outputDir, "output-dir", "", "Directory to write the initialized template to") + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + + if name == "" { + return errors.New("--name is required. Example: init-template job --name my_job") + } + + configMap := map[string]any{ + "project_name": name, + "include_job": "yes", + "include_pipeline": "no", + "include_python": "yes", + "serverless": "yes", + "personal_schemas": "yes", + } + if catalog != "" { + configMap["default_catalog"] = catalog + } + + configFile, err := writeConfigToTempFile(configMap) + if err != nil { + return err + } + defer os.Remove(configFile) + + if outputDir != "" { + if err := os.MkdirAll(outputDir, 0o755); err != nil { + return fmt.Errorf("create output directory: %w", err) + } + } + + r := template.Resolver{ + TemplatePathOrUrl: string(template.DefaultPython), + ConfigFile: configFile, + OutputDir: outputDir, + } + + tmpl, err := r.Resolve(ctx) + if err != nil { + return err + } + defer tmpl.Reader.Cleanup(ctx) + + err = tmpl.Writer.Materialize(ctx, tmpl.Reader) + if err != nil { + return err + } + tmpl.Writer.LogTelemetry(ctx) + + actualOutputDir := name + if outputDir != "" { + actualOutputDir = filepath.Join(outputDir, name) + } + + absOutputDir, err := filepath.Abs(actualOutputDir) + if err != nil { + absOutputDir = actualOutputDir + } + fileCount := countFiles(absOutputDir) + cmdio.LogString(ctx, common.FormatProjectScaffoldSuccess("job", "āš™ļø", "default-python", absOutputDir, fileCount, "")) + + fileTree, err := generateFileTree(absOutputDir) + if err == nil && fileTree != "" { + cmdio.LogString(ctx, "\nFile structure:") + cmdio.LogString(ctx, fileTree) + } + + // Write CLAUDE.md and AGENTS.md files + if err := writeAgentFiles(absOutputDir, map[string]any{}); err != nil { + return fmt.Errorf("failed to write agent files: %w", err) + } + + // Show L2 guidance: mixed (for adding any resource) + jobs (for developing jobs) + targetMixed := prompts.MustExecuteTemplate("target_mixed.tmpl", map[string]any{}) + cmdio.LogString(ctx, targetMixed) + + targetJobs := prompts.MustExecuteTemplate("target_jobs.tmpl", map[string]any{}) + cmdio.LogString(ctx, targetJobs) + + return nil + } + return cmd +} diff --git a/experimental/apps-mcp/cmd/init_template/pipeline.go b/experimental/apps-mcp/cmd/init_template/pipeline.go new file mode 100644 index 0000000000..563b6b14ec --- /dev/null +++ b/experimental/apps-mcp/cmd/init_template/pipeline.go @@ -0,0 +1,144 @@ +package init_template + +import ( + "errors" + "fmt" + "os" + "path/filepath" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/experimental/apps-mcp/lib/common" + "github.com/databricks/cli/experimental/apps-mcp/lib/prompts" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/template" + "github.com/spf13/cobra" +) + +// newPipelineCmd creates the pipeline subcommand for init-template. +func newPipelineCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "pipeline", + Short: "Initialize a Lakeflow pipeline project", + Args: cobra.NoArgs, + Long: `Initialize a Lakeflow Declarative Pipeline project. + +This creates a project with: +- Pipeline definitions in src/ directory (Python or SQL) +- Pipeline configuration in resources/ using databricks.yml +- Serverless compute enabled by default +- Personal schemas for development + +Examples: + experimental apps-mcp tools init-template pipeline --name my_pipeline --language python + experimental apps-mcp tools init-template pipeline --name my_pipeline --language sql + experimental apps-mcp tools init-template pipeline --name my_pipeline --language python --catalog my_catalog + experimental apps-mcp tools init-template pipeline --name my_pipeline --language sql --output-dir ./projects + +After initialization: + databricks bundle deploy --target dev +`, + } + + var name string + var language string + var catalog string + var outputDir string + + cmd.Flags().StringVar(&name, "name", "", "Project name (required)") + cmd.Flags().StringVar(&language, "language", "", "Pipeline language: 'python' or 'sql' (required)") + cmd.Flags().StringVar(&catalog, "catalog", "", "Default catalog for tables (defaults to workspace default)") + cmd.Flags().StringVar(&outputDir, "output-dir", "", "Directory to write the initialized template to") + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + + if name == "" { + return errors.New("--name is required. Example: init-template pipeline --name my_pipeline --language python") + } + if language == "" { + return errors.New("--language is required. Choose 'python' or 'sql'. Example: init-template pipeline --name my_pipeline --language python") + } + if language != "python" && language != "sql" { + return fmt.Errorf("--language must be 'python' or 'sql', got '%s'", language) + } + + configMap := map[string]any{ + "project_name": name, + "lakeflow_only": "yes", + "include_job": "no", + "include_pipeline": "yes", + "include_python": "no", + "serverless": "yes", + "personal_schemas": "yes", + "language": language, + } + if catalog != "" { + configMap["default_catalog"] = catalog + } + + configFile, err := writeConfigToTempFile(configMap) + if err != nil { + return err + } + defer os.Remove(configFile) + + if outputDir != "" { + if err := os.MkdirAll(outputDir, 0o755); err != nil { + return fmt.Errorf("create output directory: %w", err) + } + } + + r := template.Resolver{ + TemplatePathOrUrl: string(template.LakeflowPipelines), + ConfigFile: configFile, + OutputDir: outputDir, + } + + tmpl, err := r.Resolve(ctx) + if err != nil { + return err + } + defer tmpl.Reader.Cleanup(ctx) + + err = tmpl.Writer.Materialize(ctx, tmpl.Reader) + if err != nil { + return err + } + tmpl.Writer.LogTelemetry(ctx) + + actualOutputDir := name + if outputDir != "" { + actualOutputDir = filepath.Join(outputDir, name) + } + + absOutputDir, err := filepath.Abs(actualOutputDir) + if err != nil { + absOutputDir = actualOutputDir + } + fileCount := countFiles(absOutputDir) + extraDetails := "Language: " + language + cmdio.LogString(ctx, common.FormatProjectScaffoldSuccess("pipeline", "šŸ”„", "lakeflow-pipelines", absOutputDir, fileCount, extraDetails)) + + fileTree, err := generateFileTree(absOutputDir) + if err == nil && fileTree != "" { + cmdio.LogString(ctx, "\nFile structure:") + cmdio.LogString(ctx, fileTree) + } + + // Write CLAUDE.md and AGENTS.md files + if err := writeAgentFiles(absOutputDir, map[string]any{}); err != nil { + return fmt.Errorf("failed to write agent files: %w", err) + } + + // Show L2 guidance: mixed (for adding any resource) + pipelines (for developing pipelines) + targetMixed := prompts.MustExecuteTemplate("target_mixed.tmpl", map[string]any{}) + cmdio.LogString(ctx, targetMixed) + + targetPipelines := prompts.MustExecuteTemplate("target_pipelines.tmpl", map[string]any{}) + cmdio.LogString(ctx, targetPipelines) + + return nil + } + return cmd +} diff --git a/experimental/apps-mcp/cmd/tools.go b/experimental/apps-mcp/cmd/tools.go index 6d88451147..473bacddf3 100644 --- a/experimental/apps-mcp/cmd/tools.go +++ b/experimental/apps-mcp/cmd/tools.go @@ -1,6 +1,7 @@ package mcp import ( + "github.com/databricks/cli/experimental/apps-mcp/cmd/init_template" "github.com/spf13/cobra" ) @@ -13,7 +14,7 @@ func newToolsCmd() *cobra.Command { cmd.AddCommand(newQueryCmd()) cmd.AddCommand(newDiscoverSchemaCmd()) - cmd.AddCommand(newInitTemplateCmd()) + cmd.AddCommand(init_template.NewInitTemplateCommand()) cmd.AddCommand(newValidateCmd()) cmd.AddCommand(newDeployCmd()) diff --git a/experimental/apps-mcp/lib/common/output.go b/experimental/apps-mcp/lib/common/output.go index 7454950870..92e3f1d1a4 100644 --- a/experimental/apps-mcp/lib/common/output.go +++ b/experimental/apps-mcp/lib/common/output.go @@ -45,3 +45,18 @@ func FormatDeploymentFailure(appName, message string) string { return fmt.Sprintf("%sāŒ Deployment failed for '%s'\n\n%s\n", header, appName, message) } + +// FormatProjectScaffoldSuccess formats a success message for project scaffolding. +// projectType examples: "job", "pipeline", "empty" +// emoji examples: "āš™ļø" (job), "šŸ”„" (pipeline), "šŸ“¦" (empty) +// extraDetails can include additional info like "Language: python" +func FormatProjectScaffoldSuccess(projectType, emoji, templateName, workDir string, filesCopied int, extraDetails string) string { + header := FormatBrandedHeader(emoji, projectType+" project scaffolded successfully") + result := fmt.Sprintf("%sāœ… Created %s %s project at %s\n\n", + header, templateName, projectType, workDir) + if extraDetails != "" { + result += extraDetails + "\n" + } + result += fmt.Sprintf("Files copied: %d\n\nTemplate: %s\n", filesCopied, templateName) + return result +} diff --git a/experimental/apps-mcp/lib/detector/bundle_detector.go b/experimental/apps-mcp/lib/detector/bundle_detector.go index c88c5d19b1..859ff7ed18 100644 --- a/experimental/apps-mcp/lib/detector/bundle_detector.go +++ b/experimental/apps-mcp/lib/detector/bundle_detector.go @@ -40,15 +40,37 @@ func (d *BundleDetector) Detect(ctx context.Context, workDir string, detected *D } // extract target types from fully loaded resources - if len(b.Config.Resources.Apps) > 0 { + hasApps := len(b.Config.Resources.Apps) > 0 + hasJobs := len(b.Config.Resources.Jobs) > 0 + hasPipelines := len(b.Config.Resources.Pipelines) > 0 + + if hasApps { detected.TargetTypes = append(detected.TargetTypes, "apps") } - if len(b.Config.Resources.Jobs) > 0 { + if hasJobs { detected.TargetTypes = append(detected.TargetTypes, "jobs") } - if len(b.Config.Resources.Pipelines) > 0 { + if hasPipelines { detected.TargetTypes = append(detected.TargetTypes, "pipelines") } + // Include "mixed" guidance for all projects EXCEPT app-only projects. + // This provides general resource addition guidance (target_mixed.tmpl). + // We exclude app-only projects to provide a dedicated app development experience + // focused on app-specific patterns (target_apps.tmpl has comprehensive app guidance). + isAppOnly := hasApps && !hasJobs && !hasPipelines && + len(b.Config.Resources.Clusters) == 0 && + len(b.Config.Resources.Dashboards) == 0 && + len(b.Config.Resources.Experiments) == 0 && + len(b.Config.Resources.ModelServingEndpoints) == 0 && + len(b.Config.Resources.RegisteredModels) == 0 && + len(b.Config.Resources.Schemas) == 0 && + len(b.Config.Resources.QualityMonitors) == 0 && + len(b.Config.Resources.Volumes) == 0 + + if !isAppOnly { + detected.TargetTypes = append(detected.TargetTypes, "mixed") + } + return nil } diff --git a/experimental/apps-mcp/lib/prompts/AGENTS.tmpl b/experimental/apps-mcp/lib/prompts/AGENTS.tmpl new file mode 100644 index 0000000000..df7d451d75 --- /dev/null +++ b/experimental/apps-mcp/lib/prompts/AGENTS.tmpl @@ -0,0 +1,35 @@ +{{- /* + * Template for the AGENTS.md / CLAUDE.md file created in new projects. + * + * Goal for this file is to reinforce with agents that they need to call the databricks_discover tool. + * A secondary goal is to offer a placeholder to users to extend with their own instructions. + * It is a non-goal to inline all instructions. + * + * Note the app template currently has its own AGENTS.md / CLAUDE.md file. + * + */ -}} + +# Databricks Asset Bundles Project + +This project uses Databricks Asset Bundles for deployment. Add project-specific instructions below. + +## For AI Agents: MANDATORY First Action + +**BEFORE any other action, call `mcp__databricks-mcp__databricks_discover` with this project's path.** + +This is required because the discover tool: +- Detects project state (new, deployed, running) and provides relevant guidance +- Shows available CLI commands and recommended next steps +- Configures workspace authentication + +Without this context, you cannot effectively help with this project. + +If the tool is unavailable, the user needs to set up the Databricks CLI MCP server: +1. Install CLI: https://docs.databricks.com/dev-tools/cli/install +2. Run: `databricks experimental apps-mcp install` + +--- + +## Project Instructions + + diff --git a/experimental/apps-mcp/lib/prompts/flow.tmpl b/experimental/apps-mcp/lib/prompts/flow.tmpl index 40da4cf0f7..12a1eafbdd 100644 --- a/experimental/apps-mcp/lib/prompts/flow.tmpl +++ b/experimental/apps-mcp/lib/prompts/flow.tmpl @@ -25,14 +25,29 @@ Use `invoke_databricks_cli ''` to run any Databricks CLI command. ### Project scaffolding +IMPORTANT: Always use 'experimental apps-mcp tools init-template' commands below instead of 'databricks bundle init'. +The init-template commands create agent-friendly projects with AGENTS.md/CLAUDE.md guidance files and proper MCP integration. + For apps: +invoke_databricks_cli 'experimental apps-mcp tools init-template app --name my-app --description "My app description"' -invoke_databricks_cli 'experimental apps-mcp tools init-template --name my-app --description "My app description"' +For jobs (Python notebooks with wheel package): +invoke_databricks_cli 'experimental apps-mcp tools init-template job --name my_job' +invoke_databricks_cli 'experimental apps-mcp tools init-template job --name my_job --catalog my_catalog' -- App name must be ≤26 characters (dev- prefix adds 4 chars, max total 30) -- Use lowercase letters, numbers, and hyphens only +For pipelines (Lakeflow Declarative Pipelines): +invoke_databricks_cli 'experimental apps-mcp tools init-template pipeline --name my_pipeline --language python' +invoke_databricks_cli 'experimental apps-mcp tools init-template pipeline --name my_pipeline --language sql --catalog my_catalog' +Note: --language is required (python or sql) -Other types of projects are not yet supported. +For custom resources (dashboards, alerts, model serving, etc.): +invoke_databricks_cli 'experimental apps-mcp tools init-template empty --name my_project' +Note: Use this for resources OTHER than apps, jobs, or pipelines + +Notes: +- App name must be ≤26 characters (dev- prefix adds 4 chars, max total 30) +- Job/pipeline/project names: letters, numbers, underscores only +- --catalog defaults to workspace default catalog{{if .DefaultCatalog}} (currently '{{.DefaultCatalog}}'){{end}} ### Custom SQL Queries diff --git a/experimental/apps-mcp/lib/prompts/target_jobs.tmpl b/experimental/apps-mcp/lib/prompts/target_jobs.tmpl new file mode 100644 index 0000000000..53762fc23e --- /dev/null +++ b/experimental/apps-mcp/lib/prompts/target_jobs.tmpl @@ -0,0 +1,57 @@ +{{- /* + * L2: Target-specific guidance for Lakeflow Jobs. + * + * Injected when: target type "jobs" is detected or after init-template job. + * Contains: job-specific development patterns, task configuration, code examples. + * Note: For adding NEW resources (dashboards, alerts, etc.), see target_mixed.tmpl guidance. + */ -}} + +## Lakeflow Jobs Development + +This guidance is for DEVELOPING jobs in this project. To ADD other resource types (dashboards, alerts, pipelines, etc.), see the general resource guidance above. + +### Project Structure +- `src/` - Python notebooks (.ipynb) and source code +- `resources/` - Job definitions in databricks.yml format + +### Configuring Tasks +Edit `resources/.job.yml` to configure tasks: + +```yaml +tasks: + - task_key: my_notebook + notebook_task: + notebook_path: ../src/my_notebook.ipynb + - task_key: my_python + python_wheel_task: + package_name: my_package + entry_point: main +``` + +Task types: `notebook_task`, `python_wheel_task`, `spark_python_task`, `pipeline_task`, `sql_task` + +### Job Parameters +Parameters defined at job level are passed to ALL tasks (no need to repeat per task). Example: +```yaml +resources: + jobs: + my_job: + parameters: + - name: catalog + default: ${var.catalog} + - name: schema + default: ${var.schema} +``` + +### Writing Notebook Code +- Use `spark.read.table("catalog.schema.table")` to read tables +- Use `spark.sql("SELECT ...")` for SQL queries +- Use `dbutils.widgets` for parameters + +### Unit Testing +Run unit tests locally with: `uv run pytest` + +### Documentation +- Lakeflow Jobs: https://docs.databricks.com/jobs +- Task types: https://docs.databricks.com/jobs/configure-task +- Databricks Asset Bundles / yml format examples: https://docs.databricks.com/dev-tools/bundles/examples diff --git a/experimental/apps-mcp/lib/prompts/target_mixed.tmpl b/experimental/apps-mcp/lib/prompts/target_mixed.tmpl new file mode 100644 index 0000000000..84b094ff20 --- /dev/null +++ b/experimental/apps-mcp/lib/prompts/target_mixed.tmpl @@ -0,0 +1,58 @@ +{{- /* + * L2: Target-specific guidance for mixed/custom resource projects. + * + * Injected when: empty projects or projects with mixed resource types. Not for app-only projects. + * Contains: how to add any resource type, deployment commands, documentation. + */ -}} + +## Adding Databricks Resources + +Add resources by creating YAML files in resources/: + +**Jobs** - `resources/my_job.job.yml`: +```yaml +resources: + jobs: + my_job: + name: my_job + tasks: + - task_key: main + notebook_task: + notebook_path: ../src/notebook.py + new_cluster: + num_workers: 2 + spark_version: "15.4.x-scala2.12" + node_type_id: "i3.xlarge" +``` + +**Pipelines** (Lakeflow Declarative Pipelines) - `resources/my_pipeline.pipeline.yml`: +```yaml +resources: + pipelines: + my_pipeline: + name: my_pipeline + catalog: ${var.catalog} + target: ${var.schema} + libraries: + - notebook: + path: ../src/pipeline.py +``` + +**Dashboards** - `resources/my_dashboard.dashboard.yml` +**Alerts** - `resources/my_alert.alert.yml` +**Model Serving** - `resources/my_endpoint.yml` +**Apps** - `resources/my_app.app.yml` + +**Other resource types**: clusters, schemas, volumes, registered_models, experiments, quality_monitors + +### Deployment +For dev targets you can deploy without user consent. This allows you to run resources on the workspace too! + + invoke_databricks_cli 'bundle deploy --target dev' + invoke_databricks_cli 'bundle run --target dev' + +View status with `invoke_databricks_cli 'bundle summary'`. + +### Documentation +- Resource types reference: https://docs.databricks.com/dev-tools/bundles/resources +- YAML examples: https://docs.databricks.com/dev-tools/bundles/examples diff --git a/experimental/apps-mcp/lib/prompts/target_pipelines.tmpl b/experimental/apps-mcp/lib/prompts/target_pipelines.tmpl new file mode 100644 index 0000000000..f0970222c9 --- /dev/null +++ b/experimental/apps-mcp/lib/prompts/target_pipelines.tmpl @@ -0,0 +1,61 @@ +{{- /* + * L2: Target-specific guidance for Lakeflow Declarative Pipelines. + * + * Injected when: target type "pipelines" is detected or after init-template pipeline. + * Contains: pipeline-specific development patterns, transformation syntax, scheduling. + * Note: For adding NEW resources (dashboards, alerts, etc.), see target_mixed.tmpl guidance. + */ -}} + +## Lakeflow Declarative Pipelines Development + +This guidance is for DEVELOPING pipelines in this project. To ADD other resource types (dashboards, alerts, jobs, etc.), see the general resource guidance above. + +Lakeflow Declarative Pipelines (formerly Delta Live Tables) is a framework for building batch and streaming data pipelines. + +### Project Structure +- `src/` - Pipeline transformations (Python or SQL) +- `resources/` - Pipeline configuration in databricks.yml format + +### Adding Transformations + +**Python** - Create `.py` files in `src/`: +```python +from pyspark import pipelines as dp + +@dp.table +def my_table(): + return spark.read.table("catalog.schema.source") +``` + +By convention, each dataset definition like the @dp.table definition above should be in a file named +like the dataset name, e.g. `src/my_table.py`. + +**SQL** - Create `.sql` files in `src/`: +```sql +CREATE MATERIALIZED VIEW my_view AS +SELECT * FROM catalog.schema.source +``` + +This example would live in `src/my_view.sql`. + +Use `CREATE STREAMING TABLE` for incremental ingestion, `CREATE MATERIALIZED VIEW` for transformations. + +### Scheduling Pipelines +To schedule a pipeline, make sure you have a job that triggers it, like `resources/.job.yml`: +```yaml +resources: + jobs: + my_pipeline_job: + trigger: + periodic: + interval: 1 + unit: DAYS + tasks: + - task_key: refresh_pipeline + pipeline_task: + pipeline_id: ${resources.pipelines.my_pipeline.id} +``` + +### Documentation +- Lakeflow Declarative Pipelines: https://docs.databricks.com/ldp +- Databricks Asset Bundles / yml format examples: https://docs.databricks.com/dev-tools/bundles/examples diff --git a/experimental/apps-mcp/lib/providers/clitools/discover.go b/experimental/apps-mcp/lib/providers/clitools/discover.go index 3ac1a1d6ab..5f51e82bb2 100644 --- a/experimental/apps-mcp/lib/providers/clitools/discover.go +++ b/experimental/apps-mcp/lib/providers/clitools/discover.go @@ -24,16 +24,35 @@ func Discover(ctx context.Context, workingDirectory string) (string, error) { currentProfile := middlewares.GetDatabricksProfile(ctx) profiles := middlewares.GetAvailableProfiles(ctx) + // Get default catalog (non-fatal if unavailable) + defaultCatalog := getDefaultCatalog(ctx) + // run detectors to identify project context registry := detector.NewRegistry() detected := registry.Detect(ctx, workingDirectory) - return generateDiscoverGuidance(ctx, warehouse, currentProfile, profiles, detected), nil + return generateDiscoverGuidance(ctx, warehouse, currentProfile, profiles, defaultCatalog, detected), nil +} + +// getDefaultCatalog fetches the workspace default catalog name. +// Returns empty string if Unity Catalog is not available or on error. +func getDefaultCatalog(ctx context.Context) string { + w, err := middlewares.GetDatabricksClient(ctx) + if err != nil { + return "" + } + + metastore, err := w.Metastores.Current(ctx) + if err != nil { + return "" // gracefully handle any error (no UC, permission denied, etc.) + } + + return metastore.DefaultCatalogName } // generateDiscoverGuidance creates guidance with L1 (flow) + L2 (target) layers. -func generateDiscoverGuidance(ctx context.Context, warehouse *sql.EndpointInfo, currentProfile string, profiles profile.Profiles, detected *detector.DetectedContext) string { - data := buildTemplateData(warehouse, currentProfile, profiles) +func generateDiscoverGuidance(ctx context.Context, warehouse *sql.EndpointInfo, currentProfile string, profiles profile.Profiles, defaultCatalog string, detected *detector.DetectedContext) string { + data := buildTemplateData(warehouse, currentProfile, profiles, defaultCatalog) // L1: always include flow guidance result := prompts.MustExecuteTemplate("flow.tmpl", data) @@ -61,7 +80,7 @@ func generateDiscoverGuidance(ctx context.Context, warehouse *sql.EndpointInfo, return result } -func buildTemplateData(warehouse *sql.EndpointInfo, currentProfile string, profiles profile.Profiles) map[string]string { +func buildTemplateData(warehouse *sql.EndpointInfo, currentProfile string, profiles profile.Profiles, defaultCatalog string) map[string]string { workspaceInfo := "Current Workspace Profile: " + currentProfile if len(profiles) > 0 { var currentHost string @@ -106,10 +125,11 @@ func buildTemplateData(warehouse *sql.EndpointInfo, currentProfile string, profi } return map[string]string{ - "WorkspaceInfo": workspaceInfo, - "WarehouseName": warehouseName, - "WarehouseID": warehouseID, - "ProfilesInfo": profilesInfo, - "Profile": currentProfile, + "WorkspaceInfo": workspaceInfo, + "WarehouseName": warehouseName, + "WarehouseID": warehouseID, + "ProfilesInfo": profilesInfo, + "Profile": currentProfile, + "DefaultCatalog": defaultCatalog, } } diff --git a/experimental/apps-mcp/lib/providers/clitools/provider.go b/experimental/apps-mcp/lib/providers/clitools/provider.go index e9d146f21a..36d62d80d5 100644 --- a/experimental/apps-mcp/lib/providers/clitools/provider.go +++ b/experimental/apps-mcp/lib/providers/clitools/provider.go @@ -93,7 +93,7 @@ func (p *Provider) RegisterTools(server *mcpsdk.Server) error { mcpsdk.AddTool(server, &mcpsdk.Tool{ Name: "databricks_discover", - Description: "Discover available Databricks workspaces, warehouses, and get workflow recommendations. Call this FIRST when planning ANY Databricks work involving apps, dashboards, pipelines, jobs, bundles, or SQL workflows. Returns workspace capabilities and recommended tooling.", + Description: "Discover how to perform Databricks work. MANDATORY: Call this FIRST before ANY Databricks work. Do not skip this step. This tool detects project context, configures authentication, and returns required guidance for apps, pipelines, jobs, bundles, dashboards, and SQL workflows. Without calling this first, you cannot effectively help with Databricks tasks.", }, func(ctx context.Context, req *mcpsdk.CallToolRequest, args DiscoverInput) (*mcpsdk.CallToolResult, any, error) { log.Debugf(ctx, "databricks_discover called: working_directory=%s", args.WorkingDirectory) From 5db89c12ea9717509c34e8771e6da6d92a6c7bd2 Mon Sep 17 00:00:00 2001 From: Lennart Kats Date: Fri, 26 Dec 2025 18:28:15 +0100 Subject: [PATCH 2/5] Add acceptance tests for init-template commands and remove unnecessary configMap conditionals - Add acceptance tests for job, pipeline, empty, and app init-template subcommands - Remove unnecessary 'if catalog != ""' checks (templates handle empty values) - Fix logdiag double-initialization in bundle detector - Simplify FormatProjectScaffoldSuccess output formatting --- .../apps/init-template/app/out.test.toml | 5 + acceptance/apps/init-template/app/output.txt | 1 + acceptance/apps/init-template/app/script | 4 + acceptance/apps/init-template/app/test.toml | 2 + .../apps/init-template/empty/out.test.toml | 5 + .../apps/init-template/empty/output.txt | 1 + acceptance/apps/init-template/empty/script | 4 + acceptance/apps/init-template/empty/test.toml | 2 + .../apps/init-template/job/out.test.toml | 5 + acceptance/apps/init-template/job/output.txt | 1 + acceptance/apps/init-template/job/script | 4 + acceptance/apps/init-template/job/test.toml | 2 + .../apps/init-template/pipeline/out.test.toml | 5 + .../apps/init-template/pipeline/output.txt | 1 + acceptance/apps/init-template/pipeline/script | 4 + .../apps/init-template/pipeline/test.toml | 2 + experimental/apps-mcp/cmd/apps_mcp.go | 2 +- .../apps-mcp/cmd/init_template/app.go | 124 ++++-------------- .../apps-mcp/cmd/init_template/common.go | 83 ++++++++++++ .../apps-mcp/cmd/init_template/empty.go | 76 +---------- .../apps-mcp/cmd/init_template/job.go | 75 +---------- .../apps-mcp/cmd/init_template/pipeline.go | 80 +---------- experimental/apps-mcp/lib/common/output.go | 23 +--- .../apps-mcp/lib/common/output_test.go | 6 +- .../apps-mcp/lib/detector/bundle_detector.go | 11 +- 25 files changed, 184 insertions(+), 344 deletions(-) create mode 100644 acceptance/apps/init-template/app/out.test.toml create mode 100644 acceptance/apps/init-template/app/output.txt create mode 100644 acceptance/apps/init-template/app/script create mode 100644 acceptance/apps/init-template/app/test.toml create mode 100644 acceptance/apps/init-template/empty/out.test.toml create mode 100644 acceptance/apps/init-template/empty/output.txt create mode 100644 acceptance/apps/init-template/empty/script create mode 100644 acceptance/apps/init-template/empty/test.toml create mode 100644 acceptance/apps/init-template/job/out.test.toml create mode 100644 acceptance/apps/init-template/job/output.txt create mode 100644 acceptance/apps/init-template/job/script create mode 100644 acceptance/apps/init-template/job/test.toml create mode 100644 acceptance/apps/init-template/pipeline/out.test.toml create mode 100644 acceptance/apps/init-template/pipeline/output.txt create mode 100644 acceptance/apps/init-template/pipeline/script create mode 100644 acceptance/apps/init-template/pipeline/test.toml diff --git a/acceptance/apps/init-template/app/out.test.toml b/acceptance/apps/init-template/app/out.test.toml new file mode 100644 index 0000000000..d560f1de04 --- /dev/null +++ b/acceptance/apps/init-template/app/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform", "direct"] diff --git a/acceptance/apps/init-template/app/output.txt b/acceptance/apps/init-template/app/output.txt new file mode 100644 index 0000000000..a522103bfa --- /dev/null +++ b/acceptance/apps/init-template/app/output.txt @@ -0,0 +1 @@ +āœ“ Template instantiation succeeded diff --git a/acceptance/apps/init-template/app/script b/acceptance/apps/init-template/app/script new file mode 100644 index 0000000000..1f38796b6c --- /dev/null +++ b/acceptance/apps/init-template/app/script @@ -0,0 +1,4 @@ +#!/bin/bash +$CLI experimental apps-mcp tools init-template app --name test_app --sql-warehouse-id abc123 --output-dir output > /dev/null 2>&1 +echo "āœ“ Template instantiation succeeded" +rm -rf output diff --git a/acceptance/apps/init-template/app/test.toml b/acceptance/apps/init-template/app/test.toml new file mode 100644 index 0000000000..7d36fb9dc1 --- /dev/null +++ b/acceptance/apps/init-template/app/test.toml @@ -0,0 +1,2 @@ +Local = true +Cloud = false diff --git a/acceptance/apps/init-template/empty/out.test.toml b/acceptance/apps/init-template/empty/out.test.toml new file mode 100644 index 0000000000..d560f1de04 --- /dev/null +++ b/acceptance/apps/init-template/empty/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform", "direct"] diff --git a/acceptance/apps/init-template/empty/output.txt b/acceptance/apps/init-template/empty/output.txt new file mode 100644 index 0000000000..a522103bfa --- /dev/null +++ b/acceptance/apps/init-template/empty/output.txt @@ -0,0 +1 @@ +āœ“ Template instantiation succeeded diff --git a/acceptance/apps/init-template/empty/script b/acceptance/apps/init-template/empty/script new file mode 100644 index 0000000000..5d5a80bd97 --- /dev/null +++ b/acceptance/apps/init-template/empty/script @@ -0,0 +1,4 @@ +#!/bin/bash +$CLI experimental apps-mcp tools init-template empty --name test_empty --catalog main --output-dir output > /dev/null 2>&1 +echo "āœ“ Template instantiation succeeded" +rm -rf output diff --git a/acceptance/apps/init-template/empty/test.toml b/acceptance/apps/init-template/empty/test.toml new file mode 100644 index 0000000000..7d36fb9dc1 --- /dev/null +++ b/acceptance/apps/init-template/empty/test.toml @@ -0,0 +1,2 @@ +Local = true +Cloud = false diff --git a/acceptance/apps/init-template/job/out.test.toml b/acceptance/apps/init-template/job/out.test.toml new file mode 100644 index 0000000000..d560f1de04 --- /dev/null +++ b/acceptance/apps/init-template/job/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform", "direct"] diff --git a/acceptance/apps/init-template/job/output.txt b/acceptance/apps/init-template/job/output.txt new file mode 100644 index 0000000000..a522103bfa --- /dev/null +++ b/acceptance/apps/init-template/job/output.txt @@ -0,0 +1 @@ +āœ“ Template instantiation succeeded diff --git a/acceptance/apps/init-template/job/script b/acceptance/apps/init-template/job/script new file mode 100644 index 0000000000..8464089885 --- /dev/null +++ b/acceptance/apps/init-template/job/script @@ -0,0 +1,4 @@ +#!/bin/bash +$CLI experimental apps-mcp tools init-template job --name test_job --catalog main --output-dir output > /dev/null 2>&1 || exit 1 +echo "āœ“ Template instantiation succeeded" +rm -rf output diff --git a/acceptance/apps/init-template/job/test.toml b/acceptance/apps/init-template/job/test.toml new file mode 100644 index 0000000000..7d36fb9dc1 --- /dev/null +++ b/acceptance/apps/init-template/job/test.toml @@ -0,0 +1,2 @@ +Local = true +Cloud = false diff --git a/acceptance/apps/init-template/pipeline/out.test.toml b/acceptance/apps/init-template/pipeline/out.test.toml new file mode 100644 index 0000000000..d560f1de04 --- /dev/null +++ b/acceptance/apps/init-template/pipeline/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform", "direct"] diff --git a/acceptance/apps/init-template/pipeline/output.txt b/acceptance/apps/init-template/pipeline/output.txt new file mode 100644 index 0000000000..a522103bfa --- /dev/null +++ b/acceptance/apps/init-template/pipeline/output.txt @@ -0,0 +1 @@ +āœ“ Template instantiation succeeded diff --git a/acceptance/apps/init-template/pipeline/script b/acceptance/apps/init-template/pipeline/script new file mode 100644 index 0000000000..0d73aae59f --- /dev/null +++ b/acceptance/apps/init-template/pipeline/script @@ -0,0 +1,4 @@ +#!/bin/bash +$CLI experimental apps-mcp tools init-template pipeline --name test_pipeline --language python --catalog main --output-dir output > /dev/null 2>&1 +echo "āœ“ Template instantiation succeeded" +rm -rf output diff --git a/acceptance/apps/init-template/pipeline/test.toml b/acceptance/apps/init-template/pipeline/test.toml new file mode 100644 index 0000000000..7d36fb9dc1 --- /dev/null +++ b/acceptance/apps/init-template/pipeline/test.toml @@ -0,0 +1,2 @@ +Local = true +Cloud = false diff --git a/experimental/apps-mcp/cmd/apps_mcp.go b/experimental/apps-mcp/cmd/apps_mcp.go index 67bc8213be..ff19ef6912 100644 --- a/experimental/apps-mcp/cmd/apps_mcp.go +++ b/experimental/apps-mcp/cmd/apps_mcp.go @@ -1,8 +1,8 @@ package mcp import ( - mcplib "github.com/databricks/cli/experimental/apps-mcp/lib" "github.com/databricks/cli/experimental/apps-mcp/cmd/init_template" + mcplib "github.com/databricks/cli/experimental/apps-mcp/lib" "github.com/databricks/cli/experimental/apps-mcp/lib/server" "github.com/databricks/cli/libs/log" "github.com/spf13/cobra" diff --git a/experimental/apps-mcp/cmd/init_template/app.go b/experimental/apps-mcp/cmd/init_template/app.go index a27da91871..227eb55bf6 100644 --- a/experimental/apps-mcp/cmd/init_template/app.go +++ b/experimental/apps-mcp/cmd/init_template/app.go @@ -9,8 +9,6 @@ import ( "path/filepath" "github.com/databricks/cli/cmd/root" - "github.com/databricks/cli/experimental/apps-mcp/lib/common" - "github.com/databricks/cli/experimental/apps-mcp/lib/prompts" "github.com/databricks/cli/experimental/apps-mcp/lib/state" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/template" @@ -24,6 +22,19 @@ const ( templatePathEnvVar = "DATABRICKS_APPKIT_TEMPLATE_PATH" ) +func readClaudeMd(ctx context.Context, projectDir string) { + claudePath := filepath.Join(projectDir, "CLAUDE.md") + content, err := os.ReadFile(claudePath) + if err != nil { + cmdio.LogString(ctx, "\nConsult with CLAUDE.md provided in the bundle if present.") + return + } + + cmdio.LogString(ctx, "\n=== CLAUDE.md ===") + cmdio.LogString(ctx, string(content)) + cmdio.LogString(ctx, "=================\n") +} + func validateAppNameLength(projectName string) error { const maxAppNameLength = 30 const devTargetPrefix = "dev-" @@ -38,46 +49,6 @@ func validateAppNameLength(projectName string) error { return nil } -func readClaudeMd(ctx context.Context, configFile string) { - showFallback := func() { - cmdio.LogString(ctx, "\nConsult with CLAUDE.md provided in the bundle if present.") - } - - if configFile == "" { - showFallback() - return - } - - configBytes, err := os.ReadFile(configFile) - if err != nil { - showFallback() - return - } - - var config map[string]any - if err := json.Unmarshal(configBytes, &config); err != nil { - showFallback() - return - } - - projectName, ok := config["project_name"].(string) - if !ok || projectName == "" { - showFallback() - return - } - - claudePath := filepath.Join(".", projectName, "CLAUDE.md") - content, err := os.ReadFile(claudePath) - if err != nil { - showFallback() - return - } - - cmdio.LogString(ctx, "\n=== CLAUDE.md ===") - cmdio.LogString(ctx, string(content)) - cmdio.LogString(ctx, "=================\n") -} - // newAppCmd creates the app subcommand for init-template. func newAppCmd() *cobra.Command { cmd := &cobra.Command{ @@ -184,73 +155,26 @@ After initialization: configMap["app_description"] = description } - // Write config to temp file - configFile, err := writeConfigToTempFile(configMap) + err := MaterializeTemplate(ctx, TemplateConfig{ + TemplatePath: templatePathOrUrl, + TemplateName: "appkit", + TemplateDir: templateDir, + Branch: branch, + }, configMap, name, outputDir) if err != nil { return err } - defer os.Remove(configFile) - // Create output directory if specified and doesn't exist - if outputDir != "" { - if err := os.MkdirAll(outputDir, 0o755); err != nil { - return fmt.Errorf("create output directory: %w", err) - } - } - - r := template.Resolver{ - TemplatePathOrUrl: templatePathOrUrl, - ConfigFile: configFile, - OutputDir: outputDir, - TemplateDir: templateDir, - Branch: branch, - } - - tmpl, err := r.Resolve(ctx) - if err != nil { - return err - } - defer tmpl.Reader.Cleanup(ctx) - - err = tmpl.Writer.Materialize(ctx, tmpl.Reader) - if err != nil { - return err - } - tmpl.Writer.LogTelemetry(ctx) - - // Determine actual output directory (template writes to subdirectory with project name) - actualOutputDir := name - if outputDir != "" { - actualOutputDir = filepath.Join(outputDir, name) - } - - // Count files and get absolute path - absOutputDir, err := filepath.Abs(actualOutputDir) - if err != nil { - absOutputDir = actualOutputDir - } - fileCount := countFiles(absOutputDir) - cmdio.LogString(ctx, common.FormatScaffoldSuccess("appkit", absOutputDir, fileCount)) - - // Generate and print file tree structure - fileTree, err := generateFileTree(absOutputDir) - if err == nil && fileTree != "" { - cmdio.LogString(ctx, "\nFile structure:") - cmdio.LogString(ctx, fileTree) - } - - // Inject L2 (target-specific guidance for apps) - targetApps := prompts.MustExecuteTemplate("target_apps.tmpl", map[string]any{}) - cmdio.LogString(ctx, targetApps) + projectDir := filepath.Join(outputDir, name) // Inject L3 (template-specific guidance from CLAUDE.md) - readClaudeMd(ctx, configFile) + // (we only do this for the app template; other templates use a generic CLAUDE.md) + readClaudeMd(ctx, projectDir) - // Save initial scaffolded state - if err := state.SaveState(absOutputDir, state.NewScaffolded()); err != nil { + // Save initial scaffolded state for app state machine + if err := state.SaveState(projectDir, state.NewScaffolded()); err != nil { return fmt.Errorf("failed to save project state: %w", err) } - return nil } return cmd diff --git a/experimental/apps-mcp/cmd/init_template/common.go b/experimental/apps-mcp/cmd/init_template/common.go index 87d720d7af..8caa2ff3f2 100644 --- a/experimental/apps-mcp/cmd/init_template/common.go +++ b/experimental/apps-mcp/cmd/init_template/common.go @@ -1,6 +1,7 @@ package init_template import ( + "context" "encoding/json" "fmt" "os" @@ -8,9 +9,91 @@ import ( "sort" "strings" + "github.com/databricks/cli/experimental/apps-mcp/lib/common" + "github.com/databricks/cli/experimental/apps-mcp/lib/detector" "github.com/databricks/cli/experimental/apps-mcp/lib/prompts" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/template" ) +// TemplateConfig holds configuration for template materialization. +type TemplateConfig struct { + TemplatePath string // e.g., template.DefaultPython or remote URL + TemplateName string // e.g., "default-python", "lakeflow-pipelines", "appkit" + TemplateDir string // subdirectory within repo (for remote templates) + Branch string // git branch (for remote templates) +} + +// MaterializeTemplate handles the common template materialization workflow. +func MaterializeTemplate(ctx context.Context, cfg TemplateConfig, configMap map[string]any, name, outputDir string) error { + configFile, err := writeConfigToTempFile(configMap) + if err != nil { + return err + } + defer os.Remove(configFile) + + if outputDir != "" { + if err := os.MkdirAll(outputDir, 0o755); err != nil { + return fmt.Errorf("create output directory: %w", err) + } + } + + r := template.Resolver{ + TemplatePathOrUrl: cfg.TemplatePath, + ConfigFile: configFile, + OutputDir: outputDir, + TemplateDir: cfg.TemplateDir, + Branch: cfg.Branch, + } + + tmpl, err := r.Resolve(ctx) + if err != nil { + return err + } + defer tmpl.Reader.Cleanup(ctx) + + if err := tmpl.Writer.Materialize(ctx, tmpl.Reader); err != nil { + return err + } + tmpl.Writer.LogTelemetry(ctx) + + actualOutputDir := name + if outputDir != "" { + actualOutputDir = filepath.Join(outputDir, name) + } + + absOutputDir, err := filepath.Abs(actualOutputDir) + if err != nil { + absOutputDir = actualOutputDir + } + + fileCount := countFiles(absOutputDir) + cmdio.LogString(ctx, common.FormatProjectScaffoldSuccess(cfg.TemplateName, absOutputDir, fileCount)) + + fileTree, err := generateFileTree(absOutputDir) + if err == nil && fileTree != "" { + cmdio.LogString(ctx, "\nFile structure:") + cmdio.LogString(ctx, fileTree) + } + + if err := writeAgentFiles(absOutputDir, map[string]any{}); err != nil { + return fmt.Errorf("failed to write agent files: %w", err) + } + + // Detect project type and inject appropriate L2 guidance + registry := detector.NewRegistry() + detected := registry.Detect(ctx, absOutputDir) + for _, targetType := range detected.TargetTypes { + templateName := fmt.Sprintf("target_%s.tmpl", targetType) + if prompts.TemplateExists(templateName) { + content := prompts.MustExecuteTemplate(templateName, map[string]any{}) + cmdio.LogString(ctx, content) + } + } + + return nil +} + // countFiles counts the number of files in a directory. func countFiles(dir string) int { count := 0 diff --git a/experimental/apps-mcp/cmd/init_template/empty.go b/experimental/apps-mcp/cmd/init_template/empty.go index 5c0aba6f5f..a2c5e76239 100644 --- a/experimental/apps-mcp/cmd/init_template/empty.go +++ b/experimental/apps-mcp/cmd/init_template/empty.go @@ -3,13 +3,8 @@ package init_template import ( "errors" "fmt" - "os" - "path/filepath" "github.com/databricks/cli/cmd/root" - "github.com/databricks/cli/experimental/apps-mcp/lib/common" - "github.com/databricks/cli/experimental/apps-mcp/lib/prompts" - "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/template" "github.com/spf13/cobra" ) @@ -68,76 +63,15 @@ After initialization: configMap := map[string]any{ "project_name": name, - "include_job": "no", - "include_pipeline": "no", - "include_python": "no", - "serverless": "yes", "personal_schemas": "yes", "language_choice": language, - "lakeflow_only": "no", - "enable_pydabs": "no", + "default_catalog": catalog, } - if catalog != "" { - configMap["default_catalog"] = catalog - } - - configFile, err := writeConfigToTempFile(configMap) - if err != nil { - return err - } - defer os.Remove(configFile) - - if outputDir != "" { - if err := os.MkdirAll(outputDir, 0o755); err != nil { - return fmt.Errorf("create output directory: %w", err) - } - } - - r := template.Resolver{ - TemplatePathOrUrl: string(template.DefaultMinimal), - ConfigFile: configFile, - OutputDir: outputDir, - } - - tmpl, err := r.Resolve(ctx) - if err != nil { - return err - } - defer tmpl.Reader.Cleanup(ctx) - - err = tmpl.Writer.Materialize(ctx, tmpl.Reader) - if err != nil { - return err - } - tmpl.Writer.LogTelemetry(ctx) - - actualOutputDir := name - if outputDir != "" { - actualOutputDir = filepath.Join(outputDir, name) - } - - absOutputDir, err := filepath.Abs(actualOutputDir) - if err != nil { - absOutputDir = actualOutputDir - } - fileCount := countFiles(absOutputDir) - cmdio.LogString(ctx, common.FormatProjectScaffoldSuccess("empty", "šŸ“¦", "default-minimal", absOutputDir, fileCount, "")) - - fileTree, err := generateFileTree(absOutputDir) - if err == nil && fileTree != "" { - cmdio.LogString(ctx, "\nFile structure:") - cmdio.LogString(ctx, fileTree) - } - - // Write CLAUDE.md and AGENTS.md files - if err := writeAgentFiles(absOutputDir, map[string]any{}); err != nil { - return fmt.Errorf("failed to write agent files: %w", err) - } - - targetMixed := prompts.MustExecuteTemplate("target_mixed.tmpl", map[string]any{}) - cmdio.LogString(ctx, targetMixed) - return nil + return MaterializeTemplate(ctx, TemplateConfig{ + TemplatePath: string(template.DefaultMinimal), + TemplateName: "default-minimal", + }, configMap, name, outputDir) } return cmd } diff --git a/experimental/apps-mcp/cmd/init_template/job.go b/experimental/apps-mcp/cmd/init_template/job.go index 2107544ba8..4e27db7a00 100644 --- a/experimental/apps-mcp/cmd/init_template/job.go +++ b/experimental/apps-mcp/cmd/init_template/job.go @@ -2,14 +2,8 @@ package init_template import ( "errors" - "fmt" - "os" - "path/filepath" "github.com/databricks/cli/cmd/root" - "github.com/databricks/cli/experimental/apps-mcp/lib/common" - "github.com/databricks/cli/experimental/apps-mcp/lib/prompts" - "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/template" "github.com/spf13/cobra" ) @@ -62,72 +56,13 @@ After initialization: "include_python": "yes", "serverless": "yes", "personal_schemas": "yes", + "default_catalog": catalog, } - if catalog != "" { - configMap["default_catalog"] = catalog - } - - configFile, err := writeConfigToTempFile(configMap) - if err != nil { - return err - } - defer os.Remove(configFile) - - if outputDir != "" { - if err := os.MkdirAll(outputDir, 0o755); err != nil { - return fmt.Errorf("create output directory: %w", err) - } - } - - r := template.Resolver{ - TemplatePathOrUrl: string(template.DefaultPython), - ConfigFile: configFile, - OutputDir: outputDir, - } - - tmpl, err := r.Resolve(ctx) - if err != nil { - return err - } - defer tmpl.Reader.Cleanup(ctx) - - err = tmpl.Writer.Materialize(ctx, tmpl.Reader) - if err != nil { - return err - } - tmpl.Writer.LogTelemetry(ctx) - - actualOutputDir := name - if outputDir != "" { - actualOutputDir = filepath.Join(outputDir, name) - } - - absOutputDir, err := filepath.Abs(actualOutputDir) - if err != nil { - absOutputDir = actualOutputDir - } - fileCount := countFiles(absOutputDir) - cmdio.LogString(ctx, common.FormatProjectScaffoldSuccess("job", "āš™ļø", "default-python", absOutputDir, fileCount, "")) - - fileTree, err := generateFileTree(absOutputDir) - if err == nil && fileTree != "" { - cmdio.LogString(ctx, "\nFile structure:") - cmdio.LogString(ctx, fileTree) - } - - // Write CLAUDE.md and AGENTS.md files - if err := writeAgentFiles(absOutputDir, map[string]any{}); err != nil { - return fmt.Errorf("failed to write agent files: %w", err) - } - - // Show L2 guidance: mixed (for adding any resource) + jobs (for developing jobs) - targetMixed := prompts.MustExecuteTemplate("target_mixed.tmpl", map[string]any{}) - cmdio.LogString(ctx, targetMixed) - - targetJobs := prompts.MustExecuteTemplate("target_jobs.tmpl", map[string]any{}) - cmdio.LogString(ctx, targetJobs) - return nil + return MaterializeTemplate(ctx, TemplateConfig{ + TemplatePath: string(template.DefaultPython), + TemplateName: "default-python", + }, configMap, name, outputDir) } return cmd } diff --git a/experimental/apps-mcp/cmd/init_template/pipeline.go b/experimental/apps-mcp/cmd/init_template/pipeline.go index 563b6b14ec..bbd183446c 100644 --- a/experimental/apps-mcp/cmd/init_template/pipeline.go +++ b/experimental/apps-mcp/cmd/init_template/pipeline.go @@ -3,13 +3,8 @@ package init_template import ( "errors" "fmt" - "os" - "path/filepath" "github.com/databricks/cli/cmd/root" - "github.com/databricks/cli/experimental/apps-mcp/lib/common" - "github.com/databricks/cli/experimental/apps-mcp/lib/prompts" - "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/template" "github.com/spf13/cobra" ) @@ -65,80 +60,15 @@ After initialization: configMap := map[string]any{ "project_name": name, - "lakeflow_only": "yes", - "include_job": "no", - "include_pipeline": "yes", - "include_python": "no", - "serverless": "yes", "personal_schemas": "yes", "language": language, + "default_catalog": catalog, } - if catalog != "" { - configMap["default_catalog"] = catalog - } - - configFile, err := writeConfigToTempFile(configMap) - if err != nil { - return err - } - defer os.Remove(configFile) - - if outputDir != "" { - if err := os.MkdirAll(outputDir, 0o755); err != nil { - return fmt.Errorf("create output directory: %w", err) - } - } - - r := template.Resolver{ - TemplatePathOrUrl: string(template.LakeflowPipelines), - ConfigFile: configFile, - OutputDir: outputDir, - } - - tmpl, err := r.Resolve(ctx) - if err != nil { - return err - } - defer tmpl.Reader.Cleanup(ctx) - - err = tmpl.Writer.Materialize(ctx, tmpl.Reader) - if err != nil { - return err - } - tmpl.Writer.LogTelemetry(ctx) - - actualOutputDir := name - if outputDir != "" { - actualOutputDir = filepath.Join(outputDir, name) - } - - absOutputDir, err := filepath.Abs(actualOutputDir) - if err != nil { - absOutputDir = actualOutputDir - } - fileCount := countFiles(absOutputDir) - extraDetails := "Language: " + language - cmdio.LogString(ctx, common.FormatProjectScaffoldSuccess("pipeline", "šŸ”„", "lakeflow-pipelines", absOutputDir, fileCount, extraDetails)) - - fileTree, err := generateFileTree(absOutputDir) - if err == nil && fileTree != "" { - cmdio.LogString(ctx, "\nFile structure:") - cmdio.LogString(ctx, fileTree) - } - - // Write CLAUDE.md and AGENTS.md files - if err := writeAgentFiles(absOutputDir, map[string]any{}); err != nil { - return fmt.Errorf("failed to write agent files: %w", err) - } - - // Show L2 guidance: mixed (for adding any resource) + pipelines (for developing pipelines) - targetMixed := prompts.MustExecuteTemplate("target_mixed.tmpl", map[string]any{}) - cmdio.LogString(ctx, targetMixed) - - targetPipelines := prompts.MustExecuteTemplate("target_pipelines.tmpl", map[string]any{}) - cmdio.LogString(ctx, targetPipelines) - return nil + return MaterializeTemplate(ctx, TemplateConfig{ + TemplatePath: string(template.LakeflowPipelines), + TemplateName: "lakeflow-pipelines", + }, configMap, name, outputDir) } return cmd } diff --git a/experimental/apps-mcp/lib/common/output.go b/experimental/apps-mcp/lib/common/output.go index 92e3f1d1a4..b00d22e9d4 100644 --- a/experimental/apps-mcp/lib/common/output.go +++ b/experimental/apps-mcp/lib/common/output.go @@ -12,13 +12,6 @@ func FormatBrandedHeader(emoji, message string) string { headerLine, emoji, message, headerLine) } -// FormatScaffoldSuccess formats a success message for app scaffolding. -func FormatScaffoldSuccess(templateName, workDir string, filesCopied int) string { - header := FormatBrandedHeader("šŸš€", "App scaffolded successfully") - return fmt.Sprintf("%sāœ… Created %s application at %s\n\nFiles copied: %d\n\nTemplate: %s\n", - header, templateName, workDir, filesCopied, templateName) -} - // FormatValidationSuccess formats a success message for validation. func FormatValidationSuccess(message string) string { header := FormatBrandedHeader("šŸ”", "Validating your app") @@ -47,16 +40,8 @@ func FormatDeploymentFailure(appName, message string) string { } // FormatProjectScaffoldSuccess formats a success message for project scaffolding. -// projectType examples: "job", "pipeline", "empty" -// emoji examples: "āš™ļø" (job), "šŸ”„" (pipeline), "šŸ“¦" (empty) -// extraDetails can include additional info like "Language: python" -func FormatProjectScaffoldSuccess(projectType, emoji, templateName, workDir string, filesCopied int, extraDetails string) string { - header := FormatBrandedHeader(emoji, projectType+" project scaffolded successfully") - result := fmt.Sprintf("%sāœ… Created %s %s project at %s\n\n", - header, templateName, projectType, workDir) - if extraDetails != "" { - result += extraDetails + "\n" - } - result += fmt.Sprintf("Files copied: %d\n\nTemplate: %s\n", filesCopied, templateName) - return result +func FormatProjectScaffoldSuccess(templateName, workDir string, filesCopied int) string { + header := FormatBrandedHeader("šŸ“¦", "Project scaffolded successfully") + return fmt.Sprintf("%sāœ… Created %s project at %s\n\nFiles copied: %d\n", + header, templateName, workDir, filesCopied) } diff --git a/experimental/apps-mcp/lib/common/output_test.go b/experimental/apps-mcp/lib/common/output_test.go index 392596ebf1..25a97ab76a 100644 --- a/experimental/apps-mcp/lib/common/output_test.go +++ b/experimental/apps-mcp/lib/common/output_test.go @@ -17,11 +17,11 @@ func TestFormatBrandedHeader(t *testing.T) { } } -func TestFormatScaffoldSuccess(t *testing.T) { - result := FormatScaffoldSuccess("appkit", "/path/to/app", 42) +func TestFormatProjectScaffoldSuccess(t *testing.T) { + result := FormatProjectScaffoldSuccess("appkit", "/path/to/app", 42) // Check for key components - if !strings.Contains(result, "šŸš€ Databricks MCP") { + if !strings.Contains(result, "šŸ“¦ Databricks MCP") { t.Error("Missing branded header") } if !strings.Contains(result, "āœ…") { diff --git a/experimental/apps-mcp/lib/detector/bundle_detector.go b/experimental/apps-mcp/lib/detector/bundle_detector.go index 859ff7ed18..44f0f31e9d 100644 --- a/experimental/apps-mcp/lib/detector/bundle_detector.go +++ b/experimental/apps-mcp/lib/detector/bundle_detector.go @@ -22,7 +22,9 @@ func (d *BundleDetector) Detect(ctx context.Context, workDir string, detected *D } // use full bundle loading to get all resources including from includes - ctx = logdiag.InitContext(ctx) + if !logdiag.IsSetup(ctx) { + ctx = logdiag.InitContext(ctx) + } b, err := bundle.Load(ctx, workDir) if err != nil || b == nil { return nil @@ -54,10 +56,8 @@ func (d *BundleDetector) Detect(ctx context.Context, workDir string, detected *D detected.TargetTypes = append(detected.TargetTypes, "pipelines") } - // Include "mixed" guidance for all projects EXCEPT app-only projects. - // This provides general resource addition guidance (target_mixed.tmpl). - // We exclude app-only projects to provide a dedicated app development experience - // focused on app-specific patterns (target_apps.tmpl has comprehensive app guidance). + // Determine if this is an app-only project (only app resources, nothing else). + // App-only projects get focused app guidance; others get "mixed" guidance. isAppOnly := hasApps && !hasJobs && !hasPipelines && len(b.Config.Resources.Clusters) == 0 && len(b.Config.Resources.Dashboards) == 0 && @@ -68,6 +68,7 @@ func (d *BundleDetector) Detect(ctx context.Context, workDir string, detected *D len(b.Config.Resources.QualityMonitors) == 0 && len(b.Config.Resources.Volumes) == 0 + // Include "mixed" guidance for all projects except app-only projects if !isAppOnly { detected.TargetTypes = append(detected.TargetTypes, "mixed") } From 16dc3360007b37c6a6391f4111b8c476f3993eaa Mon Sep 17 00:00:00 2001 From: Lennart Kats Date: Sat, 27 Dec 2025 16:55:22 +0100 Subject: [PATCH 3/5] Refinements --- experimental/apps-mcp/cmd/apps_mcp.go | 2 -- .../apps-mcp/cmd/init_template/common.go | 14 ++++++++----- .../apps-mcp/cmd/init_template/job.go | 6 ++++++ .../apps-mcp/cmd/init_template/pipeline.go | 6 ++++++ .../apps-mcp/lib/detector/bundle_detector.go | 2 ++ .../apps-mcp/lib/detector/detector.go | 1 + .../lib/middlewares/databricks_client.go | 16 +++++++++++++++ experimental/apps-mcp/lib/prompts/flow.tmpl | 4 +++- .../apps-mcp/lib/prompts/target_jobs.tmpl | 2 +- .../apps-mcp/lib/prompts/target_mixed.tmpl | 4 ---- .../lib/prompts/target_pipelines.tmpl | 2 +- .../lib/providers/clitools/discover.go | 20 ++----------------- 12 files changed, 47 insertions(+), 32 deletions(-) diff --git a/experimental/apps-mcp/cmd/apps_mcp.go b/experimental/apps-mcp/cmd/apps_mcp.go index ff19ef6912..83da91447c 100644 --- a/experimental/apps-mcp/cmd/apps_mcp.go +++ b/experimental/apps-mcp/cmd/apps_mcp.go @@ -1,7 +1,6 @@ package mcp import ( - "github.com/databricks/cli/experimental/apps-mcp/cmd/init_template" mcplib "github.com/databricks/cli/experimental/apps-mcp/lib" "github.com/databricks/cli/experimental/apps-mcp/lib/server" "github.com/databricks/cli/libs/log" @@ -52,7 +51,6 @@ The server communicates via stdio using the Model Context Protocol.`, cmd.AddCommand(newInstallCmd()) cmd.AddCommand(newToolsCmd()) - cmd.AddCommand(init_template.NewInitTemplateCommand()) return cmd } diff --git a/experimental/apps-mcp/cmd/init_template/common.go b/experimental/apps-mcp/cmd/init_template/common.go index 8caa2ff3f2..21af80e8df 100644 --- a/experimental/apps-mcp/cmd/init_template/common.go +++ b/experimental/apps-mcp/cmd/init_template/common.go @@ -76,13 +76,17 @@ func MaterializeTemplate(ctx context.Context, cfg TemplateConfig, configMap map[ cmdio.LogString(ctx, fileTree) } - if err := writeAgentFiles(absOutputDir, map[string]any{}); err != nil { - return fmt.Errorf("failed to write agent files: %w", err) - } - - // Detect project type and inject appropriate L2 guidance registry := detector.NewRegistry() detected := registry.Detect(ctx, absOutputDir) + + // Only write generic CLAUDE.md for non-app projects + // (app projects have their own template-specific CLAUDE.md) + if !detected.IsAppOnly { + if err := writeAgentFiles(absOutputDir, map[string]any{}); err != nil { + return fmt.Errorf("failed to write agent files: %w", err) + } + } + for _, targetType := range detected.TargetTypes { templateName := fmt.Sprintf("target_%s.tmpl", targetType) if prompts.TemplateExists(templateName) { diff --git a/experimental/apps-mcp/cmd/init_template/job.go b/experimental/apps-mcp/cmd/init_template/job.go index 4e27db7a00..6f5b061637 100644 --- a/experimental/apps-mcp/cmd/init_template/job.go +++ b/experimental/apps-mcp/cmd/init_template/job.go @@ -4,6 +4,7 @@ import ( "errors" "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/experimental/apps-mcp/lib/middlewares" "github.com/databricks/cli/libs/template" "github.com/spf13/cobra" ) @@ -49,6 +50,11 @@ After initialization: return errors.New("--name is required. Example: init-template job --name my_job") } + // Default to workspace default catalog if not specified + if catalog == "" { + catalog = middlewares.GetDefaultCatalog(ctx) + } + configMap := map[string]any{ "project_name": name, "include_job": "yes", diff --git a/experimental/apps-mcp/cmd/init_template/pipeline.go b/experimental/apps-mcp/cmd/init_template/pipeline.go index bbd183446c..68ca1b8e54 100644 --- a/experimental/apps-mcp/cmd/init_template/pipeline.go +++ b/experimental/apps-mcp/cmd/init_template/pipeline.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/experimental/apps-mcp/lib/middlewares" "github.com/databricks/cli/libs/template" "github.com/spf13/cobra" ) @@ -58,6 +59,11 @@ After initialization: return fmt.Errorf("--language must be 'python' or 'sql', got '%s'", language) } + // Default to workspace default catalog if not specified + if catalog == "" { + catalog = middlewares.GetDefaultCatalog(ctx) + } + configMap := map[string]any{ "project_name": name, "personal_schemas": "yes", diff --git a/experimental/apps-mcp/lib/detector/bundle_detector.go b/experimental/apps-mcp/lib/detector/bundle_detector.go index 44f0f31e9d..a61eba07ec 100644 --- a/experimental/apps-mcp/lib/detector/bundle_detector.go +++ b/experimental/apps-mcp/lib/detector/bundle_detector.go @@ -68,6 +68,8 @@ func (d *BundleDetector) Detect(ctx context.Context, workDir string, detected *D len(b.Config.Resources.QualityMonitors) == 0 && len(b.Config.Resources.Volumes) == 0 + detected.IsAppOnly = isAppOnly + // Include "mixed" guidance for all projects except app-only projects if !isAppOnly { detected.TargetTypes = append(detected.TargetTypes, "mixed") diff --git a/experimental/apps-mcp/lib/detector/detector.go b/experimental/apps-mcp/lib/detector/detector.go index 4b00a589ff..2e8e13288d 100644 --- a/experimental/apps-mcp/lib/detector/detector.go +++ b/experimental/apps-mcp/lib/detector/detector.go @@ -19,6 +19,7 @@ type DetectedContext struct { Template string // "appkit-typescript", "python", etc. BundleInfo *BundleInfo Metadata map[string]string + IsAppOnly bool // True if project contains only app resources, no jobs/pipelines/etc. } // Detector detects project context from a working directory. diff --git a/experimental/apps-mcp/lib/middlewares/databricks_client.go b/experimental/apps-mcp/lib/middlewares/databricks_client.go index 4190b22db9..784646b7d2 100644 --- a/experimental/apps-mcp/lib/middlewares/databricks_client.go +++ b/experimental/apps-mcp/lib/middlewares/databricks_client.go @@ -132,3 +132,19 @@ func newAuthError(ctx context.Context) error { } return errors.New(prompts.MustExecuteTemplate("auth_error.tmpl", data)) } + +// GetDefaultCatalog fetches the workspace default catalog name. +// Returns empty string if Unity Catalog is not available or on error. +func GetDefaultCatalog(ctx context.Context) string { + w, err := GetDatabricksClient(ctx) + if err != nil { + return "" + } + + metastore, err := w.Metastores.Current(ctx) + if err != nil { + return "" // gracefully handle any error (no UC, permission denied, etc.) + } + + return metastore.DefaultCatalogName +} diff --git a/experimental/apps-mcp/lib/prompts/flow.tmpl b/experimental/apps-mcp/lib/prompts/flow.tmpl index 12a1eafbdd..50a90c16fb 100644 --- a/experimental/apps-mcp/lib/prompts/flow.tmpl +++ b/experimental/apps-mcp/lib/prompts/flow.tmpl @@ -38,7 +38,9 @@ invoke_databricks_cli 'experimental apps-mcp tools init-template job --name my_j For pipelines (Lakeflow Declarative Pipelines): invoke_databricks_cli 'experimental apps-mcp tools init-template pipeline --name my_pipeline --language python' invoke_databricks_cli 'experimental apps-mcp tools init-template pipeline --name my_pipeline --language sql --catalog my_catalog' -Note: --language is required (python or sql) +Note: --language is required (python or sql). Ask the user which language they prefer: + - SQL: Recommended for straightforward transformations (filters, joins, aggregations) + - Python: Recommended for complex logic (custom UDFs, ML, advanced processing) For custom resources (dashboards, alerts, model serving, etc.): invoke_databricks_cli 'experimental apps-mcp tools init-template empty --name my_project' diff --git a/experimental/apps-mcp/lib/prompts/target_jobs.tmpl b/experimental/apps-mcp/lib/prompts/target_jobs.tmpl index 53762fc23e..470d77bd3c 100644 --- a/experimental/apps-mcp/lib/prompts/target_jobs.tmpl +++ b/experimental/apps-mcp/lib/prompts/target_jobs.tmpl @@ -8,7 +8,7 @@ ## Lakeflow Jobs Development -This guidance is for DEVELOPING jobs in this project. To ADD other resource types (dashboards, alerts, pipelines, etc.), see the general resource guidance above. +This guidance is for developing jobs in this project. ### Project Structure - `src/` - Python notebooks (.ipynb) and source code diff --git a/experimental/apps-mcp/lib/prompts/target_mixed.tmpl b/experimental/apps-mcp/lib/prompts/target_mixed.tmpl index 84b094ff20..e1a01ea418 100644 --- a/experimental/apps-mcp/lib/prompts/target_mixed.tmpl +++ b/experimental/apps-mcp/lib/prompts/target_mixed.tmpl @@ -19,10 +19,6 @@ resources: - task_key: main notebook_task: notebook_path: ../src/notebook.py - new_cluster: - num_workers: 2 - spark_version: "15.4.x-scala2.12" - node_type_id: "i3.xlarge" ``` **Pipelines** (Lakeflow Declarative Pipelines) - `resources/my_pipeline.pipeline.yml`: diff --git a/experimental/apps-mcp/lib/prompts/target_pipelines.tmpl b/experimental/apps-mcp/lib/prompts/target_pipelines.tmpl index f0970222c9..4f9a968565 100644 --- a/experimental/apps-mcp/lib/prompts/target_pipelines.tmpl +++ b/experimental/apps-mcp/lib/prompts/target_pipelines.tmpl @@ -8,7 +8,7 @@ ## Lakeflow Declarative Pipelines Development -This guidance is for DEVELOPING pipelines in this project. To ADD other resource types (dashboards, alerts, jobs, etc.), see the general resource guidance above. +This guidance is for developing pipelines in this project. Lakeflow Declarative Pipelines (formerly Delta Live Tables) is a framework for building batch and streaming data pipelines. diff --git a/experimental/apps-mcp/lib/providers/clitools/discover.go b/experimental/apps-mcp/lib/providers/clitools/discover.go index 5f51e82bb2..7dc8cf70a0 100644 --- a/experimental/apps-mcp/lib/providers/clitools/discover.go +++ b/experimental/apps-mcp/lib/providers/clitools/discover.go @@ -25,29 +25,13 @@ func Discover(ctx context.Context, workingDirectory string) (string, error) { profiles := middlewares.GetAvailableProfiles(ctx) // Get default catalog (non-fatal if unavailable) - defaultCatalog := getDefaultCatalog(ctx) + defaultCatalog := middlewares.GetDefaultCatalog(ctx) // run detectors to identify project context registry := detector.NewRegistry() detected := registry.Detect(ctx, workingDirectory) - return generateDiscoverGuidance(ctx, warehouse, currentProfile, profiles, defaultCatalog, detected), nil -} - -// getDefaultCatalog fetches the workspace default catalog name. -// Returns empty string if Unity Catalog is not available or on error. -func getDefaultCatalog(ctx context.Context) string { - w, err := middlewares.GetDatabricksClient(ctx) - if err != nil { - return "" - } - - metastore, err := w.Metastores.Current(ctx) - if err != nil { - return "" // gracefully handle any error (no UC, permission denied, etc.) - } - - return metastore.DefaultCatalogName + return generateDiscoverGuidance(ctx, warehouse, currentProfile, profiles, defaultCatalog, detected, listAllSkills), nil } // generateDiscoverGuidance creates guidance with L1 (flow) + L2 (target) layers. From 440d60aef4dad9f6df678aa5408cf0f58909ebe9 Mon Sep 17 00:00:00 2001 From: Lennart Kats Date: Mon, 29 Dec 2025 14:32:06 +0100 Subject: [PATCH 4/5] Fix discover.go compilation error and update test MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Remove undefined listAllSkills parameter from generateDiscoverGuidance call - Fix test expectation: jobs-only projects now include "mixed" target type šŸ¤– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- experimental/apps-mcp/lib/detector/detector_test.go | 2 +- experimental/apps-mcp/lib/providers/clitools/discover.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/experimental/apps-mcp/lib/detector/detector_test.go b/experimental/apps-mcp/lib/detector/detector_test.go index fa25b78971..b2c4f72b26 100644 --- a/experimental/apps-mcp/lib/detector/detector_test.go +++ b/experimental/apps-mcp/lib/detector/detector_test.go @@ -59,7 +59,7 @@ resources: detected := registry.Detect(ctx, dir) assert.True(t, detected.InProject) - assert.Equal(t, []string{"jobs"}, detected.TargetTypes) + assert.Equal(t, []string{"jobs", "mixed"}, detected.TargetTypes) assert.Equal(t, "my-job", detected.BundleInfo.Name) } diff --git a/experimental/apps-mcp/lib/providers/clitools/discover.go b/experimental/apps-mcp/lib/providers/clitools/discover.go index 7dc8cf70a0..2e9324dc9b 100644 --- a/experimental/apps-mcp/lib/providers/clitools/discover.go +++ b/experimental/apps-mcp/lib/providers/clitools/discover.go @@ -31,7 +31,7 @@ func Discover(ctx context.Context, workingDirectory string) (string, error) { registry := detector.NewRegistry() detected := registry.Detect(ctx, workingDirectory) - return generateDiscoverGuidance(ctx, warehouse, currentProfile, profiles, defaultCatalog, detected, listAllSkills), nil + return generateDiscoverGuidance(ctx, warehouse, currentProfile, profiles, defaultCatalog, detected), nil } // generateDiscoverGuidance creates guidance with L1 (flow) + L2 (target) layers. From d48179917b32515040f47b1a758e2b1e24fda3d2 Mon Sep 17 00:00:00 2001 From: Lennart Kats Date: Mon, 29 Dec 2025 18:11:51 +0100 Subject: [PATCH 5/5] Cleanup --- experimental/apps-mcp/cmd/init_template/common.go | 2 +- experimental/apps-mcp/lib/common/output.go | 14 +++++++------- experimental/apps-mcp/lib/common/output_test.go | 6 +++--- .../apps-mcp/lib/providers/clitools/provider.go | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/experimental/apps-mcp/cmd/init_template/common.go b/experimental/apps-mcp/cmd/init_template/common.go index 21af80e8df..7f78ec4a0b 100644 --- a/experimental/apps-mcp/cmd/init_template/common.go +++ b/experimental/apps-mcp/cmd/init_template/common.go @@ -68,7 +68,7 @@ func MaterializeTemplate(ctx context.Context, cfg TemplateConfig, configMap map[ } fileCount := countFiles(absOutputDir) - cmdio.LogString(ctx, common.FormatProjectScaffoldSuccess(cfg.TemplateName, absOutputDir, fileCount)) + cmdio.LogString(ctx, common.FormatScaffoldSuccess(cfg.TemplateName, absOutputDir, fileCount)) fileTree, err := generateFileTree(absOutputDir) if err == nil && fileTree != "" { diff --git a/experimental/apps-mcp/lib/common/output.go b/experimental/apps-mcp/lib/common/output.go index b00d22e9d4..7454950870 100644 --- a/experimental/apps-mcp/lib/common/output.go +++ b/experimental/apps-mcp/lib/common/output.go @@ -12,6 +12,13 @@ func FormatBrandedHeader(emoji, message string) string { headerLine, emoji, message, headerLine) } +// FormatScaffoldSuccess formats a success message for app scaffolding. +func FormatScaffoldSuccess(templateName, workDir string, filesCopied int) string { + header := FormatBrandedHeader("šŸš€", "App scaffolded successfully") + return fmt.Sprintf("%sāœ… Created %s application at %s\n\nFiles copied: %d\n\nTemplate: %s\n", + header, templateName, workDir, filesCopied, templateName) +} + // FormatValidationSuccess formats a success message for validation. func FormatValidationSuccess(message string) string { header := FormatBrandedHeader("šŸ”", "Validating your app") @@ -38,10 +45,3 @@ func FormatDeploymentFailure(appName, message string) string { return fmt.Sprintf("%sāŒ Deployment failed for '%s'\n\n%s\n", header, appName, message) } - -// FormatProjectScaffoldSuccess formats a success message for project scaffolding. -func FormatProjectScaffoldSuccess(templateName, workDir string, filesCopied int) string { - header := FormatBrandedHeader("šŸ“¦", "Project scaffolded successfully") - return fmt.Sprintf("%sāœ… Created %s project at %s\n\nFiles copied: %d\n", - header, templateName, workDir, filesCopied) -} diff --git a/experimental/apps-mcp/lib/common/output_test.go b/experimental/apps-mcp/lib/common/output_test.go index 25a97ab76a..392596ebf1 100644 --- a/experimental/apps-mcp/lib/common/output_test.go +++ b/experimental/apps-mcp/lib/common/output_test.go @@ -17,11 +17,11 @@ func TestFormatBrandedHeader(t *testing.T) { } } -func TestFormatProjectScaffoldSuccess(t *testing.T) { - result := FormatProjectScaffoldSuccess("appkit", "/path/to/app", 42) +func TestFormatScaffoldSuccess(t *testing.T) { + result := FormatScaffoldSuccess("appkit", "/path/to/app", 42) // Check for key components - if !strings.Contains(result, "šŸ“¦ Databricks MCP") { + if !strings.Contains(result, "šŸš€ Databricks MCP") { t.Error("Missing branded header") } if !strings.Contains(result, "āœ…") { diff --git a/experimental/apps-mcp/lib/providers/clitools/provider.go b/experimental/apps-mcp/lib/providers/clitools/provider.go index 36d62d80d5..e9d146f21a 100644 --- a/experimental/apps-mcp/lib/providers/clitools/provider.go +++ b/experimental/apps-mcp/lib/providers/clitools/provider.go @@ -93,7 +93,7 @@ func (p *Provider) RegisterTools(server *mcpsdk.Server) error { mcpsdk.AddTool(server, &mcpsdk.Tool{ Name: "databricks_discover", - Description: "Discover how to perform Databricks work. MANDATORY: Call this FIRST before ANY Databricks work. Do not skip this step. This tool detects project context, configures authentication, and returns required guidance for apps, pipelines, jobs, bundles, dashboards, and SQL workflows. Without calling this first, you cannot effectively help with Databricks tasks.", + Description: "Discover available Databricks workspaces, warehouses, and get workflow recommendations. Call this FIRST when planning ANY Databricks work involving apps, dashboards, pipelines, jobs, bundles, or SQL workflows. Returns workspace capabilities and recommended tooling.", }, func(ctx context.Context, req *mcpsdk.CallToolRequest, args DiscoverInput) (*mcpsdk.CallToolResult, any, error) { log.Debugf(ctx, "databricks_discover called: working_directory=%s", args.WorkingDirectory)