From e39477f2087a04767e64897a8cd6113e51365e64 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Fri, 17 Oct 2025 18:08:27 +0200 Subject: [PATCH 01/10] Added support for --bind flag in 'bundle generate' --- .../generate/auto-bind/databricks.yml.tmpl | 8 +++ .../bundle/generate/auto-bind/out.test.toml | 5 ++ .../bundle/generate/auto-bind/output.txt | 48 ++++++++++++++ acceptance/bundle/generate/auto-bind/script | 50 +++++++++++++++ acceptance/bundle/generate/auto-bind/test.py | 2 + .../bundle/generate/auto-bind/test.toml | 26 ++++++++ cmd/bundle/deployment/bind.go | 46 +------------- cmd/bundle/deployment/bind_resource.go | 63 +++++++++++++++++++ cmd/bundle/generate.go | 20 ++++-- cmd/bundle/generate/app.go | 14 +++++ cmd/bundle/generate/job.go | 15 +++++ cmd/bundle/generate/pipeline.go | 14 +++++ 12 files changed, 262 insertions(+), 49 deletions(-) create mode 100644 acceptance/bundle/generate/auto-bind/databricks.yml.tmpl create mode 100644 acceptance/bundle/generate/auto-bind/out.test.toml create mode 100644 acceptance/bundle/generate/auto-bind/output.txt create mode 100755 acceptance/bundle/generate/auto-bind/script create mode 100644 acceptance/bundle/generate/auto-bind/test.py create mode 100644 acceptance/bundle/generate/auto-bind/test.toml create mode 100644 cmd/bundle/deployment/bind_resource.go diff --git a/acceptance/bundle/generate/auto-bind/databricks.yml.tmpl b/acceptance/bundle/generate/auto-bind/databricks.yml.tmpl new file mode 100644 index 0000000000..c58a4bbd14 --- /dev/null +++ b/acceptance/bundle/generate/auto-bind/databricks.yml.tmpl @@ -0,0 +1,8 @@ +bundle: + name: auto-bind-test + +workspace: + root_path: /tmp/${UNIQUE_NAME} + +include: + - resources/*.yml diff --git a/acceptance/bundle/generate/auto-bind/out.test.toml b/acceptance/bundle/generate/auto-bind/out.test.toml new file mode 100644 index 0000000000..3cdb920b67 --- /dev/null +++ b/acceptance/bundle/generate/auto-bind/out.test.toml @@ -0,0 +1,5 @@ +Local = false +Cloud = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform"] diff --git a/acceptance/bundle/generate/auto-bind/output.txt b/acceptance/bundle/generate/auto-bind/output.txt new file mode 100644 index 0000000000..652ee7d545 --- /dev/null +++ b/acceptance/bundle/generate/auto-bind/output.txt @@ -0,0 +1,48 @@ + +=== Create a pre-defined job: +Created job with ID: [NUMID] + +>>> [CLI] workspace mkdirs /Workspace/Users/[USERNAME]/python-[UNIQUE_NAME] + +>>> [CLI] workspace import /Workspace/Users/[USERNAME]/python-[UNIQUE_NAME]/test --file test.py --language PYTHON + +=== Generate and bind in one step: +>>> [CLI] bundle generate job --key test_job --existing-job-id [NUMID] --config-dir resources --source-dir src --bind +File successfully saved to src/test.py +Job configuration successfully saved to resources/test_job.job.yml +Updating deployment state... +Successfully bound job with an id '[NUMID]' + +>>> ls src/ +test.py + +>>> cat resources/test_job.job.yml + name: auto-bind-job-[UNIQUE_NAME] + +=== Deploy the bound job: +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/tmp/[UNIQUE_NAME]/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +=== Destroy the bundle: +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete job test_job + +All files and directories at the following location will be deleted: /Workspace/tmp/[UNIQUE_NAME] + +Deleting files... +Destroy complete! + +=== Check that job is bound and does not exist after bundle is destroyed: +>>> errcode [CLI] jobs get [NUMID] --output json +Error: Job [NUMID] does not exist. + +Exit code: 1 + +=== Delete the tmp folder: +>>> [CLI] workspace delete /Workspace/Users/[USERNAME]/python-[UNIQUE_NAME]/test + +>>> [CLI] workspace delete /Workspace/Users/[USERNAME]/python-[UNIQUE_NAME] diff --git a/acceptance/bundle/generate/auto-bind/script b/acceptance/bundle/generate/auto-bind/script new file mode 100755 index 0000000000..4afef19e48 --- /dev/null +++ b/acceptance/bundle/generate/auto-bind/script @@ -0,0 +1,50 @@ +title "Create a pre-defined job:\n" + +PYTHON_NOTEBOOK_DIR="/Workspace/Users/${CURRENT_USER_NAME}/python-${UNIQUE_NAME}" +PYTHON_NOTEBOOK="${PYTHON_NOTEBOOK_DIR}/test" + +JOB_ID=$($CLI jobs create --json ' +{ + "name": "auto-bind-job-'${UNIQUE_NAME}'", + "tasks": [ + { + "task_key": "test", + "new_cluster": { + "spark_version": "'${DEFAULT_SPARK_VERSION}'", + "node_type_id": "'${NODE_TYPE_ID}'", + "num_workers": 1 + }, + "notebook_task": { + "notebook_path": "'${PYTHON_NOTEBOOK}'" + } + } + ] +}' | jq -r '.job_id') + +echo "Created job with ID: $JOB_ID" + +envsubst < databricks.yml.tmpl > databricks.yml + +cleanup() { + title "Delete the tmp folder:" + trace $CLI workspace delete ${PYTHON_NOTEBOOK} + trace $CLI workspace delete ${PYTHON_NOTEBOOK_DIR} +} +trap cleanup EXIT + +trace $CLI workspace mkdirs "${PYTHON_NOTEBOOK_DIR}" +trace $CLI workspace import "${PYTHON_NOTEBOOK}" --file test.py --language PYTHON + +title "Generate and bind in one step:" +trace $CLI bundle generate job --key test_job --existing-job-id $JOB_ID --config-dir resources --source-dir src --bind +trace ls src/ +trace cat resources/test_job.job.yml | grep "name: auto-bind-job-${UNIQUE_NAME}" + +title "Deploy the bound job:" +trace $CLI bundle deploy + +title "Destroy the bundle:" +trace $CLI bundle destroy --auto-approve + +title "Check that job is bound and does not exist after bundle is destroyed:" +trace errcode $CLI jobs get "${JOB_ID}" --output json diff --git a/acceptance/bundle/generate/auto-bind/test.py b/acceptance/bundle/generate/auto-bind/test.py new file mode 100644 index 0000000000..8cfae7c74f --- /dev/null +++ b/acceptance/bundle/generate/auto-bind/test.py @@ -0,0 +1,2 @@ +# Databricks notebook source +print("Test notebook") diff --git a/acceptance/bundle/generate/auto-bind/test.toml b/acceptance/bundle/generate/auto-bind/test.toml new file mode 100644 index 0000000000..9272f7bfd0 --- /dev/null +++ b/acceptance/bundle/generate/auto-bind/test.toml @@ -0,0 +1,26 @@ +# This test is using a workspace import API to load a notebook file. +# This API has a logic on how to accept notebook files and distinguishes them from regular python files. +# To succeed locally we would need to replicate this logic in the fake_workspace +Local = false +Cloud = true + +Ignore = [ + "databricks.yml", + "resources/*", + "src/*", + ".databricks", +] + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform"] + + +[Env] +# MSYS2 automatically converts absolute paths like /Users/$username/$UNIQUE_NAME to +# C:/Program Files/Git/Users/$username/UNIQUE_NAME before passing it to the CLI +# Setting this environment variable prevents that conversion on windows. +MSYS_NO_PATHCONV = "1" + +[[Repls]] +Old = '\\' +New = '/' diff --git a/cmd/bundle/deployment/bind.go b/cmd/bundle/deployment/bind.go index 4d69380521..fcfb9c900a 100644 --- a/cmd/bundle/deployment/bind.go +++ b/cmd/bundle/deployment/bind.go @@ -1,13 +1,6 @@ package deployment import ( - "context" - "fmt" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/deploy/terraform" - "github.com/databricks/cli/bundle/phases" - "github.com/databricks/cli/cmd/bundle/utils" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/logdiag" @@ -61,47 +54,12 @@ Any manual changes made in the workspace UI may be overwritten on deployment.`, ctx := logdiag.InitContext(cmd.Context()) cmd.SetContext(ctx) - b := utils.ConfigureBundleWithVariables(cmd) - if b == nil || logdiag.HasError(ctx) { - return root.ErrAlreadyPrinted - } - - phases.Initialize(ctx, b) - if logdiag.HasError(ctx) { - return root.ErrAlreadyPrinted - } - - resource, err := b.Config.Resources.FindResourceByConfigKey(args[0]) + err := BindResource(cmd, args[0], args[1], autoApprove, forceLock) if err != nil { return err } - w := b.WorkspaceClient() - exists, err := resource.Exists(ctx, w, args[1]) - if err != nil { - return fmt.Errorf("failed to fetch the resource, err: %w", err) - } - - if !exists { - return fmt.Errorf("%s with an id '%s' is not found", resource.ResourceDescription().SingularName, args[1]) - } - - bundle.ApplyFuncContext(ctx, b, func(context.Context, *bundle.Bundle) { - b.Config.Bundle.Deployment.Lock.Force = forceLock - }) - - tfName := terraform.GroupToTerraformName[resource.ResourceDescription().PluralName] - phases.Bind(ctx, b, &terraform.BindOptions{ - AutoApprove: autoApprove, - ResourceType: tfName, - ResourceKey: args[0], - ResourceId: args[1], - }) - if logdiag.HasError(ctx) { - return root.ErrAlreadyPrinted - } - - cmdio.LogString(ctx, fmt.Sprintf("Successfully bound %s with an id '%s'. Run 'bundle deploy' to deploy changes to your workspace", resource.ResourceDescription().SingularName, args[1])) + cmdio.LogString(ctx, "Run 'bundle deploy' to deploy changes to your workspace") return nil } diff --git a/cmd/bundle/deployment/bind_resource.go b/cmd/bundle/deployment/bind_resource.go new file mode 100644 index 0000000000..be08fb2504 --- /dev/null +++ b/cmd/bundle/deployment/bind_resource.go @@ -0,0 +1,63 @@ +package deployment + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/deploy/terraform" + "github.com/databricks/cli/bundle/phases" + "github.com/databricks/cli/cmd/bundle/utils" + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/logdiag" + "github.com/spf13/cobra" +) + +// BindResource binds a bundle resource to an existing workspace resource. +// This function is shared between the bind command and generate commands with --bind flag. +func BindResource(cmd *cobra.Command, resourceKey, resourceId string, autoApprove, forceLock bool) error { + ctx := cmd.Context() + b := utils.ConfigureBundleWithVariables(cmd) + if b == nil || logdiag.HasError(ctx) { + return root.ErrAlreadyPrinted + } + + phases.Initialize(ctx, b) + if logdiag.HasError(ctx) { + return root.ErrAlreadyPrinted + } + + resource, err := b.Config.Resources.FindResourceByConfigKey(resourceKey) + if err != nil { + return err + } + + w := b.WorkspaceClient() + exists, err := resource.Exists(ctx, w, resourceId) + if err != nil { + return fmt.Errorf("failed to fetch the resource, err: %w", err) + } + + if !exists { + return fmt.Errorf("%s with an id '%s' is not found", resource.ResourceDescription().SingularName, resourceId) + } + + bundle.ApplyFuncContext(ctx, b, func(context.Context, *bundle.Bundle) { + b.Config.Bundle.Deployment.Lock.Force = forceLock + }) + + tfName := terraform.GroupToTerraformName[resource.ResourceDescription().PluralName] + phases.Bind(ctx, b, &terraform.BindOptions{ + AutoApprove: autoApprove, + ResourceType: tfName, + ResourceKey: resourceKey, + ResourceId: resourceId, + }) + if logdiag.HasError(ctx) { + return root.ErrAlreadyPrinted + } + + cmdio.LogString(ctx, fmt.Sprintf("Successfully bound %s with an id '%s'", resource.ResourceDescription().SingularName, resourceId)) + return nil +} diff --git a/cmd/bundle/generate.go b/cmd/bundle/generate.go index 448a172820..ab03d3457d 100644 --- a/cmd/bundle/generate.go +++ b/cmd/bundle/generate.go @@ -7,6 +7,7 @@ import ( func newGenerateCommand() *cobra.Command { var key string + var bind bool cmd := &cobra.Command{ Use: "generate", @@ -15,16 +16,24 @@ func newGenerateCommand() *cobra.Command { Common patterns: databricks bundle generate job --existing-job-id 123 --key my_job + databricks bundle generate pipeline --existing-pipeline-id abc123 --key etl_pipeline databricks bundle generate dashboard --existing-path /my-dashboard --key sales_dash databricks bundle generate dashboard --resource my_dashboard --watch --force # Keep local copy in sync. Useful for development. databricks bundle generate dashboard --resource my_dashboard --force # Do a one-time sync. -Complete migration workflow: - 1. Generate: databricks bundle generate job --existing-job-id 123 --key my_job - 2. Bind: databricks bundle deployment bind my_job 123 - 3. Deploy: databricks bundle deploy +Migration workflows: -Use --key to specify the resource name in your bundle configuration.`, + Two-step workflow (manual bind): + 1. Generate: databricks bundle generate job --existing-job-id 123 --key my_job + 2. Bind: databricks bundle deployment bind my_job 123 + 3. Deploy: databricks bundle deploy + + One-step workflow (automatic bind): + 1. Generate and bind: databricks bundle generate job --existing-job-id 123 --key my_job --bind + 2. Deploy: databricks bundle deploy + +Use --key to specify the resource name in your bundle configuration. +Use --bind to automatically bind the generated resource to the existing workspace resource.`, } cmd.AddCommand(generate.NewGenerateJobCommand()) @@ -32,5 +41,6 @@ Use --key to specify the resource name in your bundle configuration.`, cmd.AddCommand(generate.NewGenerateDashboardCommand()) cmd.AddCommand(generate.NewGenerateAppCommand()) cmd.PersistentFlags().StringVar(&key, "key", "", `resource key to use for the generated configuration`) + cmd.PersistentFlags().BoolVar(&bind, "bind", false, `automatically bind the generated resource to the existing resource`) return cmd } diff --git a/cmd/bundle/generate/app.go b/cmd/bundle/generate/app.go index df2fa70785..b231c8d54d 100644 --- a/cmd/bundle/generate/app.go +++ b/cmd/bundle/generate/app.go @@ -5,6 +5,7 @@ import ( "path/filepath" "github.com/databricks/cli/bundle/generate" + "github.com/databricks/cli/cmd/bundle/deployment" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/dyn" @@ -37,6 +38,9 @@ Examples: databricks bundle generate app --existing-app-name data-viewer \ --key data_app --config-dir resources --source-dir src/apps + # Generate and automatically bind to the existing app + databricks bundle generate app --existing-app-name my-app --key analytics_app --bind + What gets generated: - App configuration YAML file with app settings and dependencies - App source files downloaded to the specified source directory @@ -120,6 +124,16 @@ per target environment.`, } cmdio.LogString(ctx, "App configuration successfully saved to "+filename) + + // If --bind flag is set, automatically bind the generated resource + bind, err := cmd.Flags().GetBool("bind") + if err != nil { + return err + } + if bind { + return deployment.BindResource(cmd, appKey, app.Name, true, false) + } + return nil } diff --git a/cmd/bundle/generate/job.go b/cmd/bundle/generate/job.go index 12b984232e..f760deecd5 100644 --- a/cmd/bundle/generate/job.go +++ b/cmd/bundle/generate/job.go @@ -6,8 +6,10 @@ import ( "io/fs" "os" "path/filepath" + "strconv" "github.com/databricks/cli/bundle/generate" + "github.com/databricks/cli/cmd/bundle/deployment" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/dyn" @@ -41,6 +43,9 @@ Examples: databricks bundle generate job --existing-job-id 67890 \ --key data_pipeline --config-dir resources --source-dir src + # Generate and automatically bind to the existing job + databricks bundle generate job --existing-job-id 12345 --key my_etl_job --bind + What gets generated: - Job configuration YAML file in the resources directory - Any associated notebook or Python files in the source directory @@ -138,6 +143,16 @@ After generation, you can deploy this job to other targets using: } cmdio.LogString(ctx, "Job configuration successfully saved to "+filename) + + // If --bind flag is set, automatically bind the generated resource + bind, err := cmd.Flags().GetBool("bind") + if err != nil { + return err + } + if bind { + return deployment.BindResource(cmd, jobKey, strconv.FormatInt(jobId, 10), true, false) + } + return nil } diff --git a/cmd/bundle/generate/pipeline.go b/cmd/bundle/generate/pipeline.go index f8ed9b2ba0..e0951f147b 100644 --- a/cmd/bundle/generate/pipeline.go +++ b/cmd/bundle/generate/pipeline.go @@ -8,6 +8,7 @@ import ( "path/filepath" "github.com/databricks/cli/bundle/generate" + "github.com/databricks/cli/cmd/bundle/deployment" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/dyn" @@ -42,6 +43,9 @@ Examples: databricks bundle generate pipeline --existing-pipeline-id def456 \ --key data_transformation --config-dir resources --source-dir src + # Generate and automatically bind to the existing pipeline + databricks bundle generate pipeline --existing-pipeline-id abc123 --key etl_pipeline --bind + What gets generated: - Pipeline configuration YAML file with settings and libraries - Pipeline notebooks downloaded to the source directory @@ -143,6 +147,16 @@ like catalogs, schemas, and compute configurations per target.`, } cmdio.LogString(ctx, "Pipeline configuration successfully saved to "+filename) + + // If --bind flag is set, automatically bind the generated resource + bind, err := cmd.Flags().GetBool("bind") + if err != nil { + return err + } + if bind { + return deployment.BindResource(cmd, pipelineKey, pipelineId, true, false) + } + return nil } From 184d63ad0d368554c8e54f9594755c8c85dda037 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Fri, 17 Oct 2025 18:15:42 +0200 Subject: [PATCH 02/10] fix output + changelog --- NEXT_CHANGELOG.md | 1 + .../help/bundle-generate-dashboard/output.txt | 1 + .../bundle/help/bundle-generate-job/output.txt | 4 ++++ .../help/bundle-generate-pipeline/output.txt | 4 ++++ .../bundle/help/bundle-generate/output.txt | 17 +++++++++++++---- 5 files changed, 23 insertions(+), 4 deletions(-) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index ea1e1b29da..019cb6b523 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -9,5 +9,6 @@ ### Dependency updates ### Bundles +* Added support for --bind flag in `bundle generate` ([#3782](https://github.com/databricks/cli/pull/3782)) ### API Changes diff --git a/acceptance/bundle/help/bundle-generate-dashboard/output.txt b/acceptance/bundle/help/bundle-generate-dashboard/output.txt index 41ed2d6c02..6be72fc56e 100644 --- a/acceptance/bundle/help/bundle-generate-dashboard/output.txt +++ b/acceptance/bundle/help/bundle-generate-dashboard/output.txt @@ -46,6 +46,7 @@ Flags: --watch watch for changes to the dashboard and update the configuration Global Flags: + --bind automatically bind the generated resource to the existing resource --debug enable debug logging --key string resource key to use for the generated configuration -o, --output type output type: text or json (default text) diff --git a/acceptance/bundle/help/bundle-generate-job/output.txt b/acceptance/bundle/help/bundle-generate-job/output.txt index feadcb0713..5b6c6c8b52 100644 --- a/acceptance/bundle/help/bundle-generate-job/output.txt +++ b/acceptance/bundle/help/bundle-generate-job/output.txt @@ -13,6 +13,9 @@ Examples: databricks bundle generate job --existing-job-id 67890 \ --key data_pipeline --config-dir resources --source-dir src + # Generate and automatically bind to the existing job + databricks bundle generate job --existing-job-id 12345 --key my_etl_job --bind + What gets generated: - Job configuration YAML file in the resources directory - Any associated notebook or Python files in the source directory @@ -32,6 +35,7 @@ Flags: -s, --source-dir string Dir path where the downloaded files will be stored (default "src") Global Flags: + --bind automatically bind the generated resource to the existing resource --debug enable debug logging --key string resource key to use for the generated configuration -o, --output type output type: text or json (default text) diff --git a/acceptance/bundle/help/bundle-generate-pipeline/output.txt b/acceptance/bundle/help/bundle-generate-pipeline/output.txt index f05ce2852d..74b4790549 100644 --- a/acceptance/bundle/help/bundle-generate-pipeline/output.txt +++ b/acceptance/bundle/help/bundle-generate-pipeline/output.txt @@ -14,6 +14,9 @@ Examples: databricks bundle generate pipeline --existing-pipeline-id def456 \ --key data_transformation --config-dir resources --source-dir src + # Generate and automatically bind to the existing pipeline + databricks bundle generate pipeline --existing-pipeline-id abc123 --key etl_pipeline --bind + What gets generated: - Pipeline configuration YAML file with settings and libraries - Pipeline notebooks downloaded to the source directory @@ -32,6 +35,7 @@ Flags: -s, --source-dir string Dir path where the downloaded files will be stored (default "src") Global Flags: + --bind automatically bind the generated resource to the existing resource --debug enable debug logging --key string resource key to use for the generated configuration -o, --output type output type: text or json (default text) diff --git a/acceptance/bundle/help/bundle-generate/output.txt b/acceptance/bundle/help/bundle-generate/output.txt index 13f1318184..253674327d 100644 --- a/acceptance/bundle/help/bundle-generate/output.txt +++ b/acceptance/bundle/help/bundle-generate/output.txt @@ -4,16 +4,24 @@ Generate bundle configuration from existing Databricks resources. Common patterns: databricks bundle generate job --existing-job-id 123 --key my_job + databricks bundle generate pipeline --existing-pipeline-id abc123 --key etl_pipeline databricks bundle generate dashboard --existing-path /my-dashboard --key sales_dash databricks bundle generate dashboard --resource my_dashboard --watch --force # Keep local copy in sync. Useful for development. databricks bundle generate dashboard --resource my_dashboard --force # Do a one-time sync. -Complete migration workflow: - 1. Generate: databricks bundle generate job --existing-job-id 123 --key my_job - 2. Bind: databricks bundle deployment bind my_job 123 - 3. Deploy: databricks bundle deploy +Migration workflows: + + Two-step workflow (manual bind): + 1. Generate: databricks bundle generate job --existing-job-id 123 --key my_job + 2. Bind: databricks bundle deployment bind my_job 123 + 3. Deploy: databricks bundle deploy + + One-step workflow (automatic bind): + 1. Generate and bind: databricks bundle generate job --existing-job-id 123 --key my_job --bind + 2. Deploy: databricks bundle deploy Use --key to specify the resource name in your bundle configuration. +Use --bind to automatically bind the generated resource to the existing workspace resource. Usage: databricks bundle generate [command] @@ -25,6 +33,7 @@ Available Commands: pipeline Generate bundle configuration for a pipeline Flags: + --bind automatically bind the generated resource to the existing resource -h, --help help for generate --key string resource key to use for the generated configuration From 107af64875cbaf4622f8d67de7ef4ad507a9a383 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Fri, 17 Oct 2025 18:35:15 +0200 Subject: [PATCH 03/10] update output --- acceptance/bundle/deployment/bind/alert/output.txt | 3 ++- acceptance/bundle/deployment/bind/cluster/output.txt | 3 ++- acceptance/bundle/deployment/bind/dashboard/output.txt | 3 ++- .../bundle/deployment/bind/dashboard/recreation/output.txt | 3 ++- acceptance/bundle/deployment/bind/database_instance/output.txt | 3 ++- acceptance/bundle/deployment/bind/experiment/output.txt | 3 ++- .../bundle/deployment/bind/job/generate-and-bind/output.txt | 3 ++- .../deployment/bind/job/job-spark-python-task/output.txt | 3 ++- acceptance/bundle/deployment/bind/job/noop-job/output.txt | 3 ++- acceptance/bundle/deployment/bind/job/python-job/output.txt | 3 ++- .../bundle/deployment/bind/model-serving-endpoint/output.txt | 3 ++- acceptance/bundle/deployment/bind/quality-monitor/output.txt | 3 ++- acceptance/bundle/deployment/bind/registered-model/output.txt | 3 ++- acceptance/bundle/deployment/bind/schema/output.txt | 3 ++- acceptance/bundle/deployment/bind/secret-scope/output.txt | 3 ++- acceptance/bundle/deployment/bind/sql_warehouse/output.txt | 3 ++- acceptance/bundle/deployment/bind/volume/output.txt | 3 ++- 17 files changed, 34 insertions(+), 17 deletions(-) diff --git a/acceptance/bundle/deployment/bind/alert/output.txt b/acceptance/bundle/deployment/bind/alert/output.txt index e6f74a8d88..d37176b247 100644 --- a/acceptance/bundle/deployment/bind/alert/output.txt +++ b/acceptance/bundle/deployment/bind/alert/output.txt @@ -3,7 +3,8 @@ >>> [CLI] bundle deployment bind my_alert [UUID] --auto-approve Updating deployment state... -Successfully bound alert with an id '[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound alert with an id '[UUID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle summary Name: test-bundle-$UNIQUE_NAME diff --git a/acceptance/bundle/deployment/bind/cluster/output.txt b/acceptance/bundle/deployment/bind/cluster/output.txt index 9572504bc9..aeb195f0f1 100644 --- a/acceptance/bundle/deployment/bind/cluster/output.txt +++ b/acceptance/bundle/deployment/bind/cluster/output.txt @@ -6,7 +6,8 @@ >>> [CLI] bundle deployment bind cluster1 [CLUSTER-ID] --auto-approve Updating deployment state... -Successfully bound cluster with an id '[CLUSTER-ID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound cluster with an id '[CLUSTER-ID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deployment unbind cluster1 Updating deployment state... diff --git a/acceptance/bundle/deployment/bind/dashboard/output.txt b/acceptance/bundle/deployment/bind/dashboard/output.txt index 0ccd387124..3926b8f1f9 100644 --- a/acceptance/bundle/deployment/bind/dashboard/output.txt +++ b/acceptance/bundle/deployment/bind/dashboard/output.txt @@ -1,7 +1,8 @@ >>> [CLI] bundle deployment bind dashboard1 [DASHBOARD_ID] --auto-approve Updating deployment state... -Successfully bound dashboard with an id '[DASHBOARD_ID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound dashboard with an id '[DASHBOARD_ID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/deployment/bind/dashboard/recreation/output.txt b/acceptance/bundle/deployment/bind/dashboard/recreation/output.txt index 5b58737491..7f2651fc6c 100644 --- a/acceptance/bundle/deployment/bind/dashboard/recreation/output.txt +++ b/acceptance/bundle/deployment/bind/dashboard/recreation/output.txt @@ -1,7 +1,8 @@ >>> [CLI] bundle deployment bind dashboard1 [DASHBOARD_ID] --auto-approve Updating deployment state... -Successfully bound dashboard with an id '[DASHBOARD_ID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound dashboard with an id '[DASHBOARD_ID]' +Run 'bundle deploy' to deploy changes to your workspace >>> errcode [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/deployment/bind/database_instance/output.txt b/acceptance/bundle/deployment/bind/database_instance/output.txt index 7b3477b4d9..71309cdc88 100644 --- a/acceptance/bundle/deployment/bind/database_instance/output.txt +++ b/acceptance/bundle/deployment/bind/database_instance/output.txt @@ -1,7 +1,8 @@ >>> [CLI] bundle deployment bind database_instance1 [UUID] --auto-approve Updating deployment state... -Successfully bound database_instance with an id '[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound database_instance with an id '[UUID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle summary Name: test-bundle-$UNIQUE_NAME diff --git a/acceptance/bundle/deployment/bind/experiment/output.txt b/acceptance/bundle/deployment/bind/experiment/output.txt index ac57ce8b85..b783247880 100644 --- a/acceptance/bundle/deployment/bind/experiment/output.txt +++ b/acceptance/bundle/deployment/bind/experiment/output.txt @@ -3,7 +3,8 @@ === Substitute variables in the template === Create a pre-defined experiment === Bind experiment: Updating deployment state... -Successfully bound experiment with an id '[NUMID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound experiment with an id '[NUMID]' +Run 'bundle deploy' to deploy changes to your workspace === Deploy bundle: Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... Deploying resources... diff --git a/acceptance/bundle/deployment/bind/job/generate-and-bind/output.txt b/acceptance/bundle/deployment/bind/job/generate-and-bind/output.txt index e8a39ae2d1..a9b3e047cf 100644 --- a/acceptance/bundle/deployment/bind/job/generate-and-bind/output.txt +++ b/acceptance/bundle/deployment/bind/job/generate-and-bind/output.txt @@ -18,7 +18,8 @@ test.py >>> [CLI] bundle deployment bind test_job_key [NUMID] --auto-approve Updating deployment state... -Successfully bound job with an id '[NUMID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound job with an id '[NUMID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-generate-bind-[UNIQUE_NAME]/files... diff --git a/acceptance/bundle/deployment/bind/job/job-spark-python-task/output.txt b/acceptance/bundle/deployment/bind/job/job-spark-python-task/output.txt index 421e0e25de..3ea190bbf8 100644 --- a/acceptance/bundle/deployment/bind/job/job-spark-python-task/output.txt +++ b/acceptance/bundle/deployment/bind/job/job-spark-python-task/output.txt @@ -5,7 +5,8 @@ Created job with ID: [NUMID] === Bind job: >>> [CLI] bundle deployment bind foo [NUMID] --auto-approve Updating deployment state... -Successfully bound job with an id '[NUMID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound job with an id '[NUMID]' +Run 'bundle deploy' to deploy changes to your workspace === Remove .databricks directory to simulate fresh deployment: >>> rm -rf .databricks diff --git a/acceptance/bundle/deployment/bind/job/noop-job/output.txt b/acceptance/bundle/deployment/bind/job/noop-job/output.txt index bb859490ef..2f4cf39310 100644 --- a/acceptance/bundle/deployment/bind/job/noop-job/output.txt +++ b/acceptance/bundle/deployment/bind/job/noop-job/output.txt @@ -3,7 +3,8 @@ >>> [CLI] bundle deployment bind job_1 [NUMID] --auto-approve Updating deployment state... -Successfully bound job with an id '[NUMID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound job with an id '[NUMID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/my_project/default/files... diff --git a/acceptance/bundle/deployment/bind/job/python-job/output.txt b/acceptance/bundle/deployment/bind/job/python-job/output.txt index e5f4483ed4..285ccf9030 100644 --- a/acceptance/bundle/deployment/bind/job/python-job/output.txt +++ b/acceptance/bundle/deployment/bind/job/python-job/output.txt @@ -3,7 +3,8 @@ >>> uv run --with [DATABRICKS_BUNDLES_WHEEL] -q [CLI] bundle deployment bind job_1 [NUMID] --auto-approve Updating deployment state... -Successfully bound job with an id '[NUMID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound job with an id '[NUMID]' +Run 'bundle deploy' to deploy changes to your workspace >>> uv run --with [DATABRICKS_BUNDLES_WHEEL] -q [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/my_project/default/files... diff --git a/acceptance/bundle/deployment/bind/model-serving-endpoint/output.txt b/acceptance/bundle/deployment/bind/model-serving-endpoint/output.txt index 68bad006e6..2516ab002f 100644 --- a/acceptance/bundle/deployment/bind/model-serving-endpoint/output.txt +++ b/acceptance/bundle/deployment/bind/model-serving-endpoint/output.txt @@ -29,7 +29,8 @@ resources: >>> [CLI] bundle deployment bind endpoint1 test-endpoint-[UUID] Updating deployment state... -Successfully bound model_serving_endpoint with an id 'test-endpoint-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound model_serving_endpoint with an id 'test-endpoint-[UUID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/deployment/bind/quality-monitor/output.txt b/acceptance/bundle/deployment/bind/quality-monitor/output.txt index e6551c31c8..e45b0350b5 100644 --- a/acceptance/bundle/deployment/bind/quality-monitor/output.txt +++ b/acceptance/bundle/deployment/bind/quality-monitor/output.txt @@ -13,7 +13,8 @@ >>> [CLI] bundle deployment bind monitor1 catalog.schema.table Updating deployment state... -Successfully bound quality_monitor with an id 'catalog.schema.table'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound quality_monitor with an id 'catalog.schema.table' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/bind-quality-monitor-test-localonly/default/files... diff --git a/acceptance/bundle/deployment/bind/registered-model/output.txt b/acceptance/bundle/deployment/bind/registered-model/output.txt index 4d8c1e7482..fb9dd1c56d 100644 --- a/acceptance/bundle/deployment/bind/registered-model/output.txt +++ b/acceptance/bundle/deployment/bind/registered-model/output.txt @@ -15,7 +15,8 @@ resources: >>> [CLI] bundle deployment bind model1 main.test-schema-rmodel-[UUID].test-registered-model-[UUID] Updating deployment state... -Successfully bound registered_model with an id 'main.test-schema-rmodel-[UUID].test-registered-model-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound registered_model with an id 'main.test-schema-rmodel-[UUID].test-registered-model-[UUID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/deployment/bind/schema/output.txt b/acceptance/bundle/deployment/bind/schema/output.txt index 4023996156..2e6976fb58 100644 --- a/acceptance/bundle/deployment/bind/schema/output.txt +++ b/acceptance/bundle/deployment/bind/schema/output.txt @@ -7,7 +7,8 @@ } === Bind schema: Updating deployment state... -Successfully bound schema with an id 'main.test-schema-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound schema with an id 'main.test-schema-[UUID]' +Run 'bundle deploy' to deploy changes to your workspace === Deploy bundle: Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... Deploying resources... diff --git a/acceptance/bundle/deployment/bind/secret-scope/output.txt b/acceptance/bundle/deployment/bind/secret-scope/output.txt index 53f66c3c8c..2a0433bb17 100644 --- a/acceptance/bundle/deployment/bind/secret-scope/output.txt +++ b/acceptance/bundle/deployment/bind/secret-scope/output.txt @@ -3,7 +3,8 @@ >>> [CLI] bundle deployment bind secret_scope1 test-secret-scope-[UUID] --auto-approve Updating deployment state... -Successfully bound secret_scope with an id 'test-secret-scope-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound secret_scope with an id 'test-secret-scope-[UUID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/bind-secret-scope-test-[UNIQUE_NAME]/default/files... diff --git a/acceptance/bundle/deployment/bind/sql_warehouse/output.txt b/acceptance/bundle/deployment/bind/sql_warehouse/output.txt index aedea6082b..67a882e2c1 100644 --- a/acceptance/bundle/deployment/bind/sql_warehouse/output.txt +++ b/acceptance/bundle/deployment/bind/sql_warehouse/output.txt @@ -1,7 +1,8 @@ >>> [CLI] bundle deployment bind sql_warehouse1 [SQL-WAREHOUSE-ID] --auto-approve Updating deployment state... -Successfully bound sql_warehouse with an id '[SQL-WAREHOUSE-ID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound sql_warehouse with an id '[SQL-WAREHOUSE-ID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle summary Name: test-bundle-$UNIQUE_NAME diff --git a/acceptance/bundle/deployment/bind/volume/output.txt b/acceptance/bundle/deployment/bind/volume/output.txt index b065873604..c7c18e5e33 100644 --- a/acceptance/bundle/deployment/bind/volume/output.txt +++ b/acceptance/bundle/deployment/bind/volume/output.txt @@ -8,7 +8,8 @@ === Create a pre-defined volume: >>> [CLI] bundle deployment bind volume1 main.test-schema-[UUID].volume-[UUID] --auto-approve Updating deployment state... -Successfully bound volume with an id 'main.test-schema-[UUID].volume-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace +Successfully bound volume with an id 'main.test-schema-[UUID].volume-[UUID]' +Run 'bundle deploy' to deploy changes to your workspace >>> [CLI] bundle deploy Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle-[UNIQUE_NAME]/default/files... From ad9f57d63c8cd3b66730c22f101956015dc8e89d Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 27 Oct 2025 09:52:54 +0100 Subject: [PATCH 04/10] updated the output --- .../bundle/generate/auto-bind/output.txt | 24 +++++++++++++++++++ acceptance/bundle/generate/auto-bind/script | 2 +- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/acceptance/bundle/generate/auto-bind/output.txt b/acceptance/bundle/generate/auto-bind/output.txt index 652ee7d545..d9c93dad60 100644 --- a/acceptance/bundle/generate/auto-bind/output.txt +++ b/acceptance/bundle/generate/auto-bind/output.txt @@ -17,7 +17,31 @@ Successfully bound job with an id '[NUMID]' test.py >>> cat resources/test_job.job.yml +resources: + jobs: + test_job: name: auto-bind-job-[UNIQUE_NAME] + tasks: + - task_key: test + new_cluster: + azure_attributes: + availability: ON_DEMAND_AZURE + enable_elastic_disk: true + node_type_id: [NODE_TYPE_ID] + num_workers: 1 + spark_version: 13.3.x-snapshot-scala2.12 + email_notifications: {} + notebook_task: + notebook_path: ../src/test.py + source: WORKSPACE + run_if: ALL_SUCCESS + timeout_seconds: 0 + email_notifications: {} + max_concurrent_runs: 1 + queue: + enabled: true + timeout_seconds: 0 + webhook_notifications: {} === Deploy the bound job: >>> [CLI] bundle deploy diff --git a/acceptance/bundle/generate/auto-bind/script b/acceptance/bundle/generate/auto-bind/script index 4afef19e48..7fa6c9bdd6 100755 --- a/acceptance/bundle/generate/auto-bind/script +++ b/acceptance/bundle/generate/auto-bind/script @@ -38,7 +38,7 @@ trace $CLI workspace import "${PYTHON_NOTEBOOK}" --file test.py --language PYTHO title "Generate and bind in one step:" trace $CLI bundle generate job --key test_job --existing-job-id $JOB_ID --config-dir resources --source-dir src --bind trace ls src/ -trace cat resources/test_job.job.yml | grep "name: auto-bind-job-${UNIQUE_NAME}" +trace cat resources/test_job.job.yml title "Deploy the bound job:" trace $CLI bundle deploy From c7012a2c39516ff07964a0d6d295813bb38e3ad5 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 27 Oct 2025 13:48:53 +0100 Subject: [PATCH 05/10] make flags mutually exclusive --- cmd/bundle/generate/dashboard.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/cmd/bundle/generate/dashboard.go b/cmd/bundle/generate/dashboard.go index 7480ac145e..eaffeba16e 100644 --- a/cmd/bundle/generate/dashboard.go +++ b/cmd/bundle/generate/dashboard.go @@ -500,6 +500,9 @@ bundle files automatically, useful during active dashboard development.`, cmd.MarkFlagsMutuallyExclusive("watch", "existing-path") cmd.MarkFlagsMutuallyExclusive("watch", "existing-id") + // Make sure the bind flag is only used with the existing-resource flag. + cmd.MarkFlagsMutuallyExclusive("bind", "resource") + // Completion for the resource flag. cmd.RegisterFlagCompletionFunc("resource", dashboardResourceCompletion) From 0fc49297bcd7eb759c082584f0ebd26f7fc26691 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 27 Oct 2025 14:07:27 +0100 Subject: [PATCH 06/10] fix + comment --- cmd/bundle/deployment/bind_resource.go | 1 + cmd/bundle/generate.go | 2 -- cmd/bundle/generate/app.go | 7 ++----- cmd/bundle/generate/dashboard.go | 20 ++++++++++++++++++++ cmd/bundle/generate/job.go | 7 ++----- cmd/bundle/generate/pipeline.go | 7 ++----- 6 files changed, 27 insertions(+), 17 deletions(-) diff --git a/cmd/bundle/deployment/bind_resource.go b/cmd/bundle/deployment/bind_resource.go index be08fb2504..e01333be49 100644 --- a/cmd/bundle/deployment/bind_resource.go +++ b/cmd/bundle/deployment/bind_resource.go @@ -18,6 +18,7 @@ import ( // This function is shared between the bind command and generate commands with --bind flag. func BindResource(cmd *cobra.Command, resourceKey, resourceId string, autoApprove, forceLock bool) error { ctx := cmd.Context() + // Reload the bundle configuration to ensure we're using the latest configuration. b := utils.ConfigureBundleWithVariables(cmd) if b == nil || logdiag.HasError(ctx) { return root.ErrAlreadyPrinted diff --git a/cmd/bundle/generate.go b/cmd/bundle/generate.go index ab03d3457d..d282e14b5e 100644 --- a/cmd/bundle/generate.go +++ b/cmd/bundle/generate.go @@ -7,7 +7,6 @@ import ( func newGenerateCommand() *cobra.Command { var key string - var bind bool cmd := &cobra.Command{ Use: "generate", @@ -41,6 +40,5 @@ Use --bind to automatically bind the generated resource to the existing workspac cmd.AddCommand(generate.NewGenerateDashboardCommand()) cmd.AddCommand(generate.NewGenerateAppCommand()) cmd.PersistentFlags().StringVar(&key, "key", "", `resource key to use for the generated configuration`) - cmd.PersistentFlags().BoolVar(&bind, "bind", false, `automatically bind the generated resource to the existing resource`) return cmd } diff --git a/cmd/bundle/generate/app.go b/cmd/bundle/generate/app.go index b231c8d54d..e56578fb11 100644 --- a/cmd/bundle/generate/app.go +++ b/cmd/bundle/generate/app.go @@ -21,6 +21,7 @@ func NewGenerateAppCommand() *cobra.Command { var sourceDir string var appName string var force bool + var bind bool cmd := &cobra.Command{ Use: "app", @@ -57,6 +58,7 @@ per target environment.`, cmd.Flags().StringVarP(&configDir, "config-dir", "d", "resources", `Directory path where the output bundle config will be stored`) cmd.Flags().StringVarP(&sourceDir, "source-dir", "s", "src/app", `Directory path where the app files will be stored`) cmd.Flags().BoolVarP(&force, "force", "f", false, `Force overwrite existing files in the output directory`) + cmd.Flags().BoolVarP(&bind, "bind", "b", false, `automatically bind the generated app config to the existing app`) cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := logdiag.InitContext(cmd.Context()) @@ -125,11 +127,6 @@ per target environment.`, cmdio.LogString(ctx, "App configuration successfully saved to "+filename) - // If --bind flag is set, automatically bind the generated resource - bind, err := cmd.Flags().GetBool("bind") - if err != nil { - return err - } if bind { return deployment.BindResource(cmd, appKey, app.Name, true, false) } diff --git a/cmd/bundle/generate/dashboard.go b/cmd/bundle/generate/dashboard.go index eaffeba16e..d09bb90e3f 100644 --- a/cmd/bundle/generate/dashboard.go +++ b/cmd/bundle/generate/dashboard.go @@ -18,7 +18,9 @@ import ( "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/bundle/resources" "github.com/databricks/cli/bundle/statemgmt" + "github.com/databricks/cli/cmd/bundle/deployment" "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/yamlsaver" @@ -54,6 +56,12 @@ type dashboard struct { // Relative path from the resource directory to the dashboard directory. relativeDashboardDir string + // Command. + cmd *cobra.Command + + // Automatically bind the generated resource to the existing resource. + bind bool + // Output and error streams. out io.Writer err io.Writer @@ -333,6 +341,15 @@ func (d *dashboard) generateForExisting(ctx context.Context, b *bundle.Bundle, d if err != nil { logdiag.LogError(ctx, err) } + + if d.bind { + err = deployment.BindResource(d.cmd, key, dashboardID, true, false) + if err != nil { + logdiag.LogError(ctx, err) + return + } + cmdio.LogString(ctx, fmt.Sprintf("Successfully bound dashboard with an id '%s'", dashboardID)) + } } func (d *dashboard) initialize(ctx context.Context, b *bundle.Bundle) { @@ -486,6 +503,8 @@ bundle files automatically, useful during active dashboard development.`, cmd.Flags().StringVarP(&d.dashboardDir, "dashboard-dir", "s", "src", `directory to write the dashboard representation to`) cmd.Flags().BoolVarP(&d.force, "force", "f", false, `force overwrite existing files in the output directory`) + cmd.Flags().BoolVarP(&d.bind, "bind", "b", false, `automatically bind the generated dashboard config to the existing dashboard`) + // Exactly one of the lookup flags must be provided. cmd.MarkFlagsOneRequired( "existing-path", @@ -507,5 +526,6 @@ bundle files automatically, useful during active dashboard development.`, cmd.RegisterFlagCompletionFunc("resource", dashboardResourceCompletion) cmd.RunE = d.RunE + d.cmd = cmd return cmd } diff --git a/cmd/bundle/generate/job.go b/cmd/bundle/generate/job.go index f760deecd5..5663fbbfb1 100644 --- a/cmd/bundle/generate/job.go +++ b/cmd/bundle/generate/job.go @@ -26,6 +26,7 @@ func NewGenerateJobCommand() *cobra.Command { var sourceDir string var jobId int64 var force bool + var bind bool cmd := &cobra.Command{ Use: "job", @@ -61,6 +62,7 @@ After generation, you can deploy this job to other targets using: cmd.Flags().StringVarP(&configDir, "config-dir", "d", "resources", `Dir path where the output config will be stored`) cmd.Flags().StringVarP(&sourceDir, "source-dir", "s", "src", `Dir path where the downloaded files will be stored`) cmd.Flags().BoolVarP(&force, "force", "f", false, `Force overwrite existing files in the output directory`) + cmd.Flags().BoolVarP(&bind, "bind", "b", false, `automatically bind the generated resource to the existing resource`) cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := logdiag.InitContext(cmd.Context()) @@ -144,11 +146,6 @@ After generation, you can deploy this job to other targets using: cmdio.LogString(ctx, "Job configuration successfully saved to "+filename) - // If --bind flag is set, automatically bind the generated resource - bind, err := cmd.Flags().GetBool("bind") - if err != nil { - return err - } if bind { return deployment.BindResource(cmd, jobKey, strconv.FormatInt(jobId, 10), true, false) } diff --git a/cmd/bundle/generate/pipeline.go b/cmd/bundle/generate/pipeline.go index e0951f147b..d52d69b74c 100644 --- a/cmd/bundle/generate/pipeline.go +++ b/cmd/bundle/generate/pipeline.go @@ -25,6 +25,7 @@ func NewGeneratePipelineCommand() *cobra.Command { var sourceDir string var pipelineId string var force bool + var bind bool cmd := &cobra.Command{ Use: "pipeline", @@ -60,6 +61,7 @@ like catalogs, schemas, and compute configurations per target.`, cmd.Flags().StringVarP(&configDir, "config-dir", "d", "resources", `Dir path where the output config will be stored`) cmd.Flags().StringVarP(&sourceDir, "source-dir", "s", "src", `Dir path where the downloaded files will be stored`) cmd.Flags().BoolVarP(&force, "force", "f", false, `Force overwrite existing files in the output directory`) + cmd.Flags().BoolVarP(&bind, "bind", "b", false, `automatically bind the generated resource to the existing resource`) cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := logdiag.InitContext(cmd.Context()) @@ -148,11 +150,6 @@ like catalogs, schemas, and compute configurations per target.`, cmdio.LogString(ctx, "Pipeline configuration successfully saved to "+filename) - // If --bind flag is set, automatically bind the generated resource - bind, err := cmd.Flags().GetBool("bind") - if err != nil { - return err - } if bind { return deployment.BindResource(cmd, pipelineKey, pipelineId, true, false) } From 8fa62af2981d0bfa8c4d86635861c3ae9247734a Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 27 Oct 2025 14:18:33 +0100 Subject: [PATCH 07/10] fix outout --- acceptance/bundle/help/bundle-generate-dashboard/output.txt | 2 +- acceptance/bundle/help/bundle-generate-job/output.txt | 2 +- acceptance/bundle/help/bundle-generate-pipeline/output.txt | 2 +- acceptance/bundle/help/bundle-generate/output.txt | 1 - 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/acceptance/bundle/help/bundle-generate-dashboard/output.txt b/acceptance/bundle/help/bundle-generate-dashboard/output.txt index 6be72fc56e..3f5f4bf1c3 100644 --- a/acceptance/bundle/help/bundle-generate-dashboard/output.txt +++ b/acceptance/bundle/help/bundle-generate-dashboard/output.txt @@ -36,6 +36,7 @@ Usage: databricks bundle generate dashboard [flags] Flags: + -b, --bind automatically bind the generated dashboard config to the existing dashboard -s, --dashboard-dir string directory to write the dashboard representation to (default "src") --existing-id string ID of the dashboard to generate configuration for --existing-path string workspace path of the dashboard to generate configuration for @@ -46,7 +47,6 @@ Flags: --watch watch for changes to the dashboard and update the configuration Global Flags: - --bind automatically bind the generated resource to the existing resource --debug enable debug logging --key string resource key to use for the generated configuration -o, --output type output type: text or json (default text) diff --git a/acceptance/bundle/help/bundle-generate-job/output.txt b/acceptance/bundle/help/bundle-generate-job/output.txt index 5b6c6c8b52..ba126d29ad 100644 --- a/acceptance/bundle/help/bundle-generate-job/output.txt +++ b/acceptance/bundle/help/bundle-generate-job/output.txt @@ -28,6 +28,7 @@ Usage: databricks bundle generate job [flags] Flags: + -b, --bind automatically bind the generated resource to the existing resource -d, --config-dir string Dir path where the output config will be stored (default "resources") --existing-job-id int Job ID of the job to generate config for -f, --force Force overwrite existing files in the output directory @@ -35,7 +36,6 @@ Flags: -s, --source-dir string Dir path where the downloaded files will be stored (default "src") Global Flags: - --bind automatically bind the generated resource to the existing resource --debug enable debug logging --key string resource key to use for the generated configuration -o, --output type output type: text or json (default text) diff --git a/acceptance/bundle/help/bundle-generate-pipeline/output.txt b/acceptance/bundle/help/bundle-generate-pipeline/output.txt index 74b4790549..927eb2c653 100644 --- a/acceptance/bundle/help/bundle-generate-pipeline/output.txt +++ b/acceptance/bundle/help/bundle-generate-pipeline/output.txt @@ -28,6 +28,7 @@ Usage: databricks bundle generate pipeline [flags] Flags: + -b, --bind automatically bind the generated resource to the existing resource -d, --config-dir string Dir path where the output config will be stored (default "resources") --existing-pipeline-id string ID of the pipeline to generate config for -f, --force Force overwrite existing files in the output directory @@ -35,7 +36,6 @@ Flags: -s, --source-dir string Dir path where the downloaded files will be stored (default "src") Global Flags: - --bind automatically bind the generated resource to the existing resource --debug enable debug logging --key string resource key to use for the generated configuration -o, --output type output type: text or json (default text) diff --git a/acceptance/bundle/help/bundle-generate/output.txt b/acceptance/bundle/help/bundle-generate/output.txt index 253674327d..9dedd80b39 100644 --- a/acceptance/bundle/help/bundle-generate/output.txt +++ b/acceptance/bundle/help/bundle-generate/output.txt @@ -33,7 +33,6 @@ Available Commands: pipeline Generate bundle configuration for a pipeline Flags: - --bind automatically bind the generated resource to the existing resource -h, --help help for generate --key string resource key to use for the generated configuration From 0872fb0801c952dbedeb2bc7efde7c20848f4764 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 28 Oct 2025 09:21:50 +0100 Subject: [PATCH 08/10] add bundle.ReloadBundle --- cmd/bundle/deployment/bind_resource.go | 2 +- cmd/bundle/utils/utils.go | 92 ++++++++++++++++++++++++++ 2 files changed, 93 insertions(+), 1 deletion(-) diff --git a/cmd/bundle/deployment/bind_resource.go b/cmd/bundle/deployment/bind_resource.go index e01333be49..0f42a66ff6 100644 --- a/cmd/bundle/deployment/bind_resource.go +++ b/cmd/bundle/deployment/bind_resource.go @@ -19,7 +19,7 @@ import ( func BindResource(cmd *cobra.Command, resourceKey, resourceId string, autoApprove, forceLock bool) error { ctx := cmd.Context() // Reload the bundle configuration to ensure we're using the latest configuration. - b := utils.ConfigureBundleWithVariables(cmd) + b := utils.ReloadBundle(cmd) if b == nil || logdiag.HasError(ctx) { return root.ErrAlreadyPrinted } diff --git a/cmd/bundle/utils/utils.go b/cmd/bundle/utils/utils.go index 65decce058..dcc7c1b8eb 100644 --- a/cmd/bundle/utils/utils.go +++ b/cmd/bundle/utils/utils.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/validate" "github.com/databricks/cli/bundle/deployplan" + bundleenv "github.com/databricks/cli/bundle/env" "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/diag" @@ -25,6 +26,97 @@ func configureVariables(cmd *cobra.Command, b *bundle.Bundle, variables []string }) } +// getTargetFromCmd returns the target name from command flags or environment. +func getTargetFromCmd(cmd *cobra.Command) string { + // Check command line flag first + if flag := cmd.Flag("target"); flag != nil { + if value := flag.Value.String(); value != "" { + return value + } + } + + // Check deprecated environment flag + if flag := cmd.Flag("environment"); flag != nil { + if value := flag.Value.String(); value != "" { + return value + } + } + + // Fall back to environment variable + target, _ := bundleenv.Target(cmd.Context()) + return target +} + +// ReloadBundle reloads the bundle configuration without modifying the command context. +// This is useful when you need to refresh the bundle configuration after changes +// without side effects like setting values on the context. +func ReloadBundle(cmd *cobra.Command) *bundle.Bundle { + ctx := cmd.Context() + + // Load the bundle configuration fresh from the filesystem + b := bundle.MustLoad(ctx) + if b == nil || logdiag.HasError(ctx) { + return b + } + + // Load the target configuration + if target := getTargetFromCmd(cmd); target == "" { + phases.LoadDefaultTarget(ctx, b) + } else { + phases.LoadNamedTarget(ctx, b, target) + } + + if logdiag.HasError(ctx) { + return b + } + + // Configure the workspace profile if provided + configureProfile(cmd, b) + + // Configure variables if provided + variables, err := cmd.Flags().GetStringSlice("var") + if err != nil { + logdiag.LogDiag(ctx, diag.FromErr(err)[0]) + return b + } + configureVariables(cmd, b, variables) + + // Set DirectDeployment flag based on environment + engine, err := deploymentEngine(ctx) + if err != nil { + logdiag.LogError(ctx, err) + return b + } + b.DirectDeployment = engine == "direct-exp" + + return b +} + +// configureProfile applies the profile flag to the bundle. +func configureProfile(cmd *cobra.Command, b *bundle.Bundle) { + profile := getProfileFromCmd(cmd) + if profile == "" { + return + } + + bundle.ApplyFuncContext(cmd.Context(), b, func(ctx context.Context, b *bundle.Bundle) { + b.Config.Workspace.Profile = profile + }) +} + +// getProfileFromCmd returns the profile from command flags or environment. +func getProfileFromCmd(cmd *cobra.Command) string { + // Check command line flag first + if flag := cmd.Flag("profile"); flag != nil { + if value := flag.Value.String(); value != "" { + return value + } + } + + // Fall back to environment variable + return env.Get(cmd.Context(), "DATABRICKS_CONFIG_PROFILE") +} + func ConfigureBundleWithVariables(cmd *cobra.Command) *bundle.Bundle { // Load bundle config and apply target b := root.MustConfigureBundle(cmd) From a744d45582efe5fc691d00db706ffcfed4ae36c9 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 28 Oct 2025 11:20:56 +0100 Subject: [PATCH 09/10] port fix --- cmd/bundle/deployment/bind_resource.go | 1 + 1 file changed, 1 insertion(+) diff --git a/cmd/bundle/deployment/bind_resource.go b/cmd/bundle/deployment/bind_resource.go index 0f42a66ff6..96f9236cb3 100644 --- a/cmd/bundle/deployment/bind_resource.go +++ b/cmd/bundle/deployment/bind_resource.go @@ -24,6 +24,7 @@ func BindResource(cmd *cobra.Command, resourceKey, resourceId string, autoApprov return root.ErrAlreadyPrinted } + ctx = cmd.Context() phases.Initialize(ctx, b) if logdiag.HasError(ctx) { return root.ErrAlreadyPrinted From 70c693b3068f7d80be709cc622731c4d0a9fd6b6 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 28 Oct 2025 12:50:21 +0100 Subject: [PATCH 10/10] fix --- .../bundle/generate/auto-bind/output.txt | 24 ------------------- acceptance/bundle/generate/auto-bind/script | 3 ++- 2 files changed, 2 insertions(+), 25 deletions(-) diff --git a/acceptance/bundle/generate/auto-bind/output.txt b/acceptance/bundle/generate/auto-bind/output.txt index d9c93dad60..652ee7d545 100644 --- a/acceptance/bundle/generate/auto-bind/output.txt +++ b/acceptance/bundle/generate/auto-bind/output.txt @@ -17,31 +17,7 @@ Successfully bound job with an id '[NUMID]' test.py >>> cat resources/test_job.job.yml -resources: - jobs: - test_job: name: auto-bind-job-[UNIQUE_NAME] - tasks: - - task_key: test - new_cluster: - azure_attributes: - availability: ON_DEMAND_AZURE - enable_elastic_disk: true - node_type_id: [NODE_TYPE_ID] - num_workers: 1 - spark_version: 13.3.x-snapshot-scala2.12 - email_notifications: {} - notebook_task: - notebook_path: ../src/test.py - source: WORKSPACE - run_if: ALL_SUCCESS - timeout_seconds: 0 - email_notifications: {} - max_concurrent_runs: 1 - queue: - enabled: true - timeout_seconds: 0 - webhook_notifications: {} === Deploy the bound job: >>> [CLI] bundle deploy diff --git a/acceptance/bundle/generate/auto-bind/script b/acceptance/bundle/generate/auto-bind/script index 7fa6c9bdd6..f6517a8adb 100755 --- a/acceptance/bundle/generate/auto-bind/script +++ b/acceptance/bundle/generate/auto-bind/script @@ -38,7 +38,8 @@ trace $CLI workspace import "${PYTHON_NOTEBOOK}" --file test.py --language PYTHO title "Generate and bind in one step:" trace $CLI bundle generate job --key test_job --existing-job-id $JOB_ID --config-dir resources --source-dir src --bind trace ls src/ -trace cat resources/test_job.job.yml +# The output of the job is difference per cloud so we only check the name. +trace cat resources/test_job.job.yml | grep "name: auto-bind-job-${UNIQUE_NAME}" title "Deploy the bound job:" trace $CLI bundle deploy