diff --git a/docs/guides/experimental-exporter.md b/docs/guides/experimental-exporter.md index f1e73521d3..29ef7b349e 100644 --- a/docs/guides/experimental-exporter.md +++ b/docs/guides/experimental-exporter.md @@ -131,7 +131,7 @@ Services are just logical groups of resources used for filtering and organizatio * `sql-queries` - **listing** [databricks_sql_query](../resources/sql_query.md). * `storage` - only [databricks_dbfs_file](../resources/dbfs_file.md) and [databricks_file](../resources/file.md) referenced in other resources (libraries, init scripts, ...) will be downloaded locally and properly arranged into terraform state. * `uc-artifact-allowlist` - **listing** exports [databricks_artifact_allowlist](../resources/artifact_allowlist.md) resources for Unity Catalog Allow Lists attached to the current metastore. -* `uc-catalogs` - **listing** [databricks_catalog](../resources/catalog.md) and [databricks_catalog_workspace_binding](../resources/catalog_workspace_binding.md) +* `uc-catalogs` - **listing** [databricks_catalog](../resources/catalog.md) and [databricks_workspace_binding](../resources/workspace_binding.md) * `uc-connections` - **listing** [databricks_connection](../resources/connection.md). *Please note that because API doesn't return sensitive fields, such as, passwords, tokens, ..., the generated `options` block could be incomplete!* * `uc-external-locations` - **listing** exports [databricks_external_location](../resources/external_location.md) resource. * `uc-grants` - [databricks_grants](../resources/grants.md). *Please note that during export the list of grants is expanded to include the identity that does the export! This is done to allow to creation of objects in case when catalogs/schemas have different owners than the current identity.*. @@ -224,6 +224,7 @@ Exporter aims to generate HCL code for most of the resources within the Databric | [databricks_user_instance_profile](../resources/user_instance_profile.md) | No | No | No | No | | [databricks_user_role](../resources/user_role.md) | Yes | No | Yes | Yes | | [databricks_volume](../resources/volume.md) | Yes | Yes | Yes | No | +| [databricks_workspace_binding](../resources/workspace_binding.md) | Yes | No | Yes | No | | [databricks_workspace_conf](../resources/workspace_conf.md) | Yes (partial) | No | Yes | No | | [databricks_workspace_file](../resources/workspace_file.md) | Yes | Yes | Yes | No | diff --git a/exporter/importables.go b/exporter/importables.go index 7c08a41eca..6fc34654e5 100644 --- a/exporter/importables.go +++ b/exporter/importables.go @@ -2413,37 +2413,7 @@ var resourcesMap map[string]importable = map[string]importable{ } } if cat.IsolationMode == "ISOLATED" { - securable := "catalog" - bindings, err := ic.workspaceClient.WorkspaceBindings.GetBindings(ic.Context, catalog.GetBindingsRequest{ - SecurableName: cat.Name, - SecurableType: securable, - }) - if err == nil { - for _, binding := range bindings.Bindings { - id := fmt.Sprintf("%d|%s|%s", binding.WorkspaceId, securable, cat.Name) - // We were creating Data instance explicitly because of the bug in the databricks_catalog_workspace_binding - // implementation. Technically, after the fix is merged we can remove this, but we're keeping it as-is now - // to decrease a number of API calls. - d := ic.Resources["databricks_catalog_workspace_binding"].Data( - &terraform.InstanceState{ - ID: id, - Attributes: map[string]string{ - "workspace_id": fmt.Sprintf("%d", binding.WorkspaceId), - "securable_type": securable, - "securable_name": cat.Name, - "binding_type": binding.BindingType.String(), - }, - }) - ic.Emit(&resource{ - Resource: "databricks_catalog_workspace_binding", - ID: id, - Name: fmt.Sprintf("%s_%s_ws_%d", securable, cat.Name, binding.WorkspaceId), - Data: d, - }) - } - } else { - log.Printf("[ERROR] listing catalog bindings: %s", err.Error()) - } + ic.emitWorkspaceBindings("catalog", cat.Name) } return nil }, @@ -2684,6 +2654,12 @@ var resourcesMap map[string]importable = map[string]importable{ Service: "uc-storage-credentials", Import: func(ic *importContext, r *resource) error { ic.emitUCGrantsWithOwner("storage_credential/"+r.ID, r) + if r.Data != nil { + isolationMode := r.Data.Get("isolation_mode").(string) + if isolationMode == "ISOLATION_MODE_ISOLATED" { + ic.emitWorkspaceBindings("storage_credential", r.ID) + } + } return nil }, List: func(ic *importContext) error { @@ -2699,7 +2675,12 @@ var resourcesMap map[string]importable = map[string]importable{ } return nil }, - ShouldOmitField: shouldOmitForUnityCatalog, + ShouldOmitField: func(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool { + if pathString == "isolation_mode" { + return d.Get(pathString).(string) != "ISOLATION_MODE_ISOLATED" + } + return shouldOmitForUnityCatalog(ic, pathString, as, d) + }, Depends: []reference{ {Path: "azure_service_principal.client_secret", Variable: true}, }, @@ -2714,6 +2695,12 @@ var resourcesMap map[string]importable = map[string]importable{ Resource: "databricks_storage_credential", ID: credentialName, }) + if r.Data != nil { + isolationMode := r.Data.Get("isolation_mode").(string) + if isolationMode == "ISOLATION_MODE_ISOLATED" { + ic.emitWorkspaceBindings("external_location", r.ID) + } + } // r.AddDependsOn(&resource{Resource: "databricks_grants", ID: "storage_credential/" + credentialName}) return nil }, @@ -2732,7 +2719,12 @@ var resourcesMap map[string]importable = map[string]importable{ } return nil }, - ShouldOmitField: shouldOmitForUnityCatalog, + ShouldOmitField: func(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool { + if pathString == "isolation_mode" { + return d.Get(pathString).(string) != "ISOLATION_MODE_ISOLATED" + } + return shouldOmitForUnityCatalog(ic, pathString, as, d) + }, // This external location is automatically created when metastore is created with the `storage_root` Ignore: func(ic *importContext, r *resource) bool { return r.ID == "metastore_default_location" @@ -2966,11 +2958,22 @@ var resourcesMap map[string]importable = map[string]importable{ {Path: "metastore_id", Resource: "databricks_metastore"}, }, }, - "databricks_catalog_workspace_binding": { + "databricks_workspace_binding": { WorkspaceLevel: true, Service: "uc-catalogs", + ShouldOmitField: func(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool { + if pathString == "securable_name" { + return d.Get(pathString).(string) == "" + } + return defaultShouldOmitFieldFunc(ic, pathString, as, d) + }, Depends: []reference{ - {Path: "securable_name", Resource: "databricks_catalog", Match: "name"}, + {Path: "securable_name", Resource: "databricks_catalog", Match: "name", + IsValidApproximation: isMatchingSecurableTypeAndName, SkipDirectLookup: true}, + {Path: "securable_name", Resource: "databricks_storage_credential", Match: "name", + IsValidApproximation: isMatchingSecurableTypeAndName, SkipDirectLookup: true}, + {Path: "securable_name", Resource: "databricks_external_location", Match: "name", + IsValidApproximation: isMatchingSecurableTypeAndName, SkipDirectLookup: true}, }, }, "databricks_file": { diff --git a/exporter/importables_test.go b/exporter/importables_test.go index 116919d3eb..45060c5c69 100644 --- a/exporter/importables_test.go +++ b/exporter/importables_test.go @@ -1726,7 +1726,7 @@ func TestImportIsolatedManagedCatalog(t *testing.T) { assert.NoError(t, err) require.Equal(t, 2, len(ic.testEmits)) assert.True(t, ic.testEmits["databricks_grants[] (id: catalog/ctest)"]) - assert.True(t, ic.testEmits["databricks_catalog_workspace_binding[catalog_ctest_ws_1234] (id: 1234|catalog|ctest)"]) + assert.True(t, ic.testEmits["databricks_workspace_binding[catalog_ctest_ws_1234] (id: 1234|catalog|ctest)"]) }) } diff --git a/exporter/util.go b/exporter/util.go index ab06f99647..c5451d0707 100644 --- a/exporter/util.go +++ b/exporter/util.go @@ -23,6 +23,7 @@ import ( "github.com/databricks/terraform-provider-databricks/storage" "github.com/databricks/terraform-provider-databricks/workspace" + "github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/iam" @@ -30,6 +31,7 @@ import ( "github.com/hashicorp/hcl/v2/hclwrite" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" ) // Remove this once databricks_pipeline and databricks_job resources are migrated to Go SDK @@ -1518,3 +1520,43 @@ func (ic *importContext) makeGroupMemberData(id, groupId, memberId string) *sche data.Set("member_id", memberId) return data } + +func (ic *importContext) emitWorkspaceBindings(securableType, securableName string) { + bindings, err := ic.workspaceClient.WorkspaceBindings.GetBindings(ic.Context, catalog.GetBindingsRequest{ + SecurableName: securableName, + SecurableType: securableType, + }) + if err != nil { + log.Printf("[ERROR] listing %s bindings for %s: %s", securableType, securableName, err.Error()) + return + } + for _, binding := range bindings.Bindings { + id := fmt.Sprintf("%d|%s|%s", binding.WorkspaceId, securableType, securableName) + // We were creating Data instance explicitly because of the bug in the databricks_catalog_workspace_binding + // implementation. Technically, after the fix is merged we can remove this, but we're keeping it as-is now + // to decrease a number of API calls. + d := ic.Resources["databricks_workspace_binding"].Data( + &terraform.InstanceState{ + ID: id, + Attributes: map[string]string{ + "workspace_id": fmt.Sprintf("%d", binding.WorkspaceId), + "securable_type": securableType, + "securable_name": securableName, + "binding_type": binding.BindingType.String(), + }, + }) + ic.Emit(&resource{ + Resource: "databricks_workspace_binding", + ID: id, + Name: fmt.Sprintf("%s_%s_ws_%d", securableType, securableName, binding.WorkspaceId), + Data: d, + }) + } +} + +func isMatchingSecurableTypeAndName(ic *importContext, res *resource, ra *resourceApproximation, origPath string) bool { + res_securable_type := res.Data.Get("securable_type").(string) + res_securable_name := res.Data.Get("securable_name").(string) + ra_name, _ := ra.Get("name") + return ra.Type == ("databricks_"+res_securable_type) && ra_name.(string) == res_securable_name +}