diff --git a/docs/guides/experimental-exporter.md b/docs/guides/experimental-exporter.md index 80300b1c3f..f1e73521d3 100644 --- a/docs/guides/experimental-exporter.md +++ b/docs/guides/experimental-exporter.md @@ -112,6 +112,7 @@ Services are just logical groups of resources used for filtering and organizatio * `access` - [databricks_permissions](../resources/permissions.md), [databricks_instance_profile](../resources/instance_profile.md), [databricks_ip_access_list](../resources/ip_access_list.md), [databricks_mws_permission_assignment](../resources/mws_permission_assignment.md) and [databricks_access_control_rule_set](../resources/access_control_rule_set.md). * `compute` - **listing** [databricks_cluster](../resources/cluster.md). +* `dashboards` - **listing** [databricks_dashboard](../resources/dashboard.md). * `directories` - **listing** [databricks_directory](../resources/directory.md). *Please note that directories aren't listed when running in the incremental mode! Only directories with updated notebooks will be emitted.* * `dlt` - **listing** [databricks_pipeline](../resources/pipeline.md). * `groups` - **listing** [databricks_group](../data-sources/group.md) with [membership](../resources/group_member.md) and [data access](../resources/group_instance_profile.md). @@ -171,6 +172,7 @@ Exporter aims to generate HCL code for most of the resources within the Databric | [databricks_cluster](../resources/cluster.md) | Yes | No | Yes | No | | [databricks_cluster_policy](../resources/cluster_policy.md) | Yes | No | Yes | No | | [databricks_connection](../resources/connection.md) | Yes | Yes | Yes | No | +| [databricks_dashboard](../resources/dashboard.md) | Yes | No | Yes | No | | [databricks_dbfs_file](../resources/dbfs_file.md) | Yes | No | Yes | No | | [databricks_external_location](../resources/external_location.md) | Yes | Yes | Yes | No | | [databricks_file](../resources/file.md) | Yes | No | Yes | No | diff --git a/exporter/exporter_test.go b/exporter/exporter_test.go index 2739286e66..9f1818d83f 100644 --- a/exporter/exporter_test.go +++ b/exporter/exporter_test.go @@ -15,6 +15,7 @@ import ( "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/compute" + sdk_dashboards "github.com/databricks/databricks-sdk-go/service/dashboards" "github.com/databricks/databricks-sdk-go/service/iam" sdk_jobs "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/ml" @@ -436,6 +437,13 @@ var emptyMetastoreList = qa.HTTPFixture{ ReuseRequest: true, } +var emptyLakeviewList = qa.HTTPFixture{ + Method: "GET", + Resource: "/api/2.0/lakeview/dashboards?page_size=100", + Response: sdk_dashboards.ListDashboardsResponse{}, + ReuseRequest: true, +} + func TestImportingUsersGroupsSecretScopes(t *testing.T) { listSpFixtures := qa.ListServicePrincipalsFixtures([]iam.ServicePrincipal{ { @@ -457,6 +465,7 @@ func TestImportingUsersGroupsSecretScopes(t *testing.T) { qa.HTTPFixturesApply(t, []qa.HTTPFixture{ noCurrentMetastoreAttached, + emptyLakeviewList, emptyMetastoreList, meAdminFixture, emptyRepos, @@ -729,6 +738,7 @@ func TestImportingNoResourcesError(t *testing.T) { }, }, noCurrentMetastoreAttached, + emptyLakeviewList, emptyMetastoreList, emptyRepos, emptyExternalLocations, @@ -2623,3 +2633,70 @@ func TestImportingRunJobTask(t *testing.T) { }`)) }) } + +func TestImportingLakeviewDashboards(t *testing.T) { + qa.HTTPFixturesApply(t, + []qa.HTTPFixture{ + { + Method: "GET", + ReuseRequest: true, + Resource: "/api/2.0/preview/scim/v2/Me", + Response: scim.User{ + Groups: []scim.ComplexValue{ + { + Display: "admins", + }, + }, + UserName: "user@domain.com", + }, + }, + noCurrentMetastoreAttached, + { + Method: "GET", + Resource: "/api/2.0/lakeview/dashboards?page_size=100", + Response: sdk_dashboards.ListDashboardsResponse{ + Dashboards: []sdk_dashboards.Dashboard{ + { + DashboardId: "9cb0c8f562624a1f", + DisplayName: "Dashboard1", + }, + }, + }, + ReuseRequest: true, + }, + { + Method: "GET", + Resource: "/api/2.0/lakeview/dashboards/9cb0c8f562624a1f?", + Response: sdk_dashboards.Dashboard{ + DashboardId: "9cb0c8f562624a1f", + DisplayName: "Dashboard1", + ParentPath: "/", + Path: "/Dashboard1.lvdash.json", + SerializedDashboard: `{}`, + WarehouseId: "1234", + }, + }, + }, + func(ctx context.Context, client *common.DatabricksClient) { + tmpDir := fmt.Sprintf("/tmp/tf-%s", qa.RandomName()) + defer os.RemoveAll(tmpDir) + + ic := newImportContext(client) + ic.Directory = tmpDir + ic.enableListing("dashboards") + ic.enableServices("dashboards") + + err := ic.Run() + assert.NoError(t, err) + + content, err := os.ReadFile(tmpDir + "/dashboards.tf") + assert.NoError(t, err) + contentStr := string(content) + assert.True(t, strings.Contains(contentStr, `resource "databricks_dashboard" "dashboard1_9cb0c8f562624a1f"`)) + assert.True(t, strings.Contains(contentStr, `file_path = "${path.module}/dashboards/Dashboard1_9cb0c8f562624a1f.lvdash.json"`)) + content, err = os.ReadFile(tmpDir + "/dashboards/Dashboard1_9cb0c8f562624a1f.lvdash.json") + assert.NoError(t, err) + contentStr = string(content) + assert.Equal(t, `{}`, contentStr) + }) +} diff --git a/exporter/importables.go b/exporter/importables.go index e607951f04..6a9e31588e 100644 --- a/exporter/importables.go +++ b/exporter/importables.go @@ -16,6 +16,7 @@ import ( "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/databricks-sdk-go/service/dashboards" "github.com/databricks/databricks-sdk-go/service/iam" sdk_jobs "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/ml" @@ -1109,6 +1110,7 @@ var resourcesMap map[string]importable = map[string]importable{ {Path: "sql_alert_id", Resource: "databricks_sql_alert"}, {Path: "sql_dashboard_id", Resource: "databricks_sql_dashboard"}, {Path: "sql_endpoint_id", Resource: "databricks_sql_endpoint"}, + {Path: "dashboard_id", Resource: "databricks_dashboard"}, {Path: "registered_model_id", Resource: "databricks_mlflow_model"}, {Path: "experiment_id", Resource: "databricks_mlflow_experiment"}, {Path: "repo_id", Resource: "databricks_repo"}, @@ -3086,4 +3088,96 @@ var resourcesMap map[string]importable = map[string]importable{ {Resource: "databricks_group", Path: "principal_id"}, }, }, + "databricks_dashboard": { + WorkspaceLevel: true, + Service: "dashboards", + List: func(ic *importContext) error { + dashboards, err := ic.workspaceClient.Lakeview.ListAll(ic.Context, dashboards.ListDashboardsRequest{PageSize: 100}) + if err != nil { + return err + } + for i, d := range dashboards { + if !ic.MatchesName(d.DisplayName) { + continue + } + // TODO: add emit for incremental mode. Use already defined functions for emitting? + ic.Emit(&resource{ + Resource: "databricks_dashboard", + ID: d.DashboardId, + }) + if i%100 == 0 { + log.Printf("[INFO] Processed %d dashboard out of %d", i+1, len(dashboards)) + } + } + return nil + }, + Name: func(ic *importContext, d *schema.ResourceData) string { + s := d.Get("parent_path").(string) + if s != "" { + s = s[1:] + if s != "" { + s = s + "_" + } + } + dname := d.Get("display_name").(string) + if dname != "" { + s = s + dname + } + s = s + "_" + d.Id() + return nameNormalizationRegex.ReplaceAllString(s, "_") + }, + Import: func(ic *importContext, r *resource) error { + path := r.Data.Get("path").(string) + if strings.HasPrefix(path, "/Repos") { + ic.emitRepoByPath(path) + return nil + } + parts := strings.Split(path, "/") + plen := len(parts) + if idx := strings.Index(parts[plen-1], "."); idx != -1 { + parts[plen-1] = parts[plen-1][:idx] + "_" + r.ID + parts[plen-1][idx:] + } else { + parts[plen-1] = parts[plen-1] + "_" + r.ID + } + name := fileNameNormalizationRegex.ReplaceAllString(strings.Join(parts, "/")[1:], "_") + fileName, err := ic.saveFileIn("dashboards", name, []byte(r.Data.Get("serialized_dashboard").(string))) + if err != nil { + return err + } + r.Data.Set("file_path", fileName) + r.Data.Set("serialized_dashboard", "") + + ic.emitPermissionsIfNotIgnored(r, "/dashboards/"+r.ID, + "dashboard_"+ic.Importables["databricks_dashboard"].Name(ic, r.Data)) + parentPath := r.Data.Get("parent_path").(string) + if parentPath != "" && parentPath != "/" { + ic.Emit(&resource{ + Resource: "databricks_directory", + ID: parentPath, + }) + } + warehouseId := r.Data.Get("warehouse_id").(string) + if warehouseId != "" { + ic.Emit(&resource{ + Resource: "databricks_sql_endpoint", + ID: warehouseId, + }) + } + + return nil + }, + ShouldOmitField: func(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool { + return pathString == "dashboard_change_detected" || shouldOmitMd5Field(ic, pathString, as, d) + }, + Ignore: func(ic *importContext, r *resource) bool { + return strings.HasPrefix(r.Data.Get("path").(string), "/Repos") || strings.HasPrefix(r.Data.Get("parent_path").(string), "/Repos") + }, + Depends: []reference{ + {Path: "file_path", File: true}, + {Path: "warehouse_id", Resource: "databricks_sql_endpoint"}, + {Path: "parent_path", Resource: "databricks_directory"}, + {Path: "parent_path", Resource: "databricks_user", Match: "home"}, + {Path: "parent_path", Resource: "databricks_service_principal"}, + }, + }, }