diff --git a/clusters/resource_cluster.go b/clusters/resource_cluster.go index bc7c08f08d..6595b92a9f 100644 --- a/clusters/resource_cluster.go +++ b/clusters/resource_cluster.go @@ -270,14 +270,31 @@ func FixInstancePoolChangeIfAny(d *schema.ResourceData, cluster any) error { func SetForceSendFieldsForCluster(cluster any, d *schema.ResourceData) error { switch c := cluster.(type) { case *compute.ClusterSpec: + // Used in jobs. if c.Autoscale == nil { c.ForceSendFields = append(c.ForceSendFields, "NumWorkers") } + // Workload type is not relevant in jobs clusters. return nil case *compute.CreateCluster: if c.Autoscale == nil { c.ForceSendFields = append(c.ForceSendFields, "NumWorkers") } + // If workload type is set by the user, the fields within Clients should always be sent. + // These default to true if not set. + if c.WorkloadType != nil { + c.WorkloadType.Clients.ForceSendFields = []string{"Jobs", "Notebooks"} + } + return nil + case *compute.EditCluster: + if c.Autoscale == nil { + c.ForceSendFields = append(c.ForceSendFields, "NumWorkers") + } + // If workload type is set by the user, the fields within Clients should always be sent. + // These default to true if not set. + if c.WorkloadType != nil { + c.WorkloadType.Clients.ForceSendFields = []string{"Jobs", "Notebooks"} + } return nil default: return fmt.Errorf(unsupportedExceptCreateEditClusterSpecErr, cluster, "*", "*", "*") @@ -309,6 +326,17 @@ func (ClusterSpec) CustomizeSchemaResourceSpecific(s *common.CustomizableSchema) return old == new }, }) + s.AddNewField("no_wait", &schema.Schema{ + Type: schema.TypeBool, + Optional: true, + Default: false, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + if old == "" && new == "false" { + return true + } + return old == new + }, + }) s.AddNewField("state", &schema.Schema{ Type: schema.TypeString, Computed: true, @@ -414,11 +442,8 @@ func resourceClusterCreate(ctx context.Context, d *schema.ResourceData, c *commo if err != nil { return err } - clusterInfo, err := clusterWaiter.GetWithTimeout(timeout) - if err != nil { - return err - } - d.SetId(clusterInfo.ClusterId) + + d.SetId(clusterWaiter.ClusterId) d.Set("cluster_id", d.Id()) isPinned, ok := d.GetOk("is_pinned") if ok && isPinned.(bool) { @@ -437,6 +462,20 @@ func resourceClusterCreate(ctx context.Context, d *schema.ResourceData, c *commo }); err != nil { return err } + } + + // If there is a no_wait flag set to true, don't wait for the cluster to be created + noWait, ok := d.GetOk("no_wait") + if ok && noWait.(bool) { + return nil + } + + clusterInfo, err := clusterWaiter.GetWithTimeout(timeout) + if err != nil { + return err + } + + if len(cluster.Libraries) > 0 { _, err := libraries.WaitForLibrariesInstalledSdk(ctx, w, compute.Wait{ ClusterID: d.Id(), IsRunning: clusterInfo.IsRunningOrResizing(), @@ -508,7 +547,7 @@ func resourceClusterRead(ctx context.Context, d *schema.ResourceData, c *common. func hasClusterConfigChanged(d *schema.ResourceData) bool { for k := range clusterSchema { // TODO: create a map if we'll add more non-cluster config parameters in the future - if k == "library" || k == "is_pinned" { + if k == "library" || k == "is_pinned" || k == "no_wait" { continue } if d.HasChange(k) { @@ -551,6 +590,7 @@ func resourceClusterUpdate(ctx context.Context, d *schema.ResourceData, c *commo for k := range clusterSchema { if k == "library" || k == "is_pinned" || + k == "no_wait" || k == "num_workers" || k == "autoscale" { continue @@ -603,10 +643,7 @@ func resourceClusterUpdate(ctx context.Context, d *schema.ResourceData, c *commo Autoscale: cluster.Autoscale, }) } else { - if err != nil { - return err - } - cluster.ForceSendFields = []string{"NumWorkers"} + SetForceSendFieldsForCluster(&cluster, d) err = retry.RetryContext(ctx, 15*time.Minute, func() *retry.RetryError { _, err = clusters.Edit(ctx, cluster) diff --git a/clusters/resource_cluster_test.go b/clusters/resource_cluster_test.go index 53c693810a..b5af784be5 100644 --- a/clusters/resource_cluster_test.go +++ b/clusters/resource_cluster_test.go @@ -432,6 +432,161 @@ func TestResourceClusterCreatePhoton(t *testing.T) { assert.Equal(t, "abc", d.Id()) } +func TestResourceClusterCreateNoWait_WithLibraries(t *testing.T) { + d, err := qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: "POST", + Resource: "/api/2.1/clusters/create", + ExpectedRequest: compute.ClusterSpec{ + NumWorkers: 100, + SparkVersion: "7.1-scala12", + NodeTypeId: "i3.xlarge", + AutoterminationMinutes: 60, + }, + Response: compute.ClusterDetails{ + ClusterId: "abc", + State: compute.StateUnknown, + }, + }, + { + Method: "GET", + ReuseRequest: true, + Resource: "/api/2.1/clusters/get?cluster_id=abc", + Response: compute.ClusterDetails{ + ClusterId: "abc", + NumWorkers: 100, + SparkVersion: "7.1-scala12", + NodeTypeId: "i3.xlarge", + AutoterminationMinutes: 15, + State: compute.StateUnknown, + }, + }, + { + Method: "POST", + Resource: "/api/2.1/clusters/events", + ExpectedRequest: compute.GetEvents{ + ClusterId: "abc", + Limit: 1, + Order: compute.GetEventsOrderDesc, + EventTypes: []compute.EventType{compute.EventTypePinned, compute.EventTypeUnpinned}, + }, + Response: compute.GetEventsResponse{ + Events: []compute.ClusterEvent{}, + TotalCount: 0, + }, + }, + { + Method: "POST", + Resource: "/api/2.0/libraries/install", + ExpectedRequest: compute.InstallLibraries{ + ClusterId: "abc", + Libraries: []compute.Library{ + { + Pypi: &compute.PythonPyPiLibrary{ + Package: "seaborn==1.2.4", + }, + }, + }, + }, + }, + { + Method: "GET", + Resource: "/api/2.0/libraries/cluster-status?cluster_id=abc", + Response: compute.ClusterLibraryStatuses{ + LibraryStatuses: []compute.LibraryFullStatus{ + { + Library: &compute.Library{ + Pypi: &compute.PythonPyPiLibrary{ + Package: "seaborn==1.2.4", + }, + }, + Status: compute.LibraryInstallStatusPending, + }, + }, + }, + }, + }, + Create: true, + Resource: ResourceCluster(), + HCL: `num_workers = 100 + spark_version = "7.1-scala12" + node_type_id = "i3.xlarge" + no_wait = true + + library { + pypi { + package = "seaborn==1.2.4" + } + }`, + }.Apply(t) + assert.NoError(t, err) + assert.Equal(t, "abc", d.Id()) +} + +func TestResourceClusterCreateNoWait(t *testing.T) { + d, err := qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: "POST", + Resource: "/api/2.1/clusters/create", + ExpectedRequest: compute.ClusterSpec{ + NumWorkers: 100, + ClusterName: "Shared Autoscaling", + SparkVersion: "7.1-scala12", + NodeTypeId: "i3.xlarge", + AutoterminationMinutes: 15, + }, + Response: compute.ClusterDetails{ + ClusterId: "abc", + State: compute.StateUnknown, + }, + }, + { + Method: "GET", + ReuseRequest: true, + Resource: "/api/2.1/clusters/get?cluster_id=abc", + Response: compute.ClusterDetails{ + ClusterId: "abc", + NumWorkers: 100, + ClusterName: "Shared Autoscaling", + SparkVersion: "7.1-scala12", + NodeTypeId: "i3.xlarge", + AutoterminationMinutes: 15, + State: compute.StateUnknown, + }, + }, + { + Method: "POST", + Resource: "/api/2.1/clusters/events", + ExpectedRequest: compute.GetEvents{ + ClusterId: "abc", + Limit: 1, + Order: compute.GetEventsOrderDesc, + EventTypes: []compute.EventType{compute.EventTypePinned, compute.EventTypeUnpinned}, + }, + Response: compute.GetEventsResponse{ + Events: []compute.ClusterEvent{}, + TotalCount: 0, + }, + }, + }, + Create: true, + Resource: ResourceCluster(), + State: map[string]any{ + "autotermination_minutes": 15, + "cluster_name": "Shared Autoscaling", + "spark_version": "7.1-scala12", + "node_type_id": "i3.xlarge", + "num_workers": 100, + "is_pinned": false, + "no_wait": true, + }, + }.Apply(t) + assert.NoError(t, err) + assert.Equal(t, "abc", d.Id()) +} + func TestResourceClusterCreate_Error(t *testing.T) { d, err := qa.ResourceFixture{ Fixtures: []qa.HTTPFixture{ diff --git a/common/resource.go b/common/resource.go index 79a1264086..77ba894853 100644 --- a/common/resource.go +++ b/common/resource.go @@ -381,30 +381,31 @@ func genericDatabricksData[T, P, C any]( var dummy T var other P otherFields := StructToSchema(other, nil) - s := StructToSchema(dummy, func(m map[string]*schema.Schema) map[string]*schema.Schema { - // For WorkspaceData and AccountData, a single data type is used to represent all of the fields of - // the resource, so its configuration is correct. For the *WithParams methods, the SdkType parameter - // is copied directly from the resource definition, which means that all fields from that type are - // computed and optional, and the fields from OtherFields are overlaid on top of the schema generated - // by SdkType. - if hasOther { - for k := range m { - m[k].Computed = true - m[k].Required = false - m[k].Optional = true - } - for k, v := range otherFields { - m[k] = v - } + + s := StructToSchema(dummy, nil) + // For WorkspaceData and AccountData, a single data type is used to represent all of the fields of + // the resource, so its configuration is correct. For the *WithParams methods, the SdkType parameter + // is copied directly from the resource definition, which means that all fields from that type are + // computed and optional, and the fields from OtherFields are overlaid on top of the schema generated + // by SdkType. + if hasOther { + for k := range s { + s[k].Computed = true + s[k].Required = false + s[k].Optional = true } - // `id` attribute must be marked as computed, otherwise it's not set! - if v, ok := m["id"]; ok { - v.Computed = true - v.Required = false + for k, v := range otherFields { + s[k] = v } - // allow c - return customizeSchemaFunc(m) - }) + } + // `id` attribute must be marked as computed, otherwise it's not set! + if v, ok := s["id"]; ok { + v.Computed = true + v.Required = false + } + // allow c + s = customizeSchemaFunc(s) + return Resource{ Schema: s, Read: func(ctx context.Context, d *schema.ResourceData, client *DatabricksClient) (err error) { diff --git a/docs/resources/automatic_cluster_update_setting.md b/docs/resources/automatic_cluster_update_setting.md index d3a3b77292..0a5ce409d0 100644 --- a/docs/resources/automatic_cluster_update_setting.md +++ b/docs/resources/automatic_cluster_update_setting.md @@ -33,17 +33,16 @@ resource "databricks_automatic_cluster_update_workspace_setting" "this" { The resource supports the following arguments: -* `enabled` - (Required) The configuration details. -* `restart_even_if_no_updates_available` - (Optional) To force clusters and other compute resources to restart during the maintenance window regardless of the availability of a new update. - -A `maintenance_window` block that defines the maintenance frequency with the following arguments - -* A `week_day_based_schedule` block with the following arguments - * `day_of_week` - the day of the week in uppercase, e.g. `MONDAY` or `SUNDAY` - * `frequency` - one of the `FIRST_OF_MONTH`, `SECOND_OF_MONTH`, `THIRD_OF_MONTH`, `FOURTH_OF_MONTH`, `FIRST_AND_THIRD_OF_MONTH`, `SECOND_AND_FOURTH_OF_MONTH`, `EVERY_WEEK`. - * A `window_start_time` block that defines the time of your maintenance window. The default timezone is UTC and cannot be changed. - * `hours` - * `minutes` +- `automatic_cluster_update_workspace` (Required) block with following attributes + - `enabled` - (Required) The configuration details. + - `restart_even_if_no_updates_available` - (Optional) To force clusters and other compute resources to restart during the maintenance window regardless of the availability of a new update. + - `maintenance_window` block that defines the maintenance frequency with the following arguments + - `week_day_based_schedule` block with the following arguments + - `day_of_week` - the day of the week in uppercase, e.g. `MONDAY` or `SUNDAY` + - `frequency` - one of the `FIRST_OF_MONTH`, `SECOND_OF_MONTH`, `THIRD_OF_MONTH`, `FOURTH_OF_MONTH`, `FIRST_AND_THIRD_OF_MONTH`, `SECOND_AND_FOURTH_OF_MONTH`, `EVERY_WEEK`. + - `window_start_time` block that defines the time of your maintenance window. The default timezone is UTC and cannot be changed. + - `hours` - hour to perform update: 0-23 + - `minutes` - minute to perform update: 0-59 ## Import diff --git a/docs/resources/cluster.md b/docs/resources/cluster.md index 2de0b6ff05..c68cc8aa81 100644 --- a/docs/resources/cluster.md +++ b/docs/resources/cluster.md @@ -51,6 +51,7 @@ resource "databricks_cluster" "shared_autoscaling" { * `custom_tags` - (Optional) Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS EC2 instances and EBS volumes) with these tags in addition to `default_tags`. If a custom cluster tag has the same name as a default cluster tag, the custom tag is prefixed with an `x_` when it is propagated. * `spark_conf` - (Optional) Map with key-value pairs to fine-tune Spark clusters, where you can provide custom [Spark configuration properties](https://spark.apache.org/docs/latest/configuration.html) in a cluster configuration. * `is_pinned` - (Optional) boolean value specifying if the cluster is pinned (not pinned by default). You must be a Databricks administrator to use this. The pinned clusters' maximum number is [limited to 100](https://docs.databricks.com/clusters/clusters-manage.html#pin-a-cluster), so `apply` may fail if you have more than that (this number may change over time, so check Databricks documentation for actual number). +* `no_wait` - (Optional) If true, the provider will not wait for the cluster to reach `RUNNING` state when creating the cluster, allowing cluster creation and library installation to continue asynchronously. Defaults to false (the provider will wait for cluster creation and library installation to succeed). The following example demonstrates how to create an autoscaling cluster with [Delta Cache](https://docs.databricks.com/delta/optimizations/delta-cache.html) enabled: diff --git a/docs/resources/compliance_security_profile_setting.md b/docs/resources/compliance_security_profile_setting.md index 0794ace4f6..a507243418 100644 --- a/docs/resources/compliance_security_profile_setting.md +++ b/docs/resources/compliance_security_profile_setting.md @@ -27,8 +27,9 @@ resource "databricks_compliance_security_profile_workspace_setting" "this" { The resource supports the following arguments: -* `is_enabled` - (Required) Enable the Compliance Security Profile on the workspace -* `compliance_standards` - (Required) Enable one or more compliance standards on the workspace, e.g. `HIPAA`, `PCI_DSS`, `FEDRAMP_MODERATE` +- `compliance_security_profile_workspace` block with following attributes: + - `is_enabled` - (Required) Enable the Compliance Security Profile on the workspace + - `compliance_standards` - (Required) Enable one or more compliance standards on the workspace, e.g. `HIPAA`, `PCI_DSS`, `FEDRAMP_MODERATE` ## Import diff --git a/docs/resources/enhanced_security_monitoring_setting.md b/docs/resources/enhanced_security_monitoring_setting.md index ce8b4f25d9..7447b7ad34 100644 --- a/docs/resources/enhanced_security_monitoring_setting.md +++ b/docs/resources/enhanced_security_monitoring_setting.md @@ -25,7 +25,8 @@ resource "databricks_enhanced_security_monitoring_workspace_setting" "this" { The resource supports the following arguments: -* `is_enabled` - (Required) Enable the Enhanced Security Monitoring on the workspace + - `enhanced_security_monitoring_workspace` block with following attributes: + - `is_enabled` - (Required) Enable the Enhanced Security Monitoring on the workspace ## Import diff --git a/docs/resources/job.md b/docs/resources/job.md index 3a7043f1f0..e8e3c9cdc2 100644 --- a/docs/resources/job.md +++ b/docs/resources/job.md @@ -231,7 +231,7 @@ One of the `query`, `dashboard` or `alert` needs to be provided. * `pause_subscriptions` - (Optional) flag that specifies if subscriptions are paused or not. * `alert` - (Optional) block consisting of following fields: * `alert_id` - (Required) (String) identifier of the Databricks SQL Alert. - * `subscriptions` - (Required) a list of subscription blocks consisting out of one of the required fields: `user_name` for user emails or `destination_id` - for Alert destination's identifier. + * `subscriptions` - (Optional) a list of subscription blocks consisting out of one of the required fields: `user_name` for user emails or `destination_id` - for Alert destination's identifier. * `pause_subscriptions` - (Optional) flag that specifies if subscriptions are paused or not. * `file` - (Optional) block consisting of single string fields: * `source` - (Optional) The source of the project. Possible values are `WORKSPACE` and `GIT`. diff --git a/exporter/importables_test.go b/exporter/importables_test.go index e18d9d163e..b503117595 100644 --- a/exporter/importables_test.go +++ b/exporter/importables_test.go @@ -1828,23 +1828,25 @@ func TestImportShare(t *testing.T) { d := tfsharing.ResourceShare().ToResource().TestResourceData() scm := tfsharing.ResourceShare().Schema share := tfsharing.ShareInfo{ - Name: "stest", - Objects: []tfsharing.SharedDataObject{ - { - DataObjectType: "TABLE", - Name: "ctest.stest.table1", - }, - { - DataObjectType: "MODEL", - Name: "ctest.stest.model1", - }, - { - DataObjectType: "VOLUME", - Name: "ctest.stest.vol1", - }, - { - DataObjectType: "NOTEBOOK", - Name: "Test", + ShareInfo: sharing.ShareInfo{ + Name: "stest", + Objects: []sharing.SharedDataObject{ + { + DataObjectType: "TABLE", + Name: "ctest.stest.table1", + }, + { + DataObjectType: "MODEL", + Name: "ctest.stest.model1", + }, + { + DataObjectType: "VOLUME", + Name: "ctest.stest.vol1", + }, + { + DataObjectType: "NOTEBOOK", + Name: "Test", + }, }, }, } diff --git a/internal/acceptance/cluster_test.go b/internal/acceptance/cluster_test.go index 892ab3104c..4dad55c9f7 100644 --- a/internal/acceptance/cluster_test.go +++ b/internal/acceptance/cluster_test.go @@ -3,6 +3,8 @@ package acceptance import ( "fmt" "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccClusterResource_CreateClusterWithLibraries(t *testing.T) { @@ -108,3 +110,89 @@ func TestAccClusterResource_CreateAndUpdateAwsAttributes(t *testing.T) { }) } } + +func TestAccClusterResource_CreateAndNoWait(t *testing.T) { + workspaceLevel(t, step{ + Template: `data "databricks_spark_version" "latest" { + } + resource "databricks_cluster" "this" { + cluster_name = "nowait-{var.RANDOM}" + spark_version = data.databricks_spark_version.latest.id + instance_pool_id = "{env.TEST_INSTANCE_POOL_ID}" + num_workers = 1 + autotermination_minutes = 10 + spark_conf = { + "spark.databricks.cluster.profile" = "serverless" + } + custom_tags = { + "ResourceClass" = "Serverless" + } + no_wait = true + }`, + }) +} + +func TestAccClusterResource_WorkloadType(t *testing.T) { + workspaceLevel(t, step{ + Template: testAccClusterResourceWorkloadTypeTemplate(""), + }, step{ + Template: testAccClusterResourceWorkloadTypeTemplate(` + workload_type { + clients { + jobs = true + notebooks = true + } + }`), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("databricks_cluster.this", "workload_type.0.clients.0.jobs", "true"), + resource.TestCheckResourceAttr("databricks_cluster.this", "workload_type.0.clients.0.notebooks", "true"), + ), + }, step{ + Template: testAccClusterResourceWorkloadTypeTemplate(` + workload_type { + clients { + jobs = false + notebooks = false + } + }`), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("databricks_cluster.this", "workload_type.0.clients.0.jobs", "false"), + resource.TestCheckResourceAttr("databricks_cluster.this", "workload_type.0.clients.0.notebooks", "false"), + ), + }, step{ + Template: testAccClusterResourceWorkloadTypeTemplate(` + workload_type { + clients { } + }`), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("databricks_cluster.this", "workload_type.0.clients.0.jobs", "true"), + resource.TestCheckResourceAttr("databricks_cluster.this", "workload_type.0.clients.0.notebooks", "true"), + ), + }, step{ + Template: testAccClusterResourceWorkloadTypeTemplate(``), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("databricks_cluster.this", "workload_type.0.clients.0.jobs", "true"), + resource.TestCheckResourceAttr("databricks_cluster.this", "workload_type.0.clients.0.notebooks", "true"), + ), + }) +} + +func testAccClusterResourceWorkloadTypeTemplate(workloadType string) string { + return fmt.Sprintf(` +data "databricks_spark_version" "latest" {} +resource "databricks_cluster" "this" { + cluster_name = "workload-{var.RANDOM}" + spark_version = data.databricks_spark_version.latest.id + instance_pool_id = "{env.TEST_INSTANCE_POOL_ID}" + autotermination_minutes = 10 + num_workers = 0 + spark_conf = { + "spark.databricks.cluster.profile" = "singleNode" + "spark.master" = "local[*]" + } + custom_tags = { + "ResourceClass" = "SingleNode" + } + %s +}`, workloadType) +} diff --git a/internal/acceptance/init_test.go b/internal/acceptance/init_test.go index 9bcbb8efab..fb13d01567 100644 --- a/internal/acceptance/init_test.go +++ b/internal/acceptance/init_test.go @@ -166,7 +166,7 @@ func run(t *testing.T, steps []step) { if s.Template != "" { stepConfig = environmentTemplate(t, s.Template, vars) } - stepNum := i + stepNum := i + 1 thisStep := s stepCheck := thisStep.Check stepPreConfig := s.PreConfig diff --git a/jobs/resource_job.go b/jobs/resource_job.go index 4f00e3bc95..be2b982a79 100644 --- a/jobs/resource_job.go +++ b/jobs/resource_job.go @@ -83,7 +83,7 @@ type SqlDashboardTask struct { type SqlAlertTask struct { AlertID string `json:"alert_id"` - Subscriptions []SqlSubscription `json:"subscriptions"` + Subscriptions []SqlSubscription `json:"subscriptions,omitempty"` PauseSubscriptions bool `json:"pause_subscriptions,omitempty"` } @@ -624,9 +624,6 @@ func (JobSettingsResource) CustomizeSchema(s *common.CustomizableSchema) *common s.SchemaPath("task", "python_wheel_task", "package_name").SetOptional() s.SchemaPath("task", "for_each_task", "task", "python_wheel_task", "package_name").SetOptional() - s.SchemaPath("task", "sql_task", "alert", "subscriptions").SetRequired() - s.SchemaPath("task", "for_each_task", "task", "sql_task", "alert", "subscriptions").SetRequired() - s.SchemaPath("task", "new_cluster", "cluster_id").SetOptional() s.SchemaPath("task", "for_each_task", "task", "new_cluster", "cluster_id").SetOptional() diff --git a/settings/resource_automatic_cluster_update_setting.go b/settings/resource_automatic_cluster_update_setting.go index 726ae9d244..1926303363 100644 --- a/settings/resource_automatic_cluster_update_setting.go +++ b/settings/resource_automatic_cluster_update_setting.go @@ -19,10 +19,20 @@ var automaticClusterUpdateFieldMask = strings.Join([]string{ "automatic_cluster_update_workspace.maintenance_window.week_day_based_schedule.window_start_time.hours", "automatic_cluster_update_workspace.maintenance_window.week_day_based_schedule.window_start_time.minutes", }, ",") + var automaticClusterUpdateSetting = workspaceSetting[settings.AutomaticClusterUpdateSetting]{ settingStruct: settings.AutomaticClusterUpdateSetting{}, customizeSchemaFunc: func(s map[string]*schema.Schema) map[string]*schema.Schema { - common.MustSchemaPath(s, "automatic_cluster_update_workspace", "enablement_details").Computed = true + common.CustomizeSchemaPath(s, "automatic_cluster_update_workspace", "enablement_details").SetReadOnly() + common.CustomizeSchemaPath(s, "automatic_cluster_update_workspace", "enabled").SetRequired() + common.CustomizeSchemaPath(s, "automatic_cluster_update_workspace", "maintenance_window", + "week_day_based_schedule", "window_start_time", "hours").SetRequired() + common.CustomizeSchemaPath(s, "automatic_cluster_update_workspace", "maintenance_window", + "week_day_based_schedule", "day_of_week").SetRequired() + common.CustomizeSchemaPath(s, "automatic_cluster_update_workspace", "maintenance_window", + "week_day_based_schedule", "frequency").SetRequired() + common.CustomizeSchemaPath(s, "automatic_cluster_update_workspace", "maintenance_window", + "week_day_based_schedule", "window_start_time", "minutes").SetRequired() return s }, readFunc: func(ctx context.Context, w *databricks.WorkspaceClient, etag string) (*settings.AutomaticClusterUpdateSetting, error) { @@ -32,6 +42,12 @@ var automaticClusterUpdateSetting = workspaceSetting[settings.AutomaticClusterUp }, updateFunc: func(ctx context.Context, w *databricks.WorkspaceClient, t settings.AutomaticClusterUpdateSetting) (string, error) { t.SettingName = "default" + t.AutomaticClusterUpdateWorkspace.ForceSendFields = []string{"Enabled", "RestartEvenIfNoUpdatesAvailable"} + if t.AutomaticClusterUpdateWorkspace.MaintenanceWindow != nil && + t.AutomaticClusterUpdateWorkspace.MaintenanceWindow.WeekDayBasedSchedule != nil && + t.AutomaticClusterUpdateWorkspace.MaintenanceWindow.WeekDayBasedSchedule.WindowStartTime != nil { + t.AutomaticClusterUpdateWorkspace.MaintenanceWindow.WeekDayBasedSchedule.WindowStartTime.ForceSendFields = []string{"Hours", "Minutes"} + } res, err := w.Settings.AutomaticClusterUpdate().Update(ctx, settings.UpdateAutomaticClusterUpdateSettingRequest{ AllowMissing: true, Setting: t, @@ -49,7 +65,8 @@ var automaticClusterUpdateSetting = workspaceSetting[settings.AutomaticClusterUp Etag: etag, SettingName: "default", AutomaticClusterUpdateWorkspace: settings.ClusterAutoRestartMessage{ - Enabled: false, + Enabled: false, + ForceSendFields: []string{"Enabled"}, }, }, FieldMask: automaticClusterUpdateFieldMask, diff --git a/settings/resource_automatic_cluster_update_setting_test.go b/settings/resource_automatic_cluster_update_setting_test.go index 79a9fbea41..ea065936e2 100644 --- a/settings/resource_automatic_cluster_update_setting_test.go +++ b/settings/resource_automatic_cluster_update_setting_test.go @@ -23,7 +23,8 @@ func TestQueryCreateAutomaticClusterUpdateSetting(t *testing.T) { Setting: settings.AutomaticClusterUpdateSetting{ Etag: "", AutomaticClusterUpdateWorkspace: settings.ClusterAutoRestartMessage{ - Enabled: true, + Enabled: true, + ForceSendFields: []string{"Enabled", "RestartEvenIfNoUpdatesAvailable"}, }, SettingName: "default", }, @@ -44,7 +45,8 @@ func TestQueryCreateAutomaticClusterUpdateSetting(t *testing.T) { Setting: settings.AutomaticClusterUpdateSetting{ Etag: "etag1", AutomaticClusterUpdateWorkspace: settings.ClusterAutoRestartMessage{ - Enabled: true, + Enabled: true, + ForceSendFields: []string{"Enabled", "RestartEvenIfNoUpdatesAvailable"}, }, SettingName: "default", }, @@ -150,14 +152,16 @@ func TestQueryUpdateAutomaticClusterUpdateSetting(t *testing.T) { Etag: "etag1", AutomaticClusterUpdateWorkspace: settings.ClusterAutoRestartMessage{ Enabled: true, + ForceSendFields: []string{"Enabled", "RestartEvenIfNoUpdatesAvailable"}, RestartEvenIfNoUpdatesAvailable: true, MaintenanceWindow: &settings.ClusterAutoRestartMessageMaintenanceWindow{ WeekDayBasedSchedule: &settings.ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule{ DayOfWeek: "MONDAY", Frequency: "EVERY_WEEK", WindowStartTime: &settings.ClusterAutoRestartMessageMaintenanceWindowWindowStartTime{ - Hours: 1, - Minutes: 0, + Hours: 1, + Minutes: 0, + ForceSendFields: []string{"Hours", "Minutes"}, }, }, }, @@ -238,6 +242,7 @@ func TestQueryUpdateAutomaticClusterUpdateSettingWithConflict(t *testing.T) { AutomaticClusterUpdateWorkspace: settings.ClusterAutoRestartMessage{ Enabled: true, RestartEvenIfNoUpdatesAvailable: true, + ForceSendFields: []string{"Enabled", "RestartEvenIfNoUpdatesAvailable"}, }, SettingName: "default", }, @@ -260,6 +265,7 @@ func TestQueryUpdateAutomaticClusterUpdateSettingWithConflict(t *testing.T) { AutomaticClusterUpdateWorkspace: settings.ClusterAutoRestartMessage{ Enabled: true, RestartEvenIfNoUpdatesAvailable: true, + ForceSendFields: []string{"Enabled", "RestartEvenIfNoUpdatesAvailable"}, }, SettingName: "default", }, @@ -312,7 +318,8 @@ func TestQueryDeleteAutomaticClusterUpdateSetting(t *testing.T) { Etag: "etag1", SettingName: "default", AutomaticClusterUpdateWorkspace: settings.ClusterAutoRestartMessage{ - Enabled: false, + Enabled: false, + ForceSendFields: []string{"Enabled"}, }, }, }).Return(&settings.AutomaticClusterUpdateSetting{ @@ -347,7 +354,8 @@ func TestQueryDeleteAutomaticClusterUpdateSettingWithConflict(t *testing.T) { Etag: "etag1", SettingName: "default", AutomaticClusterUpdateWorkspace: settings.ClusterAutoRestartMessage{ - Enabled: false, + Enabled: false, + ForceSendFields: []string{"Enabled"}, }, }, }).Return(nil, &apierr.APIError{ @@ -368,7 +376,8 @@ func TestQueryDeleteAutomaticClusterUpdateSettingWithConflict(t *testing.T) { Etag: "etag2", SettingName: "default", AutomaticClusterUpdateWorkspace: settings.ClusterAutoRestartMessage{ - Enabled: false, + Enabled: false, + ForceSendFields: []string{"Enabled"}, }, }, }).Return(&settings.AutomaticClusterUpdateSetting{ diff --git a/settings/resource_compliance_security_profile_setting.go b/settings/resource_compliance_security_profile_setting.go index 779bf78708..153bcfede5 100644 --- a/settings/resource_compliance_security_profile_setting.go +++ b/settings/resource_compliance_security_profile_setting.go @@ -6,7 +6,9 @@ import ( "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/settings" + "github.com/databricks/terraform-provider-databricks/common" "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) // Enhanced Security Monitoring setting @@ -16,6 +18,11 @@ var complianceSecurityProfileFieldMask = strings.Join([]string{ }, ",") var complianceSecurityProfileSetting = workspaceSetting[settings.ComplianceSecurityProfileSetting]{ settingStruct: settings.ComplianceSecurityProfileSetting{}, + customizeSchemaFunc: func(s map[string]*schema.Schema) map[string]*schema.Schema { + common.CustomizeSchemaPath(s, "compliance_security_profile_workspace", "compliance_standards").SetRequired() + common.CustomizeSchemaPath(s, "compliance_security_profile_workspace", "is_enabled").SetRequired() + return s + }, readFunc: func(ctx context.Context, w *databricks.WorkspaceClient, etag string) (*settings.ComplianceSecurityProfileSetting, error) { return w.Settings.ComplianceSecurityProfile().Get(ctx, settings.GetComplianceSecurityProfileSettingRequest{ Etag: etag, @@ -23,6 +30,7 @@ var complianceSecurityProfileSetting = workspaceSetting[settings.ComplianceSecur }, updateFunc: func(ctx context.Context, w *databricks.WorkspaceClient, t settings.ComplianceSecurityProfileSetting) (string, error) { t.SettingName = "default" + t.ComplianceSecurityProfileWorkspace.ForceSendFields = []string{"IsEnabled"} res, err := w.Settings.ComplianceSecurityProfile().Update(ctx, settings.UpdateComplianceSecurityProfileSettingRequest{ AllowMissing: true, Setting: t, diff --git a/settings/resource_compliance_security_profile_setting_test.go b/settings/resource_compliance_security_profile_setting_test.go index 2abd08ca05..409e5e3206 100644 --- a/settings/resource_compliance_security_profile_setting_test.go +++ b/settings/resource_compliance_security_profile_setting_test.go @@ -25,6 +25,7 @@ func TestQueryCreateComplianceSecurityProfileSettingWithNoneStandard(t *testing. ComplianceSecurityProfileWorkspace: settings.ComplianceSecurityProfile{ IsEnabled: true, ComplianceStandards: []settings.ComplianceStandard{"NONE"}, + ForceSendFields: []string{"IsEnabled"}, }, SettingName: "default", }, @@ -47,6 +48,7 @@ func TestQueryCreateComplianceSecurityProfileSettingWithNoneStandard(t *testing. ComplianceSecurityProfileWorkspace: settings.ComplianceSecurityProfile{ IsEnabled: true, ComplianceStandards: []settings.ComplianceStandard{"NONE"}, + ForceSendFields: []string{"IsEnabled"}, }, SettingName: "default", }, @@ -136,6 +138,7 @@ func TestQueryUpdateComplianceSecurityProfileSetting(t *testing.T) { ComplianceSecurityProfileWorkspace: settings.ComplianceSecurityProfile{ IsEnabled: true, ComplianceStandards: []settings.ComplianceStandard{"HIPAA", "PCI_DSS"}, + ForceSendFields: []string{"IsEnabled"}, }, SettingName: "default", }, @@ -192,6 +195,7 @@ func TestQueryUpdateComplianceSecurityProfileSettingWithConflict(t *testing.T) { ComplianceSecurityProfileWorkspace: settings.ComplianceSecurityProfile{ IsEnabled: true, ComplianceStandards: []settings.ComplianceStandard{"HIPAA"}, + ForceSendFields: []string{"IsEnabled"}, }, SettingName: "default", }, @@ -214,6 +218,7 @@ func TestQueryUpdateComplianceSecurityProfileSettingWithConflict(t *testing.T) { ComplianceSecurityProfileWorkspace: settings.ComplianceSecurityProfile{ IsEnabled: true, ComplianceStandards: []settings.ComplianceStandard{"HIPAA"}, + ForceSendFields: []string{"IsEnabled"}, }, SettingName: "default", }, diff --git a/settings/resource_enhanced_security_monitoring_setting.go b/settings/resource_enhanced_security_monitoring_setting.go index 602855b92e..d61dc96ecb 100644 --- a/settings/resource_enhanced_security_monitoring_setting.go +++ b/settings/resource_enhanced_security_monitoring_setting.go @@ -6,6 +6,8 @@ import ( "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/settings" + "github.com/databricks/terraform-provider-databricks/common" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) // Enhanced Security Monitoring setting @@ -14,6 +16,10 @@ var enhancedSecurityMonitoringFieldMask = strings.Join([]string{ }, ",") var enhancedSecurityMonitoringSetting = workspaceSetting[settings.EnhancedSecurityMonitoringSetting]{ settingStruct: settings.EnhancedSecurityMonitoringSetting{}, + customizeSchemaFunc: func(s map[string]*schema.Schema) map[string]*schema.Schema { + common.CustomizeSchemaPath(s, "enhanced_security_monitoring_workspace", "is_enabled").SetRequired() + return s + }, readFunc: func(ctx context.Context, w *databricks.WorkspaceClient, etag string) (*settings.EnhancedSecurityMonitoringSetting, error) { return w.Settings.EnhancedSecurityMonitoring().Get(ctx, settings.GetEnhancedSecurityMonitoringSettingRequest{ Etag: etag, @@ -21,6 +27,7 @@ var enhancedSecurityMonitoringSetting = workspaceSetting[settings.EnhancedSecuri }, updateFunc: func(ctx context.Context, w *databricks.WorkspaceClient, t settings.EnhancedSecurityMonitoringSetting) (string, error) { t.SettingName = "default" + t.EnhancedSecurityMonitoringWorkspace.ForceSendFields = []string{"IsEnabled"} res, err := w.Settings.EnhancedSecurityMonitoring().Update(ctx, settings.UpdateEnhancedSecurityMonitoringSettingRequest{ AllowMissing: true, Setting: t, @@ -38,7 +45,8 @@ var enhancedSecurityMonitoringSetting = workspaceSetting[settings.EnhancedSecuri Etag: etag, SettingName: "default", EnhancedSecurityMonitoringWorkspace: settings.EnhancedSecurityMonitoring{ - IsEnabled: false, + IsEnabled: false, + ForceSendFields: []string{"IsEnabled"}, }, }, FieldMask: enhancedSecurityMonitoringFieldMask, diff --git a/settings/resource_enhanced_security_monitoring_setting_test.go b/settings/resource_enhanced_security_monitoring_setting_test.go index c09bb1804e..4c2c3a3e65 100644 --- a/settings/resource_enhanced_security_monitoring_setting_test.go +++ b/settings/resource_enhanced_security_monitoring_setting_test.go @@ -23,7 +23,8 @@ func TestQueryCreateEnhancedSecurityMonitoringSetting(t *testing.T) { Setting: settings.EnhancedSecurityMonitoringSetting{ Etag: "", EnhancedSecurityMonitoringWorkspace: settings.EnhancedSecurityMonitoring{ - IsEnabled: true, + IsEnabled: true, + ForceSendFields: []string{"IsEnabled"}, }, SettingName: "default", }, @@ -44,7 +45,8 @@ func TestQueryCreateEnhancedSecurityMonitoringSetting(t *testing.T) { Setting: settings.EnhancedSecurityMonitoringSetting{ Etag: "etag1", EnhancedSecurityMonitoringWorkspace: settings.EnhancedSecurityMonitoring{ - IsEnabled: true, + IsEnabled: true, + ForceSendFields: []string{"IsEnabled"}, }, SettingName: "default", }, @@ -123,14 +125,16 @@ func TestQueryUpdateEnhancedSecurityMonitoringSetting(t *testing.T) { Setting: settings.EnhancedSecurityMonitoringSetting{ Etag: "etag1", EnhancedSecurityMonitoringWorkspace: settings.EnhancedSecurityMonitoring{ - IsEnabled: true, + IsEnabled: true, + ForceSendFields: []string{"IsEnabled"}, }, SettingName: "default", }, }).Return(&settings.EnhancedSecurityMonitoringSetting{ Etag: "etag2", EnhancedSecurityMonitoringWorkspace: settings.EnhancedSecurityMonitoring{ - IsEnabled: true, + IsEnabled: true, + ForceSendFields: []string{"IsEnabled"}, }, SettingName: "default", }, nil) @@ -173,7 +177,8 @@ func TestQueryUpdateEnhancedSecurityMonitoringSettingWithConflict(t *testing.T) Setting: settings.EnhancedSecurityMonitoringSetting{ Etag: "etag1", EnhancedSecurityMonitoringWorkspace: settings.EnhancedSecurityMonitoring{ - IsEnabled: true, + IsEnabled: true, + ForceSendFields: []string{"IsEnabled"}, }, SettingName: "default", }, @@ -194,7 +199,8 @@ func TestQueryUpdateEnhancedSecurityMonitoringSettingWithConflict(t *testing.T) Setting: settings.EnhancedSecurityMonitoringSetting{ Etag: "etag2", EnhancedSecurityMonitoringWorkspace: settings.EnhancedSecurityMonitoring{ - IsEnabled: true, + IsEnabled: true, + ForceSendFields: []string{"IsEnabled"}, }, SettingName: "default", }, @@ -244,7 +250,8 @@ func TestQueryDeleteEnhancedSecurityMonitoringSetting(t *testing.T) { Etag: "etag1", SettingName: "default", EnhancedSecurityMonitoringWorkspace: settings.EnhancedSecurityMonitoring{ - IsEnabled: false, + IsEnabled: false, + ForceSendFields: []string{"IsEnabled"}, }, }, }).Return(&settings.EnhancedSecurityMonitoringSetting{ @@ -279,7 +286,8 @@ func TestQueryDeleteEnhancedSecurityMonitoringSettingWithConflict(t *testing.T) Etag: "etag1", SettingName: "default", EnhancedSecurityMonitoringWorkspace: settings.EnhancedSecurityMonitoring{ - IsEnabled: false, + IsEnabled: false, + ForceSendFields: []string{"IsEnabled"}, }, }, }).Return(nil, &apierr.APIError{ @@ -300,7 +308,8 @@ func TestQueryDeleteEnhancedSecurityMonitoringSettingWithConflict(t *testing.T) Etag: "etag2", SettingName: "default", EnhancedSecurityMonitoringWorkspace: settings.EnhancedSecurityMonitoring{ - IsEnabled: false, + IsEnabled: false, + ForceSendFields: []string{"IsEnabled"}, }, }, }).Return(&settings.EnhancedSecurityMonitoringSetting{ diff --git a/sharing/data_share.go b/sharing/data_share.go index 0542956d6a..1e15176112 100644 --- a/sharing/data_share.go +++ b/sharing/data_share.go @@ -3,20 +3,48 @@ package sharing import ( "context" + "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/service/sharing" "github.com/databricks/terraform-provider-databricks/common" ) -func DataSourceShare() common.Resource { - type ShareDetail struct { - Name string `json:"name,omitempty" tf:"computed"` - Objects []SharedDataObject `json:"objects,omitempty" tf:"computed,slice_set,alias:object"` - CreatedAt int64 `json:"created_at,omitempty" tf:"computed"` - CreatedBy string `json:"created_by,omitempty" tf:"computed"` +type ShareDetail struct { + Name string `json:"name,omitempty" tf:"computed"` + Objects []sharing.SharedDataObject `json:"objects,omitempty" tf:"computed,slice_set,alias:object"` + CreatedAt int64 `json:"created_at,omitempty" tf:"computed"` + CreatedBy string `json:"created_by,omitempty" tf:"computed"` +} + +func (ShareDetail) CustomizeSchema(s *common.CustomizableSchema) *common.CustomizableSchema { + s.SchemaPath("name").SetComputed() + s.SchemaPath("object", "added_at").SetComputed() + s.SchemaPath("object", "added_by").SetComputed() + s.SchemaPath("object", "data_object_type").SetRequired() + s.SchemaPath("object", "status").SetComputed() + s.SchemaPath("object", "partition", "value", "op").SetRequired() + s.SchemaPath("object", "partition", "value", "name").SetRequired() + s.SchemaPath("object", "partition", "value").SetMinItems(1) + + return s +} + +func (ShareDetail) Aliases() map[string]map[string]string { + return map[string]map[string]string{ + "sharing.SharedDataObject": { + "partitions": "partition", + }, + "sharing.Partition": { + "values": "value", + }, } - return common.DataResource(ShareDetail{}, func(ctx context.Context, e any, c *common.DatabricksClient) error { - data := e.(*ShareDetail) - sharesAPI := NewSharesAPI(ctx, c) - share, err := sharesAPI.get(data.Name) +} + +func DataSourceShare() common.Resource { + return common.WorkspaceData(func(ctx context.Context, data *ShareDetail, client *databricks.WorkspaceClient) error { + share, err := client.Shares.Get(ctx, sharing.GetShareRequest{ + Name: data.Name, + IncludeSharedData: true, + }) if err != nil { return err } diff --git a/sharing/data_share_test.go b/sharing/data_share_test.go index 8211d145a5..5cda61ea7a 100644 --- a/sharing/data_share_test.go +++ b/sharing/data_share_test.go @@ -3,6 +3,7 @@ package sharing import ( "testing" + "github.com/databricks/databricks-sdk-go/service/sharing" "github.com/databricks/terraform-provider-databricks/qa" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/stretchr/testify/assert" @@ -14,21 +15,21 @@ func TestShareData(t *testing.T) { { Method: "GET", Resource: "/api/2.1/unity-catalog/shares/a?include_shared_data=true", - Response: ShareInfo{ + Response: sharing.ShareInfo{ Name: "a", - Objects: []SharedDataObject{ + Objects: []sharing.SharedDataObject{ { Name: "a", DataObjectType: "TABLE", Comment: "c", - CDFEnabled: false, + CdfEnabled: false, StartVersion: 0, SharedAs: "", AddedAt: 0, AddedBy: "", HistoryDataSharingStatus: "DISABLED", Status: "ACTIVE", - Partitions: []Partition{}, + Partitions: []sharing.Partition{}, }, }, CreatedBy: "bob", @@ -52,10 +53,12 @@ func TestShareData(t *testing.T) { "added_at": 0, "added_by": "", "comment": "c", + "content": "", "data_object_type": "TABLE", "name": "a", "shared_as": "", "start_version": 0, + "string_shared_as": "", "cdf_enabled": false, "status": "ACTIVE", "history_data_sharing_status": "DISABLED", diff --git a/sharing/data_shares_test.go b/sharing/data_shares_test.go index dce65b0c82..8afddc0b33 100644 --- a/sharing/data_shares_test.go +++ b/sharing/data_shares_test.go @@ -3,6 +3,7 @@ package sharing import ( "testing" + "github.com/databricks/databricks-sdk-go/service/sharing" "github.com/databricks/terraform-provider-databricks/qa" ) @@ -15,17 +16,18 @@ func TestSharesData(t *testing.T) { Response: Shares{ Shares: []ShareInfo{ { - Name: "a", - Objects: []SharedDataObject{ - { - Name: "a", - DataObjectType: "TABLE", - Comment: "c", + sharing.ShareInfo{ + Name: "a", + Objects: []sharing.SharedDataObject{ + { + Name: "a", + DataObjectType: "TABLE", + Comment: "c", + }, }, - }, - CreatedAt: 0, - CreatedBy: "", - }, + CreatedAt: 0, + CreatedBy: "", + }}, }, }, }, diff --git a/sharing/resource_share.go b/sharing/resource_share.go index e2953068e9..f45aa6a0fb 100644 --- a/sharing/resource_share.go +++ b/sharing/resource_share.go @@ -11,68 +11,53 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) -type SharesAPI struct { - client *common.DatabricksClient - context context.Context -} - -func NewSharesAPI(ctx context.Context, m any) SharesAPI { - return SharesAPI{m.(*common.DatabricksClient), context.WithValue(ctx, common.Api, common.API_2_1)} -} - -const ( - ShareAdd = "ADD" - ShareRemove = "REMOVE" - ShareUpdate = "UPDATE" -) - type ShareInfo struct { - Name string `json:"name" tf:"force_new"` - Owner string `json:"owner,omitempty" tf:"suppress_diff"` - Objects []SharedDataObject `json:"objects,omitempty" tf:"alias:object"` - CreatedAt int64 `json:"created_at,omitempty" tf:"computed"` - CreatedBy string `json:"created_by,omitempty" tf:"computed"` + sharing.ShareInfo } -type SharedDataObject struct { - Name string `json:"name"` - DataObjectType string `json:"data_object_type"` - Comment string `json:"comment,omitempty"` - SharedAs string `json:"shared_as,omitempty" tf:"suppress_diff"` - CDFEnabled bool `json:"cdf_enabled,omitempty" tf:"suppress_diff"` - StartVersion int64 `json:"start_version,omitempty" tf:"suppress_diff"` - HistoryDataSharingStatus string `json:"history_data_sharing_status,omitempty" tf:"suppress_diff"` - Partitions []Partition `json:"partitions,omitempty" tf:"alias:partition"` - Status string `json:"status,omitempty" tf:"computed"` - AddedAt int64 `json:"added_at,omitempty" tf:"computed"` - AddedBy string `json:"added_by,omitempty" tf:"computed"` -} +func (ShareInfo) CustomizeSchema(s *common.CustomizableSchema) *common.CustomizableSchema { + s.SchemaPath("name").SetRequired() + s.SchemaPath("name").SetForceNew() + s.SchemaPath("name").SetCustomSuppressDiff(common.EqualFoldDiffSuppress) + s.SchemaPath("owner").SetSuppressDiff() + s.SchemaPath("created_at").SetComputed() + s.SchemaPath("created_by").SetComputed() + s.SchemaPath("updated_at").SetComputed() + s.SchemaPath("updated_by").SetComputed() + + s.SchemaPath("object").SetMinItems(1) + s.SchemaPath("object", "data_object_type").SetRequired() + s.SchemaPath("object", "shared_as").SetSuppressDiff() + s.SchemaPath("object", "cdf_enabled").SetSuppressDiff() + s.SchemaPath("object", "start_version").SetSuppressDiff() + s.SchemaPath("object", "history_data_sharing_status").SetSuppressDiff() + s.SchemaPath("object", "status").SetComputed() + s.SchemaPath("object", "added_at").SetComputed() + s.SchemaPath("object", "added_by").SetComputed() + s.SchemaPath("object", "partition", "value", "op").SetRequired() + s.SchemaPath("object", "partition", "value", "name").SetRequired() -type ShareDataChange struct { - Action string `json:"action"` - DataObject SharedDataObject `json:"data_object"` + return s } -type ShareUpdates struct { - Owner string `json:"owner,omitempty"` - Updates []ShareDataChange `json:"updates"` +func (ShareInfo) Aliases() map[string]map[string]string { + return map[string]map[string]string{ + "sharing.ShareInfo": { + "objects": "object", + }, + "sharing.SharedDataObject": { + "partitions": "partition", + }, + "sharing.Partition": { + "values": "value", + }, + } } type Shares struct { Shares []ShareInfo `json:"shares"` } -type Partition struct { - Values []PartitionValue `json:"values" tf:"alias:value"` -} - -type PartitionValue struct { - Name string `json:"name"` - Op string `json:"op"` - RecipientPropertyKey string `json:"recipient_property_key,omitempty"` - Value string `json:"value,omitempty"` -} - func (si *ShareInfo) sortSharesByName() { sort.Slice(si.Objects, func(i, j int) bool { return si.Objects[i].Name < si.Objects[j].Name @@ -83,70 +68,58 @@ func (si *ShareInfo) suppressCDFEnabledDiff() { //suppress diff for CDF Enabled if HistoryDataSharingStatus is enabled , as API does not accept both fields to be set for i := range si.Objects { if si.Objects[i].HistoryDataSharingStatus == "ENABLED" { - si.Objects[i].CDFEnabled = false + si.Objects[i].CdfEnabled = false } } } -func (a SharesAPI) get(name string) (si ShareInfo, err error) { - err = a.client.Get(a.context, "/unity-catalog/shares/"+name+"?include_shared_data=true", nil, &si) - si.sortSharesByName() - si.suppressCDFEnabledDiff() - return -} - -func (a SharesAPI) update(name string, su ShareUpdates) error { - if len(su.Updates) == 0 { - return nil - } - return a.client.Patch(a.context, "/unity-catalog/shares/"+name, su) -} - -func (si ShareInfo) shareChanges(action string) ShareUpdates { - var changes []ShareDataChange +func (si ShareInfo) shareChanges(action string) sharing.UpdateShare { + var changes []sharing.SharedDataObjectUpdate for _, obj := range si.Objects { - changes = append(changes, ShareDataChange{ - Action: action, - DataObject: obj, - }) + changes = append(changes, sharing.SharedDataObjectUpdate{ + Action: sharing.SharedDataObjectUpdateAction(action), + DataObject: &obj, + }, + ) } - return ShareUpdates{ + return sharing.UpdateShare{ Updates: changes, } } -func (si ShareInfo) resourceShareMap() map[string]SharedDataObject { - m := make(map[string]SharedDataObject, len(si.Objects)) +func (si ShareInfo) resourceShareMap() map[string]sharing.SharedDataObject { + m := make(map[string]sharing.SharedDataObject, len(si.Objects)) for _, sdo := range si.Objects { m[sdo.Name] = sdo } return m } -func (sdo SharedDataObject) Equal(other SharedDataObject) bool { +func Equal(this sharing.SharedDataObject, other sharing.SharedDataObject) bool { if other.SharedAs == "" { - other.SharedAs = sdo.SharedAs + other.SharedAs = this.SharedAs } //don't compare computed fields - other.AddedAt = sdo.AddedAt - other.AddedBy = sdo.AddedBy - other.Status = sdo.Status - return reflect.DeepEqual(sdo, other) + other.AddedAt = this.AddedAt + other.AddedBy = this.AddedBy + other.Status = this.Status + other.ForceSendFields = this.ForceSendFields // TODO: is this the right thing to do? + return reflect.DeepEqual(this, other) } -func (beforeSi ShareInfo) Diff(afterSi ShareInfo) []ShareDataChange { +func (beforeSi ShareInfo) Diff(afterSi ShareInfo) []sharing.SharedDataObjectUpdate { beforeMap := beforeSi.resourceShareMap() afterMap := afterSi.resourceShareMap() - changes := []ShareDataChange{} + changes := []sharing.SharedDataObjectUpdate{} // not in after so remove for _, beforeSdo := range beforeSi.Objects { _, exists := afterMap[beforeSdo.Name] if exists { continue } - changes = append(changes, ShareDataChange{ - Action: ShareRemove, - DataObject: beforeSdo, + changes = append(changes, sharing.SharedDataObjectUpdate{ + Action: sharing.SharedDataObjectUpdateActionRemove, + DataObject: &beforeSdo, }) } @@ -155,29 +128,26 @@ func (beforeSi ShareInfo) Diff(afterSi ShareInfo) []ShareDataChange { for _, afterSdo := range afterSi.Objects { beforeSdo, exists := beforeMap[afterSdo.Name] if exists { - if !beforeSdo.Equal(afterSdo) { + if !Equal(beforeSdo, afterSdo) { // do not send SharedAs afterSdo.SharedAs = "" - changes = append(changes, ShareDataChange{ - Action: ShareUpdate, - DataObject: afterSdo, + changes = append(changes, sharing.SharedDataObjectUpdate{ + Action: sharing.SharedDataObjectUpdateActionUpdate, + DataObject: &afterSdo, }) } continue } - changes = append(changes, ShareDataChange{ - Action: ShareAdd, - DataObject: afterSdo, + changes = append(changes, sharing.SharedDataObjectUpdate{ + Action: sharing.SharedDataObjectUpdateActionAdd, + DataObject: &afterSdo, }) } return changes } func ResourceShare() common.Resource { - shareSchema := common.StructToSchema(ShareInfo{}, func(m map[string]*schema.Schema) map[string]*schema.Schema { - m["name"].DiffSuppressFunc = common.EqualFoldDiffSuppress - return m - }) + shareSchema := common.StructToSchema(ShareInfo{}, nil) return common.Resource{ Schema: shareSchema, Create: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { @@ -195,9 +165,10 @@ func ResourceShare() common.Resource { //can only create empty share, objects & owners have to be added using update API var si ShareInfo common.DataToStructPointer(d, shareSchema, &si) - shareChanges := si.shareChanges(ShareAdd) + shareChanges := si.shareChanges(string(sharing.SharedDataObjectUpdateActionAdd)) + shareChanges.Name = si.Name shareChanges.Owner = si.Owner - if err := NewSharesAPI(ctx, c).update(si.Name, shareChanges); err != nil { + if _, err := w.Shares.Update(ctx, shareChanges); err != nil { //delete orphaned share if update fails if d_err := w.Shares.DeleteByName(ctx, si.Name); d_err != nil { return d_err @@ -208,28 +179,47 @@ func ResourceShare() common.Resource { return nil }, Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { - si, err := NewSharesAPI(ctx, c).get(d.Id()) + client, err := c.WorkspaceClient() if err != nil { return err } + + shareInfo, err := client.Shares.Get(ctx, sharing.GetShareRequest{ + Name: d.Id(), + IncludeSharedData: true, + }) + var si = ShareInfo{*shareInfo} + si.sortSharesByName() + si.suppressCDFEnabledDiff() + if err != nil { + return err + } + return common.StructToData(si, shareSchema, d) }, Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { - beforeSi, err := NewSharesAPI(ctx, c).get(d.Id()) + client, err := c.WorkspaceClient() if err != nil { return err } - var afterSi ShareInfo - common.DataToStructPointer(d, shareSchema, &afterSi) - changes := beforeSi.Diff(afterSi) - w, err := c.WorkspaceClient() + si, err := client.Shares.Get(ctx, sharing.GetShareRequest{ + Name: d.Id(), + IncludeSharedData: true, + }) if err != nil { return err } + var beforeSi = ShareInfo{*si} + beforeSi.sortSharesByName() + beforeSi.suppressCDFEnabledDiff() + var afterSi ShareInfo + common.DataToStructPointer(d, shareSchema, &afterSi) + changes := beforeSi.Diff(afterSi) + if d.HasChange("owner") { - _, err = w.Shares.Update(ctx, sharing.UpdateShare{ + _, err = client.Shares.Update(ctx, sharing.UpdateShare{ Name: afterSi.Name, Owner: afterSi.Owner, }) @@ -242,14 +232,19 @@ func ResourceShare() common.Resource { return nil } - err = NewSharesAPI(ctx, c).update(d.Id(), ShareUpdates{ + if len(changes) == 0 { + return nil + } + + _, err = client.Shares.Update(ctx, sharing.UpdateShare{ + Name: d.Id(), Updates: changes, }) if err != nil { if d.HasChange("owner") { // Rollback old, new := d.GetChange("owner") - _, rollbackErr := w.Shares.Update(ctx, sharing.UpdateShare{ + _, rollbackErr := client.Shares.Update(ctx, sharing.UpdateShare{ Name: beforeSi.Name, Owner: old.(string), }) diff --git a/sharing/resource_share_test.go b/sharing/resource_share_test.go index 7d03f76089..73a6a27289 100644 --- a/sharing/resource_share_test.go +++ b/sharing/resource_share_test.go @@ -12,160 +12,166 @@ import ( func TestDiffShareInfo(t *testing.T) { empty := ShareInfo{ - Name: "b", - Objects: []SharedDataObject{}, + ShareInfo: sharing.ShareInfo{ + Name: "b", + Objects: []sharing.SharedDataObject{}, + }, } firstShare := ShareInfo{ - Name: "b", - Objects: []SharedDataObject{ - { - Name: "main.b", - DataObjectType: "TABLE", - Comment: "c", - }, - { - Name: "main.a", - DataObjectType: "TABLE", - Comment: "c", - }, - }, + ShareInfo: sharing.ShareInfo{ + Name: "b", + Objects: []sharing.SharedDataObject{ + { + Name: "main.b", + DataObjectType: "TABLE", + Comment: "c", + }, + { + Name: "main.a", + DataObjectType: "TABLE", + Comment: "c", + }, + }}, } secondShare := ShareInfo{ - Name: "b", - Objects: []SharedDataObject{ - { - Name: "main.c", - DataObjectType: "TABLE", - Comment: "d", - }, - { - Name: "main.a", - DataObjectType: "TABLE", - Comment: "c", - }, - }, + ShareInfo: sharing.ShareInfo{ + Name: "b", + Objects: []sharing.SharedDataObject{ + { + Name: "main.c", + DataObjectType: "TABLE", + Comment: "d", + }, + { + Name: "main.a", + DataObjectType: "TABLE", + Comment: "c", + }, + }}, } thirdShare := ShareInfo{ - Name: "b", - Objects: []SharedDataObject{ - { - Name: "main.c", - DataObjectType: "TABLE", - Comment: "d", - }, - { - Name: "main.b", - DataObjectType: "TABLE", - Comment: "d", - }, - }, + ShareInfo: sharing.ShareInfo{ + Name: "b", + Objects: []sharing.SharedDataObject{ + { + Name: "main.c", + DataObjectType: "TABLE", + Comment: "d", + }, + { + Name: "main.b", + DataObjectType: "TABLE", + Comment: "d", + }, + }}, } fourthShare := ShareInfo{ - Name: "d", - Objects: []SharedDataObject{ - { - Name: "main.b", - DataObjectType: "TABLE", - Comment: "d", - }, - { - Name: "main.a", - DataObjectType: "TABLE", - Comment: "c", - }, - }, + ShareInfo: sharing.ShareInfo{ + Name: "d", + Objects: []sharing.SharedDataObject{ + { + Name: "main.b", + DataObjectType: "TABLE", + Comment: "d", + }, + { + Name: "main.a", + DataObjectType: "TABLE", + Comment: "c", + }, + }}, } - diffAdd := []ShareDataChange{ + diffAdd := []sharing.SharedDataObjectUpdate{ { - Action: ShareAdd, - DataObject: SharedDataObject{ + Action: sharing.SharedDataObjectUpdateActionAdd, + DataObject: &sharing.SharedDataObject{ Name: "main.b", DataObjectType: "TABLE", Comment: "c", }, }, { - Action: ShareAdd, - DataObject: SharedDataObject{ + Action: sharing.SharedDataObjectUpdateActionAdd, + DataObject: &sharing.SharedDataObject{ Name: "main.a", DataObjectType: "TABLE", Comment: "c", }, }, } - diffRemove := []ShareDataChange{ + diffRemove := []sharing.SharedDataObjectUpdate{ { - Action: ShareRemove, - DataObject: SharedDataObject{ + Action: sharing.SharedDataObjectUpdateActionRemove, + DataObject: &sharing.SharedDataObject{ Name: "main.b", DataObjectType: "TABLE", Comment: "c", }, }, { - Action: ShareRemove, - DataObject: SharedDataObject{ + Action: sharing.SharedDataObjectUpdateActionRemove, + DataObject: &sharing.SharedDataObject{ Name: "main.a", DataObjectType: "TABLE", Comment: "c", }, }, } - diff12 := []ShareDataChange{ + diff12 := []sharing.SharedDataObjectUpdate{ { - Action: ShareRemove, - DataObject: SharedDataObject{ + Action: sharing.SharedDataObjectUpdateActionRemove, + DataObject: &sharing.SharedDataObject{ Name: "main.b", DataObjectType: "TABLE", Comment: "c", }, }, { - Action: ShareAdd, - DataObject: SharedDataObject{ + Action: sharing.SharedDataObjectUpdateActionAdd, + DataObject: &sharing.SharedDataObject{ Name: "main.c", DataObjectType: "TABLE", Comment: "d", }, }, } - diff13 := []ShareDataChange{ + diff13 := []sharing.SharedDataObjectUpdate{ { - Action: ShareRemove, - DataObject: SharedDataObject{ + Action: sharing.SharedDataObjectUpdateActionRemove, + DataObject: &sharing.SharedDataObject{ Name: "main.a", DataObjectType: "TABLE", Comment: "c", }, }, { - Action: ShareAdd, - DataObject: SharedDataObject{ + Action: sharing.SharedDataObjectUpdateActionAdd, + DataObject: &sharing.SharedDataObject{ Name: "main.c", DataObjectType: "TABLE", Comment: "d", }, }, { - Action: ShareUpdate, - DataObject: SharedDataObject{ + Action: sharing.SharedDataObjectUpdateActionUpdate, + DataObject: &sharing.SharedDataObject{ Name: "main.b", DataObjectType: "TABLE", Comment: "d", }, }, } - diff14 := []ShareDataChange{ + diff14 := []sharing.SharedDataObjectUpdate{ { - Action: ShareUpdate, - DataObject: SharedDataObject{ + Action: sharing.SharedDataObjectUpdateActionUpdate, + DataObject: &sharing.SharedDataObject{ Name: "main.b", DataObjectType: "TABLE", Comment: "d", }, }, } - assert.Equal(t, firstShare.Diff(firstShare), []ShareDataChange{}, "Should not have difference") + assert.Equal(t, firstShare.Diff(firstShare), []sharing.SharedDataObjectUpdate{}, "Should not have difference") assert.Equal(t, empty.Diff(firstShare), diffAdd, "Should have 2 ADDs") assert.Equal(t, firstShare.Diff(empty), diffRemove, "Should have 2 REMOVEs") assert.Equal(t, firstShare.Diff(secondShare), diff12, "Should have 1 ADD and 1 REMOVE") @@ -184,21 +190,24 @@ func TestCreateShare(t *testing.T) { Method: "POST", Resource: "/api/2.1/unity-catalog/shares", ExpectedRequest: ShareInfo{ - Name: "a", + ShareInfo: sharing.ShareInfo{ + Name: "a", + }, }, Response: ShareInfo{ - Name: "a", - }, + ShareInfo: sharing.ShareInfo{ + Name: "a", + }}, }, { Method: "PATCH", Resource: "/api/2.1/unity-catalog/shares/a", - ExpectedRequest: ShareUpdates{ + ExpectedRequest: sharing.UpdateShare{ Owner: "admin", - Updates: []ShareDataChange{ + Updates: []sharing.SharedDataObjectUpdate{ { Action: "ADD", - DataObject: SharedDataObject{ + DataObject: &sharing.SharedDataObject{ Name: "main.a", DataObjectType: "TABLE", Comment: "c", @@ -206,7 +215,7 @@ func TestCreateShare(t *testing.T) { }, { Action: "ADD", - DataObject: SharedDataObject{ + DataObject: &sharing.SharedDataObject{ Name: "main.b", DataObjectType: "TABLE", Comment: "c", @@ -215,27 +224,30 @@ func TestCreateShare(t *testing.T) { }, }, Response: ShareInfo{ - Name: "a", + ShareInfo: sharing.ShareInfo{ + Name: "a", + }, }, }, { Method: "GET", Resource: "/api/2.1/unity-catalog/shares/a?include_shared_data=true", Response: ShareInfo{ - Name: "a", - Owner: "admin", - Objects: []SharedDataObject{ - { - Name: "main.a", - DataObjectType: "TABLE", - Comment: "c", - }, - { - Name: "main.b", - DataObjectType: "TABLE", - Comment: "c", - }, - }, + ShareInfo: sharing.ShareInfo{ + Name: "a", + Owner: "admin", + Objects: []sharing.SharedDataObject{ + { + Name: "main.a", + DataObjectType: "TABLE", + Comment: "c", + }, + { + Name: "main.b", + DataObjectType: "TABLE", + Comment: "c", + }, + }}, }, }, }, @@ -265,17 +277,18 @@ func TestUpdateShare(t *testing.T) { Method: "GET", Resource: "/api/2.1/unity-catalog/shares/abc?include_shared_data=true", Response: ShareInfo{ - Name: "abc", - Objects: []SharedDataObject{ - { - Name: "d", - DataObjectType: "TABLE", - Comment: "d", - SharedAs: "", - AddedAt: 0, - AddedBy: "", - }, - }, + ShareInfo: sharing.ShareInfo{ + Name: "abc", + Objects: []sharing.SharedDataObject{ + { + Name: "d", + DataObjectType: "TABLE", + Comment: "d", + SharedAs: "", + AddedAt: 0, + AddedBy: "", + }, + }}, }, }, { @@ -288,11 +301,11 @@ func TestUpdateShare(t *testing.T) { { Method: "PATCH", Resource: "/api/2.1/unity-catalog/shares/abc", - ExpectedRequest: ShareUpdates{ - Updates: []ShareDataChange{ + ExpectedRequest: sharing.UpdateShare{ + Updates: []sharing.SharedDataObjectUpdate{ { Action: "REMOVE", - DataObject: SharedDataObject{ + DataObject: &sharing.SharedDataObject{ Comment: "d", DataObjectType: "TABLE", Name: "d", @@ -300,7 +313,7 @@ func TestUpdateShare(t *testing.T) { }, { Action: "ADD", - DataObject: SharedDataObject{ + DataObject: &sharing.SharedDataObject{ Comment: "c", DataObjectType: "TABLE", Name: "a", @@ -308,7 +321,7 @@ func TestUpdateShare(t *testing.T) { }, { Action: "ADD", - DataObject: SharedDataObject{ + DataObject: &sharing.SharedDataObject{ Comment: "c", DataObjectType: "TABLE", Name: "b", @@ -321,26 +334,27 @@ func TestUpdateShare(t *testing.T) { Method: "GET", Resource: "/api/2.1/unity-catalog/shares/abc?include_shared_data=true", Response: ShareInfo{ - Name: "abc", - Owner: "admin", - Objects: []SharedDataObject{ - { - Name: "a", - DataObjectType: "TABLE", - Comment: "c", - SharedAs: "", - AddedAt: 0, - AddedBy: "", - }, - { - Name: "b", - DataObjectType: "TABLE", - Comment: "c", - SharedAs: "", - AddedAt: 0, - AddedBy: "", - }, - }, + sharing.ShareInfo{ + Name: "abc", + Owner: "admin", + Objects: []sharing.SharedDataObject{ + { + Name: "a", + DataObjectType: "TABLE", + Comment: "c", + SharedAs: "", + AddedAt: 0, + AddedBy: "", + }, + { + Name: "b", + DataObjectType: "TABLE", + Comment: "c", + SharedAs: "", + AddedAt: 0, + AddedBy: "", + }, + }}, }, }, }, @@ -375,17 +389,18 @@ func TestUpdateShareRollback(t *testing.T) { Method: "GET", Resource: "/api/2.1/unity-catalog/shares/abc?include_shared_data=true", Response: ShareInfo{ - Name: "abc", - Objects: []SharedDataObject{ - { - Name: "d", - DataObjectType: "TABLE", - Comment: "d", - SharedAs: "", - AddedAt: 0, - AddedBy: "", - }, - }, + sharing.ShareInfo{ + Name: "abc", + Objects: []sharing.SharedDataObject{ + { + Name: "d", + DataObjectType: "TABLE", + Comment: "d", + SharedAs: "", + AddedAt: 0, + AddedBy: "", + }, + }}, }, }, { @@ -398,11 +413,11 @@ func TestUpdateShareRollback(t *testing.T) { { Method: "PATCH", Resource: "/api/2.1/unity-catalog/shares/abc", - ExpectedRequest: ShareUpdates{ - Updates: []ShareDataChange{ + ExpectedRequest: sharing.UpdateShare{ + Updates: []sharing.SharedDataObjectUpdate{ { Action: "REMOVE", - DataObject: SharedDataObject{ + DataObject: &sharing.SharedDataObject{ Comment: "d", DataObjectType: "TABLE", Name: "d", @@ -410,7 +425,7 @@ func TestUpdateShareRollback(t *testing.T) { }, { Action: "ADD", - DataObject: SharedDataObject{ + DataObject: &sharing.SharedDataObject{ Comment: "c", DataObjectType: "TABLE", Name: "a", @@ -418,7 +433,7 @@ func TestUpdateShareRollback(t *testing.T) { }, { Action: "ADD", - DataObject: SharedDataObject{ + DataObject: &sharing.SharedDataObject{ Comment: "c", DataObjectType: "TABLE", Name: "b", @@ -443,26 +458,27 @@ func TestUpdateShareRollback(t *testing.T) { Method: "GET", Resource: "/api/2.1/unity-catalog/shares/abc?include_shared_data=true", Response: ShareInfo{ - Name: "abc", - Owner: "admin", - Objects: []SharedDataObject{ - { - Name: "a", - DataObjectType: "TABLE", - Comment: "c", - SharedAs: "", - AddedAt: 0, - AddedBy: "", - }, - { - Name: "b", - DataObjectType: "TABLE", - Comment: "c", - SharedAs: "", - AddedAt: 0, - AddedBy: "", - }, - }, + sharing.ShareInfo{ + Name: "abc", + Owner: "admin", + Objects: []sharing.SharedDataObject{ + { + Name: "a", + DataObjectType: "TABLE", + Comment: "c", + SharedAs: "", + AddedAt: 0, + AddedBy: "", + }, + { + Name: "b", + DataObjectType: "TABLE", + Comment: "c", + SharedAs: "", + AddedAt: 0, + AddedBy: "", + }, + }}, }, }, }, @@ -499,35 +515,38 @@ func TestUpdateShare_NoChanges(t *testing.T) { Method: "GET", Resource: "/api/2.1/unity-catalog/shares/abc?include_shared_data=true", Response: ShareInfo{ - Name: "abc", - Objects: []SharedDataObject{ - { - Name: "d", - DataObjectType: "TABLE", - Comment: "c", - SharedAs: "", - AddedAt: 0, - AddedBy: "", + sharing.ShareInfo{ + Name: "abc", + Objects: []sharing.SharedDataObject{ + { + Name: "d", + DataObjectType: "TABLE", + Comment: "c", + SharedAs: "", + AddedAt: 0, + AddedBy: "", + ForceSendFields: []string{"Name", "Comment", "DataObjectType"}, + }, }, - }, - }, + }}, }, { Method: "GET", Resource: "/api/2.1/unity-catalog/shares/abc?include_shared_data=true", Response: ShareInfo{ - Name: "abc", - Objects: []SharedDataObject{ - { - Name: "d", - DataObjectType: "TABLE", - Comment: "c", - SharedAs: "", - AddedAt: 0, - AddedBy: "", + sharing.ShareInfo{ + Name: "abc", + Objects: []sharing.SharedDataObject{ + { + Name: "d", + DataObjectType: "TABLE", + Comment: "c", + SharedAs: "", + AddedAt: 0, + AddedBy: "", + }, }, - }, - }, + }}, }, }, ID: "abc", @@ -555,7 +574,9 @@ func TestCreateShare_ThrowError(t *testing.T) { Method: "POST", Resource: "/api/2.1/unity-catalog/shares", ExpectedRequest: ShareInfo{ - Name: "a", + sharing.ShareInfo{ + Name: "a", + }, }, Response: common.APIErrorBody{ ErrorCode: "INVALID_REQUEST", @@ -591,20 +612,24 @@ func TestCreateShareButPatchFails(t *testing.T) { Method: "POST", Resource: "/api/2.1/unity-catalog/shares", ExpectedRequest: ShareInfo{ - Name: "a", + sharing.ShareInfo{ + Name: "a", + }, }, Response: ShareInfo{ - Name: "a", + sharing.ShareInfo{ + Name: "a", + }, }, }, { Method: "PATCH", Resource: "/api/2.1/unity-catalog/shares/a", - ExpectedRequest: ShareUpdates{ - Updates: []ShareDataChange{ + ExpectedRequest: sharing.UpdateShare{ + Updates: []sharing.SharedDataObjectUpdate{ { Action: "ADD", - DataObject: SharedDataObject{ + DataObject: &sharing.SharedDataObject{ Name: "main.a", DataObjectType: "TABLE", Comment: "c", @@ -612,7 +637,7 @@ func TestCreateShareButPatchFails(t *testing.T) { }, { Action: "ADD", - DataObject: SharedDataObject{ + DataObject: &sharing.SharedDataObject{ Name: "main.b", DataObjectType: "TABLE", Comment: "c", @@ -659,27 +684,28 @@ func TestUpdateShareComplexDiff(t *testing.T) { Method: "GET", Resource: "/api/2.1/unity-catalog/shares/abc?include_shared_data=true", Response: ShareInfo{ - Name: "abc", - Objects: []SharedDataObject{ - { - Name: "a", - DataObjectType: "TABLE", - Comment: "c", - SharedAs: "b", - AddedAt: 0, - AddedBy: "", - }, - }, + sharing.ShareInfo{ + Name: "abc", + Objects: []sharing.SharedDataObject{ + { + Name: "a", + DataObjectType: "TABLE", + Comment: "c", + SharedAs: "b", + AddedAt: 0, + AddedBy: "", + }, + }}, }, }, { Method: "PATCH", Resource: "/api/2.1/unity-catalog/shares/abc", - ExpectedRequest: ShareUpdates{ - Updates: []ShareDataChange{ + ExpectedRequest: sharing.UpdateShare{ + Updates: []sharing.SharedDataObjectUpdate{ { Action: "ADD", - DataObject: SharedDataObject{ + DataObject: &sharing.SharedDataObject{ Comment: "c", DataObjectType: "TABLE", Name: "b", @@ -692,25 +718,26 @@ func TestUpdateShareComplexDiff(t *testing.T) { Method: "GET", Resource: "/api/2.1/unity-catalog/shares/abc?include_shared_data=true", Response: ShareInfo{ - Name: "abc", - Objects: []SharedDataObject{ - { - Name: "a", - DataObjectType: "TABLE", - Comment: "c", - SharedAs: "", - AddedAt: 0, - AddedBy: "", - }, - { - Name: "b", - DataObjectType: "TABLE", - Comment: "c", - SharedAs: "", - AddedAt: 0, - AddedBy: "", - }, - }, + sharing.ShareInfo{ + Name: "abc", + Objects: []sharing.SharedDataObject{ + { + Name: "a", + DataObjectType: "TABLE", + Comment: "c", + SharedAs: "", + AddedAt: 0, + AddedBy: "", + }, + { + Name: "b", + DataObjectType: "TABLE", + Comment: "c", + SharedAs: "", + AddedAt: 0, + AddedBy: "", + }, + }}, }, }, },