diff --git a/common/resource.go b/common/resource.go index 9e639eb962..4e357305db 100644 --- a/common/resource.go +++ b/common/resource.go @@ -443,13 +443,20 @@ func genericDatabricksData[T, P, C any]( // WorkspacePathPrefixDiffSuppress suppresses diffs for workspace paths where both sides // may or may not include the `/Workspace` prefix. // -// This is the case for dashboards where at create time, the user may include the `/Workspace` +// This is the case for dashboards, alerts and queries where at create time, the user may include the `/Workspace` // prefix for the `parent_path` field, but the read response will not include the prefix. func WorkspacePathPrefixDiffSuppress(k, old, new string, d *schema.ResourceData) bool { const prefix = "/Workspace" return strings.TrimPrefix(old, prefix) == strings.TrimPrefix(new, prefix) } +// WorkspaceOrEmptyPathPrefixDiffSuppress is similar WorkspacePathPrefixDiffSuppress but also suppresses diffs +// when the new value is empty (not specified by user). +func WorkspaceOrEmptyPathPrefixDiffSuppress(k, old, new string, d *schema.ResourceData) bool { + const prefix = "/Workspace" + return (old != "" && new == "") || strings.TrimPrefix(old, prefix) == strings.TrimPrefix(new, prefix) +} + func EqualFoldDiffSuppress(k, old, new string, d *schema.ResourceData) bool { if strings.EqualFold(old, new) { log.Printf("[INFO] Suppressing diff on %s", k) diff --git a/common/resource_test.go b/common/resource_test.go index e93885a02c..f01f373ff5 100644 --- a/common/resource_test.go +++ b/common/resource_test.go @@ -187,6 +187,15 @@ func TestWorkspacePathPrefixDiffSuppress(t *testing.T) { assert.False(t, WorkspacePathPrefixDiffSuppress("k", "/Workspace/1", "/Workspace/2", nil)) } +func TestWorkspaceOrEmptyPathPrefixDiffSuppress(t *testing.T) { + assert.True(t, WorkspaceOrEmptyPathPrefixDiffSuppress("k", "/Workspace/foo/bar", "/Workspace/foo/bar", nil)) + assert.True(t, WorkspaceOrEmptyPathPrefixDiffSuppress("k", "/Workspace/foo/bar", "/foo/bar", nil)) + assert.True(t, WorkspaceOrEmptyPathPrefixDiffSuppress("k", "/foo/bar", "/Workspace/foo/bar", nil)) + assert.True(t, WorkspaceOrEmptyPathPrefixDiffSuppress("k", "/foo/bar", "/foo/bar", nil)) + assert.True(t, WorkspaceOrEmptyPathPrefixDiffSuppress("k", "/foo/bar", "", nil)) + assert.False(t, WorkspaceOrEmptyPathPrefixDiffSuppress("k", "/Workspace/1", "/Workspace/2", nil)) +} + func TestEqualFoldDiffSuppress(t *testing.T) { assert.True(t, EqualFoldDiffSuppress("k", "A", "a", nil)) assert.False(t, EqualFoldDiffSuppress("k", "A", "A2", nil)) diff --git a/docs/resources/alert.md b/docs/resources/alert.md new file mode 100644 index 0000000000..f15bdaf116 --- /dev/null +++ b/docs/resources/alert.md @@ -0,0 +1,196 @@ +--- +subcategory: "Databricks SQL" +--- +# databricks_alert Resource + +This resource allows you to manage [Databricks SQL Alerts](https://docs.databricks.com/en/sql/user/alerts/index.html). It supersedes [databricks_sql_alert](sql_alert.md) resource - see migration guide below for more details. + +## Example Usage + +```hcl +resource "databricks_directory" "shared_dir" { + path = "/Shared/Queries" +} + +# This will be replaced with new databricks_query resource +resource "databricks_sql_query" "this" { + data_source_id = databricks_sql_endpoint.example.data_source_id + name = "My Query Name" + query = "SELECT 42 as value" + parent = "folders/${databricks_directory.shared_dir.object_id}" +} + +resource "databricks_alert" "alert" { + query_id = databricks_sql_query.this.id + display_name = "TF new alert" + parent_path = databricks_directory.shared_dir.path + condition { + op = "GREATER_THAN" + operand { + column { + name = "value" + } + } + threshold { + value { + double_value = 42 + } + } + } +} +``` + +## Argument Reference + +The following arguments are available: + +* `query_id` - (Required, String) ID of the query evaluated by the alert. +* `display_name` - (Required, String) Name of the alert. +* `condition` - (Required) Trigger conditions of the alert. Block consists of the following attributes: + * `op` - (Required, String Enum) Operator used for comparison in alert evaluation. (Enum: `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL`, `EQUAL`, `NOT_EQUAL`, `IS_NULL`) + * `operand` - (Required, Block) Name of the column from the query result to use for comparison in alert evaluation: + * `column` - (Required, Block) Block describing the column from the query result to use for comparison in alert evaluation: + * `name` - (Required, String) Name of the column. + * `threshold` - (Optional for `IS_NULL` operation, Block) Threshold value used for comparison in alert evaluation: + * `value` - (Required, Block) actual value used in comparison (one of the attributes is required): + * `string_value` - string value to compare against string results. + * `double_value` - double value to compare against integer and double results. + * `bool_value` - boolean value (`true` or `false`) to compare against boolean results. + * `empty_result_state` - (Optional, String Enum) Alert state if the result is empty (`UNKNOWN`, `OK`, `TRIGGERED`) +* `custom_subject` - (Optional, String) Custom subject of alert notification, if it exists. This includes email subject, Slack notification header, etc. See [Alerts API reference](https://docs.databricks.com/en/sql/user/alerts/index.html) for custom templating instructions. +* `custom_body` - (Optional, String) Custom body of alert notification, if it exists. See [Alerts API reference](https://docs.databricks.com/en/sql/user/alerts/index.html) for custom templating instructions. +* `parent_path` - (Optional, String) The path to a workspace folder containing the alert. The default is the user's home folder. If changed, the alert will be recreated. +* `seconds_to_retrigger` - (Optional, Integer) Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again. +* `owner_user_name` - (Optional, String) Alert owner's username. +* `notify_on_ok` - (Optional, Boolean) Whether to notify alert subscribers when alert returns back to normal. + +## Attribute Reference + +In addition to all the arguments above, the following attributes are exported: + +* `id` - unique ID of the Alert. +* `lifecycle_state` - The workspace state of the alert. Used for tracking trashed status. (Possible values are `ACTIVE` or `TRASHED`). +* `state` - Current state of the alert's trigger status (`UNKNOWN`, `OK`, `TRIGGERED`). This field is set to `UNKNOWN` if the alert has not yet been evaluated or ran into an error during the last evaluation. +* `create_time` - The timestamp string indicating when the alert was created. +* `update_time` - The timestamp string indicating when the alert was updated. +* `trigger_time` - The timestamp string when the alert was last triggered if the alert has been triggered before. + +## Migrating from `databricks_sql_alert` resource + +Under the hood, the new resource uses the same data as the `databricks_sql_alert`, but is exposed via a different API. This means that we can migrate existing alerts without recreating them. This operation is done in few steps: + +* Record the ID of existing `databricks_sql_alert`, for example, by executing the `terraform state show databricks_sql_alert.alert` command. +* Create the code for the new implementation by performing the following changes: + * the `name` attribute is now named `display_name` + * the `parent` (if exists) is renamed to `parent_path` attribute and should be converted from `folders/object_id` to the actual path. + * the `options` block is converted into the `condition` block with the following changes: + * the value of the `op` attribute should be converted from a mathematical operator into a string name, like, `>` is becoming `GREATER_THAN`, `==` is becoming `EQUAL`, etc. + * the `column` attribute is becoming the `operand` block + * the `value` attribute is becoming the `threshold` block. **Please note that the old implementation always used strings so you may have changes after import if you use `double_value` or `bool_value` inside the block.** + * the `rearm` attribute is renamed to `seconds_to_retrigger`. + +For example, if we have the original `databricks_sql_alert` defined as: + +```hcl +resource "databricks_sql_alert" "alert" { + query_id = databricks_sql_query.this.id + name = "My Alert" + parent = "folders/${databricks_directory.shared_dir.object_id}" + options { + column = "value" + op = ">" + value = "42" + muted = false + } +} +``` + +we'll have a new resource defined as: + +```hcl +resource "databricks_alert" "alert" { + query_id = databricks_sql_query.this.id + display_name = "My Alert" + parent_path = databricks_directory.shared_dir.path + condition { + op = "GREATER_THAN" + operand { + column { + name = "value" + } + } + threshold { + value { + double_value = 42 + } + } + } +} +``` + +### For Terraform version >= 1.7.0 + +Terraform 1.7 introduced the [removed](https://developer.hashicorp.com/terraform/language/resources/syntax#removing-resources) block in addition to the [import](https://developer.hashicorp.com/terraform/language/import) block introduced in Terraform 1.5. Together they make import and removal of resources easier, avoiding manual execution of `terraform import` and `terraform state rm` commands. + +So with Terraform 1.7+, the migration looks as the following: + +* remove the old alert definition and replace it with the new one. +* Adjust references, like, `databricks_permissions`. +* Add `import` and `removed` blocks like this: + +```hcl +import { + to = databricks_alert.alert + id = "" +} + +removed { + from = databricks_sql_alert.alert + + lifecycle { + destroy = false + } +} +``` + +* Run the `terraform plan` command to check possible changes, such as value type change, etc. +* Run the `terraform apply` command to apply changes. +* Remove the `import` and `removed` blocks from the code. + +### For Terraform version < 1.7.0 + +* Remove the old alert definition and replace it with the new one. +* Remove the old resource from the state with the `terraform state rm databricks_sql_alert.alert` command. +* Import new resource with the `terraform import databricks_alert.alert ` command. +* Adjust references, like, `databricks_permissions`. +* Run the `terraform plan` command to check possible changes, such as value type change, etc. + +## Access Control + +[databricks_permissions](permissions.md#sql-alert-usage) can control which groups or individual users can *Manage*, *Edit*, *Run* or *View* individual alerts. + +```hcl +resource "databricks_permissions" "alert_usage" { + sql_alert_id = databricks_alert.alert.id + access_control { + group_name = "users" + permission_level = "CAN_RUN" + } +} +``` + +## Import + +This resource can be imported using alert ID: + +```bash +terraform import databricks_alert.this +``` + +## Related Resources + +The following resources are often used in the same context: + +* [databricks_sql_query](sql_query.md) to manage Databricks SQL [Queries](https://docs.databricks.com/sql/user/queries/index.html). +* [databricks_sql_endpoint](sql_endpoint.md) to manage Databricks SQL [Endpoints](https://docs.databricks.com/sql/admin/sql-endpoints.html). +* [databricks_directory](directory.md) to manage directories in [Databricks Workpace](https://docs.databricks.com/workspace/workspace-objects.html). diff --git a/docs/resources/sql_alert.md b/docs/resources/sql_alert.md index 689a52a5d5..12063c2a70 100644 --- a/docs/resources/sql_alert.md +++ b/docs/resources/sql_alert.md @@ -58,6 +58,18 @@ In addition to all arguments above, the following attributes are exported: * `id` - unique ID of the SQL Alert. +## Access Control + +[databricks_permissions](permissions.md#sql-alert-usage) can control which groups or individual users can *Manage*, *Edit*, *Run* or *View* individual alerts. + +## Import + +This resource can be imported using alert ID: + +```bash +terraform import databricks_sql_alert.this +``` + ## Related Resources The following resources are often used in the same context: diff --git a/internal/acceptance/alert_test.go b/internal/acceptance/alert_test.go new file mode 100644 index 0000000000..22ed542468 --- /dev/null +++ b/internal/acceptance/alert_test.go @@ -0,0 +1,60 @@ +package acceptance + +import ( + "testing" +) + +func TestAccAlert(t *testing.T) { + WorkspaceLevel(t, Step{ + Template: ` + resource "databricks_sql_query" "this" { + data_source_id = "{env.TEST_DEFAULT_WAREHOUSE_DATASOURCE_ID}" + name = "tf-{var.RANDOM}" + query = "SELECT 1 AS p1, 2 as p2" + } + + resource "databricks_alert" "alert" { + query_id = databricks_sql_query.this.id + display_name = "tf-alert-{var.RANDOM}" + condition { + op = "EQUAL" + operand { + column { + name = "p2" + } + } + threshold { + value { + double_value = 2 + } + } + } + } +`, + }, Step{ + Template: ` + resource "databricks_sql_query" "this" { + data_source_id = "{env.TEST_DEFAULT_WAREHOUSE_DATASOURCE_ID}" + name = "tf-{var.RANDOM}" + query = "SELECT 1 AS p1, 2 as p2" + } + + resource "databricks_alert" "alert" { + query_id = databricks_sql_query.this.id + display_name = "tf-alert-{var.RANDOM}" + condition { + op = "GREATER_THAN" + operand { + column { + name = "p2" + } + } + threshold { + value { + double_value = 3 + } + } + } + }`, + }) +} diff --git a/internal/acceptance/permissions_test.go b/internal/acceptance/permissions_test.go index bcd67fa8c9..48cbe40b86 100644 --- a/internal/acceptance/permissions_test.go +++ b/internal/acceptance/permissions_test.go @@ -837,3 +837,42 @@ func TestAccPermissions_ServingEndpoint(t *testing.T) { ExpectError: regexp.MustCompile("cannot remove management permissions for the current user for serving-endpoint, allowed levels: CAN_MANAGE"), }) } + +func TestAccPermissions_Alert(t *testing.T) { + loadDebugEnvIfRunsFromIDE(t, "workspace") + alertTemplate := ` + resource "databricks_sql_query" "this" { + name = "{var.STICKY_RANDOM}-query" + query = "SELECT 1 AS p1, 2 as p2" + data_source_id = "{env.TEST_DEFAULT_WAREHOUSE_DATASOURCE_ID}" + } + + resource "databricks_alert" "this" { + query_id = databricks_sql_query.this.id + display_name = "{var.STICKY_RANDOM}-alert" + condition { + op = "GREATER_THAN" + operand { + column { + name = "value" + } + } + threshold { + value { + double_value = 42 + } + } + } + } +` + WorkspaceLevel(t, Step{ + Template: alertTemplate + makePermissionsTestStage("sql_alert_id", "databricks_alert.this.id", groupPermissions("CAN_VIEW")), + }, Step{ + Template: alertTemplate + makePermissionsTestStage("sql_alert_id", "databricks_alert.this.id", + currentPrincipalPermission(t, "CAN_MANAGE"), groupPermissions("CAN_VIEW", "CAN_EDIT", "CAN_RUN", "CAN_MANAGE")), + }, Step{ + Template: alertTemplate + makePermissionsTestStage("sql_alert_id", "databricks_alert.this.id", + currentPrincipalPermission(t, "CAN_VIEW"), groupPermissions("CAN_VIEW", "CAN_EDIT", "CAN_RUN", "CAN_MANAGE")), + ExpectError: regexp.MustCompile("cannot remove management permissions for the current user for alert, allowed levels: CAN_MANAGE"), + }) +} diff --git a/internal/acceptance/sql_alert_test.go b/internal/acceptance/sql_alert_test.go index 612df0653e..9db6bb72da 100644 --- a/internal/acceptance/sql_alert_test.go +++ b/internal/acceptance/sql_alert_test.go @@ -4,7 +4,7 @@ import ( "testing" ) -func TestAccAlert(t *testing.T) { +func TestAccSqlAlert(t *testing.T) { WorkspaceLevel(t, Step{ Template: ` resource "databricks_sql_query" "this" { diff --git a/internal/providers/sdkv2/sdkv2.go b/internal/providers/sdkv2/sdkv2.go index 65ab1f4973..7c90851314 100644 --- a/internal/providers/sdkv2/sdkv2.go +++ b/internal/providers/sdkv2/sdkv2.go @@ -128,6 +128,7 @@ func DatabricksProvider() *schema.Provider { }, ResourcesMap: map[string]*schema.Resource{ // must be in alphabetical order "databricks_access_control_rule_set": permissions.ResourceAccessControlRuleSet().ToResource(), + "databricks_alert": sql.ResourceAlert().ToResource(), "databricks_artifact_allowlist": catalog.ResourceArtifactAllowlist().ToResource(), "databricks_aws_s3_mount": storage.ResourceAWSS3Mount().ToResource(), "databricks_azure_adls_gen1_mount": storage.ResourceAzureAdlsGen1Mount().ToResource(), diff --git a/sql/resource_alert.go b/sql/resource_alert.go new file mode 100644 index 0000000000..03281d5006 --- /dev/null +++ b/sql/resource_alert.go @@ -0,0 +1,125 @@ +package sql + +import ( + "context" + "log" + "strings" + + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/databricks/terraform-provider-databricks/common" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" +) + +func ResourceAlert() common.Resource { + s := common.StructToSchema(sql.Alert{}, func(m map[string]*schema.Schema) map[string]*schema.Schema { + common.CustomizeSchemaPath(m, "display_name").SetRequired() + common.CustomizeSchemaPath(m, "query_id").SetRequired() + common.CustomizeSchemaPath(m, "condition").SetRequired() + // TODO: can we automatically generate it from SDK? Or should we avoid validation at all? + common.CustomizeSchemaPath(m, "condition", "op").SetRequired().SetValidateFunc(validation.StringInSlice([]string{ + "GREATER_THAN", "GREATER_THAN_OR_EQUAL", "LESS_THAN", "LESS_THAN_OR_EQUAL", "EQUAL", "NOT_EQUAL", "IS_NULL"}, true)) + common.CustomizeSchemaPath(m, "condition", "op").SetRequired() + common.CustomizeSchemaPath(m, "parent_path").SetCustomSuppressDiff(common.WorkspaceOrEmptyPathPrefixDiffSuppress).SetForceNew() + common.CustomizeSchemaPath(m, "condition", "operand").SetRequired() + common.CustomizeSchemaPath(m, "condition", "operand", "column").SetRequired() + common.CustomizeSchemaPath(m, "condition", "operand", "column", "name").SetRequired() + common.CustomizeSchemaPath(m, "condition", "empty_result_state").SetValidateFunc( + validation.StringInSlice([]string{"UNKNOWN", "OK", "TRIGGERED"}, true)) + // We may not need it for some conditions + // common.CustomizeSchemaPath(m, "condition", "threshold").SetRequired() + common.CustomizeSchemaPath(m, "condition", "threshold", "value").SetRequired() + alof := []string{ + "condition.0.threshold.0.value.0.string_value", + "condition.0.threshold.0.value.0.double_value", + "condition.0.threshold.0.value.0.bool_value", + } + for _, f := range alof { + common.CustomizeSchemaPath(m, "condition", "threshold", "value", + strings.TrimPrefix(f, "condition.0.threshold.0.value.0.")).SetExactlyOneOf(alof) + } + common.CustomizeSchemaPath(m, "owner_user_name").SetSuppressDiff() + common.CustomizeSchemaPath(m, "id").SetReadOnly() + common.CustomizeSchemaPath(m, "create_time").SetReadOnly() + common.CustomizeSchemaPath(m, "lifecycle_state").SetReadOnly() + common.CustomizeSchemaPath(m, "state").SetReadOnly() + common.CustomizeSchemaPath(m, "trigger_time").SetReadOnly() + common.CustomizeSchemaPath(m, "update_time").SetReadOnly() + return m + }) + + return common.Resource{ + Create: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + var a sql.CreateAlertRequestAlert + common.DataToStructPointer(d, s, &a) + apiAlert, err := w.Alerts.Create(ctx, sql.CreateAlertRequest{ + Alert: &a, + }) + if err != nil { + return err + } + d.SetId(apiAlert.Id) + owner := d.Get("owner_user_name").(string) + if owner != "" { + _, err = w.Alerts.Update(ctx, sql.UpdateAlertRequest{ + Alert: &sql.UpdateAlertRequestAlert{ + OwnerUserName: owner, + }, + Id: apiAlert.Id, + UpdateMask: "owner_user_name", + }) + } + return err + }, + Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + apiAlert, err := w.Alerts.GetById(ctx, d.Id()) + if err != nil { + log.Printf("[WARN] error getting alert by ID: %v", err) + return err + } + parentPath := d.Get("parent_path").(string) + if parentPath != "" && strings.HasPrefix(apiAlert.ParentPath, "/Workspace") && !strings.HasPrefix(parentPath, "/Workspace") { + apiAlert.ParentPath = strings.TrimPrefix(parentPath, "/Workspace") + } + return common.StructToData(apiAlert, s, d) + }, + Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + var a sql.UpdateAlertRequestAlert + updateMask := "display_name,query_id,seconds_to_retrigger,condition,custom_body,custom_subject" + if d.HasChange("owner_user_name") { + updateMask += ",owner_user_name" + } + if d.HasChange("notify_on_ok") { + updateMask += ",notify_on_ok" + a.ForceSendFields = append(a.ForceSendFields, "NotifyOnOk") + } + common.DataToStructPointer(d, s, &a) + _, err = w.Alerts.Update(ctx, sql.UpdateAlertRequest{ + Alert: &a, + Id: d.Id(), + UpdateMask: updateMask, + }) + return err + }, + Delete: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + return w.Alerts.DeleteById(ctx, d.Id()) + }, + Schema: s, + } +} diff --git a/sql/resource_alert_test.go b/sql/resource_alert_test.go new file mode 100644 index 0000000000..f0559434b3 --- /dev/null +++ b/sql/resource_alert_test.go @@ -0,0 +1,245 @@ +package sql + +import ( + "net/http" + "testing" + + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/experimental/mocks" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/databricks/terraform-provider-databricks/qa" + "github.com/stretchr/testify/mock" +) + +var ( + alertResponse = sql.Alert{ + Id: "7890", + QueryId: "123456", + DisplayName: "TF new alert", + OwnerUserName: "user@domain.com", + Condition: &sql.AlertCondition{ + Op: "GREATER_THAN", + Operand: &sql.AlertConditionOperand{ + Column: &sql.AlertOperandColumn{ + Name: "value", + }, + }, + Threshold: &sql.AlertConditionThreshold{ + Value: &sql.AlertOperandValue{ + DoubleValue: 42, + }, + }, + }, + ParentPath: "/Workspace/Shared/Alerts", + } + createHcl = `query_id = "123456" + display_name = "TF new alert" + parent_path = "/Shared/Alerts" + owner_user_name = "user@domain.com" + condition { + op = "GREATER_THAN" + operand { + column { + name = "value" + } + } + threshold { + value { + double_value = 42 + } + } + }` + createAlertRequest = sql.CreateAlertRequest{ + Alert: &sql.CreateAlertRequestAlert{ + QueryId: "123456", + DisplayName: "TF new alert", + ParentPath: "/Shared/Alerts", + Condition: &sql.AlertCondition{ + Op: "GREATER_THAN", + Operand: &sql.AlertConditionOperand{ + Column: &sql.AlertOperandColumn{ + Name: "value", + }, + }, + Threshold: &sql.AlertConditionThreshold{ + Value: &sql.AlertOperandValue{ + DoubleValue: 42, + }, + }, + }, + }} +) + +func TestAlertCreate(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + e := w.GetMockAlertsAPI().EXPECT() + e.Create(mock.Anything, createAlertRequest).Return(&alertResponse, nil) + e.Update(mock.Anything, sql.UpdateAlertRequest{ + Id: "7890", + UpdateMask: "owner_user_name", + Alert: &sql.UpdateAlertRequestAlert{ + OwnerUserName: "user@domain.com", + }, + }).Return(&alertResponse, nil) + e.GetById(mock.Anything, "7890").Return(&alertResponse, nil) + }, + Resource: ResourceAlert(), + Create: true, + HCL: createHcl, + }.ApplyAndExpectData(t, map[string]any{ + "id": "7890", + "query_id": "123456", + "display_name": "TF new alert", + "owner_user_name": "user@domain.com", + }) +} + +func TestAlertCreate_BackendError(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + e := w.GetMockAlertsAPI().EXPECT() + e.Create(mock.Anything, createAlertRequest).Return(nil, &apierr.APIError{ + StatusCode: http.StatusBadRequest, + Message: "bad payload", + }) + }, + Resource: ResourceAlert(), + Create: true, + HCL: createHcl, + }.ExpectError(t, "bad payload") +} + +func TestAlertCreate_ErrorMultipleValues(t *testing.T) { + qa.ResourceFixture{ + Resource: ResourceAlert(), + Create: true, + HCL: `query_id = "123456" + display_name = "TF new alert" + parent_path = "/Shared/Alerts" + owner_user_name = "user@domain.com" + condition { + op = "GREATER_THAN" + operand { + column { + name = "value" + } + } + threshold { + value { + double_value = 42 + } + } + threshold { + value { + bool_value = 42 + } + } +}`, + }.ExpectError(t, "invalid config supplied. [condition.#.threshold] Too many list items") +} + +func TestAlertRead_Import(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + w.GetMockAlertsAPI().EXPECT().GetById(mock.Anything, "7890").Return(&alertResponse, nil) + }, + Resource: ResourceAlert(), + Read: true, + ID: "7890", + New: true, + }.ApplyAndExpectData(t, map[string]any{ + "id": "7890", + "query_id": "123456", + "display_name": "TF new alert", + "owner_user_name": "user@domain.com", + }) +} + +func TestAlertRead_Error(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + w.GetMockAlertsAPI().EXPECT().GetById(mock.Anything, "7890").Return(nil, &apierr.APIError{ + StatusCode: http.StatusBadRequest, + Message: "bad payload", + }) + }, + Resource: ResourceAlert(), + Read: true, + ID: "7890", + New: true, + }.ExpectError(t, "bad payload") +} + +func TestAlertDelete(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + w.GetMockAlertsAPI().EXPECT().DeleteById(mock.Anything, "7890").Return(nil) + }, + Resource: ResourceAlert(), + Delete: true, + ID: "7890", + New: true, + }.ApplyNoError(t) +} + +func TestAlertUpdate(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + e := w.GetMockAlertsAPI().EXPECT() + e.Update(mock.Anything, sql.UpdateAlertRequest{ + Id: "7890", + UpdateMask: "display_name,query_id,seconds_to_retrigger,condition,custom_body,custom_subject,owner_user_name,notify_on_ok", + Alert: &sql.UpdateAlertRequestAlert{ + QueryId: "123456", + DisplayName: "TF new alert", + OwnerUserName: "user@domain.com", + Condition: &sql.AlertCondition{ + Op: "GREATER_THAN", + Operand: &sql.AlertConditionOperand{ + Column: &sql.AlertOperandColumn{ + Name: "value", + }, + }, + Threshold: &sql.AlertConditionThreshold{ + Value: &sql.AlertOperandValue{ + DoubleValue: 42, + }, + }, + }, + ForceSendFields: []string{"NotifyOnOk"}, + }}).Return(&alertResponse, nil) + e.GetById(mock.Anything, "7890").Return(&alertResponse, nil) + }, + Resource: ResourceAlert(), + Update: true, + ID: "7890", + New: true, + InstanceState: map[string]string{ + "id": "7890", + "query_id": "123456", + "notify_on_ok": "true", + }, + HCL: `query_id = "123456" + display_name = "TF new alert" + owner_user_name = "user@domain.com" + condition { + op = "GREATER_THAN" + operand { + column { + name = "value" + } + } + threshold { + value { + double_value = 42 + } + } + }`, + }.ApplyAndExpectData(t, map[string]any{ + "id": "7890", + "query_id": "123456", + "display_name": "TF new alert", + "owner_user_name": "user@domain.com", + }) +} diff --git a/sql/resource_sql_alerts.go b/sql/resource_sql_alerts.go index 156834bd44..7a54a59ecb 100644 --- a/sql/resource_sql_alerts.go +++ b/sql/resource_sql_alerts.go @@ -178,6 +178,7 @@ func ResourceSqlAlert() common.Resource { } return w.AlertsLegacy.DeleteByAlertId(ctx, data.Id()) }, - Schema: s, + Schema: s, + DeprecationMessage: "This resource is deprecated and will be removed in the future. Please use the `databricks_alert` resource instead.", } }