diff --git a/pipelines/resource_pipeline.go b/pipelines/resource_pipeline.go index 69f88318e5..5386135461 100644 --- a/pipelines/resource_pipeline.go +++ b/pipelines/resource_pipeline.go @@ -176,7 +176,8 @@ type Pipeline struct { func (Pipeline) Aliases() map[string]map[string]string { return map[string]map[string]string{ - "pipelines.Pipeline": aliasMap, + "pipelines.Pipeline": aliasMap, + "pipelines.PipelineSpec": aliasMap, } } @@ -274,12 +275,7 @@ func ResourcePipeline() common.Resource { if err != nil { return err } - err = Create(w, ctx, d, d.Timeout(schema.TimeoutCreate)) - if err != nil { - return err - } - d.Set("url", c.FormatURL("#joblist/pipelines/", d.Id())) - return nil + return Create(w, ctx, d, d.Timeout(schema.TimeoutCreate)) }, Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { w, err := c.WorkspaceClient() @@ -294,10 +290,20 @@ func ResourcePipeline() common.Resource { if readPipeline.Spec == nil { return fmt.Errorf("pipeline spec is nil for '%v'", readPipeline.PipelineId) } - if err = common.StructToData(readPipeline.Spec, pipelineSchema, d); err != nil { - return err + p := Pipeline{ + PipelineSpec: *readPipeline.Spec, + Cause: readPipeline.Cause, + ClusterId: readPipeline.ClusterId, + CreatorUserName: readPipeline.CreatorUserName, + Health: readPipeline.Health, + LastModified: readPipeline.LastModified, + LatestUpdates: readPipeline.LatestUpdates, + RunAsUserName: readPipeline.RunAsUserName, + State: readPipeline.State, + // Provides the URL to the pipeline in the Databricks UI. + URL: c.FormatURL("#joblist/pipelines/", d.Id()), } - return common.StructToData(readPipeline, pipelineSchema, d) + return common.StructToData(p, pipelineSchema, d) }, Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { w, err := c.WorkspaceClient() diff --git a/pipelines/resource_pipeline_test.go b/pipelines/resource_pipeline_test.go index 1effd12b03..766a96e80c 100644 --- a/pipelines/resource_pipeline_test.go +++ b/pipelines/resource_pipeline_test.go @@ -288,8 +288,59 @@ func TestResourcePipelineRead(t *testing.T) { "key1": "value1", "key2": "value2", }, - "filters.0.include.0": "com.databricks.include", - "continuous": false, + "cluster": []any{ + map[string]any{ + "apply_policy_default_values": false, + "autoscale": []any{}, + "aws_attributes": []any{}, + "azure_attributes": []any{}, + "cluster_log_conf": []any{}, + "driver_instance_pool_id": "", + "driver_node_type_id": "", + "enable_local_disk_encryption": false, + "gcp_attributes": []any{}, + "init_scripts": []any{}, + "instance_pool_id": "", + "node_type_id": "", + "num_workers": 0, + "policy_id": "", + "spark_conf": map[string]any{}, + "spark_env_vars": map[string]any{}, + "ssh_public_keys": []any{}, + "label": "default", + "custom_tags": map[string]any{ + "cluster_tag1": "cluster_value1", + }, + }, + }, + "library": []any{ + map[string]any{ + "file": []any{}, + "maven": []any{}, + "jar": "", + "whl": "", + "notebook": []any{ + map[string]any{ + "path": "/Test", + }, + }, + }, + }, + "filters": []any{ + map[string]any{ + "include": []any{"com.databricks.include"}, + "exclude": []any{"com.databricks.exclude"}, + }, + }, + "deployment": []any{ + map[string]any{ + "kind": "BUNDLE", + "metadata_file_path": "/foo/bar", + }, + }, + "edition": "ADVANCED", + "channel": "CURRENT", + "continuous": false, }) }