Skip to content

Commit

Permalink
work
Browse files Browse the repository at this point in the history
  • Loading branch information
mgyucht committed Aug 14, 2024
1 parent 4547d07 commit f01b81a
Show file tree
Hide file tree
Showing 2 changed files with 69 additions and 12 deletions.
26 changes: 16 additions & 10 deletions pipelines/resource_pipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,8 @@ type Pipeline struct {

func (Pipeline) Aliases() map[string]map[string]string {
return map[string]map[string]string{
"pipelines.Pipeline": aliasMap,
"pipelines.Pipeline": aliasMap,
"pipelines.PipelineSpec": aliasMap,
}

}
Expand Down Expand Up @@ -274,12 +275,7 @@ func ResourcePipeline() common.Resource {
if err != nil {
return err
}
err = Create(w, ctx, d, d.Timeout(schema.TimeoutCreate))
if err != nil {
return err
}
d.Set("url", c.FormatURL("#joblist/pipelines/", d.Id()))
return nil
return Create(w, ctx, d, d.Timeout(schema.TimeoutCreate))
},
Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
w, err := c.WorkspaceClient()
Expand All @@ -294,10 +290,20 @@ func ResourcePipeline() common.Resource {
if readPipeline.Spec == nil {
return fmt.Errorf("pipeline spec is nil for '%v'", readPipeline.PipelineId)
}
if err = common.StructToData(readPipeline.Spec, pipelineSchema, d); err != nil {
return err
p := Pipeline{
PipelineSpec: *readPipeline.Spec,
Cause: readPipeline.Cause,
ClusterId: readPipeline.ClusterId,
CreatorUserName: readPipeline.CreatorUserName,
Health: readPipeline.Health,
LastModified: readPipeline.LastModified,
LatestUpdates: readPipeline.LatestUpdates,
RunAsUserName: readPipeline.RunAsUserName,
State: readPipeline.State,
// Provides the URL to the pipeline in the Databricks UI.
URL: c.FormatURL("#joblist/pipelines/", d.Id()),
}
return common.StructToData(readPipeline, pipelineSchema, d)
return common.StructToData(p, pipelineSchema, d)
},
Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
w, err := c.WorkspaceClient()
Expand Down
55 changes: 53 additions & 2 deletions pipelines/resource_pipeline_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -288,8 +288,59 @@ func TestResourcePipelineRead(t *testing.T) {
"key1": "value1",
"key2": "value2",
},
"filters.0.include.0": "com.databricks.include",
"continuous": false,
"cluster": []any{
map[string]any{
"apply_policy_default_values": false,
"autoscale": []any{},
"aws_attributes": []any{},
"azure_attributes": []any{},
"cluster_log_conf": []any{},
"driver_instance_pool_id": "",
"driver_node_type_id": "",
"enable_local_disk_encryption": false,
"gcp_attributes": []any{},
"init_scripts": []any{},
"instance_pool_id": "",
"node_type_id": "",
"num_workers": 0,
"policy_id": "",
"spark_conf": map[string]any{},
"spark_env_vars": map[string]any{},
"ssh_public_keys": []any{},
"label": "default",
"custom_tags": map[string]any{
"cluster_tag1": "cluster_value1",
},
},
},
"library": []any{
map[string]any{
"file": []any{},
"maven": []any{},
"jar": "",
"whl": "",
"notebook": []any{
map[string]any{
"path": "/Test",
},
},
},
},
"filters": []any{
map[string]any{
"include": []any{"com.databricks.include"},
"exclude": []any{"com.databricks.exclude"},
},
},
"deployment": []any{
map[string]any{
"kind": "BUNDLE",
"metadata_file_path": "/foo/bar",
},
},
"edition": "ADVANCED",
"channel": "CURRENT",
"continuous": false,
})
}

Expand Down

0 comments on commit f01b81a

Please sign in to comment.