Skip to content

Commit

Permalink
feat: add migration for existing pipelines
Browse files Browse the repository at this point in the history
  • Loading branch information
niross committed Nov 23, 2023
1 parent 58db5ef commit e65bf48
Showing 1 changed file with 44 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# Generated by Django 3.2.20 on 2023-11-23 11:20
import json

from django.conf import settings
from django.db import migrations

from dataworkspace.apps.core.boto3_client import get_s3_client
from dataworkspace.apps.datasets.pipelines.utils import split_schema_table


def generate_pipeline_configs(apps, _):
model = apps.get_model("datasets", "Pipeline")
for pipeline in model.objects.all():
schema_name, table_name = split_schema_table(pipeline.table_name)
try:
client = get_s3_client()
client.put_object(
Bucket=settings.AWS_UPLOADS_BUCKET,
Key=f"{settings.DATAFLOW_PIPELINES_PREFIX}/pipeline-{pipeline.id}.json",
Body=json.dumps(
{
"id": pipeline.id,
"dag_id": "DerivedPipeline-" + pipeline.table_name.replace('"', ""),
"type": pipeline.type,
"schedule": pipeline.schedule,
"schema": schema_name,
"table": table_name,
"config": pipeline.config,
"enabled": True,
}
),
)
except Exception: # pylint: disable=broad-except
pass


class Migration(migrations.Migration):
dependencies = [
("datasets", "0157_pipeline_schedule"),
]

operations = [
migrations.RunPython(generate_pipeline_configs, reverse_code=migrations.RunPython.noop),
]

0 comments on commit e65bf48

Please sign in to comment.