From 23341f637d01a63bad311cb0a3712b586d5c08a3 Mon Sep 17 00:00:00 2001 From: Vladislav Korenkov Date: Fri, 24 Mar 2023 22:58:17 +1000 Subject: [PATCH] Fix(backend): Migrate initiator foreign keys --- doc/api_schema.yaml | 10 +- polemarch/__init__.py | 2 +- .../0003_v3_migrate_data_to_new_models.py | 145 ++++++++-- polemarch/main/utils.py | 34 +-- polemarch/plugins/execution/ansible.py | 8 +- polemarch/plugins/execution/base.py | 5 + tests.py | 257 +++++++++++++++++- 7 files changed, 404 insertions(+), 57 deletions(-) diff --git a/doc/api_schema.yaml b/doc/api_schema.yaml index cdc2c08a..86b7efa6 100644 --- a/doc/api_schema.yaml +++ b/doc/api_schema.yaml @@ -70,7 +70,7 @@ info: application: 3.0.0 library: 3.0.0 vstutils: 5.4.0 - django: 4.1.6 + django: 4.1.7 djangorestframework: 3.14.0 drf_yasg: 1.21.5 ansible: 2.9.27 @@ -13766,7 +13766,7 @@ definitions: model: $ref: '#/definitions/AnsiblePlaybook' value_field: playbook - view_field: name + view_field: playbook usePrefetch: true args: title: Args @@ -15274,7 +15274,7 @@ definitions: model: $ref: '#/definitions/AnsiblePlaybook' value_field: playbook - view_field: name + view_field: playbook usePrefetch: true args: title: Args @@ -15513,7 +15513,7 @@ definitions: model: $ref: '#/definitions/AnsiblePlaybook' value_field: playbook - view_field: name + view_field: playbook usePrefetch: true args: title: Args @@ -16032,7 +16032,7 @@ definitions: model: $ref: '#/definitions/AnsiblePlaybook' value_field: playbook - view_field: name + view_field: playbook usePrefetch: true args: title: Args diff --git a/polemarch/__init__.py b/polemarch/__init__.py index 83cce301..51cd2b32 100644 --- a/polemarch/__init__.py +++ b/polemarch/__init__.py @@ -31,6 +31,6 @@ "VST_ROOT_URLCONF": os.getenv("VST_ROOT_URLCONF", 'vstutils.urls'), } -__version__ = "3.0.0" +__version__ = "3.0.1" prepare_environment(**default_settings) diff --git a/polemarch/main/migrations/0003_v3_migrate_data_to_new_models.py b/polemarch/main/migrations/0003_v3_migrate_data_to_new_models.py index 1d19875d..c615adca 100644 --- a/polemarch/main/migrations/0003_v3_migrate_data_to_new_models.py +++ b/polemarch/main/migrations/0003_v3_migrate_data_to_new_models.py @@ -91,7 +91,7 @@ def to_template_data(option): def to_options_data(options_qs): return { - option.name: to_template_data(option) + slugify(option.name): to_template_data(option) for option in options_qs } @@ -99,12 +99,14 @@ def to_options_data(options_qs): def migrate_templates_data_direct(apps, schema_editor): Template = apps.get_model('main', 'Template') PeriodicTask = apps.get_model('main', 'PeriodicTask') + History = apps.get_model('main', 'History') ExecutionTemplate = apps.get_model('main', 'ExecutionTemplate') ExecutionTemplateOption = apps.get_model('main', 'ExecutionTemplateOption') TemplatePeriodicTask = apps.get_model('main', 'TemplatePeriodicTask') db_alias = schema_editor.connection.alias + history_to_update = [] for old_template in Template.objects.all(): new_template = ExecutionTemplate.objects.using(db_alias).create( @@ -129,24 +131,55 @@ def migrate_templates_data_direct(apps, schema_editor): for old_option_key, old_option_value in json.loads(old_template.options_data or '{}').items() ] - ExecutionTemplateOption.objects.using(db_alias).bulk_create([default_option, *other_options]) - - if old_template.periodic_task.exists(): - TemplatePeriodicTask.objects.using(db_alias).bulk_create([ - TemplatePeriodicTask( - name=old_periodic_task.name, - template_option=default_option if not old_periodic_task.template_opt else [ - option for option in other_options - if slugify(option.name) == old_periodic_task.template_opt - ][0], - notes=old_periodic_task.notes, - type=old_periodic_task.type, - schedule=old_periodic_task.schedule, - enabled=old_periodic_task.enabled, - save_result=old_periodic_task.save_result, - ) - for old_periodic_task in old_template.periodic_task.all() - ]) + other_options_created = ExecutionTemplateOption.objects.using(db_alias).bulk_create( + [default_option, *other_options] + ) + + history_to_update_qs = History.objects \ + .using(db_alias) \ + .filter(initiator_type='template', initiator=old_template.id) + for history in history_to_update_qs: + history.initiator = new_template.id + options = json.loads(history.json_options) + old_template_option = options.get('template_option') + if old_template_option is None: + options['template_option'] = str(default_option.id) + else: + try: + new_option = [ + option for option in other_options_created + if slugify(option.name) == old_template_option + ][0] + options['template_option'] = str(new_option.id) + except IndexError: + pass + history.json_options = json.dumps(options) + history_to_update.append(history) + + for old_periodic_task in old_template.periodic_task.all(): + new_periodic_task = TemplatePeriodicTask.objects.using(db_alias).create( + name=old_periodic_task.name, + template_option=default_option if not old_periodic_task.template_opt else [ + option for option in other_options + if slugify(option.name) == old_periodic_task.template_opt + ][0], + notes=old_periodic_task.notes, + type=old_periodic_task.type, + schedule=old_periodic_task.schedule, + enabled=old_periodic_task.enabled, + save_result=old_periodic_task.save_result, + ) + + history_to_update_qs = History.objects \ + .using(db_alias) \ + .filter(initiator_type='scheduler', initiator=old_periodic_task.id) + for history in history_to_update_qs: + history.initiator = new_periodic_task.id + history.json_options = json.dumps({ + 'template': new_periodic_task.template_option.template.id, + 'template_option': str(new_periodic_task.template_option.id), + }) + history_to_update.append(history) for old_periodic_task in PeriodicTask.objects.filter(template_id=None): variables = {} @@ -177,7 +210,7 @@ def migrate_templates_data_direct(apps, schema_editor): inventory=old_periodic_task._inventory_id or old_periodic_task.inventory_file), template=new_template, ) - TemplatePeriodicTask.objects.using(db_alias).create( + new_periodic_task = TemplatePeriodicTask.objects.using(db_alias).create( name=old_periodic_task.name, template_option=default_option, notes='', @@ -187,10 +220,24 @@ def migrate_templates_data_direct(apps, schema_editor): save_result=old_periodic_task.save_result, ) + history_to_update_qs = History.objects \ + .using(db_alias) \ + .filter(initiator_type='scheduler', initiator=old_periodic_task.id) + for history in history_to_update_qs: + history.initiator = new_periodic_task.id + history.json_options = json.dumps({ + 'template': new_periodic_task.template_option.template.id, + 'template_option': str(new_periodic_task.template_option.id), + }) + history_to_update.append(history) + + History.objects.using(db_alias).bulk_update(history_to_update, fields=['initiator', 'json_options']) + def migrate_templates_data_backwards(apps, schema_editor): Template = apps.get_model('main', 'Template') PeriodicTask = apps.get_model('main', 'PeriodicTask') + History = apps.get_model('main', 'History') ExecutionTemplate = apps.get_model('main', 'ExecutionTemplate') ExecutionTemplateOption = apps.get_model('main', 'ExecutionTemplateOption') @@ -216,8 +263,26 @@ def migrate_templates_data_backwards(apps, schema_editor): new_template_id=new_template.id, ) - PeriodicTask.objects.using(db_alias).bulk_create([ - PeriodicTask( + history_to_update = [] + + history_to_update_qs = History.objects \ + .using(db_alias) \ + .filter(initiator_type='template', initiator=new_template.id) + for history in history_to_update_qs: + history.initiator = old_template.id + options = json.loads(history.json_options) + template_option_id = options.get('template_option') + if str(default_option.id) == template_option_id: + del options['template_option'] + elif template_option_id is not None: + option = other_options.filter(id=template_option_id).first() + if option: + options['template_option'] = option.name + history.json_options = json.dumps(options) + history_to_update.append(history) + + for periodic_task in default_option.periodic_tasks.all(): + old_periodic_task = PeriodicTask.objects.create( name=periodic_task.name, notes=periodic_task.notes, mode='', @@ -233,11 +298,21 @@ def migrate_templates_data_backwards(apps, schema_editor): project=new_template.project, template=old_template, ) - for periodic_task in default_option.periodic_tasks.all() - ]) - - PeriodicTask.objects.using(db_alias).bulk_create([ - PeriodicTask( + history_to_update_qs = History.objects \ + .using(db_alias) \ + .filter(initiator_type='scheduler', initiator=periodic_task.id) + for history in history_to_update_qs: + history.initiator = old_periodic_task.id + options = json.loads(history.json_options) + if 'template' in options: + del options['template'] + if 'template_option' in options: + del options['template_option'] + history.json_options = json.dumps(options) + history_to_update.append(history) + + for periodic_task in TemplatePeriodicTask.objects.filter(template_option__in=other_options): + old_periodic_task = PeriodicTask.objects.create( name=periodic_task.name, notes=periodic_task.notes, mode='', @@ -253,9 +328,19 @@ def migrate_templates_data_backwards(apps, schema_editor): project=new_template.project, template=old_template, ) - for periodic_task in TemplatePeriodicTask.objects.filter(template_option__in=other_options) - ]) - + history_to_update_qs = History.objects \ + .using(db_alias) \ + .filter(initiator_type='scheduler', initiator=periodic_task.id) + for history in history_to_update_qs: + history.initiator = old_periodic_task.id + options = json.loads(history.json_options) + if 'template' in options: + del options['template'] + options['template_option'] = old_periodic_task.template_opt + history.json_options = json.dumps(options) + history_to_update.append(history) + + History.objects.using(db_alias).bulk_update(history_to_update, fields=['initiator', 'json_options']) def migrate_history_data_direct(apps, schema_editor): History = apps.get_model('main', 'History') diff --git a/polemarch/main/utils.py b/polemarch/main/utils.py index 6523dc74..167e650e 100644 --- a/polemarch/main/utils.py +++ b/polemarch/main/utils.py @@ -348,16 +348,10 @@ def execute(self, plugin: str, project, execute_args, **kwargs): validate_inventory_arguments(plugin, execute_args, project) task_class = project.task_handlers.backend('EXECUTION') - plugin_class = self.backend(plugin) - - mode = f'[{plugin} plugin]' - if plugin_class.arg_shown_on_history_as_mode is not None: - mode = execute_args.get(plugin_class.arg_shown_on_history_as_mode, mode) history = self.create_history( project, plugin, - mode, execute_args=execute_args, initiator=kwargs.pop('initiator', 0), initiator_type=kwargs.pop('initiator_type', 'project'), @@ -383,17 +377,25 @@ def execute(self, plugin: str, project, execute_args, **kwargs): return history - def create_history(self, project, kind, mode, execute_args, **kwargs): + def create_history(self, project, plugin, execute_args, **kwargs): if not kwargs['save_result']: return None - history_execute_args = {**execute_args} - inventory = history_execute_args.get('inventory', None) - if isinstance(inventory, str): - history_execute_args['inventory'] = inventory - inventory = None - elif isinstance(inventory, int): - inventory = project.inventories.get(id=inventory) + plugin_class = self.backend(plugin) + + mode = f'[{plugin} plugin]' + if plugin_class.arg_shown_on_history_as_mode is not None: + mode = execute_args.get(plugin_class.arg_shown_on_history_as_mode, mode) + + inventory = None + if plugin_class.arg_shown_on_history_as_inventory is not None: + inventory_field_name = plugin_class.arg_shown_on_history_as_inventory + inventory = execute_args.get(inventory_field_name, None) + if isinstance(inventory, str): + execute_args['inventory'] = inventory + inventory = None + elif isinstance(inventory, int): + inventory = project.inventories.get(id=inventory) return project.history.create( status='DELAY', @@ -401,9 +403,9 @@ def create_history(self, project, kind, mode, execute_args, **kwargs): start_time=timezone.now(), inventory=inventory, project=project, - kind=kind, + kind=plugin, raw_stdout='', - execute_args=history_execute_args, + execute_args=execute_args, initiator=kwargs['initiator'], initiator_type=kwargs['initiator_type'], executor=kwargs['executor'], diff --git a/polemarch/plugins/execution/ansible.py b/polemarch/plugins/execution/ansible.py index b15b31f7..26e7868e 100644 --- a/polemarch/plugins/execution/ansible.py +++ b/polemarch/plugins/execution/ansible.py @@ -32,6 +32,8 @@ class BaseAnsiblePlugin(BasePlugin): -15: HistoryStatus.INTERRUPTED, } + arg_shown_on_history_as_inventory = 'inventory' + def __init__(self, options=None, output_handler=None): super().__init__(options, output_handler) self.inventory = None @@ -160,7 +162,11 @@ class AnsiblePlaybook(BaseAnsiblePlugin): reference = ANSIBLE_REFERENCE.raw_dict['playbook'] arg_shown_on_history_as_mode = 'playbook' serializer_fields = { - 'playbook': AutoCompletionField(autocomplete='AnsiblePlaybook', autocomplete_property='playbook') + 'playbook': AutoCompletionField( + autocomplete='AnsiblePlaybook', + autocomplete_property='playbook', + autocomplete_represent='playbook', + ) } @property diff --git a/polemarch/plugins/execution/base.py b/polemarch/plugins/execution/base.py index 405e9fad..bdc971ef 100644 --- a/polemarch/plugins/execution/base.py +++ b/polemarch/plugins/execution/base.py @@ -43,6 +43,11 @@ class BasePlugin: ``[ plugin]`` string. """ + arg_shown_on_history_as_inventory: Optional[str] = None # pylint: disable=invalid-name + """ + Name of argument presented in generated serializer which will be shown on list history page as *Inventory*. + """ + error_codes: Mapping[int, str] = {} """ This mapping will be looked up to choose an appropriate error message for history output if execution finished with diff --git a/tests.py b/tests.py index a84c2bb7..65734319 100644 --- a/tests.py +++ b/tests.py @@ -4893,6 +4893,34 @@ def prepare(self): Variable = self.old_state.apps.get_model('main', 'Variable') History = self.old_state.apps.get_model('main', 'History') + # create and delete unused template so all other templates' ids will start from 2 + unused_template = Template.objects.create( + name='unused', + kind='Module', + template_data='{}', + project=self.project, + owner=self.project.owner, + ) + Template.objects.filter(id=unused_template.id).delete() + + # same for periodic tasks + unused_pt = PeriodicTask.objects.create( + name='unused', + mode='shell', + kind='MODULE', + inventory_file=None, + type='CRONTAB', + schedule='57 22 * * 0', + save_result=True, + enabled=False, + template_opt=None, + _inventory=self.inventory, + project=self.project, + owner=self.project.owner, + template=None, + ) + PeriodicTask.objects.filter(id=unused_pt.id).delete() + # create template with kind=Module module_template = Template.objects.create( name='module template', @@ -4952,7 +4980,7 @@ def prepare(self): ) # create periodic task using template without option - PeriodicTask.objects.create( + pt1 = PeriodicTask.objects.create( name='pt1', mode='', notes='some notes', @@ -4970,7 +4998,7 @@ def prepare(self): ) # create periodic task using template and option - PeriodicTask.objects.create( + pt2 = PeriodicTask.objects.create( name='pt2', mode='', notes='', @@ -5076,6 +5104,78 @@ def prepare(self): ) self.history2_id = history2.id + # create history with template initiator (no option) + history3 = History.objects.create( + status='OK', + mode='ping', + inventory=self.inventory, + project=self.project, + kind='ANSIBLE_MODULE', + json_args=json.dumps({'module': 'ping'}), + initiator=module_template.id, + initiator_type='template', + executor=None, + ) + self.history3_id = history3.id + + # create history with template initiator (with option) + history4 = History.objects.create( + status='OK', + mode='ping', + inventory=self.inventory, + project=self.project, + kind='ANSIBLE_MODULE', + json_args=json.dumps({'module': 'ping'}), + json_options=json.dumps({'template_option': 'cleanup'}), + initiator=module_template.id, + initiator_type='template', + executor=None, + ) + self.history4_id = history4.id + + # create history with scheduler initiator + history5 = History.objects.create( + status='OK', + mode='ping', + inventory=self.inventory, + project=self.project, + kind='ANSIBLE_MODULE', + json_args=json.dumps({'module': 'ping'}), + json_options='{}', + initiator=pt1.id, + initiator_type='scheduler', + executor=None, + ) + self.history5_id = history5.id + + history6 = History.objects.create( + status='OK', + mode='ping', + inventory=self.inventory, + project=self.project, + kind='ANSIBLE_MODULE', + json_args=json.dumps({'module': 'ping'}), + json_options=json.dumps({'template_option': 'cleanup'}), + initiator=pt2.id, + initiator_type='scheduler', + executor=None, + ) + self.history6_id = history6.id + + history7 = History.objects.create( + status='OK', + mode='ping', + inventory=self.inventory, + project=self.project, + kind='ANSIBLE_MODULE', + json_args=json.dumps({'module': 'ping'}), + json_options='{}', + initiator=pt3.id, + initiator_type='scheduler', + executor=None, + ) + self.history7_id = history7.id + def test_migrations(self): self.check_model_not_exists(self.new_state, 'main', 'Template') self.check_model_not_exists(self.new_state, 'main', 'PeriodicTask') @@ -5250,6 +5350,44 @@ def test_migrations(self): 'module': 'invalid', }) + history3 = History.objects.get(id=self.history3_id) + self.assertEqual(history3.initiator_type, 'template') + self.assertEqual(history3.initiator, template1.id) + self.assertDictEqual(json.loads(history3.json_options), { + 'template_option': str(option1.id), + }) + + history4 = History.objects.get(id=self.history4_id) + self.assertEqual(history4.initiator_type, 'template') + self.assertEqual(history4.initiator, template1.id) + self.assertDictEqual(json.loads(history4.json_options), { + 'template_option': str(option3.id), + }) + + history5 = History.objects.get(id=self.history5_id) + self.assertEqual(history5.initiator_type, 'scheduler') + self.assertEqual(history5.initiator, pt1.id) + self.assertDictEqual(json.loads(history5.json_options), { + 'template_option': str(pt1.template_option.id), + 'template': pt1.template_option.template.id, + }) + + history6 = History.objects.get(id=self.history6_id) + self.assertEqual(history6.initiator_type, 'scheduler') + self.assertEqual(history6.initiator, pt2.id) + self.assertDictEqual(json.loads(history6.json_options), { + 'template_option': str(pt2.template_option.id), + 'template': pt2.template_option.template.id, + }) + + history7 = History.objects.get(id=self.history7_id) + self.assertEqual(history7.initiator_type, 'scheduler') + self.assertEqual(history7.initiator, pt3.id) + self.assertDictEqual(json.loads(history7.json_options), { + 'template_option': str(pt3.template_option.id), + 'template': pt3.template_option.template.id, + }) + class ExecutionTemplateBackwardsMigrationTestCase(BaseMigrationTestCase): migrate_from = ('main', '0004_v3_delete_old_models') @@ -5264,6 +5402,27 @@ def prepare(self): History = self.old_state.apps.get_model('main', 'History') + unused_template = ExecutionTemplate.objects.create( + name='unused', + notes='some notes', + plugin='ANSIBLE_MODULE', + project=self.project, + ) + unused_option = ExecutionTemplateOption.objects.create( + name='unused', + template=unused_template, + arguments={}, + ) + TemplatePeriodicTask.objects.create( + name='unused', + template_option=unused_option, + type='CRONTAB', + schedule='12 20 * * *', + enabled=False, + save_result=True, + ) + ExecutionTemplate.objects.filter(id=unused_template.id).delete() + # create template with module plugin template1 = ExecutionTemplate.objects.create( name='template1', @@ -5332,7 +5491,7 @@ def prepare(self): ) # create periodic task for default option - TemplatePeriodicTask.objects.create( + pt1 = TemplatePeriodicTask.objects.create( name='pt1', template_option=option1, type='CRONTAB', @@ -5342,7 +5501,7 @@ def prepare(self): ) # create periodic task for another option - TemplatePeriodicTask.objects.create( + pt2 = TemplatePeriodicTask.objects.create( name='pt2', template_option=option2, type='INTERVAL', @@ -5379,6 +5538,72 @@ def prepare(self): ) self.history2_id = history2.id + # create history with template initiator (default option) + history3 = History.objects.create( + status='OK', + mode='invalid', + inventory=self.inventory, + project=self.project, + kind='ANSIBLE_MODULE', + json_args=json.dumps({'module': 'invalid'}), + initiator=template1.id, + initiator_type='template', + executor=None, + json_options=json.dumps({'template_option': str(option1.id)}) + ) + self.history3_id = history3.id + + # create history with template initiator (default option) + history4 = History.objects.create( + status='OK', + mode='invalid', + inventory=self.inventory, + project=self.project, + kind='ANSIBLE_MODULE', + json_args=json.dumps({'module': 'invalid'}), + initiator=template1.id, + initiator_type='template', + executor=None, + json_options=json.dumps({'template_option': str(option2.id)}) + ) + self.history4_id = history4.id + + # create history with scheduler initiator + history5 = History.objects.create( + status='OK', + mode='invalid', + inventory=self.inventory, + project=self.project, + kind='ANSIBLE_MODULE', + json_args=json.dumps({'module': 'invalid'}), + initiator=pt1.id, + initiator_type='scheduler', + executor=None, + json_options=json.dumps({ + 'template_option': str(pt1.template_option.id), + 'template': pt1.template_option.template.id, + }), + ) + self.history5_id = history5.id + + # create history with scheduler initiator + history6 = History.objects.create( + status='OK', + mode='invalid', + inventory=self.inventory, + project=self.project, + kind='ANSIBLE_MODULE', + json_args=json.dumps({'module': 'invalid'}), + initiator=pt2.id, + initiator_type='scheduler', + executor=None, + json_options=json.dumps({ + 'template_option': str(pt2.template_option.id), + 'template': pt2.template_option.template.id, + }), + ) + self.history6_id = history6.id + def test_migrations(self): self.check_model_not_exists(self.new_state, 'main', 'ExecutionTemplate') self.check_model_not_exists(self.new_state, 'main', 'ExecutionTemplateOption') @@ -5478,6 +5703,30 @@ def test_migrations(self): 'module': 'invalid', }) + history3 = History.objects.get(id=self.history3_id) + self.assertEqual(history3.initiator_type, 'template') + self.assertEqual(history3.initiator, template1.id) + self.assertDictEqual(json.loads(history3.json_options), {}) + + history4 = History.objects.get(id=self.history4_id) + self.assertEqual(history4.initiator_type, 'template') + self.assertEqual(history3.initiator, template1.id) + self.assertDictEqual(json.loads(history4.json_options), { + 'template_option': 'option2', + }) + + history5 = History.objects.get(id=self.history5_id) + self.assertEqual(history5.initiator_type, 'scheduler') + self.assertEqual(history5.initiator, pt1.id) + self.assertDictEqual(json.loads(history5.json_options), {}) + + history6 = History.objects.get(id=self.history6_id) + self.assertEqual(history6.initiator_type, 'scheduler') + self.assertEqual(history6.initiator, pt2.id) + self.assertDictEqual(json.loads(history6.json_options), { + 'template_option': 'option2', + }) + class BaseExecutionPluginUnitTestCase(VSTBaseTestCase): plugin_class = None