diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/cli.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/cli.py index 375127c3f..5b757a402 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/cli.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/cli.py @@ -231,9 +231,11 @@ def scale_backfill_cmd(ctx, units: int): @backfill_group.command(name="status") +@click.option('--deep-check', is_flag=True, help='Perform a deep status check of the backfill') @click.pass_obj -def status_backfill_cmd(ctx): - exitcode, message = logic_backfill.status(ctx.env.backfill) +def status_backfill_cmd(ctx, deep_check): + logger.info(f"Called `console backfill status`, with {deep_check=}") + exitcode, message = logic_backfill.status(ctx.env.backfill, deep_check=deep_check) if exitcode != ExitCode.SUCCESS: raise click.ClickException(message) click.echo(message) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/backfill.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/backfill.py index e2fd9c66c..3eeac4d5b 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/backfill.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/backfill.py @@ -119,16 +119,18 @@ def scale(backfill: Backfill, units: int, *args, **kwargs) -> Tuple[ExitCode, st return ExitCode.FAILURE, "Backfill scale failed." + "\n" + result.display() -def status(backfill: Backfill, *args, **kwargs) -> Tuple[ExitCode, str]: - logger.info("Getting backfill status") +def status(backfill: Backfill, deep_check, *args, **kwargs) -> Tuple[ExitCode, str]: + logger.info(f"Getting backfill status with {deep_check=}") try: - status = backfill.get_status(*args, **kwargs) + status = backfill.get_status(deep_check, *args, **kwargs) except NotImplementedError: logger.error(f"Status is not implemented for backfill {type(backfill).__name__}") return ExitCode.FAILURE, f"Status is not implemented for backfill: {type(backfill).__name__}" except Exception as e: logger.error(f"Failed to get status of backfill: {e}") return ExitCode.FAILURE, f"Failure when getting status of backfill: {type(e).__name__} {e}" - if status: - return ExitCode.SUCCESS, status.value - return ExitCode.FAILURE, "Backfill status retrieval failed." + "\n" + status + if status.success: + return (ExitCode.SUCCESS, + f"{status.value[0]}\n{status.value[1]}" if not isinstance(status.value, str) else status.value) + return ExitCode.FAILURE, "Backfill status retrieval failed." + "\n" + \ + f"{status.value[0]}\n{status.value[1]}" if not isinstance(status.value, str) else status.value diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_base.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_base.py index d49da1997..f76315efc 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_base.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_base.py @@ -19,7 +19,7 @@ } -BackfillStatus = Enum("BackfillStatus", ["NOT_STARTED", "RUNNING", "STOPPED", "FAILED"]) +BackfillStatus = Enum("BackfillStatus", ["NOT_STARTED", "STARTING", "RUNNING", "STOPPED", "FAILED"]) class Backfill(ABC): @@ -50,7 +50,7 @@ def stop(self, *args, **kwargs) -> CommandResult: pass @abstractmethod - def get_status(self, *args, **kwargs) -> BackfillStatus: + def get_status(self, *args, **kwargs) -> CommandResult: """Return a status""" pass diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_osi.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_osi.py index 011275165..3ebbe7d2c 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_osi.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_osi.py @@ -1,7 +1,7 @@ from console_link.models.osi_utils import (create_pipeline_from_env, start_pipeline, stop_pipeline, OpenSearchIngestionMigrationProps) from console_link.models.cluster import Cluster -from console_link.models.backfill_base import Backfill, BackfillStatus +from console_link.models.backfill_base import Backfill from console_link.models.command_result import CommandResult from typing import Dict from cerberus import Validator @@ -92,7 +92,7 @@ def stop(self, pipeline_name=None): pipeline_name = self.osi_props.pipeline_name stop_pipeline(osi_client=self.osi_client, pipeline_name=pipeline_name) - def get_status(self, *args, **kwargs) -> BackfillStatus: + def get_status(self, *args, **kwargs) -> CommandResult: raise NotImplementedError() def scale(self, units: int, *args, **kwargs) -> CommandResult: diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_rfs.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_rfs.py index fc3296011..8ce27a224 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_rfs.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/backfill_rfs.py @@ -1,4 +1,4 @@ -from typing import Dict +from typing import Dict, Optional from console_link.models.backfill_base import Backfill, BackfillStatus from console_link.models.cluster import Cluster from console_link.models.schema_tools import contains_one_of @@ -61,7 +61,7 @@ def start(self, *args, **kwargs) -> CommandResult: def stop(self, *args, **kwargs) -> CommandResult: raise NotImplementedError() - def get_status(self, *args, **kwargs) -> BackfillStatus: + def get_status(self, *args, **kwargs) -> CommandResult: raise NotImplementedError() def scale(self, units: int, *args, **kwargs) -> CommandResult: @@ -96,3 +96,41 @@ def stop(self, *args, **kwargs) -> CommandResult: def scale(self, units: int, *args, **kwargs) -> CommandResult: logger.info(f"Scaling RFS backfill by setting desired count to {units} instances") return self.ecs_client.set_desired_count(units) + + def get_status(self, deep_check, *args, **kwargs) -> CommandResult: + logger.info(f"Getting status of RFS backfill, with {deep_check=}") + instance_statuses = self.ecs_client.get_instance_statuses() + if not instance_statuses: + return CommandResult(False, "Failed to get instance statuses") + + status_string = str(instance_statuses) + if deep_check: + try: + shard_status = self._get_detailed_status() + except Exception as e: + logger.error(f"Failed to get detailed status: {e}") + if shard_status: + status_string += f"\n{shard_status}" + + if instance_statuses.running > 0: + return CommandResult(True, (BackfillStatus.RUNNING, status_string)) + elif instance_statuses.pending > 0: + return CommandResult(True, (BackfillStatus.STARTING, status_string)) + return CommandResult(True, (BackfillStatus.STOPPED, status_string)) + + def _get_detailed_status(self) -> Optional[str]: + status_query = {"query": { + "bool": { + "must": [{"exists": {"field": "expiration"}}], + "must_not": [{"exists": {"field": "completedAt"}}] + } + }} + response = self.target_cluster.call_api("/.migrations_working_state/_search", json_body=status_query) + r_body = response.json() + logger.debug(f"Raw response: {r_body}") + if "hits" in r_body: + logger.info(f"Hits on detailed status query: {r_body['hits']}") + logger.info(f"Sample of remaining shards: {[hit['_id'] for hit in r_body['hits']['hits']]}") + return f"Remaining shards: {r_body['hits']['total']['value']}" + logger.warning("No hits on detailed status query, migration_working_state index may not exist or be populated") + return None diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/cluster.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/cluster.py index 1269cd583..c3fa5a217 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/cluster.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/cluster.py @@ -85,7 +85,8 @@ def __init__(self, config: Dict) -> None: elif 'sigv4' in config: self.auth_type = AuthMethod.SIGV4 - def call_api(self, path, method: HttpMethod = HttpMethod.GET, timeout=None, **kwargs) -> requests.Response: + def call_api(self, path, method: HttpMethod = HttpMethod.GET, timeout=None, + json_body=None, **kwargs) -> requests.Response: """ Calls an API on the cluster. """ @@ -100,6 +101,11 @@ def call_api(self, path, method: HttpMethod = HttpMethod.GET, timeout=None, **kw else: raise NotImplementedError(f"Auth type {self.auth_type} not implemented") + if json_body is not None: + data = json_body + else: + data = None + logger.info(f"Making api call to {self.endpoint}{path}") # Extract query parameters from kwargs @@ -112,6 +118,7 @@ def call_api(self, path, method: HttpMethod = HttpMethod.GET, timeout=None, **kw verify=(not self.allow_insecure), auth=auth, timeout=timeout, + json=data ) logger.debug(f"Cluster API call request: {r.request}") r.raise_for_status() diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/ecs_service.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/ecs_service.py index f1bda4198..3d74e13a1 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/ecs_service.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/ecs_service.py @@ -1,4 +1,5 @@ import logging +from typing import NamedTuple, Optional import boto3 @@ -9,6 +10,15 @@ logger = logging.getLogger(__name__) +class InstanceStatuses(NamedTuple): + running: int = 0 + pending: int = 0 + desired: int = 0 + + def __str__(self): + return f"Running={self.running}\nPending={self.pending}\nDesired={self.desired}" + + class ECSService: def __init__(self, cluster_name, service_name, aws_region=None): self.cluster_name = cluster_name @@ -25,7 +35,6 @@ def set_desired_count(self, desired_count: int) -> CommandResult: service=self.service_name, desiredCount=desired_count ) - logger.debug(f"Response from update_service: {response}") try: raise_for_aws_api_error(response) @@ -38,3 +47,22 @@ def set_desired_count(self, desired_count: int) -> CommandResult: desired_count = response["service"]["desiredCount"] return CommandResult(True, f"Service {self.service_name} set to {desired_count} desired count." f" Currently {running_count} running and {pending_count} pending.") + + def get_instance_statuses(self) -> Optional[InstanceStatuses]: + logger.info(f"Getting instance statuses for service {self.service_name}") + response = self.client.describe_services( + cluster=self.cluster_name, + services=[self.service_name] + ) + try: + raise_for_aws_api_error(response) + except AWSAPIError as e: + logger.error(f"Error getting instance statuses: {e}") + return None + + service = response["services"][0] + return InstanceStatuses( + running=service["runningCount"], + pending=service["pendingCount"], + desired=service["desiredCount"] + ) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/snapshot.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/snapshot.py index aeca34db0..484c3fbff 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/snapshot.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/snapshot.py @@ -107,8 +107,7 @@ def create(self, *args, **kwargs) -> CommandResult: logger.error(f"Failed to create snapshot: {str(e)}") return CommandResult(success=False, value=f"Failed to create snapshot: {str(e)}") - def status(self, *args, **kwargs) -> CommandResult: - deep_check = kwargs.get('deep_check', False) + def status(self, *args, deep_check=False, **kwargs) -> CommandResult: if deep_check: return get_snapshot_status_full(self.source_cluster, self.snapshot_name) return get_snapshot_status(self.source_cluster, self.snapshot_name) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/data/ecs_describe_services_5d_2p_1r.json b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/data/ecs_describe_services_5d_2p_1r.json new file mode 100644 index 000000000..b3938f5f4 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/data/ecs_describe_services_5d_2p_1r.json @@ -0,0 +1,95 @@ +{ + "ResponseMetadata": { + "HTTPHeaders": { + "content-length": "18483", + "content-type": "application/x-amz-json-1.1", + "date": "Wed, 26 Jun 2024 05:19:27 GMT", + "x-amzn-requestid": "2840c6e1-4016-40b1-92c4-81537cc8fa44" + }, + "HTTPStatusCode": 200, + "RequestId": "2840c6e1-4016-40b1-92c4-81537cc8fa44", + "RetryAttempts": 0 + }, + "failures": [], + "services": [ + { + "clusterArn": "arn:aws:ecs:us-east-1:471142325910:cluster/migration-aws-integ-ecs-cluster", + "createdAt": 1719256381.625, + "createdBy": "arn:aws:iam::471142325910:role/cdk-hnb659fds-cfn-exec-role-471142325910-us-east-1", + "deploymentConfiguration": { + "alarms": { + "alarmNames": [], + "enable": false, + "rollback": false + }, + "deploymentCircuitBreaker": { + "enable": false, + "rollback": false + }, + "maximumPercent": 200, + "minimumHealthyPercent": 50 + }, + "deploymentController": { + "type": "ECS" + }, + "deployments": [ + { + "createdAt": 1719357851.906, + "desiredCount": 5, + "failedTasks": 0, + "id": "ecs-svc/6398738394350162857", + "launchType": "FARGATE", + "networkConfiguration": { + "awsvpcConfiguration": { + "assignPublicIp": "ENABLED", + "securityGroups": [ + "sg-06238a66dd0dd5c3c", + "sg-020e147def49d9d63" + ], + "subnets": [ + "subnet-063807f9cb96e6512", + "subnet-005f438b980128cfc" + ] + } + }, + "pendingCount": 2, + "platformFamily": "Linux", + "platformVersion": "1.4.0", + "rolloutState": "COMPLETED", + "rolloutStateReason": "ECS deployment ecs-svc/6398738394350162857 completed.", + "runningCount": 1, + "status": "PRIMARY", + "taskDefinition": "arn:aws:ecs:us-east-1:471142325910:task-definition/migration-aws-integ-reindex-from-snapshot:7", + "updatedAt": 1719377448.437 + } + ], + "desiredCount": 5, + "enableECSManagedTags": false, + "enableExecuteCommand": true, + "events": [], + "launchType": "FARGATE", + "loadBalancers": [], + "networkConfiguration": { + "awsvpcConfiguration": { + "assignPublicIp": "ENABLED", + "securityGroups": ["sg-06238a66dd0dd5c3c", "sg-020e147def49d9d63"], + "subnets": ["subnet-063807f9cb96e6512", "subnet-005f438b980128cfc"] + } + }, + "pendingCount": 2, + "placementConstraints": [], + "placementStrategy": [], + "platformFamily": "Linux", + "platformVersion": "LATEST", + "propagateTags": "NONE", + "roleArn": "arn:aws:iam::471142325910:role/aws-service-role/ecs.amazonaws.com/AWSServiceRoleForECS", + "runningCount": 1, + "schedulingStrategy": "REPLICA", + "serviceArn": "arn:aws:ecs:us-east-1:471142325910:service/migration-aws-integ-ecs-cluster/migration-aws-integ-reindex-from-snapshot", + "serviceName": "migration-aws-integ-reindex-from-snapshot", + "serviceRegistries": [], + "status": "ACTIVE", + "taskDefinition": "arn:aws:ecs:us-east-1:471142325910:task-definition/migration-aws-integ-reindex-from-snapshot:7" + } + ] +} diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/data/migrations_working_state_search.json b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/data/migrations_working_state_search.json new file mode 100644 index 000000000..21a447c79 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/data/migrations_working_state_search.json @@ -0,0 +1,121 @@ +{ + "took": 2, + "timed_out": false, + "_shards": { "total": 1, "successful": 1, "skipped": 0, "failed": 0 }, + "hits": { + "total": { "value": 43, "relation": "eq" }, + "max_score": 1.0, + "hits": [ + { + "_index": ".migrations_working_state", + "_id": "logs-211998__2", + "_score": 1.0, + "_source": { + "numAttempts": 0, + "scriptVersion": "poc", + "creatorId": "77eccd7d-6baf-4816-b0f3-473ca3ca17e4", + "expiration": 0 + } + }, + { + "_index": ".migrations_working_state", + "_id": "logs-211998__3", + "_score": 1.0, + "_source": { + "numAttempts": 0, + "scriptVersion": "poc", + "creatorId": "77eccd7d-6baf-4816-b0f3-473ca3ca17e4", + "expiration": 0 + } + }, + { + "_index": ".migrations_working_state", + "_id": "logs-211998__4", + "_score": 1.0, + "_source": { + "numAttempts": 0, + "scriptVersion": "poc", + "creatorId": "77eccd7d-6baf-4816-b0f3-473ca3ca17e4", + "expiration": 0 + } + }, + { + "_index": ".migrations_working_state", + "_id": "logs-221998__0", + "_score": 1.0, + "_source": { + "numAttempts": 0, + "scriptVersion": "poc", + "creatorId": "77eccd7d-6baf-4816-b0f3-473ca3ca17e4", + "expiration": 0 + } + }, + { + "_index": ".migrations_working_state", + "_id": "logs-221998__1", + "_score": 1.0, + "_source": { + "numAttempts": 0, + "scriptVersion": "poc", + "creatorId": "77eccd7d-6baf-4816-b0f3-473ca3ca17e4", + "expiration": 0 + } + }, + { + "_index": ".migrations_working_state", + "_id": "logs-221998__2", + "_score": 1.0, + "_source": { + "numAttempts": 0, + "scriptVersion": "poc", + "creatorId": "77eccd7d-6baf-4816-b0f3-473ca3ca17e4", + "expiration": 0 + } + }, + { + "_index": ".migrations_working_state", + "_id": "logs-221998__3", + "_score": 1.0, + "_source": { + "numAttempts": 0, + "scriptVersion": "poc", + "creatorId": "77eccd7d-6baf-4816-b0f3-473ca3ca17e4", + "expiration": 0 + } + }, + { + "_index": ".migrations_working_state", + "_id": "logs-221998__4", + "_score": 1.0, + "_source": { + "numAttempts": 0, + "scriptVersion": "poc", + "creatorId": "77eccd7d-6baf-4816-b0f3-473ca3ca17e4", + "expiration": 0 + } + }, + { + "_index": ".migrations_working_state", + "_id": "logs-201998__0", + "_score": 1.0, + "_source": { + "numAttempts": 0, + "scriptVersion": "poc", + "creatorId": "77eccd7d-6baf-4816-b0f3-473ca3ca17e4", + "expiration": 0 + } + }, + { + "_index": ".migrations_working_state", + "_id": "logs-201998__1", + "_score": 1.0, + "_source": { + "numAttempts": 0, + "scriptVersion": "poc", + "creatorId": "77eccd7d-6baf-4816-b0f3-473ca3ca17e4", + "expiration": 0 + } + } + ] + } +} diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/data/services_with_ecs_rfs.yaml b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/data/services_with_ecs_rfs.yaml new file mode 100644 index 000000000..876472b84 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/data/services_with_ecs_rfs.yaml @@ -0,0 +1,11 @@ +target_cluster: + endpoint: "https://opensearchtarget:9200" + allow_insecure: true + basic_auth: + username: "admin" + password: "myStrongPassword123!" +backfill: + reindex_from_snapshot: + ecs: + cluster_name: "migration-cluster" + service_name: "rfs-service" diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_backfill.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_backfill.py index 14d4e55c0..c18c92136 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_backfill.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_backfill.py @@ -1,12 +1,14 @@ +import json import pathlib import pytest +import requests_mock from console_link.logic.backfill import get_backfill, UnsupportedBackfillTypeError -from console_link.models.backfill_base import Backfill +from console_link.models.backfill_base import Backfill, BackfillStatus from console_link.models.backfill_osi import OpenSearchIngestionBackfill from console_link.models.backfill_rfs import DockerRFSBackfill, ECSRFSBackfill -from console_link.models.ecs_service import ECSService +from console_link.models.ecs_service import ECSService, InstanceStatuses from tests.utils import create_valid_cluster @@ -181,3 +183,87 @@ def test_ecs_rfs_backfill_scale_sets_ecs_desired_count(ecs_rfs_backfill, mocker) assert isinstance(ecs_rfs_backfill, ECSRFSBackfill) mock.assert_called_once_with(ecs_rfs_backfill.ecs_client, 3) + + +def test_ecs_rfs_backfill_status_gets_ecs_instance_statuses(ecs_rfs_backfill, mocker): + mocked_instance_status = InstanceStatuses( + desired=3, + running=1, + pending=2 + ) + mock = mocker.patch.object(ECSService, 'get_instance_statuses', autospec=True, return_value=mocked_instance_status) + value = ecs_rfs_backfill.get_status(deep_check=False) + + mock.assert_called_once_with(ecs_rfs_backfill.ecs_client) + assert value.success + assert BackfillStatus.RUNNING == value.value[0] + assert str(mocked_instance_status) == value.value[1] + + +def test_ecs_rfs_calculates_backfill_status_from_ecs_instance_statuses_stopped(ecs_rfs_backfill, mocker): + mocked_stopped_status = InstanceStatuses( + desired=8, + running=0, + pending=0 + ) + mock = mocker.patch.object(ECSService, 'get_instance_statuses', autospec=True, return_value=mocked_stopped_status) + value = ecs_rfs_backfill.get_status(deep_check=False) + + mock.assert_called_once_with(ecs_rfs_backfill.ecs_client) + assert value.success + assert BackfillStatus.STOPPED == value.value[0] + assert str(mocked_stopped_status) == value.value[1] + + +def test_ecs_rfs_calculates_backfill_status_from_ecs_instance_statuses_starting(ecs_rfs_backfill, mocker): + mocked_starting_status = InstanceStatuses( + desired=8, + running=0, + pending=6 + ) + mock = mocker.patch.object(ECSService, 'get_instance_statuses', autospec=True, return_value=mocked_starting_status) + value = ecs_rfs_backfill.get_status(deep_check=False) + + mock.assert_called_once_with(ecs_rfs_backfill.ecs_client) + assert value.success + assert BackfillStatus.STARTING == value.value[0] + assert str(mocked_starting_status) == value.value[1] + + +def test_ecs_rfs_calculates_backfill_status_from_ecs_instance_statuses_running(ecs_rfs_backfill, mocker): + mocked_running_status = InstanceStatuses( + desired=1, + running=3, + pending=1 + ) + mock = mocker.patch.object(ECSService, 'get_instance_statuses', autospec=True, return_value=mocked_running_status) + value = ecs_rfs_backfill.get_status(deep_check=False) + + mock.assert_called_once_with(ecs_rfs_backfill.ecs_client) + assert value.success + assert BackfillStatus.RUNNING == value.value[0] + assert str(mocked_running_status) == value.value[1] + + +def test_ecs_rfs_get_status_deep_check(ecs_rfs_backfill, mocker): + target = create_valid_cluster() + mocked_stopped_status = InstanceStatuses( + desired=1, + running=1, + pending=0 + ) + mock = mocker.patch.object(ECSService, 'get_instance_statuses', autospec=True, return_value=mocked_stopped_status) + with open(TEST_DATA_DIRECTORY / "migrations_working_state_search.json") as f: + data = json.load(f) + total_shards = data['hits']['total']['value'] + with requests_mock.Mocker() as rm: + rm.get(f"{target.endpoint}/.migrations_working_state/_search", + status_code=200, + json=data) + value = ecs_rfs_backfill.get_status(deep_check=True) + + mock.assert_called_once_with(ecs_rfs_backfill.ecs_client) + assert value.success + assert BackfillStatus.RUNNING == value.value[0] + assert str(mocked_stopped_status) in value.value[1] + assert str(total_shards) in value.value[1] diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_cli.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_cli.py index e5277525e..f7ddc5fd0 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_cli.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_cli.py @@ -1,6 +1,8 @@ import pathlib import requests_mock +from console_link.models.backfill_rfs import ECSRFSBackfill +from console_link.models.ecs_service import ECSService, InstanceStatuses from console_link.models.command_result import CommandResult from console_link.cli import cli @@ -175,3 +177,49 @@ def test_cli_metadata_migrate(runner, env, mocker): catch_exceptions=True) mock.assert_called_once() assert result.exit_code == 0 + + +def test_get_backfill_status_no_deep_check(runner, mocker): + mocked_running_status = InstanceStatuses( + desired=1, + running=3, + pending=1 + ) + mock = mocker.patch.object(ECSService, 'get_instance_statuses', autspec=True, return_value=mocked_running_status) + + result = runner.invoke(cli, ['--config-file', str(TEST_DATA_DIRECTORY / "services_with_ecs_rfs.yaml"), + 'backfill', 'status'], + catch_exceptions=True) + print(result) + print(result.output) + assert result.exit_code == 0 + assert "RUNNING" in result.output + assert str(mocked_running_status) in result.output + + mock.assert_called_once() + + +def test_get_backfill_status_with_deep_check(runner, mocker): + mocked_running_status = InstanceStatuses( + desired=1, + running=3, + pending=1 + ) + mocked_detailed_status = "Remaining shards: 43" + mock_ecs_service_call = mocker.patch.object(ECSService, 'get_instance_statuses', autspec=True, + return_value=mocked_running_status) + mock_detailed_status_call = mocker.patch.object(ECSRFSBackfill, '_get_detailed_status', autspec=True, + return_value=mocked_detailed_status) + + result = runner.invoke(cli, ['--config-file', str(TEST_DATA_DIRECTORY / "services_with_ecs_rfs.yaml"), + 'backfill', 'status', '--deep-check'], + catch_exceptions=True) + print(result) + print(result.output) + assert result.exit_code == 0 + assert "RUNNING" in result.output + assert str(mocked_running_status) in result.output + assert mocked_detailed_status in result.output + + mock_ecs_service_call.assert_called_once() + mock_detailed_status_call.assert_called_once() diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_ecs.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_ecs.py index 5678cdfa6..60fa533fd 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_ecs.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_ecs.py @@ -7,7 +7,7 @@ from botocore.stub import Stubber from console_link.models.utils import AWSAPIError -from console_link.models.ecs_service import ECSService +from console_link.models.ecs_service import ECSService, InstanceStatuses TEST_DATA_DIRECTORY = pathlib.Path(__file__).parent / "data" AWS_REGION = "us-east-1" @@ -57,3 +57,16 @@ def test_ecs_update_service_unsuccessful(ecs_stubber, ecs_service): result = ecs_service.set_desired_count(1) assert result.success is False assert isinstance(result.value, AWSAPIError) + + +def test_ecs_get_instance_statues(ecs_stubber, ecs_service): + with open(TEST_DATA_DIRECTORY / "ecs_describe_services_5d_2p_1r.json") as f: + data = json.load(f) + ecs_stubber.add_response("describe_services", service_response=data) + ecs_stubber.activate() + + expected = InstanceStatuses(desired=5, pending=2, running=1) + + ecs_service.client = ecs_stubber.client + result = ecs_service.get_instance_statuses() + assert str(result) == str(expected)