diff --git a/caribou/common/teardown/__init__.py b/caribou/common/teardown/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/caribou/common/teardown/teardown_tables.py b/caribou/common/teardown/teardown_tables.py new file mode 100644 index 00000000..38bcb8d7 --- /dev/null +++ b/caribou/common/teardown/teardown_tables.py @@ -0,0 +1,134 @@ +import os +from typing import Any + +import boto3 +import botocore + +from caribou.common import constants +from caribou.common.models.endpoints import Endpoints + + +def remove_table(dynamodb: Any, table_name: str, verbose: bool = True) -> None: + # Check if the table already exists (If not skip deletion) + try: + dynamodb.describe_table(TableName=table_name) + + # If the table exists, delete it + dynamodb.delete_table(TableName=table_name) + except dynamodb.exceptions.ResourceNotFoundException: + if verbose: + print(f"Table '{table_name}' does not exists (Or already removed)") + + +def remove_bucket(s3: Any, s3_resource: Any, bucket_name: str) -> None: + # Check if the bucket already exists (If not skip deletion) + try: + s3.head_bucket(Bucket=bucket_name) + + # If the bucket exists, delete it + ## We need to first empty the bucket before deleting it + bucket = s3_resource.Bucket(bucket_name) + bucket.objects.all().delete() + + ## Finally delete the bucket + s3.delete_bucket(Bucket=bucket_name) + + print(f"Removed legacy bucket: {bucket_name}") + except botocore.exceptions.ClientError as e: + if e.response["Error"]["Code"] != "404" and e.response["Error"]["Code"] != "403": + # If the error is not 403 forbidden or 404 not found, + # raise the exception and notify the user + raise + + +def teardown_framework_tables() -> None: + dynamodb = boto3.client("dynamodb", region_name=constants.GLOBAL_SYSTEM_REGION) + + # Get all attributes of the constants module + for attr in dir(constants): + # If the attribute name ends with '_TABLE', create a DynamoDB table + if attr.endswith("_TABLE"): + table_name = getattr(constants, attr) + + if table_name in [constants.SYNC_MESSAGES_TABLE, constants.SYNC_PREDECESSOR_COUNTER_TABLE]: + # Skip the sync tables (They are removed in a separate function) + continue + + print(f"Removing table: {table_name}") + try: + remove_table(dynamodb, table_name) + except Exception as e: # pylint: disable=broad-except + print(f"Error remove table {table_name}: {e}") + + +def teardown_framework_buckets() -> None: + # Only used for legacy buckets + s3 = boto3.client("s3", region_name=constants.GLOBAL_SYSTEM_REGION) + s3_resource = boto3.resource("s3", region_name=constants.GLOBAL_SYSTEM_REGION) + + # Get all attributes of the constants module + for attr in dir(constants): + # If the attribute name ends with '_BUCKET', create an S3 bucket + if attr.endswith("_BUCKET"): + # Allow for the bucket name to be overridden by an environment variable + bucket_name = os.environ.get(f"CARIBOU_OVERRIDE_{attr}", getattr(constants, attr)) + + try: + remove_bucket(s3, s3_resource, bucket_name) + except Exception as e: # pylint: disable=broad-except + print(f"Error remove bucket {bucket_name}: {e}") + + +def remove_sync_tables_all_regions() -> None: + # First get all the regions + all_available_regions: list[str] = [] + try: + available_regions_data = ( + Endpoints().get_data_collector_client().get_all_values_from_table(constants.AVAILABLE_REGIONS_TABLE) + ) + for region_key_raw in available_regions_data.keys(): + # Keys are in forms of 'aws:eu-south-1' (For AWS regions) + if region_key_raw.startswith("aws:"): + region_key_aws = region_key_raw.split(":")[1] + all_available_regions.append(region_key_aws) + except Exception as e: # pylint: disable=broad-except + print(f"Error getting available regions: {e}") + + sync_tables = [constants.SYNC_MESSAGES_TABLE, constants.SYNC_PREDECESSOR_COUNTER_TABLE] + print(f"Removing sync tables in the following regions: {all_available_regions}") + error_regions: set[str] = set() + for region in all_available_regions: + dynamodb = boto3.client("dynamodb", region_name=region) + + for table_name in sync_tables: + try: + remove_table(dynamodb, table_name, verbose=False) + except botocore.exceptions.ClientError as e: + # If not UnrecognizedClientException, log the error + # As exception also appears if the user does not have a region enabled + # Which means that there are no tables to remove anyways + if e.response["Error"]["Code"] != "UnrecognizedClientException": + print(f"Error removing table {table_name}: {e}") + error_regions.add(region) + except Exception as e: # pylint: disable=broad-except + print(f"Unexpected error removing table {table_name}: {e}") + error_regions.add(region) + if len(error_regions) > 0: + print(f"Removed from all applicable listed regions except: {error_regions}") + + +def main() -> None: + # Remove any and all sync tables in all regions + remove_sync_tables_all_regions() + + # Remove the core framework tables + teardown_framework_tables() + + # Remove framrework buckets + ## This is targetting legacy buckets that are not used anymore + ## Current iteration of the framework does not use any buckets + teardown_framework_buckets() + + +if __name__ == "__main__": + main() diff --git a/caribou/deployment/client/cli/cli.py b/caribou/deployment/client/cli/cli.py index fb816d74..c39a9154 100644 --- a/caribou/deployment/client/cli/cli.py +++ b/caribou/deployment/client/cli/cli.py @@ -5,6 +5,7 @@ from cron_descriptor import Options, get_description from caribou.common.setup.setup_tables import main as setup_tables_func +from caribou.common.teardown.teardown_tables import main as teardown_tables_func from caribou.data_collector.components.carbon.carbon_collector import CarbonCollector from caribou.data_collector.components.performance.performance_collector import PerformanceCollector from caribou.data_collector.components.provider.provider_collector import ProviderCollector @@ -12,11 +13,11 @@ from caribou.deployment.client import __version__ as CARIBOU_VERSION from caribou.deployment.client.cli.new_workflow import create_new_workflow_directory from caribou.deployment.client.remote_cli.remote_cli import ( - deploy_aws_framework, + deploy_remote_framework, get_all_available_timed_cli_functions, get_all_default_timed_cli_functions, - remove_aws_framework, remove_aws_timers, + remove_remote_framework, report_timer_schedule_expression, setup_aws_timers, valid_framework_dir, @@ -122,6 +123,33 @@ def setup_tables() -> None: setup_tables_func() +@cli.command("teardown_framework", help="Teardown the framework.") +def teardown_framework() -> None: + confirm = ( + input("Are you sure you want to teardown the framework? This action cannot be undone. [y/N]: ").strip().lower() + ) + print(f"confirm: {confirm}") + if confirm in ["y", "yes"]: + ## First remove remote framework cli + ## This also removes all timers + remove_remote_framework() + + ## Then remove all deployed workflows + print("\nRemoving all deployed workflows") + deployed_workflows: list[str] = Client().list_workflows() + for workflow_id in deployed_workflows: + client = Client(workflow_id) + client.remove() + + # Finally teardown ALL the tables + print("\nTearing down all framework tables and buckets (if any)") + teardown_tables_func() + + print("\nFramework teardown attempt has been completed.") + else: + print("Teardown aborted.") + + @cli.command("version", help="Print the version of caribou.") def version() -> None: click.echo(CARIBOU_VERSION) @@ -194,7 +222,7 @@ def deploy_remote_cli( if ephemeral_storage_mb < 512 or ephemeral_storage_mb > 10240: raise click.ClickException("Ephemeral storage must be between 512 MB and 10240 MB (10 GB).") - deploy_aws_framework(project_dir, timeout_s, memory_mb, ephemeral_storage_mb) + deploy_remote_framework(project_dir, timeout_s, memory_mb, ephemeral_storage_mb) @cli.command("list_timers", help="See all available timers.") @@ -337,7 +365,7 @@ def remove_all_timers() -> None: @cli.command("remove_remote_cli", help="Deploy the remote framework from AWS Lambda.") def remove_remote_cli() -> None: - remove_aws_framework() + remove_remote_framework() __version__ = CARIBOU_VERSION diff --git a/caribou/deployment/client/remote_cli/remote_cli.py b/caribou/deployment/client/remote_cli/remote_cli.py index 5c3c5c2b..7822df4e 100644 --- a/caribou/deployment/client/remote_cli/remote_cli.py +++ b/caribou/deployment/client/remote_cli/remote_cli.py @@ -17,8 +17,8 @@ from caribou.deployment.common.deploy.models.resource import Resource -def remove_aws_framework() -> None: - print("Removing AWS framework") +def remove_remote_framework() -> None: + print("Removing Remote framework") aws_remote_client = AWSRemoteClient(GLOBAL_SYSTEM_REGION) # Remove all timer rules @@ -42,7 +42,7 @@ def remove_aws_framework() -> None: aws_remote_client.remove_ecr_repository(REMOTE_CARIBOU_CLI_FUNCTION_NAME) -def deploy_aws_framework(project_dir: str, timeout: int, memory_size: int, ephemeral_storage: int) -> None: +def deploy_remote_framework(project_dir: str, timeout: int, memory_size: int, ephemeral_storage: int) -> None: print(f"Deploying framework to AWS in {project_dir}") aws_remote_client = AWSRemoteClient(GLOBAL_SYSTEM_REGION) diff --git a/caribou/tests/deployment/client/remote_cli/test_remote_cli.py b/caribou/tests/deployment/client/remote_cli/test_remote_cli.py index 2b38c121..f9484192 100644 --- a/caribou/tests/deployment/client/remote_cli/test_remote_cli.py +++ b/caribou/tests/deployment/client/remote_cli/test_remote_cli.py @@ -4,8 +4,8 @@ import tempfile import json from caribou.deployment.client.remote_cli.remote_cli import ( - remove_aws_framework, - deploy_aws_framework, + remove_remote_framework, + deploy_remote_framework, valid_framework_dir, _retrieve_iam_trust_policy, _get_env_vars, @@ -23,7 +23,7 @@ def test_remove_aws_framework(self, MockAWSRemoteClient): mock_client = MockAWSRemoteClient.return_value mock_client.resource_exists.side_effect = [True, True, True] - remove_aws_framework() + remove_remote_framework() mock_client.remove_role.assert_called_once_with("caribou_deployment_policy") mock_client.remove_function.assert_called_once_with("caribou_cli") @@ -40,7 +40,7 @@ def test_deploy_aws_framework(self, mock_tempdir, mock_open, MockDeploymentPacka mock_packager.create_framework_package.return_value = "/fake/path/to/zip" with patch.dict(os.environ, {"GOOGLE_API_KEY": "fake_key", "ELECTRICITY_MAPS_AUTH_TOKEN": "fake_token"}): - deploy_aws_framework("/fake/project/dir", 300, 128, 512) + deploy_remote_framework("/fake/project/dir", 300, 128, 512) mock_client.remove_role.assert_called_once_with("caribou_deployment_policy") mock_client.remove_function.assert_called_once_with("caribou_cli")