diff --git a/samcli/cli/cli_config_file.py b/samcli/cli/cli_config_file.py index 67e214e122..9e2b4aa020 100644 --- a/samcli/cli/cli_config_file.py +++ b/samcli/cli/cli_config_file.py @@ -27,12 +27,14 @@ class TomlProvider: A parser for toml configuration files """ - def __init__(self, section=None): + def __init__(self, section=None, cmd_names=None): """ The constructor for TomlProvider class :param section: section defined in the configuration file nested within `cmd` + :param cmd_names: cmd_name defined in the configuration file """ self.section = section + self.cmd_names = cmd_names def __call__(self, config_path, config_env, cmd_names): """ @@ -67,18 +69,21 @@ def __call__(self, config_path, config_env, cmd_names): LOG.debug("Config file '%s' does not exist", samconfig.path()) return resolved_config + if not self.cmd_names: + self.cmd_names = cmd_names + try: LOG.debug( "Loading configuration values from [%s.%s.%s] (env.command_name.section) in config file at '%s'...", config_env, - cmd_names, + self.cmd_names, self.section, samconfig.path(), ) # NOTE(TheSriram): change from tomlkit table type to normal dictionary, # so that click defaults work out of the box. - resolved_config = dict(samconfig.get_all(cmd_names, self.section, env=config_env).items()) + resolved_config = dict(samconfig.get_all(self.cmd_names, self.section, env=config_env).items()) LOG.debug("Configuration values successfully loaded.") LOG.debug("Configuration values are: %s", resolved_config) @@ -87,7 +92,7 @@ def __call__(self, config_path, config_env, cmd_names): "Error reading configuration from [%s.%s.%s] (env.command_name.section) " "in configuration file at '%s' with : %s", config_env, - cmd_names, + self.cmd_names, self.section, samconfig.path(), str(ex), diff --git a/samcli/cli/command.py b/samcli/cli/command.py index 384529f78b..c329345f14 100644 --- a/samcli/cli/command.py +++ b/samcli/cli/command.py @@ -19,6 +19,7 @@ "samcli.commands.local.local", "samcli.commands.package", "samcli.commands.deploy", + "samcli.commands.delete", "samcli.commands.logs", "samcli.commands.publish", # We intentionally do not expose the `bootstrap` command for now. We might open it up later diff --git a/samcli/commands/delete/__init__.py b/samcli/commands/delete/__init__.py new file mode 100644 index 0000000000..ea5b0202d2 --- /dev/null +++ b/samcli/commands/delete/__init__.py @@ -0,0 +1,6 @@ +""" +`sam delete` command +""" + +# Expose the cli object here +from .command import cli # noqa diff --git a/samcli/commands/delete/command.py b/samcli/commands/delete/command.py new file mode 100644 index 0000000000..266d093a36 --- /dev/null +++ b/samcli/commands/delete/command.py @@ -0,0 +1,90 @@ +""" +CLI command for "delete" command +""" + +import logging + +import click +from samcli.cli.main import aws_creds_options, common_options, pass_context, print_cmdline_args + +from samcli.lib.utils.version_checker import check_newer_version + +SHORT_HELP = "Delete an AWS SAM application and the artifacts created by sam deploy." + +HELP_TEXT = """The sam delete command deletes the CloudFormation +stack and all the artifacts which were created using sam deploy. + +\b +e.g. sam delete + +\b +""" + +LOG = logging.getLogger(__name__) + + +@click.command( + "delete", + short_help=SHORT_HELP, + context_settings={"ignore_unknown_options": False, "allow_interspersed_args": True, "allow_extra_args": True}, + help=HELP_TEXT, +) +@click.option( + "--stack-name", + required=False, + help="The name of the AWS CloudFormation stack you want to delete. ", +) +@click.option( + "--config-file", + help=( + "The path and file name of the configuration file containing default parameter values to use. " + "Its default value is 'samconfig.toml' in project directory. For more information about configuration files, " + "see: " + "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html." + ), + type=click.STRING, + default="samconfig.toml", + show_default=True, +) +@click.option( + "--config-env", + help=( + "The environment name specifying the default parameter values in the configuration file to use. " + "Its default value is 'default'. For more information about configuration files, see: " + "https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-config.html." + ), + type=click.STRING, + default="default", + show_default=True, +) +@aws_creds_options +@common_options +@pass_context +@check_newer_version +@print_cmdline_args +def cli( + ctx, + stack_name: str, + config_file: str, + config_env: str, +): + """ + `sam delete` command entry point + """ + + # All logic must be implemented in the ``do_cli`` method. This helps with easy unit testing + do_cli( + stack_name=stack_name, region=ctx.region, config_file=config_file, config_env=config_env, profile=ctx.profile + ) # pragma: no cover + + +def do_cli(stack_name: str, region: str, config_file: str, config_env: str, profile: str): + """ + Implementation of the ``cli`` method + """ + from samcli.commands.delete.delete_context import DeleteContext + + with DeleteContext( + stack_name=stack_name, region=region, profile=profile, config_file=config_file, config_env=config_env + ) as delete_context: + delete_context.run() diff --git a/samcli/commands/delete/delete_context.py b/samcli/commands/delete/delete_context.py new file mode 100644 index 0000000000..4b7a5ff295 --- /dev/null +++ b/samcli/commands/delete/delete_context.py @@ -0,0 +1,144 @@ +""" +Delete a SAM stack +""" + +import boto3 + +import click +from click import confirm +from click import prompt +from samcli.cli.cli_config_file import TomlProvider +from samcli.lib.utils.botoconfig import get_boto_config_with_user_agent +from samcli.lib.delete.cf_utils import CfUtils +from samcli.lib.package.s3_uploader import S3Uploader +from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name + +CONFIG_COMMAND = "deploy" +CONFIG_SECTION = "parameters" +TEMPLATE_STAGE = "Original" + + +class DeleteContext: + def __init__(self, stack_name: str, region: str, profile: str, config_file: str, config_env: str): + self.stack_name = stack_name + self.region = region + self.profile = profile + self.config_file = config_file + self.config_env = config_env + self.s3_bucket = None + self.s3_prefix = None + self.cf_utils = None + self.s3_uploader = None + self.cf_template_file_name = None + self.delete_artifacts_folder = None + self.delete_cf_template_file = None + + def __enter__(self): + self.parse_config_file() + if not self.stack_name: + self.stack_name = prompt( + click.style("\tEnter stack name you want to delete:", bold=True), type=click.STRING + ) + + return self + + def __exit__(self, *args): + pass + + def parse_config_file(self): + """ + Read the provided config file if it exists and assign the options values. + """ + toml_provider = TomlProvider(CONFIG_SECTION, [CONFIG_COMMAND]) + config_options = toml_provider( + config_path=self.config_file, config_env=self.config_env, cmd_names=[CONFIG_COMMAND] + ) + if config_options: + if not self.stack_name: + self.stack_name = config_options.get("stack_name", None) + + # If the stack_name is same as the one present in samconfig file, + # get the information about parameters if not specified by customer. + if self.stack_name and self.stack_name == config_options.get("stack_name", None): + if not self.region: + self.region = config_options.get("region", None) + click.get_current_context().region = self.region + if not self.profile: + self.profile = config_options.get("profile", None) + click.get_current_context().profile = self.profile + self.s3_bucket = config_options.get("s3_bucket", None) + self.s3_prefix = config_options.get("s3_prefix", None) + + def delete(self): + """ + Delete method calls for Cloudformation stacks and S3 and ECR artifacts + """ + template = self.cf_utils.get_stack_template(self.stack_name, TEMPLATE_STAGE) + template_str = template.get("TemplateBody", None) + + if self.s3_bucket and self.s3_prefix and template_str: + self.delete_artifacts_folder = confirm( + click.style( + "\tAre you sure you want to delete the folder" + + f" {self.s3_prefix} in S3 which contains the artifacts?", + bold=True, + ), + default=False, + ) + if not self.delete_artifacts_folder: + with mktempfile() as temp_file: + self.cf_template_file_name = get_cf_template_name( + temp_file=temp_file, template_str=template_str, extension="template" + ) + self.delete_cf_template_file = confirm( + click.style( + "\tDo you want to delete the template file" + f" {self.cf_template_file_name} in S3?", bold=True + ), + default=False, + ) + + # Delete the primary stack + self.cf_utils.delete_stack(stack_name=self.stack_name) + + click.echo(f"\n\t- Deleting Cloudformation stack {self.stack_name}") + + # Delete the CF template file in S3 + if self.delete_cf_template_file: + self.s3_uploader.delete_artifact(remote_path=self.cf_template_file_name) + + # Delete the folder of artifacts if s3_bucket and s3_prefix provided + elif self.delete_artifacts_folder: + self.s3_uploader.delete_prefix_artifacts() + + def run(self): + """ + Delete the stack based on the argument provided by customers and samconfig.toml. + """ + delete_stack = confirm( + click.style( + f"\tAre you sure you want to delete the stack {self.stack_name}" + f" in the region {self.region} ?", + bold=True, + ), + default=False, + ) + # Fetch the template using the stack-name + if delete_stack and self.region: + boto_config = get_boto_config_with_user_agent() + + # Define cf_client based on the region as different regions can have same stack-names + cloudformation_client = boto3.client( + "cloudformation", region_name=self.region if self.region else None, config=boto_config + ) + + s3_client = boto3.client("s3", region_name=self.region if self.region else None, config=boto_config) + + self.s3_uploader = S3Uploader(s3_client=s3_client, bucket_name=self.s3_bucket, prefix=self.s3_prefix) + self.cf_utils = CfUtils(cloudformation_client) + + is_deployed = self.cf_utils.has_stack(stack_name=self.stack_name) + + if is_deployed: + self.delete() + click.echo("\nDeleted successfully") + else: + click.echo(f"Error: The input stack {self.stack_name} does not exist on Cloudformation") diff --git a/samcli/commands/delete/exceptions.py b/samcli/commands/delete/exceptions.py new file mode 100644 index 0000000000..7e2ba5105c --- /dev/null +++ b/samcli/commands/delete/exceptions.py @@ -0,0 +1,24 @@ +""" +Exceptions that are raised by sam delete +""" +from samcli.commands.exceptions import UserException + + +class DeleteFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to delete the stack: {stack_name}, {msg}" + + super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) + + +class FetchTemplateFailedError(UserException): + def __init__(self, stack_name, msg): + self.stack_name = stack_name + self.msg = msg + + message_fmt = "Failed to fetch the template for the stack: {stack_name}, {msg}" + + super().__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) diff --git a/samcli/lib/delete/__init__.py b/samcli/lib/delete/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/samcli/lib/delete/cf_utils.py b/samcli/lib/delete/cf_utils.py new file mode 100644 index 0000000000..a78ed6d38b --- /dev/null +++ b/samcli/lib/delete/cf_utils.py @@ -0,0 +1,104 @@ +""" +Delete Cloudformation stacks and s3 files +""" + +import logging + +from typing import Dict +from botocore.exceptions import ClientError, BotoCoreError +from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError + +LOG = logging.getLogger(__name__) + + +class CfUtils: + def __init__(self, cloudformation_client): + self._client = cloudformation_client + + def has_stack(self, stack_name: str) -> bool: + """ + Checks if a CloudFormation stack with given name exists + + :param stack_name: Name or ID of the stack + :return: True if stack exists. False otherwise + """ + try: + resp = self._client.describe_stacks(StackName=stack_name) + if not resp["Stacks"]: + return False + + stack = resp["Stacks"][0] + # Note: Stacks with REVIEW_IN_PROGRESS can be deleted + # using delete_stack but get_template does not return + # the template_str for this stack restricting deletion of + # artifacts. + return bool(stack["StackStatus"] != "REVIEW_IN_PROGRESS") + + except ClientError as e: + # If a stack does not exist, describe_stacks will throw an + # exception. Unfortunately we don't have a better way than parsing + # the exception msg to understand the nature of this exception. + + if "Stack with id {0} does not exist".format(stack_name) in str(e): + LOG.debug("Stack with id %s does not exist", stack_name) + return False + LOG.error("ClientError Exception : %s", str(e)) + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + except BotoCoreError as e: + # If there are credentials, environment errors, + # catch that and throw a delete failed error. + + LOG.error("Botocore Exception : %s", str(e)) + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + + except Exception as e: + # We don't know anything about this exception. Don't handle + LOG.error("Unable to get stack details.", exc_info=e) + raise e + + def get_stack_template(self, stack_name: str, stage: str) -> Dict: + """ + Return the Cloudformation template of the given stack_name + + :param stack_name: Name or ID of the stack + :param stage: The Stage of the template Original or Processed + :return: Template body of the stack + """ + try: + resp = self._client.get_template(StackName=stack_name, TemplateStage=stage) + if not resp["TemplateBody"]: + return {} + return dict(resp) + + except (ClientError, BotoCoreError) as e: + # If there are credentials, environment errors, + # catch that and throw a delete failed error. + + LOG.error("Failed to fetch template for the stack : %s", str(e)) + raise FetchTemplateFailedError(stack_name=stack_name, msg=str(e)) from e + + except Exception as e: + # We don't know anything about this exception. Don't handle + LOG.error("Unable to get stack details.", exc_info=e) + raise e + + def delete_stack(self, stack_name: str): + """ + Delete the Cloudformation stack with the given stack_name + + :param stack_name: Name or ID of the stack + """ + try: + self._client.delete_stack(StackName=stack_name) + + except (ClientError, BotoCoreError) as e: + # If there are credentials, environment errors, + # catch that and throw a delete failed error. + + LOG.error("Failed to delete stack : %s", str(e)) + raise DeleteFailedError(stack_name=stack_name, msg=str(e)) from e + + except Exception as e: + # We don't know anything about this exception. Don't handle + LOG.error("Failed to delete stack. ", exc_info=e) + raise e diff --git a/samcli/lib/deploy/deployer.py b/samcli/lib/deploy/deployer.py index 8aae03425e..0e9b3b4bf0 100644 --- a/samcli/lib/deploy/deployer.py +++ b/samcli/lib/deploy/deployer.py @@ -34,7 +34,7 @@ ) from samcli.commands._utils.table_print import pprint_column_names, pprint_columns, newline_per_item, MIN_OFFSET from samcli.commands.deploy import exceptions as deploy_exceptions -from samcli.lib.package.artifact_exporter import mktempfile +from samcli.lib.package.artifact_exporter import mktempfile, get_cf_template_name from samcli.lib.package.s3_uploader import S3Uploader from samcli.lib.utils.time import utc_to_timestamp @@ -174,12 +174,13 @@ def create_changeset( # TemplateBody. This is required for large templates. if s3_uploader: with mktempfile() as temporary_file: - temporary_file.write(kwargs.pop("TemplateBody")) - temporary_file.flush() + remote_path = get_cf_template_name( + temp_file=temporary_file, template_str=kwargs.pop("TemplateBody"), extension="template" + ) # TemplateUrl property requires S3 URL to be in path-style format parts = S3Uploader.parse_s3_url( - s3_uploader.upload_with_dedup(temporary_file.name, "template"), version_property="Version" + s3_uploader.upload(temporary_file.name, remote_path), version_property="Version" ) kwargs["TemplateURL"] = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) diff --git a/samcli/lib/package/artifact_exporter.py b/samcli/lib/package/artifact_exporter.py index c0f2b94576..6bc9787018 100644 --- a/samcli/lib/package/artifact_exporter.py +++ b/samcli/lib/package/artifact_exporter.py @@ -42,6 +42,7 @@ is_local_file, mktempfile, is_s3_url, + get_cf_template_name, ) from samcli.lib.utils.packagetype import ZIP from samcli.yamlhelper import yaml_parse, yaml_dump @@ -83,10 +84,11 @@ def do_export(self, resource_id, resource_dict, parent_dir): exported_template_str = yaml_dump(exported_template_dict) with mktempfile() as temporary_file: - temporary_file.write(exported_template_str) - temporary_file.flush() - url = self.uploader.upload_with_dedup(temporary_file.name, "template") + remote_path = get_cf_template_name( + temp_file=temporary_file, template_str=exported_template_str, extension="template" + ) + url = self.uploader.upload(temporary_file.name, remote_path) # TemplateUrl property requires S3 URL to be in path-style format parts = S3Uploader.parse_s3_url(url, version_property="Version") diff --git a/samcli/lib/package/s3_uploader.py b/samcli/lib/package/s3_uploader.py index 4a64a983d0..61a6988416 100644 --- a/samcli/lib/package/s3_uploader.py +++ b/samcli/lib/package/s3_uploader.py @@ -22,6 +22,7 @@ from collections import abc from typing import Optional, Dict, Any, cast from urllib.parse import urlparse, parse_qs +import click import botocore import botocore.exceptions @@ -144,6 +145,49 @@ def upload_with_dedup( return self.upload(file_name, remote_path) + def delete_artifact(self, remote_path: str, is_key: bool = False) -> Dict: + """ + Deletes a given file from S3 + :param remote_path: Path to the file that will be deleted + :param is_key: If the given remote_path is the key or a file_name + + :return: metadata dict of the deleted object + """ + try: + if not self.bucket_name: + LOG.error("Bucket not specified") + raise BucketNotSpecifiedError() + + key = remote_path + if self.prefix and not is_key: + key = "{0}/{1}".format(self.prefix, remote_path) + + # Deleting Specific file with key + click.echo(f"\t- Deleting S3 file {key}") + resp = self.s3.delete_object(Bucket=self.bucket_name, Key=key) + LOG.debug("S3 method delete_object is called and returned: %s", resp["ResponseMetadata"]) + return dict(resp["ResponseMetadata"]) + + except botocore.exceptions.ClientError as ex: + error_code = ex.response["Error"]["Code"] + if error_code == "NoSuchBucket": + LOG.error("Provided bucket %s does not exist ", self.bucket_name) + raise NoSuchBucketError(bucket_name=self.bucket_name) from ex + raise ex + + def delete_prefix_artifacts(self): + """ + Deletes all the files from the prefix in S3 + """ + if not self.bucket_name: + LOG.error("Bucket not specified") + raise BucketNotSpecifiedError() + if self.prefix: + prefix_files = self.s3.list_objects_v2(Bucket=self.bucket_name, Prefix=self.prefix) + + for obj in prefix_files["Contents"]: + self.delete_artifact(obj["Key"], True) + def file_exists(self, remote_path: str) -> bool: """ Check if the file we are trying to upload already exists in S3 diff --git a/samcli/lib/package/utils.py b/samcli/lib/package/utils.py index 6317c35a48..c33b2b3de7 100644 --- a/samcli/lib/package/utils.py +++ b/samcli/lib/package/utils.py @@ -11,7 +11,7 @@ import zipfile import contextlib from contextlib import contextmanager -from typing import Dict, Optional, cast +from typing import Dict, Optional, cast, TextIO import jmespath @@ -19,7 +19,7 @@ from samcli.commands.package.exceptions import ImageNotFoundError from samcli.lib.package.ecr_utils import is_ecr_url from samcli.lib.package.s3_uploader import S3Uploader -from samcli.lib.utils.hash import dir_checksum +from samcli.lib.utils.hash import dir_checksum, file_checksum LOG = logging.getLogger(__name__) @@ -284,3 +284,13 @@ def copy_to_temp_dir(filepath): dst = os.path.join(tmp_dir, os.path.basename(filepath)) shutil.copyfile(filepath, dst) return tmp_dir + + +def get_cf_template_name(temp_file: TextIO, template_str: str, extension: str) -> str: + temp_file.write(template_str) + temp_file.flush() + + filemd5 = file_checksum(temp_file.name) + remote_path = filemd5 + "." + extension + + return remote_path diff --git a/tests/unit/commands/delete/__init__.py b/tests/unit/commands/delete/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/commands/delete/test_command.py b/tests/unit/commands/delete/test_command.py new file mode 100644 index 0000000000..4e268688ee --- /dev/null +++ b/tests/unit/commands/delete/test_command.py @@ -0,0 +1,53 @@ +from unittest import TestCase +from unittest.mock import ANY, MagicMock, Mock, call, patch + +from samcli.commands.delete.command import do_cli +from tests.unit.cli.test_cli_config_file import MockContext + + +def get_mock_sam_config(): + mock_sam_config = MagicMock() + mock_sam_config.exists = MagicMock(return_value=True) + return mock_sam_config + + +MOCK_SAM_CONFIG = get_mock_sam_config() + + +class TestDeleteCliCommand(TestCase): + def setUp(self): + + self.stack_name = "stack-name" + self.s3_bucket = "s3-bucket" + self.s3_prefix = "s3-prefix" + self.region = None + self.profile = None + self.config_env = "mock-default-env" + self.config_file = "mock-default-filename" + MOCK_SAM_CONFIG.reset_mock() + + @patch("samcli.commands.delete.command.click") + @patch("samcli.commands.delete.delete_context.DeleteContext") + def test_all_args(self, mock_delete_context, mock_delete_click): + + context_mock = Mock() + mock_delete_context.return_value.__enter__.return_value = context_mock + + do_cli( + stack_name=self.stack_name, + region=self.region, + config_file=self.config_file, + config_env=self.config_env, + profile=self.profile, + ) + + mock_delete_context.assert_called_with( + stack_name=self.stack_name, + region=self.region, + profile=self.profile, + config_file=self.config_file, + config_env=self.config_env, + ) + + context_mock.run.assert_called_with() + self.assertEqual(context_mock.run.call_count, 1) diff --git a/tests/unit/commands/delete/test_delete_context.py b/tests/unit/commands/delete/test_delete_context.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/delete/__init__.py b/tests/unit/lib/delete/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/unit/lib/delete/test_cf_utils.py b/tests/unit/lib/delete/test_cf_utils.py new file mode 100644 index 0000000000..9e80a00d4a --- /dev/null +++ b/tests/unit/lib/delete/test_cf_utils.py @@ -0,0 +1,86 @@ +from unittest.mock import patch, MagicMock, ANY, call +from unittest import TestCase + +from samcli.commands.delete.exceptions import DeleteFailedError, FetchTemplateFailedError +from botocore.exceptions import ClientError, BotoCoreError +from samcli.lib.delete.cf_utils import CfUtils + + +class TestCfUtils(TestCase): + def setUp(self): + self.session = MagicMock() + self.cloudformation_client = self.session.client("cloudformation") + self.s3_client = self.session.client("s3") + self.cf_utils = CfUtils(self.cloudformation_client) + + def test_cf_utils_init(self): + self.assertEqual(self.cf_utils._client, self.cloudformation_client) + + def test_cf_utils_has_no_stack(self): + self.cf_utils._client.describe_stacks = MagicMock(return_value={"Stacks": []}) + self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception_non_exsistent(self): + self.cf_utils._client.describe_stacks = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Stack with id test does not exist"}}, + operation_name="stack_status", + ) + ) + self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception_client_error(self): + self.cf_utils._client.describe_stacks = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Error: The security token included in the request is expired"}}, + operation_name="stack_status", + ) + ) + with self.assertRaises(DeleteFailedError): + self.cf_utils.has_stack("test") + + def test_cf_utils_has_stack_exception(self): + self.cf_utils._client.describe_stacks = MagicMock(side_effect=Exception()) + with self.assertRaises(Exception): + self.cf_utils.has_stack("test") + + def test_cf_utils_has_stack_in_review(self): + self.cf_utils._client.describe_stacks = MagicMock( + return_value={"Stacks": [{"StackStatus": "REVIEW_IN_PROGRESS"}]} + ) + self.assertEqual(self.cf_utils.has_stack("test"), False) + + def test_cf_utils_has_stack_exception_botocore(self): + self.cf_utils._client.describe_stacks = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(DeleteFailedError): + self.cf_utils.has_stack("test") + + def test_cf_utils_get_stack_template_exception_client_error(self): + self.cf_utils._client.get_template = MagicMock( + side_effect=ClientError( + error_response={"Error": {"Message": "Stack with id test does not exist"}}, + operation_name="stack_status", + ) + ) + with self.assertRaises(FetchTemplateFailedError): + self.cf_utils.get_stack_template("test", "Original") + + def test_cf_utils_get_stack_template_exception_botocore(self): + self.cf_utils._client.get_template = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(FetchTemplateFailedError): + self.cf_utils.get_stack_template("test", "Original") + + def test_cf_utils_get_stack_template_exception(self): + self.cf_utils._client.get_template = MagicMock(side_effect=Exception()) + with self.assertRaises(Exception): + self.cf_utils.get_stack_template("test", "Original") + + def test_cf_utils_delete_stack_exception_botocore(self): + self.cf_utils._client.delete_stack = MagicMock(side_effect=BotoCoreError()) + with self.assertRaises(DeleteFailedError): + self.cf_utils.delete_stack("test") + + def test_cf_utils_delete_stack_exception(self): + self.cf_utils._client.delete_stack = MagicMock(side_effect=Exception()) + with self.assertRaises(Exception): + self.cf_utils.delete_stack("test") diff --git a/tests/unit/lib/package/test_artifact_exporter.py b/tests/unit/lib/package/test_artifact_exporter.py index 7cc20f6be7..f7aceafef1 100644 --- a/tests/unit/lib/package/test_artifact_exporter.py +++ b/tests/unit/lib/package/test_artifact_exporter.py @@ -778,7 +778,7 @@ def test_export_cloudformation_stack(self, TemplateMock): TemplateMock.return_value = template_instance_mock template_instance_mock.export.return_value = exported_template_dict - self.s3_uploader_mock.upload_with_dedup.return_value = result_s3_url + self.s3_uploader_mock.upload.return_value = result_s3_url self.s3_uploader_mock.to_path_style_s3_url.return_value = result_path_style_s3_url with tempfile.NamedTemporaryFile() as handle: @@ -792,7 +792,7 @@ def test_export_cloudformation_stack(self, TemplateMock): TemplateMock.assert_called_once_with(template_path, parent_dir, self.uploaders_mock, self.code_signer_mock) template_instance_mock.export.assert_called_once_with() - self.s3_uploader_mock.upload_with_dedup.assert_called_once_with(mock.ANY, "template") + self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, mock.ANY) self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) def test_export_cloudformation_stack_no_upload_path_is_s3url(self): @@ -805,7 +805,7 @@ def test_export_cloudformation_stack_no_upload_path_is_s3url(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_is_httpsurl(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -817,7 +817,7 @@ def test_export_cloudformation_stack_no_upload_path_is_httpsurl(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_is_s3_region_httpsurl(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -829,7 +829,7 @@ def test_export_cloudformation_stack_no_upload_path_is_s3_region_httpsurl(self): stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_is_empty(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -842,7 +842,7 @@ def test_export_cloudformation_stack_no_upload_path_is_empty(self): resource_dict = {} stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict, {}) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_cloudformation_stack_no_upload_path_not_file(self): stack_resource = CloudFormationStackResource(self.uploaders_mock, self.code_signer_mock) @@ -855,7 +855,7 @@ def test_export_cloudformation_stack_no_upload_path_not_file(self): resource_dict = {property_name: dirname} with self.assertRaises(exceptions.ExportFailedError): stack_resource.export(resource_id, resource_dict, "dir") - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() @patch("samcli.lib.package.artifact_exporter.Template") def test_export_serverless_application(self, TemplateMock): @@ -871,7 +871,7 @@ def test_export_serverless_application(self, TemplateMock): TemplateMock.return_value = template_instance_mock template_instance_mock.export.return_value = exported_template_dict - self.s3_uploader_mock.upload_with_dedup.return_value = result_s3_url + self.s3_uploader_mock.upload.return_value = result_s3_url self.s3_uploader_mock.to_path_style_s3_url.return_value = result_path_style_s3_url with tempfile.NamedTemporaryFile() as handle: @@ -885,7 +885,7 @@ def test_export_serverless_application(self, TemplateMock): TemplateMock.assert_called_once_with(template_path, parent_dir, self.uploaders_mock, self.code_signer_mock) template_instance_mock.export.assert_called_once_with() - self.s3_uploader_mock.upload_with_dedup.assert_called_once_with(mock.ANY, "template") + self.s3_uploader_mock.upload.assert_called_once_with(mock.ANY, mock.ANY) self.s3_uploader_mock.to_path_style_s3_url.assert_called_once_with("world", None) def test_export_serverless_application_no_upload_path_is_s3url(self): @@ -898,7 +898,7 @@ def test_export_serverless_application_no_upload_path_is_s3url(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_is_httpsurl(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -910,7 +910,7 @@ def test_export_serverless_application_no_upload_path_is_httpsurl(self): # Case 1: Path is already S3 url stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], s3_url) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_is_empty(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -921,7 +921,7 @@ def test_export_serverless_application_no_upload_path_is_empty(self): resource_dict = {} stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict, {}) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_not_file(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -933,7 +933,7 @@ def test_export_serverless_application_no_upload_path_not_file(self): resource_dict = {property_name: dirname} with self.assertRaises(exceptions.ExportFailedError): stack_resource.export(resource_id, resource_dict, "dir") - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() def test_export_serverless_application_no_upload_path_is_dictionary(self): stack_resource = ServerlessApplicationResource(self.uploaders_mock, self.code_signer_mock) @@ -945,7 +945,7 @@ def test_export_serverless_application_no_upload_path_is_dictionary(self): resource_dict = {property_name: location} stack_resource.export(resource_id, resource_dict, "dir") self.assertEqual(resource_dict[property_name], location) - self.s3_uploader_mock.upload_with_dedup.assert_not_called() + self.s3_uploader_mock.upload.assert_not_called() @patch("samcli.lib.package.artifact_exporter.yaml_parse") def test_template_export_metadata(self, yaml_parse_mock): diff --git a/tests/unit/lib/package/test_s3_uploader.py b/tests/unit/lib/package/test_s3_uploader.py index c40c4c6cf4..f1765c3f8c 100644 --- a/tests/unit/lib/package/test_s3_uploader.py +++ b/tests/unit/lib/package/test_s3_uploader.py @@ -172,6 +172,51 @@ def test_s3_upload_no_bucket(self): s3_uploader.upload(f.name, remote_path) self.assertEqual(BucketNotSpecifiedError().message, str(ex)) + def test_s3_delete_artifact(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + s3_uploader.artifact_metadata = {"a": "b"} + with self.assertRaises(BucketNotSpecifiedError) as ex: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + self.assertEqual(s3_uploader.delete_artifact(f.name), {"a": "b"}) + + def test_s3_delete_artifact_no_bucket(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=None, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=self.force_upload, + no_progressbar=self.no_progressbar, + ) + with self.assertRaises(BucketNotSpecifiedError) as ex: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as f: + s3_uploader.delete_artifact(f.name) + self.assertEqual(BucketNotSpecifiedError().message, str(ex)) + + def test_s3_delete_artifact_bucket_not_found(self): + s3_uploader = S3Uploader( + s3_client=self.s3, + bucket_name=self.bucket_name, + prefix=self.prefix, + kms_key_id=self.kms_key_id, + force_upload=True, + no_progressbar=self.no_progressbar, + ) + + s3_uploader.s3.delete_object = MagicMock( + side_effect=ClientError(error_response={"Error": {"Code": "NoSuchBucket"}}, operation_name="create_object") + ) + with tempfile.NamedTemporaryFile() as f: + with self.assertRaises(NoSuchBucketError): + s3_uploader.delete_artifact(f.name) + def test_s3_upload_with_dedup(self): s3_uploader = S3Uploader( s3_client=self.s3,