From 5cbb7d5847b63e41ace5e519079ea3635999c78d Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Tue, 12 Jul 2022 23:22:09 +0300 Subject: [PATCH 1/8] chore: sync foss changes --- .../test_custom_http_api_domains_test.py | 3 +- .../test_custom_rest_api_domains.py | 3 +- .../test_function_with_file_system_config.py | 14 +++- .../test_function_with_http_api.py | 13 ++- .../test_function_with_implicit_http_api.py | 7 +- .../test_intrinsic_function_support.py | 32 +++++--- integration/conftest.py | 74 ++++++++++++++++- integration/helpers/base_test.py | 79 ++----------------- integration/helpers/deployer/deployer.py | 77 +----------------- .../helpers/deployer/exceptions/exceptions.py | 12 ++- integration/helpers/resource.py | 26 ++++-- integration/helpers/stack.py | 13 +-- .../function_with_file_system_config.json | 2 - .../intrinsics_serverless_function.json | 4 +- .../function_with_file_system_config.yaml | 24 +++--- .../intrinsics_serverless_function.yaml | 21 ++--- integration/setup/companion-stack.yaml | 12 ++- integration/setup/test_setup_teardown.py | 14 +++- integration/single/test_basic_api.py | 31 +++++--- integration/single/test_basic_function.py | 7 +- 20 files changed, 232 insertions(+), 236 deletions(-) diff --git a/integration/combination/test_custom_http_api_domains_test.py b/integration/combination/test_custom_http_api_domains_test.py index cde5e2435..6aa66623d 100644 --- a/integration/combination/test_custom_http_api_domains_test.py +++ b/integration/combination/test_custom_http_api_domains_test.py @@ -2,7 +2,6 @@ from integration.config.service_names import CUSTOM_DOMAIN from integration.helpers.base_internal_test import BaseInternalTest -from integration.helpers.file_resources import FILE_TO_S3_URI_MAP from integration.helpers.resource import current_region_not_included @@ -25,7 +24,7 @@ def test_custom_http_api_domains_regional(self): self.assertEqual("httpapi.sam-gamma-regional.com", result["DomainName"]) mtls_auth_config = result["MutualTlsAuthentication"] - self.assertEqual(FILE_TO_S3_URI_MAP["MTLSCert.pem"]["uri"], mtls_auth_config["TruststoreUri"]) + self.assertEqual(self.file_to_s3_uri_map["MTLSCert.pem"]["uri"], mtls_auth_config["TruststoreUri"]) domain_name_configs = result["DomainNameConfigurations"] self.assertEqual(1, len(domain_name_configs)) diff --git a/integration/combination/test_custom_rest_api_domains.py b/integration/combination/test_custom_rest_api_domains.py index 79618aca8..f5319723a 100644 --- a/integration/combination/test_custom_rest_api_domains.py +++ b/integration/combination/test_custom_rest_api_domains.py @@ -2,7 +2,6 @@ from integration.config.service_names import CUSTOM_DOMAIN from integration.helpers.base_internal_test import BaseInternalTest -from integration.helpers.file_resources import FILE_TO_S3_URI_MAP from integration.helpers.resource import current_region_not_included @@ -47,7 +46,7 @@ def test_custom_rest_api_domains_regional(self): self.assertEqual("REGIONAL", end_point_types[0]) mtls_auth_config = result["mutualTlsAuthentication"] - self.assertEqual(FILE_TO_S3_URI_MAP["MTLSCert.pem"]["uri"], mtls_auth_config["truststoreUri"]) + self.assertEqual(self.file_to_s3_uri_map["MTLSCert.pem"]["uri"], mtls_auth_config["truststoreUri"]) def test_custom_rest_api_domains_regional_ownership_verification(self): self.create_and_verify_stack("combination/api_with_custom_domains_regional_ownership_verification") diff --git a/integration/combination/test_function_with_file_system_config.py b/integration/combination/test_function_with_file_system_config.py index 50b5b8b20..e10cbbd1c 100644 --- a/integration/combination/test_function_with_file_system_config.py +++ b/integration/combination/test_function_with_file_system_config.py @@ -1,4 +1,5 @@ from unittest.case import skipIf +import pytest from integration.config.service_names import EFS from integration.helpers.base_test import BaseTest @@ -6,6 +7,17 @@ class TestFunctionWithFileSystemConfig(BaseTest): + @pytest.fixture(autouse=True) + def companion_stack_outputs(self, get_companion_stack_outputs): + self.companion_stack_outputs = get_companion_stack_outputs + @skipIf(current_region_does_not_support([EFS]), "EFS is not supported in this testing region") def test_function_with_efs_integration(self): - self.create_and_verify_stack("combination/function_with_file_system_config") + parameters = self.get_parameters(self.companion_stack_outputs) + self.create_and_verify_stack("combination/function_with_file_system_config", parameters) + + def get_parameters(self, dictionary): + parameters = [] + parameters.append(self.generate_parameter("PreCreatedSubnetOne", dictionary["PreCreatedSubnetOne"])) + parameters.append(self.generate_parameter("PreCreatedVpc", dictionary["PreCreatedVpc"])) + return parameters diff --git a/integration/combination/test_function_with_http_api.py b/integration/combination/test_function_with_http_api.py index 337edc429..faec5ba6f 100644 --- a/integration/combination/test_function_with_http_api.py +++ b/integration/combination/test_function_with_http_api.py @@ -1,17 +1,24 @@ +import logging from unittest.case import skipIf +import pytest + from integration.helpers.base_test import BaseTest from integration.helpers.resource import current_region_does_not_support from integration.config.service_names import HTTP_API +LOG = logging.getLogger(__name__) + @skipIf(current_region_does_not_support([HTTP_API]), "HttpApi is not supported in this testing region") class TestFunctionWithHttpApi(BaseTest): + @pytest.mark.flaky(reruns=5) def test_function_with_http_api(self): self.create_and_verify_stack("combination/function_with_http_api") stack_outputs = self.get_stack_outputs() base_url = stack_outputs["ApiUrl"] - self.verify_get_request_response(base_url + "some/path", 200) - self.verify_get_request_response(base_url + "something", 404) - self.verify_get_request_response(base_url + "another/endpoint", 404) + resource_type = "AWS::ApiGatewayV2::Api" + self.verify_get_request_response(base_url + "some/path", 200, resource_type) + self.verify_get_request_response(base_url + "something", 404, resource_type) + self.verify_get_request_response(base_url + "another/endpoint", 404, resource_type) diff --git a/integration/combination/test_function_with_implicit_http_api.py b/integration/combination/test_function_with_implicit_http_api.py index 5803bc9e6..fddbd14c9 100644 --- a/integration/combination/test_function_with_implicit_http_api.py +++ b/integration/combination/test_function_with_implicit_http_api.py @@ -12,6 +12,7 @@ def test_function_with_implicit_api(self): stack_outputs = self.get_stack_outputs() base_url = stack_outputs["ApiUrl"] - self.verify_get_request_response(base_url, 200) - self.verify_get_request_response(base_url + "something", 200) - self.verify_get_request_response(base_url + "another/endpoint", 200) + resource_type = "AWS::ApiGatewayV2::Api" + self.verify_get_request_response(base_url, 200, resource_type) + self.verify_get_request_response(base_url + "something", 200, resource_type) + self.verify_get_request_response(base_url + "another/endpoint", 200, resource_type) diff --git a/integration/combination/test_intrinsic_function_support.py b/integration/combination/test_intrinsic_function_support.py index 9de36703a..5b3460503 100644 --- a/integration/combination/test_intrinsic_function_support.py +++ b/integration/combination/test_intrinsic_function_support.py @@ -1,5 +1,5 @@ -from parameterized import parameterized from unittest.case import skipIf +import pytest from integration.helpers.base_test import BaseTest from integration.helpers.resource import current_region_does_not_support @@ -8,18 +8,24 @@ @skipIf(current_region_does_not_support([REST_API]), "Rest API is not supported in this testing region") class TestIntrinsicFunctionsSupport(BaseTest): + @pytest.fixture(autouse=True) + def companion_stack_outputs(self, get_companion_stack_outputs): + self.companion_stack_outputs = get_companion_stack_outputs - # test code definition uri object and serverless function properties support - @parameterized.expand( - [ - "combination/intrinsics_code_definition_uri", - "combination/intrinsics_serverless_function", - ] - ) - def test_common_support(self, file_name): + # test serverless function properties support + def test_serverless_function_property_support(self): # Just a simple deployment will validate that Code & Swagger files were accessible # Just a simple deployment will validate that all properties were resolved expected - self.create_and_verify_stack(file_name, self.get_default_test_template_parameters()) + parameters = self.get_parameters(self.companion_stack_outputs) + parameters.extend(self.get_default_test_template_parameters()) + self.create_and_verify_stack("combination/intrinsics_serverless_function", parameters) + + # test code definition uri object support + def test_definition_uri_support(self): + # Just a simple deployment will validate that Code & Swagger files were accessible + # Just a simple deployment will validate that all properties were resolved expected + parameters = self.get_default_test_template_parameters() + self.create_and_verify_stack("combination/intrinsics_code_definition_uri", parameters) def test_severless_api_properties_support(self): self.create_and_verify_stack( @@ -62,3 +68,9 @@ def test_severless_api_properties_support(self): self.assertEqual(tags["lambda:createdBy"], "SAM", "Expected 'SAM' tag value, but not found.") self.assertTrue("TagKey1" in tags) self.assertEqual(tags["TagKey1"], api_stage_name) + + def get_parameters(self, dictionary): + parameters = [] + parameters.append(self.generate_parameter("PreCreatedSubnetOne", dictionary["PreCreatedSubnetOne"])) + parameters.append(self.generate_parameter("PreCreatedVpc", dictionary["PreCreatedVpc"])) + return parameters diff --git a/integration/conftest.py b/integration/conftest.py index d0a2782ee..fb6cca5ae 100644 --- a/integration/conftest.py +++ b/integration/conftest.py @@ -6,10 +6,11 @@ from integration.helpers.base_test import S3_BUCKET_PREFIX from integration.helpers.client_provider import ClientProvider -from integration.helpers.deployer.exceptions.exceptions import ThrottlingError +from integration.helpers.deployer.exceptions.exceptions import S3DoesNotExistException, ThrottlingError from integration.helpers.deployer.utils.retry import retry_with_exponential_backoff_and_jitter from integration.helpers.stack import Stack from integration.helpers.yaml_utils import load_yaml +from integration.helpers.resource import read_test_config_file, write_test_config_file_to_json try: from pathlib import Path @@ -63,10 +64,57 @@ def setup_companion_stack_once(tmpdir_factory, get_prefix): cfn_client = ClientProvider().cfn_client output_dir = tmpdir_factory.mktemp("data") stack_name = get_prefix + COMPANION_STACK_NAME - if _stack_exists(stack_name): - return companion_stack = Stack(stack_name, companion_stack_tempalte_path, cfn_client, output_dir) - companion_stack.create() + companion_stack.create_or_update(_stack_exists(stack_name)) + + +@pytest.fixture() +def upload_resources(get_s3): + """ + Creates the bucket and uploads the files used by the tests to it + """ + s3_bucket = get_s3 + if not _s3_exists(s3_bucket): + raise S3DoesNotExistException(get_s3, "Check companion stack status") + code_dir = Path(__file__).resolve().parents[0].joinpath("resources").joinpath("code") + file_to_s3_uri_map = read_test_config_file("file_to_s3_map.json") + + if not file_to_s3_uri_map or not file_to_s3_uri_map.items(): + LOG.debug("No resources to upload") + return + + current_file_name = "" + + try: + s3_client = ClientProvider().s3_client + session = boto3.session.Session() + region = session.region_name + for file_name, file_info in file_to_s3_uri_map.items(): + current_file_name = file_name + code_path = str(Path(code_dir, file_name)) + LOG.debug("Uploading file %s to bucket %s", file_name, s3_bucket) + s3_client.upload_file(code_path, s3_bucket, file_name) + LOG.debug("File %s uploaded successfully to bucket %s", file_name, s3_bucket) + file_info["uri"] = get_s3_uri(file_name, file_info["type"], s3_bucket, region) + except ClientError as error: + LOG.error("Upload of file %s to bucket %s failed", current_file_name, s3_bucket, exc_info=error) + raise error + + write_test_config_file_to_json("file_to_s3_map_modified.json", file_to_s3_uri_map) + + +def get_s3_uri(file_name, uri_type, bucket, region): + if uri_type == "s3": + return "s3://{}/{}".format(bucket, file_name) + + if region == "us-east-1": + return "https://s3.amazonaws.com/{}/{}".format(bucket, file_name) + if region == "us-iso-east-1": + return "https://s3.us-iso-east-1.c2s.ic.gov/{}/{}".format(bucket, file_name) + if region == "us-isob-east-1": + return "https://s3.us-isob-east-1.sc2s.sgov.gov/{}/{}".format(bucket, file_name) + + return "https://s3-{}.amazonaws.com/{}/{}".format(region, bucket, file_name) @pytest.fixture() @@ -99,6 +147,12 @@ def get_companion_stack_outputs(get_prefix): return get_stack_outputs(companion_stack_description) +@pytest.fixture() +def get_s3(get_companion_stack_outputs): + s3_bucket = get_companion_stack_outputs.get("PreCreatedS3Bucket") + return str(s3_bucket) + + @pytest.fixture() def get_prefix(request): prefix = "" @@ -171,3 +225,15 @@ def _stack_exists(stack_name): raise ex return True + + +@retry_with_exponential_backoff_and_jitter(ThrottlingError, 5, 360) +def _s3_exists(s3_bucket): + s3 = boto3.resource("s3") + bucket = s3.Bucket(s3_bucket) + try: + s3.meta.client.head_bucket(Bucket=bucket.name) + except ClientError: + return False + + return True diff --git a/integration/helpers/base_test.py b/integration/helpers/base_test.py index 0a84c9b33..19dc86f12 100644 --- a/integration/helpers/base_test.py +++ b/integration/helpers/base_test.py @@ -15,6 +15,7 @@ from integration.helpers.resource import generate_suffix, create_bucket, verify_stack_resources from integration.helpers.s3_uploader import S3Uploader from integration.helpers.yaml_utils import dump_yaml, load_yaml +from integration.helpers.resource import read_test_config_file from samtranslator.yaml_helper import yaml_parse from tenacity import ( @@ -33,11 +34,9 @@ from unittest.case import TestCase import boto3 -from botocore.exceptions import ClientError from integration.helpers.deployer.deployer import Deployer from integration.helpers.template import transform_template -from integration.helpers.file_resources import FILE_TO_S3_URI_MAP, CODE_KEY_TO_FILE_MAP LOG = logging.getLogger(__name__) @@ -57,8 +56,12 @@ def prefix(self, get_prefix): def stage(self, get_stage): self.pipeline_stage = get_stage + @pytest.fixture(autouse=True) + def s3_bucket(self, get_s3): + self.s3_bucket_name = get_s3 + @classmethod - @pytest.mark.usefixtures("get_prefix", "get_stage", "check_internal", "parameter_values") + @pytest.mark.usefixtures("get_prefix", "get_stage", "check_internal", "parameter_values", "get_s3") def setUpClass(cls): cls.FUNCTION_OUTPUT = "hello" cls.tests_integ_dir = Path(__file__).resolve().parents[1] @@ -67,85 +70,19 @@ def setUpClass(cls): cls.output_dir = Path(cls.tests_integ_dir, "tmp" + "-" + generate_suffix()) cls.expected_dir = Path(cls.resources_dir, "expected") cls.code_dir = Path(cls.resources_dir, "code") - cls.s3_bucket_name = S3_BUCKET_PREFIX + generate_suffix() cls.session = boto3.session.Session() cls.my_region = cls.session.region_name cls.client_provider = ClientProvider() - cls.file_to_s3_uri_map = FILE_TO_S3_URI_MAP - cls.code_key_to_file = CODE_KEY_TO_FILE_MAP + cls.file_to_s3_uri_map = read_test_config_file("file_to_s3_map_modified.json") + cls.code_key_to_file = read_test_config_file("code_key_to_file_map.json") if not cls.output_dir.exists(): os.mkdir(str(cls.output_dir)) - cls._upload_resources(FILE_TO_S3_URI_MAP) - @classmethod def tearDownClass(cls): - cls._clean_bucket() shutil.rmtree(cls.output_dir) - @classmethod - def _clean_bucket(cls): - """ - Empties and deletes the bucket used for the tests - """ - s3 = boto3.resource("s3") - bucket = s3.Bucket(cls.s3_bucket_name) - object_summary_iterator = bucket.objects.all() - - for object_summary in object_summary_iterator: - try: - cls.client_provider.s3_client.delete_object(Key=object_summary.key, Bucket=cls.s3_bucket_name) - except ClientError as e: - LOG.error( - "Unable to delete object %s from bucket %s", object_summary.key, cls.s3_bucket_name, exc_info=e - ) - try: - cls.client_provider.s3_client.delete_bucket(Bucket=cls.s3_bucket_name) - except ClientError as e: - LOG.error("Unable to delete bucket %s", cls.s3_bucket_name, exc_info=e) - - @classmethod - def _upload_resources(cls, file_to_s3_uri_map): - """ - Creates the bucket and uploads the files used by the tests to it - """ - if not file_to_s3_uri_map or not file_to_s3_uri_map.items(): - LOG.debug("No resources to upload") - return - - create_bucket(cls.s3_bucket_name, region=cls.my_region) - - current_file_name = "" - - try: - for file_name, file_info in file_to_s3_uri_map.items(): - current_file_name = file_name - code_path = str(Path(cls.code_dir, file_name)) - LOG.debug("Uploading file %s to bucket %s", file_name, cls.s3_bucket_name) - s3_client = cls.client_provider.s3_client - s3_client.upload_file(code_path, cls.s3_bucket_name, file_name) - LOG.debug("File %s uploaded successfully to bucket %s", file_name, cls.s3_bucket_name) - file_info["uri"] = cls._get_s3_uri(file_name, file_info["type"]) - except ClientError as error: - LOG.error("Upload of file %s to bucket %s failed", current_file_name, cls.s3_bucket_name, exc_info=error) - cls._clean_bucket() - raise error - - @classmethod - def _get_s3_uri(cls, file_name, uri_type): - if uri_type == "s3": - return "s3://{}/{}".format(cls.s3_bucket_name, file_name) - - if cls.my_region == "us-east-1": - return "https://s3.amazonaws.com/{}/{}".format(cls.s3_bucket_name, file_name) - if cls.my_region == "us-iso-east-1": - return "https://s3.us-iso-east-1.c2s.ic.gov/{}/{}".format(cls.s3_bucket_name, file_name) - if cls.my_region == "us-isob-east-1": - return "https://s3.us-isob-east-1.sc2s.sgov.gov/{}/{}".format(cls.s3_bucket_name, file_name) - - return "https://s3-{}.amazonaws.com/{}/{}".format(cls.my_region, cls.s3_bucket_name, file_name) - def setUp(self): self.deployer = Deployer(self.client_provider.cfn_client) self.s3_uploader = S3Uploader(self.client_provider.s3_client, self.s3_bucket_name) diff --git a/integration/helpers/deployer/deployer.py b/integration/helpers/deployer/deployer.py index 63d16cacd..3dd3d3677 100644 --- a/integration/helpers/deployer/deployer.py +++ b/integration/helpers/deployer/deployer.py @@ -26,18 +26,18 @@ # - Moved DeployColor to colors.py # - Removed unnecessary functions from artifact_exporter import sys -import math from collections import OrderedDict import logging import time from datetime import datetime + from integration.helpers.resource import generate_suffix import botocore from integration.helpers.deployer.utils.colors import DeployColor from integration.helpers.deployer.exceptions import exceptions as deploy_exceptions -from integration.helpers.deployer.utils.retry import retry, retry_with_exponential_backoff_and_jitter +from integration.helpers.deployer.utils.retry import retry_with_exponential_backoff_and_jitter from integration.helpers.deployer.utils.table_print import ( pprint_column_names, pprint_columns, @@ -307,75 +307,6 @@ def get_last_event_time(self, stack_name): except KeyError: return time.time() - @pprint_column_names( - format_string=DESCRIBE_STACK_EVENTS_FORMAT_STRING, - format_kwargs=DESCRIBE_STACK_EVENTS_DEFAULT_ARGS, - table_header=DESCRIBE_STACK_EVENTS_TABLE_HEADER_NAME, - ) - def describe_stack_events(self, stack_name, time_stamp_marker, **kwargs): - """ - Calls CloudFormation to get current stack events - :param stack_name: Name or ID of the stack - :param time_stamp_marker: last event time on the stack to start streaming events from. - :return: - """ - - stack_change_in_progress = True - events = set() - retry_attempts = 0 - - while stack_change_in_progress and retry_attempts <= self.max_attempts: - try: - - # Only sleep if there have been no retry_attempts - time.sleep(self.client_sleep if retry_attempts == 0 else 0) - describe_stacks_resp = self._client.describe_stacks(StackName=stack_name) - paginator = self._client.get_paginator("describe_stack_events") - response_iterator = paginator.paginate(StackName=stack_name) - stack_status = describe_stacks_resp["Stacks"][0]["StackStatus"] - latest_time_stamp_marker = time_stamp_marker - for event_items in response_iterator: - for event in event_items["StackEvents"]: - if event["EventId"] not in events and utc_to_timestamp(event["Timestamp"]) > time_stamp_marker: - events.add(event["EventId"]) - latest_time_stamp_marker = max( - latest_time_stamp_marker, utc_to_timestamp(event["Timestamp"]) - ) - row_color = self.deploy_color.get_stack_events_status_color(status=event["ResourceStatus"]) - pprint_columns( - columns=[ - event["ResourceStatus"], - event["ResourceType"], - event["LogicalResourceId"], - event.get("ResourceStatusReason", "-"), - ], - width=kwargs["width"], - margin=kwargs["margin"], - format_string=DESCRIBE_STACK_EVENTS_FORMAT_STRING, - format_args=kwargs["format_args"], - columns_dict=DESCRIBE_STACK_EVENTS_DEFAULT_ARGS.copy(), - color=row_color, - ) - # Skip already shown old event entries - elif utc_to_timestamp(event["Timestamp"]) <= time_stamp_marker: - time_stamp_marker = latest_time_stamp_marker - break - else: # go to next loop if not break from inside loop - time_stamp_marker = latest_time_stamp_marker # update marker if all events are new - continue - break # reached here only if break from inner loop! - - if self._check_stack_complete(stack_status): - stack_change_in_progress = False - break - except botocore.exceptions.ClientError as ex: - retry_attempts = retry_attempts + 1 - if retry_attempts > self.max_attempts: - LOG.error("Describing stack events for %s failed: %s", stack_name, str(ex)) - return - # Sleep in exponential backoff mode - time.sleep(math.pow(self.backoff, retry_attempts)) - def _check_stack_complete(self, status): return "COMPLETE" in status and "CLEANUP" not in status @@ -386,8 +317,6 @@ def wait_for_execute(self, stack_name, changeset_type): ) sys.stdout.flush() - self.describe_stack_events(stack_name, self.get_last_event_time(stack_name)) - # Pick the right waiter if changeset_type == "CREATE": waiter = self._client.get_waiter("stack_create_complete") @@ -443,6 +372,8 @@ def create_and_wait_for_changeset( self.wait_for_changeset(result["Id"], stack_name) self.describe_changeset(result["Id"], stack_name) return result + except deploy_exceptions.ChangeEmptyError as ex: + return {} except botocore.exceptions.ClientError as ex: raise deploy_exceptions.DeployFailedError(stack_name=stack_name, msg=str(ex)) diff --git a/integration/helpers/deployer/exceptions/exceptions.py b/integration/helpers/deployer/exceptions/exceptions.py index 582ef1f74..6522a1fa4 100644 --- a/integration/helpers/deployer/exceptions/exceptions.py +++ b/integration/helpers/deployer/exceptions/exceptions.py @@ -70,6 +70,16 @@ def __init__(self, stack_name, msg): self.stack_name = stack_name self.msg = msg - message_fmt = "Throttling Issue occurred: {stack_name}, {msg}" + message_fmt = "Throttling issue occurred: {stack_name}, {msg}" super(ThrottlingError, self).__init__(message=message_fmt.format(stack_name=self.stack_name, msg=msg)) + + +class S3DoesNotExistException(UserException): + def __init__(self, bucket_name, msg): + self.bucket_name = bucket_name + self.msg = msg + + message_fmt = "Companion S3 bucket used for resource upload does not exist: {bucket_name}, {msg}" + + super(S3DoesNotExistException, self).__init__(message=message_fmt.format(bucket_name=self.bucket_name, msg=msg)) diff --git a/integration/helpers/resource.py b/integration/helpers/resource.py index f70f56462..66c77bfad 100644 --- a/integration/helpers/resource.py +++ b/integration/helpers/resource.py @@ -120,15 +120,17 @@ def create_bucket(bucket_name, region): NoRegionError If region is not specified """ + s3 = boto3.resource("s3") if region is None: raise NoRegionError() + if region == "us-east-1": - s3_client = boto3.client("s3") - s3_client.create_bucket(Bucket=bucket_name) + bucket = s3.create_bucket(Bucket=bucket_name) else: - s3_client = boto3.client("s3", region_name=region) location = {"LocationConstraint": region} - s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration=location) + bucket = s3.create_bucket(Bucket=bucket_name, CreateBucketConfiguration=location) + + bucket.wait_until_exists() def _get_region(): @@ -138,8 +140,8 @@ def _get_region(): return region -def _read_test_config_file(filename): - """Reads test inclusion or exclusion file and returns the contents""" +def read_test_config_file(filename): + """Reads test config file and returns the contents""" tests_integ_dir = Path(__file__).resolve().parents[1] test_config_file_path = Path(tests_integ_dir, "config", filename) if not test_config_file_path.is_file(): @@ -148,6 +150,14 @@ def _read_test_config_file(filename): return test_config +def write_test_config_file_to_json(filename, input): + """Reads test config file and returns the contents""" + tests_integ_dir = Path(__file__).resolve().parents[1] + test_config_file_path = Path(tests_integ_dir, "config", filename) + with open(test_config_file_path, "w") as f: + json.dump(input, f) + + def current_region_does_not_support(services): """ Decide if a test should be skipped in the current testing region with the specific resources @@ -164,7 +174,7 @@ def current_region_does_not_support(services): """ region = _get_region() - region_exclude_services = _read_test_config_file("region_service_exclusion.yaml") + region_exclude_services = read_test_config_file("region_service_exclusion.yaml") if region not in region_exclude_services.get("regions", {}): return False @@ -179,7 +189,7 @@ def current_region_not_included(services): Decides which tests should only be run in certain regions """ region = _get_region() - region_include_services = _read_test_config_file("region_service_inclusion.yaml") + region_include_services = read_test_config_file("region_service_inclusion.yaml") if region not in region_include_services.get("regions", {}): return True diff --git a/integration/helpers/stack.py b/integration/helpers/stack.py index 1ffafa190..82f28110e 100644 --- a/integration/helpers/stack.py +++ b/integration/helpers/stack.py @@ -22,10 +22,10 @@ def __init__(self, stack_name, template_path, cfn_client, output_dir): self.stack_description = None self.stack_resources = None - def create(self): + def create_or_update(self, update): output_template_path = self._generate_output_file_path(self.template_path, self.output_dir) transform_template(self.template_path, output_template_path) - self._deploy_stack(output_template_path) + self._deploy_stack(output_template_path, update) def delete(self): self.cfn_client.delete_stack(StackName=self.stack_name) @@ -36,7 +36,7 @@ def get_stack_outputs(self): output_list = self.stack_description["Stacks"][0]["Outputs"] return {output["OutputKey"]: output["OutputValue"] for output in output_list} - def _deploy_stack(self, output_file_path, parameters=None): + def _deploy_stack(self, output_file_path, update, parameters=None): """ Deploys the current cloud formation stack """ @@ -50,10 +50,11 @@ def _deploy_stack(self, output_file_path, parameters=None): notification_arns=[], s3_uploader=None, tags=[], - changeset_type="CREATE", + changeset_type="UPDATE" if update else "CREATE", ) - self.deployer.execute_changeset(result["Id"], self.stack_name) - self.deployer.wait_for_execute(self.stack_name, "CREATE") + if result: + self.deployer.execute_changeset(result["Id"], self.stack_name) + self.deployer.wait_for_execute(self.stack_name, "UPDATE" if update else "CREATE") self._get_stack_description() self.stack_resources = self.cfn_client.list_stack_resources(StackName=self.stack_name) diff --git a/integration/resources/expected/combination/function_with_file_system_config.json b/integration/resources/expected/combination/function_with_file_system_config.json index 5f6e3fe3d..6d5b273af 100644 --- a/integration/resources/expected/combination/function_with_file_system_config.json +++ b/integration/resources/expected/combination/function_with_file_system_config.json @@ -3,8 +3,6 @@ { "LogicalResourceId":"MountTarget", "ResourceType":"AWS::EFS::MountTarget" }, { "LogicalResourceId":"AccessPoint", "ResourceType":"AWS::EFS::AccessPoint" }, { "LogicalResourceId":"LambdaFunctionWithEfs", "ResourceType":"AWS::Lambda::Function" }, - { "LogicalResourceId":"MyVpc", "ResourceType":"AWS::EC2::VPC" }, { "LogicalResourceId":"MySecurityGroup", "ResourceType":"AWS::EC2::SecurityGroup" }, - { "LogicalResourceId":"MySubnet", "ResourceType":"AWS::EC2::Subnet" }, { "LogicalResourceId":"LambdaFunctionWithEfsRole", "ResourceType":"AWS::IAM::Role" } ] \ No newline at end of file diff --git a/integration/resources/expected/combination/intrinsics_serverless_function.json b/integration/resources/expected/combination/intrinsics_serverless_function.json index 22203a11f..ef8334560 100644 --- a/integration/resources/expected/combination/intrinsics_serverless_function.json +++ b/integration/resources/expected/combination/intrinsics_serverless_function.json @@ -1,7 +1,5 @@ [ { "LogicalResourceId":"MyFunction", "ResourceType":"AWS::Lambda::Function" }, { "LogicalResourceId":"MyNewRole", "ResourceType":"AWS::IAM::Role" }, - { "LogicalResourceId":"MyVpc", "ResourceType":"AWS::EC2::VPC" }, - { "LogicalResourceId":"MySecurityGroup", "ResourceType":"AWS::EC2::SecurityGroup" }, - { "LogicalResourceId":"MySubnet", "ResourceType":"AWS::EC2::Subnet" } + { "LogicalResourceId":"MySecurityGroup", "ResourceType":"AWS::EC2::SecurityGroup" } ] \ No newline at end of file diff --git a/integration/resources/templates/combination/function_with_file_system_config.yaml b/integration/resources/templates/combination/function_with_file_system_config.yaml index c1d257862..8529d53ad 100644 --- a/integration/resources/templates/combination/function_with_file_system_config.yaml +++ b/integration/resources/templates/combination/function_with_file_system_config.yaml @@ -1,5 +1,11 @@ Description: SAM + Lambda + EFS +Parameters: + PreCreatedSubnetOne: + Type: String + PreCreatedVpc: + Type: String + Resources: EfsFileSystem: Type: AWS::EFS::FileSystem @@ -10,7 +16,7 @@ Resources: FileSystemId: Ref: EfsFileSystem SubnetId: - Ref: MySubnet + Ref: PreCreatedSubnetOne SecurityGroups: - Fn::GetAtt: MySecurityGroup.GroupId @@ -45,27 +51,15 @@ Resources: Fn::GetAtt: MySecurityGroup.GroupId SubnetIds: - - Ref: MySubnet + Ref: PreCreatedSubnetOne FileSystemConfigs: - Arn: Fn::GetAtt: AccessPoint.Arn LocalMountPath: /mnt/EFS - MyVpc: - Type: "AWS::EC2::VPC" - Properties: - CidrBlock: "10.0.0.0/16" - MySecurityGroup: Type: "AWS::EC2::SecurityGroup" Properties: GroupDescription: "my test group" VpcId: - Ref: MyVpc - - MySubnet: - Type: "AWS::EC2::Subnet" - Properties: - VpcId: - Ref: MyVpc - CidrBlock: "10.0.0.0/24" + Ref: PreCreatedVpc diff --git a/integration/resources/templates/combination/intrinsics_serverless_function.yaml b/integration/resources/templates/combination/intrinsics_serverless_function.yaml index 2ddb10d4f..703242bcc 100644 --- a/integration/resources/templates/combination/intrinsics_serverless_function.yaml +++ b/integration/resources/templates/combination/intrinsics_serverless_function.yaml @@ -14,6 +14,10 @@ Parameters: AutoPublishSha: Type: String Default: AnyRandomStringWillActuallyDo + PreCreatedSubnetOne: + Type: String + PreCreatedVpc: + Type: String Conditions: TrueCondition: @@ -68,7 +72,7 @@ Resources: SecurityGroupIds: - "Fn::GetAtt": ["MySecurityGroup", "GroupId"] SubnetIds: - - Ref: "MySubnet" + - Ref: PreCreatedSubnetOne # Additional resources to reference inside the Function resource MyNewRole: @@ -87,25 +91,12 @@ Resources: Service: - lambda.amazonaws.com - - MyVpc: - Type: "AWS::EC2::VPC" - Properties: - CidrBlock: "10.0.0.0/16" - MySecurityGroup: Type: "AWS::EC2::SecurityGroup" Properties: GroupDescription: "my test group" VpcId: - Ref: MyVpc - - MySubnet: - Type: "AWS::EC2::Subnet" - Properties: - VpcId: - Ref: MyVpc - CidrBlock: "10.0.0.0/24" + Ref: PreCreatedVpc # False condition, shouldn't be created MyFunctionFalseCondition: diff --git a/integration/setup/companion-stack.yaml b/integration/setup/companion-stack.yaml index 35521d7f2..c1be2d6f0 100644 --- a/integration/setup/companion-stack.yaml +++ b/integration/setup/companion-stack.yaml @@ -37,6 +37,12 @@ Resources: InternetGatewayId: Ref: PreCreatedInternetGateway + PreCreatedS3Bucket: + Type: AWS::S3::Bucket + DeletionPolicy: Delete + Properties: + BucketName: sam-integ-bucket-companion-bucket + Outputs: PreCreatedVpc: Description: "Pre-created VPC that can be used inside other tests" @@ -57,4 +63,8 @@ Outputs: PreCreatedAttachGateway: Description: "Pre-created Attach Gateway that can be used inside other tests" Value: - Ref: PreCreatedAttachGateway \ No newline at end of file + Ref: PreCreatedAttachGateway + PreCreatedS3Bucket: + Description: "Pre-created S3 Bucket that can be used inside other tests" + Value: + Ref: PreCreatedS3Bucket \ No newline at end of file diff --git a/integration/setup/test_setup_teardown.py b/integration/setup/test_setup_teardown.py index 49ba6aa96..3667a7b0d 100644 --- a/integration/setup/test_setup_teardown.py +++ b/integration/setup/test_setup_teardown.py @@ -1,11 +1,21 @@ import pytest +from integration.helpers.resource import read_test_config_file @pytest.mark.setup -def test_setup(setup_companion_stack_once): - assert True +def test_setup(setup_companion_stack_once, upload_resources, get_s3): + assert get_s3 == "sam-integ-bucket-companion-bucket" + assert s3_upload_successful() @pytest.mark.teardown def test_teardown(delete_companion_stack_once): assert True + + +def s3_upload_successful(): + modified_map = read_test_config_file("file_to_s3_map_modified.json") + for _, file_info in modified_map.items(): + if not file_info["uri"]: + return False + return True diff --git a/integration/single/test_basic_api.py b/integration/single/test_basic_api.py index 25c8ebfcd..17f80c0a4 100644 --- a/integration/single/test_basic_api.py +++ b/integration/single/test_basic_api.py @@ -1,11 +1,16 @@ -import time +import logging from unittest.case import skipIf +from tenacity import stop_after_attempt, wait_exponential, retry_if_exception_type, after_log, wait_random + from integration.helpers.base_test import BaseTest +from integration.helpers.exception import StatusCodeError from integration.helpers.resource import current_region_does_not_support from integration.config.service_names import MODE, REST_API +LOG = logging.getLogger(__name__) + @skipIf(current_region_does_not_support([REST_API]), "Rest API is not supported in this testing region") class TestBasicApi(BaseTest): @@ -40,22 +45,22 @@ def test_basic_api_with_mode(self): stack_output = self.get_stack_outputs() api_endpoint = stack_output.get("ApiEndpoint") - response = BaseTest.do_get_request_with_logging(f"{api_endpoint}/get") - self.assertEqual(response.status_code, 200) + + self.verify_get_request_response(f"{api_endpoint}/get", 200, "AWS::ApiGateway::RestApi") # Removes get from the API self.update_and_verify_stack(file_path="single/basic_api_with_mode_update") - response = BaseTest.do_get_request_with_logging(f"{api_endpoint}/get") + # API Gateway by default returns 403 if a path do not exist - retries = 20 - while retries > 0: - retries -= 1 - response = BaseTest.do_get_request_with_logging(f"{api_endpoint}/get") - if response.status_code != 500: - break - time.sleep(5) - - self.assertEqual(response.status_code, 403) + self.verify_get_request_response.retry_with( + stop=stop_after_attempt(20), + wait=wait_exponential(multiplier=1, min=4, max=10) + wait_random(0, 1), + retry=retry_if_exception_type(StatusCodeError), + after=after_log(LOG, logging.WARNING), + reraise=True, + )(self, f"{api_endpoint}/get", 403, "AWS::ApiGateway::RestApi") + + LOG.log(msg=f"retry times {self.verify_get_request_response.retry.statistics}", level=logging.WARNING) def test_basic_api_inline_openapi(self): """ diff --git a/integration/single/test_basic_function.py b/integration/single/test_basic_function.py index 112e2043f..a80226c0f 100644 --- a/integration/single/test_basic_function.py +++ b/integration/single/test_basic_function.py @@ -1,11 +1,15 @@ +import logging from unittest.case import skipIf -from integration.config.service_names import KMS, XRAY, ARM, CODE_DEPLOY, HTTP_API +import pytest +from integration.config.service_names import KMS, XRAY, ARM, CODE_DEPLOY, HTTP_API from integration.helpers.resource import current_region_does_not_support from parameterized import parameterized from integration.helpers.base_test import BaseTest +LOG = logging.getLogger(__name__) + class TestBasicFunction(BaseTest): """ @@ -36,6 +40,7 @@ def test_basic_function(self, file_name): "single/function_alias_with_http_api_events", ] ) + @pytest.mark.flaky(reruns=5) @skipIf(current_region_does_not_support([HTTP_API]), "HTTP API is not supported in this testing region") def test_function_with_http_api_events(self, file_name): self.create_and_verify_stack(file_name) From 69b36d6a7d4a713bfdde25ae792e2263e98c4217 Mon Sep 17 00:00:00 2001 From: Daniel Mil Date: Wed, 13 Jul 2022 12:07:16 -0700 Subject: [PATCH 2/8] Add make target for creating companion stack, add config files --- INTEGRATION_TESTS.md | 8 ++++++++ Makefile | 4 ++++ .../combination/test_function_with_http_api.py | 7 +++---- integration/config/code_key_to_file_map.json | 8 ++++++++ integration/config/file_to_s3_map.json | 11 +++++++++++ integration/setup/companion-stack.yaml | 2 -- integration/setup/test_setup_teardown.py | 1 - pytest.ini | 2 -- 8 files changed, 34 insertions(+), 9 deletions(-) create mode 100644 integration/config/code_key_to_file_map.json create mode 100644 integration/config/file_to_s3_map.json diff --git a/INTEGRATION_TESTS.md b/INTEGRATION_TESTS.md index 6a5f4e487..6a75662a9 100644 --- a/INTEGRATION_TESTS.md +++ b/INTEGRATION_TESTS.md @@ -34,6 +34,14 @@ If you haven't done so already, run the following command in a terminal at the r make init ``` +### Setting up a companion stack + +To run the tests, a companion stack first needs to be created. This stack houses some resources that are required by the tests, such as an S3 bucket. + +``` +make prepare-companion-stack +``` + ### Running all the tests From the root of the repository, run: diff --git a/Makefile b/Makefile index 4692a5e59..1fc25a6b5 100755 --- a/Makefile +++ b/Makefile @@ -27,6 +27,9 @@ lint: # Linter performs static analysis to catch latent bugs pylint --rcfile .pylintrc samtranslator +prepare-companion-stack: + pytest -v integration/setup -m setup + # Command to run everytime you make changes to verify everything works dev: test @@ -43,5 +46,6 @@ TARGETS integ-test Run the Integration tests. dev Run all development tests after a change. pr Perform all checks before submitting a Pull Request. + prepare-companion-stack Create or update the companion stack for running integration tests. endef diff --git a/integration/combination/test_function_with_http_api.py b/integration/combination/test_function_with_http_api.py index faec5ba6f..a2741fd46 100644 --- a/integration/combination/test_function_with_http_api.py +++ b/integration/combination/test_function_with_http_api.py @@ -18,7 +18,6 @@ def test_function_with_http_api(self): stack_outputs = self.get_stack_outputs() base_url = stack_outputs["ApiUrl"] - resource_type = "AWS::ApiGatewayV2::Api" - self.verify_get_request_response(base_url + "some/path", 200, resource_type) - self.verify_get_request_response(base_url + "something", 404, resource_type) - self.verify_get_request_response(base_url + "another/endpoint", 404, resource_type) + self.verify_get_request_response(base_url + "some/path", 200) + self.verify_get_request_response(base_url + "something", 404) + self.verify_get_request_response(base_url + "another/endpoint", 404) diff --git a/integration/config/code_key_to_file_map.json b/integration/config/code_key_to_file_map.json new file mode 100644 index 000000000..75ab8c0d0 --- /dev/null +++ b/integration/config/code_key_to_file_map.json @@ -0,0 +1,8 @@ +{ + "codeuri": "code.zip", + "contenturi": "layer1.zip", + "definitionuri": "swagger1.json", + "templateurl": "template.yaml", + "binaryMediaCodeUri": "binary-media.zip", + "mtlsuri": "MTLSCert.pem" +} \ No newline at end of file diff --git a/integration/config/file_to_s3_map.json b/integration/config/file_to_s3_map.json new file mode 100644 index 000000000..c8096eefc --- /dev/null +++ b/integration/config/file_to_s3_map.json @@ -0,0 +1,11 @@ +{ + "code.zip": {"type": "s3", "uri": ""}, + "code2.zip": {"type": "s3", "uri": ""}, + "layer1.zip": {"type": "s3", "uri": ""}, + "swagger1.json": {"type": "s3", "uri": ""}, + "swagger2.json": {"type": "s3", "uri": ""}, + "binary-media.zip": {"type": "s3", "uri": ""}, + "template.yaml": {"type": "http", "uri": ""}, + "MTLSCert.pem": {"type": "s3", "uri": ""}, + "MTLSCert-Updated.pem": {"type": "s3", "uri": ""} +} \ No newline at end of file diff --git a/integration/setup/companion-stack.yaml b/integration/setup/companion-stack.yaml index c1be2d6f0..e33fc1d7d 100644 --- a/integration/setup/companion-stack.yaml +++ b/integration/setup/companion-stack.yaml @@ -40,8 +40,6 @@ Resources: PreCreatedS3Bucket: Type: AWS::S3::Bucket DeletionPolicy: Delete - Properties: - BucketName: sam-integ-bucket-companion-bucket Outputs: PreCreatedVpc: diff --git a/integration/setup/test_setup_teardown.py b/integration/setup/test_setup_teardown.py index 3667a7b0d..35d2d1be7 100644 --- a/integration/setup/test_setup_teardown.py +++ b/integration/setup/test_setup_teardown.py @@ -4,7 +4,6 @@ @pytest.mark.setup def test_setup(setup_companion_stack_once, upload_resources, get_s3): - assert get_s3 == "sam-integ-bucket-companion-bucket" assert s3_upload_successful() diff --git a/pytest.ini b/pytest.ini index d567f28ac..1f21a32e8 100644 --- a/pytest.ini +++ b/pytest.ini @@ -3,8 +3,6 @@ # NOTE: If debug breakpoints aren't working, comment out the code coverage line below addopts = --cov samtranslator --cov-report term-missing --cov-fail-under 95 testpaths = tests -env = - AWS_DEFAULT_REGION = ap-southeast-1 markers = slow: marks tests as slow (deselect with '-m "not slow"') log_cli = 1 From a410456f9aa5a21687aeb12e6632a1b1752ee9e3 Mon Sep 17 00:00:00 2001 From: Daniel Mil Date: Wed, 13 Jul 2022 15:16:27 -0700 Subject: [PATCH 3/8] Fix cors tests --- .../combination/test_function_with_implicit_http_api.py | 7 +++---- integration/helpers/base_test.py | 2 +- pytest.ini | 2 ++ 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/integration/combination/test_function_with_implicit_http_api.py b/integration/combination/test_function_with_implicit_http_api.py index fddbd14c9..5803bc9e6 100644 --- a/integration/combination/test_function_with_implicit_http_api.py +++ b/integration/combination/test_function_with_implicit_http_api.py @@ -12,7 +12,6 @@ def test_function_with_implicit_api(self): stack_outputs = self.get_stack_outputs() base_url = stack_outputs["ApiUrl"] - resource_type = "AWS::ApiGatewayV2::Api" - self.verify_get_request_response(base_url, 200, resource_type) - self.verify_get_request_response(base_url + "something", 200, resource_type) - self.verify_get_request_response(base_url + "another/endpoint", 200, resource_type) + self.verify_get_request_response(base_url, 200) + self.verify_get_request_response(base_url + "something", 200) + self.verify_get_request_response(base_url + "another/endpoint", 200) diff --git a/integration/helpers/base_test.py b/integration/helpers/base_test.py index 19dc86f12..3b0e52aaf 100644 --- a/integration/helpers/base_test.py +++ b/integration/helpers/base_test.py @@ -570,7 +570,7 @@ def do_options_request_with_logging(url, headers=None): headers : dict headers to use in request """ - response = requests.options(url, headers=headers) if headers else requests.get(url) + response = requests.options(url, headers=headers) if headers else requests.options(url) amazon_headers = RequestUtils(response).get_amazon_headers() REQUEST_LOGGER.info("Request made to " + url, extra={"status": response.status_code, "headers": amazon_headers}) return response diff --git a/pytest.ini b/pytest.ini index 1f21a32e8..d567f28ac 100644 --- a/pytest.ini +++ b/pytest.ini @@ -3,6 +3,8 @@ # NOTE: If debug breakpoints aren't working, comment out the code coverage line below addopts = --cov samtranslator --cov-report term-missing --cov-fail-under 95 testpaths = tests +env = + AWS_DEFAULT_REGION = ap-southeast-1 markers = slow: marks tests as slow (deselect with '-m "not slow"') log_cli = 1 From 299dcb09ee36a801f6f1ff3e777650e666370f0a Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Thu, 14 Jul 2022 11:19:53 +0000 Subject: [PATCH 4/8] add --no-cov to companion setup --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 1fc25a6b5..78966d3fc 100755 --- a/Makefile +++ b/Makefile @@ -28,7 +28,7 @@ lint: pylint --rcfile .pylintrc samtranslator prepare-companion-stack: - pytest -v integration/setup -m setup + pytest -v --no-cov integration/setup -m setup # Command to run everytime you make changes to verify everything works dev: test From eaaf969d560eec0cb8e149aaa14ba018e91e4b7c Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Thu, 14 Jul 2022 15:53:05 +0300 Subject: [PATCH 5/8] fix wrong test setups --- integration/single/test_basic_api.py | 2 +- integration/single/test_function_with_http_api_and_auth.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/integration/single/test_basic_api.py b/integration/single/test_basic_api.py index 17f80c0a4..7fd731cde 100644 --- a/integration/single/test_basic_api.py +++ b/integration/single/test_basic_api.py @@ -46,7 +46,7 @@ def test_basic_api_with_mode(self): stack_output = self.get_stack_outputs() api_endpoint = stack_output.get("ApiEndpoint") - self.verify_get_request_response(f"{api_endpoint}/get", 200, "AWS::ApiGateway::RestApi") + self.verify_get_request_response(f"{api_endpoint}/get", 200) # Removes get from the API self.update_and_verify_stack(file_path="single/basic_api_with_mode_update") diff --git a/integration/single/test_function_with_http_api_and_auth.py b/integration/single/test_function_with_http_api_and_auth.py index 02cd368bb..22b9baef5 100644 --- a/integration/single/test_function_with_http_api_and_auth.py +++ b/integration/single/test_function_with_http_api_and_auth.py @@ -11,7 +11,7 @@ def test_function_with_http_api_and_auth(self): # We are not testing that IAM auth works here, we are simply testing if it was applied. IAM_AUTH_OUTPUT = '{"message":"Forbidden"}' - self.create_and_verify_stack("function_with_http_api_events_and_auth") + self.create_and_verify_stack("single/function_with_http_api_events_and_auth") implicitEndpoint = self.get_api_v2_endpoint("ServerlessHttpApi") self.assertEqual( From 60637bc830c79a3f49864105366f0769c2102065 Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Thu, 14 Jul 2022 16:23:07 +0300 Subject: [PATCH 6/8] fix wrong test setups --- integration/single/test_basic_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/single/test_basic_api.py b/integration/single/test_basic_api.py index 7fd731cde..72b2f1394 100644 --- a/integration/single/test_basic_api.py +++ b/integration/single/test_basic_api.py @@ -58,7 +58,7 @@ def test_basic_api_with_mode(self): retry=retry_if_exception_type(StatusCodeError), after=after_log(LOG, logging.WARNING), reraise=True, - )(self, f"{api_endpoint}/get", 403, "AWS::ApiGateway::RestApi") + )(self, f"{api_endpoint}/get", 403) LOG.log(msg=f"retry times {self.verify_get_request_response.retry.statistics}", level=logging.WARNING) From 4b987dc92f0ecd23a40b114dbc9f79eb1cd40b72 Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Thu, 14 Jul 2022 21:00:21 +0300 Subject: [PATCH 7/8] use parameters when doing local transform --- integration/helpers/base_test.py | 6 +++--- integration/helpers/template.py | 7 +++++-- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/integration/helpers/base_test.py b/integration/helpers/base_test.py index 3b0e52aaf..1a5fb4875 100644 --- a/integration/helpers/base_test.py +++ b/integration/helpers/base_test.py @@ -165,7 +165,7 @@ def update_stack(self, parameters=None, file_path=None): self._fill_template(folder, file_name) - self.transform_template() + self.transform_template(parameters) self._update_stack(parameters) def update_and_verify_stack(self, parameters=None, file_path=None): @@ -194,9 +194,9 @@ def generate_out_put_file_path(self, folder_name, file_name): Path(self.output_dir, "cfn_" + folder_name + "_" + file_name + generate_suffix() + ".yaml") ) - def transform_template(self): + def transform_template(self, parameters=None): if not self.pipeline_stage: - transform_template(self.sub_input_file_path, self.output_file_path) + transform_template(self.sub_input_file_path, self.output_file_path, parameters) else: transform_name = "AWS::Serverless-2016-10-31" if self.pipeline_stage == "beta": diff --git a/integration/helpers/template.py b/integration/helpers/template.py index f75bc4c56..8cdd12395 100644 --- a/integration/helpers/template.py +++ b/integration/helpers/template.py @@ -10,7 +10,7 @@ from samtranslator.yaml_helper import yaml_parse -def transform_template(sam_template_path, cfn_output_path): +def transform_template(sam_template_path, cfn_output_path, parameters=None): """ Locally transforms a SAM template to a Cloud Formation template @@ -20,7 +20,10 @@ def transform_template(sam_template_path, cfn_output_path): SAM template input path cfn_output_path : Path Cloud formation template output path + parameters : Dict + Template parameters that will be used during transform """ + parameters = parameters or {} LOG = logging.getLogger(__name__) iam_client = boto3.client("iam") @@ -28,7 +31,7 @@ def transform_template(sam_template_path, cfn_output_path): sam_template = yaml_parse(f) try: - cloud_formation_template = transform(sam_template, {}, ManagedPolicyLoader(iam_client)) + cloud_formation_template = transform(sam_template, parameters, ManagedPolicyLoader(iam_client)) cloud_formation_template_prettified = json.dumps(cloud_formation_template, indent=2) with open(cfn_output_path, "w") as f: From f91bf85f86219faeaef3f60205d161d0409403c5 Mon Sep 17 00:00:00 2001 From: Mehmet Nuri Deveci <5735811+mndeveci@users.noreply.github.com> Date: Thu, 14 Jul 2022 21:03:12 +0300 Subject: [PATCH 8/8] Revert "use parameters when doing local transform" This reverts commit 4b987dc92f0ecd23a40b114dbc9f79eb1cd40b72. --- integration/helpers/base_test.py | 6 +++--- integration/helpers/template.py | 7 ++----- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/integration/helpers/base_test.py b/integration/helpers/base_test.py index 1a5fb4875..3b0e52aaf 100644 --- a/integration/helpers/base_test.py +++ b/integration/helpers/base_test.py @@ -165,7 +165,7 @@ def update_stack(self, parameters=None, file_path=None): self._fill_template(folder, file_name) - self.transform_template(parameters) + self.transform_template() self._update_stack(parameters) def update_and_verify_stack(self, parameters=None, file_path=None): @@ -194,9 +194,9 @@ def generate_out_put_file_path(self, folder_name, file_name): Path(self.output_dir, "cfn_" + folder_name + "_" + file_name + generate_suffix() + ".yaml") ) - def transform_template(self, parameters=None): + def transform_template(self): if not self.pipeline_stage: - transform_template(self.sub_input_file_path, self.output_file_path, parameters) + transform_template(self.sub_input_file_path, self.output_file_path) else: transform_name = "AWS::Serverless-2016-10-31" if self.pipeline_stage == "beta": diff --git a/integration/helpers/template.py b/integration/helpers/template.py index 8cdd12395..f75bc4c56 100644 --- a/integration/helpers/template.py +++ b/integration/helpers/template.py @@ -10,7 +10,7 @@ from samtranslator.yaml_helper import yaml_parse -def transform_template(sam_template_path, cfn_output_path, parameters=None): +def transform_template(sam_template_path, cfn_output_path): """ Locally transforms a SAM template to a Cloud Formation template @@ -20,10 +20,7 @@ def transform_template(sam_template_path, cfn_output_path, parameters=None): SAM template input path cfn_output_path : Path Cloud formation template output path - parameters : Dict - Template parameters that will be used during transform """ - parameters = parameters or {} LOG = logging.getLogger(__name__) iam_client = boto3.client("iam") @@ -31,7 +28,7 @@ def transform_template(sam_template_path, cfn_output_path, parameters=None): sam_template = yaml_parse(f) try: - cloud_formation_template = transform(sam_template, parameters, ManagedPolicyLoader(iam_client)) + cloud_formation_template = transform(sam_template, {}, ManagedPolicyLoader(iam_client)) cloud_formation_template_prettified = json.dumps(cloud_formation_template, indent=2) with open(cfn_output_path, "w") as f: