Skip to content

Commit 2cb4c40

Browse files
author
Michal Ploski
committed
Fix different python runtimes errors
1 parent 7d34af1 commit 2cb4c40

File tree

4 files changed

+26
-25
lines changed

4 files changed

+26
-25
lines changed

.github/workflows/run-e2e-tests.yml

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6,19 +6,13 @@ env:
66
E2E_TESTS_PATH: tests/e2e/
77
jobs:
88
run:
9-
#########################
10-
# Force Github action to run only a single job at a time (based on the group name)
11-
# This is to prevent "race-condition" in building e2e tests infrastructure
12-
#########################
13-
concurrency:
14-
group: e2e-tests
159
runs-on: ubuntu-latest
1610
permissions:
1711
id-token: write # needed to interact with GitHub's OIDC Token endpoint.
1812
contents: read
1913
strategy:
2014
matrix:
21-
version: ["3.8", "3.9", "3.10"]
15+
version: ["3.6", "3.7", "3.8", "3.9"]
2216
steps:
2317
- name: "Checkout"
2418
uses: actions/checkout@v3

tests/e2e/conftest.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,26 @@
11
import datetime
2+
import sys
23
import uuid
3-
from typing import Generator, TypedDict
4+
5+
# We only need typing_extensions for python versions <3.8
6+
if sys.version_info >= (3, 8):
7+
from typing import TypedDict
8+
else:
9+
from typing_extensions import TypedDict
10+
11+
from typing import Dict, Generator
412

513
import pytest
614
from e2e.utils import helpers, infrastructure
715

816

917
class LambdaConfig(TypedDict):
1018
parameters: dict
11-
environment_variables: dict[str, str]
19+
environment_variables: Dict[str, str]
1220

1321

1422
class LambdaExecution(TypedDict):
15-
arns: dict[str, str]
23+
arns: Dict[str, str]
1624
execution_time: datetime.datetime
1725

1826

tests/e2e/logger/test_logger.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
from typing import TypedDict
2-
31
import boto3
42
import pytest
53

tests/e2e/utils/infrastructure.py

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import zipfile
55
from enum import Enum
66
from pathlib import Path
7-
from typing import Any
7+
from typing import Dict, List
88

99
import boto3
1010
import yaml
@@ -29,13 +29,13 @@ def __init__(self, stack_name: str, handlers_dir: str, config: dict, environment
2929
self.cf_client = session.client("cloudformation")
3030
self.s3_resource = session.resource("s3")
3131
self.account_id = session.client("sts").get_caller_identity()["Account"]
32-
self.region = boto3.Session().region_name
32+
self.region = session.region_name
3333
self.stack_name = stack_name
3434
self.handlers_dir = handlers_dir
3535
self.config = config
3636
self.environment_variables = environment_variables
3737

38-
def deploy(self) -> dict[str, str]:
38+
def deploy(self) -> Dict[str, str]:
3939
handlers = self._find_files(directory=self.handlers_dir, only_py=True)
4040
template, asset_root_dir = self.prepare_stack(
4141
handlers=handlers,
@@ -55,7 +55,7 @@ def delete(self):
5555

5656
# Create CDK cloud assembly code
5757
def prepare_stack(
58-
self, handlers: list[str], handlers_dir: str, stack_name: str, environment_variables: dict, **config: dict
58+
self, handlers: List[str], handlers_dir: str, stack_name: str, environment_variables: dict, **config: dict
5959
):
6060
integration_test_app = App()
6161
stack = Stack(integration_test_app, stack_name)
@@ -99,17 +99,16 @@ def _find_files(self, directory: str, only_py: bool = False) -> list:
9999
file_paths.append(os.path.join(root, filename))
100100
return file_paths
101101

102-
def _create_layer(self, stack):
103-
output_dir = Path(AssetStaging.BUNDLING_OUTPUT_DIR, "python")
104-
input_dir = Path(AssetStaging.BUNDLING_INPUT_DIR, "aws_lambda_powertools")
102+
def _create_layer(self, stack: Stack):
103+
output_dir = Path(str(AssetStaging.BUNDLING_OUTPUT_DIR), "python")
104+
input_dir = Path(str(AssetStaging.BUNDLING_INPUT_DIR), "aws_lambda_powertools")
105105
powertools_layer = LayerVersion(
106106
stack,
107107
"aws-lambda-powertools",
108108
layer_version_name="aws-lambda-powertools",
109109
compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"]],
110110
code=Code.from_asset(
111111
path=".",
112-
exclude=["*.pyc"],
113112
bundling=BundlingOptions(
114113
image=DockerImage.from_build(
115114
str(Path(__file__).parent),
@@ -118,12 +117,14 @@ def _create_layer(self, stack):
118117
command=[
119118
"bash",
120119
"-c",
121-
f"poetry export --with-credentials --format requirements.txt --output requirements.txt && pip install -r requirements.txt -t {output_dir} && cp -R {input_dir} {output_dir}",
120+
f"poetry export --with-credentials --format requirements.txt --output /tmp/requirements.txt &&\
121+
pip install -r /tmp/requirements.txt -t {output_dir} &&\
122+
cp -R {input_dir} {output_dir} &&\
123+
find {output_dir}/ -regex '^.*\\(__pycache__\\|\\.py[co]\\)$' -delete",
122124
],
123125
),
124126
),
125127
)
126-
127128
return powertools_layer
128129

129130
def _upload_assets(self, template: dict, asset_root_dir: str):
@@ -146,16 +147,16 @@ def _upload_assets(self, template: dict, asset_root_dir: str):
146147
buf.seek(0)
147148
self.s3_client.upload_fileobj(Fileobj=buf, Bucket=bucket, Key=s3_key)
148149

149-
def _deploy_stack(self, stack_name: str, template: Any):
150+
def _deploy_stack(self, stack_name: str, template: dict):
150151
response = self.cf_client.create_stack(
151152
StackName=stack_name,
152153
TemplateBody=yaml.dump(template),
153154
TimeoutInMinutes=10,
154-
OnFailure="DO_NOTHING",
155+
OnFailure="ROLLBACK",
155156
Capabilities=["CAPABILITY_IAM"],
156157
)
157158
waiter = self.cf_client.get_waiter("stack_create_complete")
158-
waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 2, "MaxAttempts": 50})
159+
waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 10, "MaxAttempts": 50})
159160
response = self.cf_client.describe_stacks(StackName=stack_name)
160161
return response
161162

0 commit comments

Comments
 (0)