Skip to content

Commit 922a956

Browse files
authored
Add the matrix strategy to workflow to make integration tests faster (#137)
* integration tests run faster
1 parent 57e8bc0 commit 922a956

File tree

3 files changed

+76
-82
lines changed

3 files changed

+76
-82
lines changed

.github/workflows/build.yml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,9 @@ jobs:
6262
6363
integration-test:
6464
runs-on: ubuntu-latest
65-
65+
strategy:
66+
matrix:
67+
runtime-param: [2.7, 3.6, 3.7, 3.8]
6668
steps:
6769
- name: Checkout
6870
uses: actions/checkout@v2
@@ -93,4 +95,5 @@ jobs:
9395
DD_API_KEY: ${{ secrets.DD_API_KEY }}
9496
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
9597
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
98+
RUNTIME_PARAM: ${{ matrix.runtime-param }}
9699
run: ./scripts/run_integration_tests.sh

scripts/run_integration_tests.sh

Lines changed: 62 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,27 @@ script_utc_start_time=$(date -u +"%Y%m%dT%H%M%S")
2424

2525
mismatch_found=false
2626

27+
# Format :
28+
# [0]: serverless runtime name
29+
# [1]: python version
30+
# [2]: random 8-character ID to avoid collisions with other runs
31+
python27=("python2.7" "2.7" $(xxd -l 4 -c 4 -p < /dev/random))
32+
python36=("python3.6" "3.6" $(xxd -l 4 -c 4 -p < /dev/random))
33+
python37=("python3.7" "3.7" $(xxd -l 4 -c 4 -p < /dev/random))
34+
python38=("python3.8" "3.8" $(xxd -l 4 -c 4 -p < /dev/random))
35+
36+
PARAMETERS_SETS=("python27" "python36" "python37" "python38")
37+
38+
if [ -z "$RUNTIME_PARAM" ]; then
39+
echo "Python version not specified, running for all python versions."
40+
else
41+
RUNTIME_PARAM_NO_DOT=$(echo $RUNTIME_PARAM | sed 's/\.//')
42+
echo "Python version is specified: $RUNTIME_PARAM"
43+
PARAMETERS_SETS=(python${RUNTIME_PARAM_NO_DOT})
44+
BUILD_LAYER_VERSION=python$RUNTIME_PARAM_NO_DOT[1]
45+
fi
46+
47+
2748
if [ -z "$AWS_SECRET_ACCESS_KEY" ]; then
2849
echo "No AWS credentials were found in the environment."
2950
echo "Note that only Datadog employees can run these integration tests."
@@ -41,7 +62,7 @@ fi
4162

4263
if [ -n "$BUILD_LAYERS" ]; then
4364
echo "Building layers that will be deployed with our test functions"
44-
source $scripts_dir/build_layers.sh
65+
PYTHON_VERSION=${!BUILD_LAYER_VERSION} source $scripts_dir/build_layers.sh
4566
else
4667
echo "Not building layers, ensure they've already been built or re-run with 'BUILD_LAYERS=true DD_API_KEY=XXXX ./scripts/run_integration_tests.sh'"
4768
fi
@@ -52,32 +73,48 @@ input_event_files=$(ls ./input_events)
5273
# Sort event files by name so that snapshots stay consistent
5374
input_event_files=($(for file_name in ${input_event_files[@]}; do echo $file_name; done | sort))
5475

55-
# Generate a random 8-character ID to avoid collisions with other runs
56-
run_id=$(xxd -l 4 -c 4 -p < /dev/random)
57-
58-
# Always remove the stack before exiting, no matter what
76+
# Always remove the stack(s) before exiting, no matter what
5977
function remove_stack() {
60-
echo "Removing functions"
61-
serverless remove --stage $run_id
78+
for parameters_set in "${PARAMETERS_SETS[@]}"; do
79+
serverless_runtime=$parameters_set[0]
80+
python_version=$parameters_set[1]
81+
run_id=$parameters_set[2]
82+
echo "Removing stack for stage : ${!run_id}"
83+
PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} \
84+
serverless remove --stage ${!run_id}
85+
done
6286
}
87+
88+
89+
6390
trap remove_stack EXIT
6491

65-
echo "Deploying functions"
66-
serverless deploy --stage $run_id
92+
for parameters_set in "${PARAMETERS_SETS[@]}"; do
93+
94+
serverless_runtime=$parameters_set[0]
95+
python_version=$parameters_set[1]
96+
run_id=$parameters_set[2]
6797

68-
echo "Invoking functions"
69-
set +e # Don't exit this script if an invocation fails or there's a diff
70-
for handler_name in "${LAMBDA_HANDLERS[@]}"; do
71-
for runtime in "${RUNTIMES[@]}"; do
72-
function_name="${handler_name}_${runtime}"
98+
echo "Deploying functions for runtime : $parameters_set, serverless runtime : ${!serverless_runtime}, \
99+
python version : ${!python_version} and run id : ${!run_id}"
100+
101+
PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} \
102+
serverless deploy --stage ${!run_id}
73103

104+
echo "Invoking functions for runtime $parameters_set"
105+
set +e # Don't exit this script if an invocation fails or there's a diff
106+
for handler_name in "${LAMBDA_HANDLERS[@]}"; do
107+
108+
function_name="${handler_name}_python"
109+
echo "$function_name"
74110
# Invoke function once for each input event
75111
for input_event_file in "${input_event_files[@]}"; do
76112
# Get event name without trailing ".json" so we can build the snapshot file name
77113
input_event_name=$(echo "$input_event_file" | sed "s/.json//")
78-
snapshot_path="./snapshots/return_values/${function_name}_${input_event_name}.json"
114+
snapshot_path="./snapshots/return_values/${handler_name}_${parameters_set}_${input_event_name}.json"
79115

80-
return_value=$(serverless invoke -f $function_name --stage $run_id --path "./input_events/$input_event_file")
116+
return_value=$(PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} \
117+
serverless invoke --stage ${!run_id} -f "$function_name" --path "./input_events/$input_event_file")
81118

82119
if [ ! -f $snapshot_path ]; then
83120
# If the snapshot file doesn't exist yet, we create it
@@ -109,14 +146,17 @@ sleep $LOGS_WAIT_SECONDS
109146
set +e # Don't exit this script if there is a diff or the logs endpoint fails
110147
echo "Fetching logs for invocations and comparing to snapshots"
111148
for handler_name in "${LAMBDA_HANDLERS[@]}"; do
112-
for runtime in "${RUNTIMES[@]}"; do
113-
function_name="${handler_name}_${runtime}"
114-
function_snapshot_path="./snapshots/logs/$function_name.log"
115-
149+
for parameters_set in "${PARAMETERS_SETS[@]}"; do
150+
function_name="${handler_name}_python"
151+
function_snapshot_path="./snapshots/logs/${handler_name}_${parameters_set}.log"
152+
serverless_runtime=$parameters_set[0]
153+
python_version=$parameters_set[1]
154+
run_id=$parameters_set[2]
116155
# Fetch logs with serverless cli, retrying to avoid AWS account-wide rate limit error
117156
retry_counter=0
118157
while [ $retry_counter -lt 10 ]; do
119-
raw_logs=$(serverless logs -f $function_name --stage $run_id --startTime $script_utc_start_time)
158+
raw_logs=$(PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} \
159+
serverless logs --stage ${!run_id} -f $function_name --startTime $script_utc_start_time)
120160
fetch_logs_exit_code=$?
121161
if [ $fetch_logs_exit_code -eq 1 ]; then
122162
echo "Retrying fetch logs for $function_name..."
@@ -158,7 +198,7 @@ for handler_name in "${LAMBDA_HANDLERS[@]}"; do
158198
sed -E "s/(dd_lambda_layer:datadog-python[0-9]+_)[0-9]+\.[0-9]+\.[0-9]+/\1X\.X\.X/g" |
159199
sed -E "s/(datadog_lambda:v)([0-9]+\.[0-9]+\.[0-9])/\1XX/g" |
160200
# Strip out run ID (from function name, resource, etc.)
161-
sed -E "s/$run_id/XXXX/g" |
201+
sed -E "s/${!run_id}/XXXX/g" |
162202
# Strip out trace/span/parent/timestamps
163203
sed -E "s/(\"trace_id\"\: \")[A-Z0-9\.\-]+/\1XXXX/g" |
164204
sed -E "s/(\"span_id\"\: \")[A-Z0-9\.\-]+/\1XXXX/g" |

tests/integration/serverless.yml

Lines changed: 10 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -19,74 +19,25 @@ provider:
1919
role: "arn:aws:iam::601427279990:role/serverless-integration-test-lambda-role"
2020

2121
layers:
22-
python27:
22+
python:
2323
package:
24-
artifact: ../../.layers/datadog_lambda_py2.7.zip
25-
python36:
26-
package:
27-
artifact: ../../.layers/datadog_lambda_py3.6.zip
28-
python37:
29-
package:
30-
artifact: ../../.layers/datadog_lambda_py3.7.zip
31-
python38:
32-
package:
33-
artifact: ../../.layers/datadog_lambda_py3.8.zip
24+
artifact: ../../.layers/datadog_lambda_py${env:PYTHON_VERSION}.zip
3425

3526
functions:
3627
# async-metrics (flushed to logs)
37-
async-metrics_python27:
38-
handler: handle.handle
39-
runtime: python2.7
40-
layers:
41-
- { Ref: Python27LambdaLayer }
42-
environment:
43-
DD_FLUSH_TO_LOG: true
44-
45-
async-metrics_python36:
28+
async-metrics_python:
29+
name: integration-tests-python-${sls:stage}-async-metrics_${env:RUNTIME}
4630
handler: handle.handle
47-
runtime: python3.6
31+
runtime: ${env:SERVERLESS_RUNTIME}
4832
layers:
49-
- { Ref: Python36LambdaLayer }
50-
environment:
51-
DD_FLUSH_TO_LOG: true
52-
53-
async-metrics_python37:
54-
handler: handle.handle
55-
runtime: python3.7
56-
layers:
57-
- { Ref: Python37LambdaLayer }
58-
environment:
59-
DD_FLUSH_TO_LOG: true
60-
61-
async-metrics_python38:
62-
handler: handle.handle
63-
runtime: python3.8
64-
layers:
65-
- { Ref: Python38LambdaLayer }
33+
- { Ref: PythonLambdaLayer }
6634
environment:
6735
DD_FLUSH_TO_LOG: true
6836

6937
# sync-metrics (sent via API)
70-
sync-metrics_python27:
71-
handler: handle.handle
72-
runtime: python2.7
73-
layers:
74-
- { Ref: Python27LambdaLayer }
75-
76-
sync-metrics_python36:
77-
handler: handle.handle
78-
runtime: python3.6
79-
layers:
80-
- { Ref: Python36LambdaLayer }
81-
82-
sync-metrics_python37:
83-
handler: handle.handle
84-
runtime: python3.7
85-
layers:
86-
- { Ref: Python37LambdaLayer }
87-
88-
sync-metrics_python38:
38+
sync-metrics_python:
39+
name: integration-tests-python-${sls:stage}-sync-metrics_${env:RUNTIME}
8940
handler: handle.handle
90-
runtime: python3.8
41+
runtime: ${env:SERVERLESS_RUNTIME}
9142
layers:
92-
- { Ref: Python38LambdaLayer }
43+
- { Ref: PythonLambdaLayer }

0 commit comments

Comments
 (0)