Skip to content

Add the matrix strategy to workflow to make integration tests faster #137

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Apr 28, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,9 @@ jobs:

integration-test:
runs-on: ubuntu-latest

strategy:
matrix:
runtime-param: [2.7, 3.6, 3.7, 3.8]
steps:
- name: Checkout
uses: actions/checkout@v2
Expand Down Expand Up @@ -93,4 +95,5 @@ jobs:
DD_API_KEY: ${{ secrets.DD_API_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
RUNTIME_PARAM: ${{ matrix.runtime-param }}
run: ./scripts/run_integration_tests.sh
84 changes: 62 additions & 22 deletions scripts/run_integration_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,27 @@ script_utc_start_time=$(date -u +"%Y%m%dT%H%M%S")

mismatch_found=false

# Format :
# [0]: serverless runtime name
# [1]: python version
# [2]: random 8-character ID to avoid collisions with other runs
python27=("python2.7" "2.7" $(xxd -l 4 -c 4 -p < /dev/random))
python36=("python3.6" "3.6" $(xxd -l 4 -c 4 -p < /dev/random))
python37=("python3.7" "3.7" $(xxd -l 4 -c 4 -p < /dev/random))
python38=("python3.8" "3.8" $(xxd -l 4 -c 4 -p < /dev/random))

PARAMETERS_SETS=("python27" "python36" "python37" "python38")

if [ -z "$RUNTIME_PARAM" ]; then
echo "Python version not specified, running for all python versions."
else
RUNTIME_PARAM_NO_DOT=$(echo $RUNTIME_PARAM | sed 's/\.//')
echo "Python version is specified: $RUNTIME_PARAM"
PARAMETERS_SETS=(python${RUNTIME_PARAM_NO_DOT})
BUILD_LAYER_VERSION=python$RUNTIME_PARAM_NO_DOT[1]
fi


if [ -z "$AWS_SECRET_ACCESS_KEY" ]; then
echo "No AWS credentials were found in the environment."
echo "Note that only Datadog employees can run these integration tests."
Expand All @@ -41,7 +62,7 @@ fi

if [ -n "$BUILD_LAYERS" ]; then
echo "Building layers that will be deployed with our test functions"
source $scripts_dir/build_layers.sh
PYTHON_VERSION=${!BUILD_LAYER_VERSION} source $scripts_dir/build_layers.sh
else
echo "Not building layers, ensure they've already been built or re-run with 'BUILD_LAYERS=true DD_API_KEY=XXXX ./scripts/run_integration_tests.sh'"
fi
Expand All @@ -52,32 +73,48 @@ input_event_files=$(ls ./input_events)
# Sort event files by name so that snapshots stay consistent
input_event_files=($(for file_name in ${input_event_files[@]}; do echo $file_name; done | sort))

# Generate a random 8-character ID to avoid collisions with other runs
run_id=$(xxd -l 4 -c 4 -p < /dev/random)

# Always remove the stack before exiting, no matter what
# Always remove the stack(s) before exiting, no matter what
function remove_stack() {
echo "Removing functions"
serverless remove --stage $run_id
for parameters_set in "${PARAMETERS_SETS[@]}"; do
serverless_runtime=$parameters_set[0]
python_version=$parameters_set[1]
run_id=$parameters_set[2]
echo "Removing stack for stage : ${!run_id}"
PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} \
serverless remove --stage ${!run_id}
done
}



trap remove_stack EXIT

echo "Deploying functions"
serverless deploy --stage $run_id
for parameters_set in "${PARAMETERS_SETS[@]}"; do

serverless_runtime=$parameters_set[0]
python_version=$parameters_set[1]
run_id=$parameters_set[2]

echo "Invoking functions"
set +e # Don't exit this script if an invocation fails or there's a diff
for handler_name in "${LAMBDA_HANDLERS[@]}"; do
for runtime in "${RUNTIMES[@]}"; do
function_name="${handler_name}_${runtime}"
echo "Deploying functions for runtime : $parameters_set, serverless runtime : ${!serverless_runtime}, \
python version : ${!python_version} and run id : ${!run_id}"

PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} \
serverless deploy --stage ${!run_id}

echo "Invoking functions for runtime $parameters_set"
set +e # Don't exit this script if an invocation fails or there's a diff
for handler_name in "${LAMBDA_HANDLERS[@]}"; do

function_name="${handler_name}_python"
echo "$function_name"
# Invoke function once for each input event
for input_event_file in "${input_event_files[@]}"; do
# Get event name without trailing ".json" so we can build the snapshot file name
input_event_name=$(echo "$input_event_file" | sed "s/.json//")
snapshot_path="./snapshots/return_values/${function_name}_${input_event_name}.json"
snapshot_path="./snapshots/return_values/${handler_name}_${parameters_set}_${input_event_name}.json"

return_value=$(serverless invoke -f $function_name --stage $run_id --path "./input_events/$input_event_file")
return_value=$(PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} \
serverless invoke --stage ${!run_id} -f "$function_name" --path "./input_events/$input_event_file")

if [ ! -f $snapshot_path ]; then
# If the snapshot file doesn't exist yet, we create it
Expand Down Expand Up @@ -109,14 +146,17 @@ sleep $LOGS_WAIT_SECONDS
set +e # Don't exit this script if there is a diff or the logs endpoint fails
echo "Fetching logs for invocations and comparing to snapshots"
for handler_name in "${LAMBDA_HANDLERS[@]}"; do
for runtime in "${RUNTIMES[@]}"; do
function_name="${handler_name}_${runtime}"
function_snapshot_path="./snapshots/logs/$function_name.log"

for parameters_set in "${PARAMETERS_SETS[@]}"; do
function_name="${handler_name}_python"
function_snapshot_path="./snapshots/logs/${handler_name}_${parameters_set}.log"
serverless_runtime=$parameters_set[0]
python_version=$parameters_set[1]
run_id=$parameters_set[2]
# Fetch logs with serverless cli, retrying to avoid AWS account-wide rate limit error
retry_counter=0
while [ $retry_counter -lt 10 ]; do
raw_logs=$(serverless logs -f $function_name --stage $run_id --startTime $script_utc_start_time)
raw_logs=$(PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} \
serverless logs --stage ${!run_id} -f $function_name --startTime $script_utc_start_time)
fetch_logs_exit_code=$?
if [ $fetch_logs_exit_code -eq 1 ]; then
echo "Retrying fetch logs for $function_name..."
Expand Down Expand Up @@ -158,7 +198,7 @@ for handler_name in "${LAMBDA_HANDLERS[@]}"; do
sed -E "s/(dd_lambda_layer:datadog-python[0-9]+_)[0-9]+\.[0-9]+\.[0-9]+/\1X\.X\.X/g" |
sed -E "s/(datadog_lambda:v)([0-9]+\.[0-9]+\.[0-9])/\1XX/g" |
# Strip out run ID (from function name, resource, etc.)
sed -E "s/$run_id/XXXX/g" |
sed -E "s/${!run_id}/XXXX/g" |
# Strip out trace/span/parent/timestamps
sed -E "s/(\"trace_id\"\: \")[A-Z0-9\.\-]+/\1XXXX/g" |
sed -E "s/(\"span_id\"\: \")[A-Z0-9\.\-]+/\1XXXX/g" |
Expand Down
69 changes: 10 additions & 59 deletions tests/integration/serverless.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,74 +19,25 @@ provider:
role: "arn:aws:iam::601427279990:role/serverless-integration-test-lambda-role"

layers:
python27:
python:
package:
artifact: ../../.layers/datadog_lambda_py2.7.zip
python36:
package:
artifact: ../../.layers/datadog_lambda_py3.6.zip
python37:
package:
artifact: ../../.layers/datadog_lambda_py3.7.zip
python38:
package:
artifact: ../../.layers/datadog_lambda_py3.8.zip
artifact: ../../.layers/datadog_lambda_py${env:PYTHON_VERSION}.zip

functions:
# async-metrics (flushed to logs)
async-metrics_python27:
handler: handle.handle
runtime: python2.7
layers:
- { Ref: Python27LambdaLayer }
environment:
DD_FLUSH_TO_LOG: true

async-metrics_python36:
async-metrics_python:
name: integration-tests-python-${sls:stage}-async-metrics_${env:RUNTIME}
handler: handle.handle
runtime: python3.6
runtime: ${env:SERVERLESS_RUNTIME}
layers:
- { Ref: Python36LambdaLayer }
environment:
DD_FLUSH_TO_LOG: true

async-metrics_python37:
handler: handle.handle
runtime: python3.7
layers:
- { Ref: Python37LambdaLayer }
environment:
DD_FLUSH_TO_LOG: true

async-metrics_python38:
handler: handle.handle
runtime: python3.8
layers:
- { Ref: Python38LambdaLayer }
- { Ref: PythonLambdaLayer }
environment:
DD_FLUSH_TO_LOG: true

# sync-metrics (sent via API)
sync-metrics_python27:
handler: handle.handle
runtime: python2.7
layers:
- { Ref: Python27LambdaLayer }

sync-metrics_python36:
handler: handle.handle
runtime: python3.6
layers:
- { Ref: Python36LambdaLayer }

sync-metrics_python37:
handler: handle.handle
runtime: python3.7
layers:
- { Ref: Python37LambdaLayer }

sync-metrics_python38:
sync-metrics_python:
name: integration-tests-python-${sls:stage}-sync-metrics_${env:RUNTIME}
handler: handle.handle
runtime: python3.8
runtime: ${env:SERVERLESS_RUNTIME}
layers:
- { Ref: Python38LambdaLayer }
- { Ref: PythonLambdaLayer }