Skip to content

Commit 03849f9

Browse files
nandajavarmaroboquat
authored andcommitted
Adding the cron for self-hosted preview cleanup
1 parent ea3921d commit 03849f9

11 files changed

+267
-42
lines changed

.werft/aks-installer-tests.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ pod:
7070
7171
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
7272
73-
export TF_VAR_TEST_ID=$(echo $RANDOM | md5sum | head -c 5; echo)
73+
export TF_VAR_TEST_ID=$(echo $RANDOM | md5sum | head -c 5; echo)-azure
7474
7575
(cd .werft && yarn install && mv node_modules ..) | werft log slice prep
7676
printf '{{ toJson . }}' > context.json

.werft/cleanup-installer-setups.yaml

Lines changed: 109 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,109 @@
1+
# debug using `werft run github -f -s .werft/installer-tests.ts -j .werft/self-hosted-installer-tests.yaml -a debug=true`
2+
pod:
3+
serviceAccount: werft
4+
affinity:
5+
nodeAffinity:
6+
requiredDuringSchedulingIgnoredDuringExecution:
7+
nodeSelectorTerms:
8+
- matchExpressions:
9+
- key: dev/workload
10+
operator: In
11+
values:
12+
- "builds"
13+
securityContext:
14+
runAsUser: 0
15+
volumes:
16+
- name: sh-playground-sa-perm
17+
secret:
18+
secretName: sh-playground-sa-perm
19+
- name: sh-playground-dns-perm
20+
secret:
21+
secretName: sh-playground-dns-perm
22+
- name: sh-aks-perm
23+
secret:
24+
secretName: aks-credentials
25+
containers:
26+
- name: nightly-test
27+
image: eu.gcr.io/gitpod-core-dev/dev/dev-environment:ljb-werft-cli-grpc-changes.2
28+
workingDir: /workspace
29+
imagePullPolicy: Always
30+
volumeMounts:
31+
- name: sh-playground-sa-perm
32+
mountPath: /mnt/secrets/sh-playground-sa-perm
33+
- name: sh-playground-dns-perm # this sa is used for the DNS management
34+
mountPath: /mnt/secrets/sh-playground-dns-perm
35+
env:
36+
- name: GOOGLE_APPLICATION_CREDENTIALS
37+
value: "/mnt/secrets/sh-playground-sa-perm/sh-sa.json"
38+
- name: TF_VAR_sa_creds
39+
value: "/mnt/secrets/sh-playground-sa-perm/sh-sa.json"
40+
- name: TF_VAR_dns_sa_creds
41+
value: "/mnt/secrets/sh-playground-dns-perm/sh-dns-sa.json"
42+
- name: ARM_SUBSCRIPTION_ID
43+
valueFrom:
44+
secretKeyRef:
45+
name: aks-credentials
46+
key: subscriptionid
47+
- name: ARM_TENANT_ID
48+
valueFrom:
49+
secretKeyRef:
50+
name: aks-credentials
51+
key: tenantid
52+
- name: ARM_CLIENT_ID
53+
valueFrom:
54+
secretKeyRef:
55+
name: aks-credentials
56+
key: clientid
57+
- name: ARM_CLIENT_SECRET
58+
valueFrom:
59+
secretKeyRef:
60+
name: aks-credentials
61+
key: clientsecret
62+
- name: NODENAME
63+
valueFrom:
64+
fieldRef:
65+
fieldPath: spec.nodeName
66+
- name: USER_TOKEN # this is for the integration tests
67+
valueFrom:
68+
secretKeyRef:
69+
name: integration-test-user
70+
key: token
71+
- name: AWS_ACCESS_KEY_ID
72+
valueFrom:
73+
secretKeyRef:
74+
name: aws-credentials
75+
key: aws-access-key
76+
- name: AWS_SECRET_ACCESS_KEY
77+
valueFrom:
78+
secretKeyRef:
79+
name: aws-credentials
80+
key: aws-secret-key
81+
- name: AWS_REGION
82+
valueFrom:
83+
secretKeyRef:
84+
name: aws-credentials
85+
key: aws-region
86+
command:
87+
- bash
88+
- -c
89+
- |
90+
sleep 1
91+
set -Eeuo pipefail
92+
93+
sudo chown -R gitpod:gitpod /workspace
94+
sudo apt update && apt install gettext-base
95+
96+
curl -sLS https://get.k3sup.dev | sh
97+
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
98+
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
99+
unzip awscliv2.zip
100+
sudo ./aws/install
101+
102+
(cd .werft && yarn install && mv node_modules ..) | werft log slice prep
103+
printf '{{ toJson . }}' > context.json
104+
105+
TESTCONFIG="CLEANUP_OLD_TESTS"
106+
107+
npx ts-node .werft/installer-tests.ts ${TESTCONFIG}
108+
plugins:
109+
cron: "15 3 * * *"

.werft/eks-installer-tests.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ pod:
6363
sudo chown -R gitpod:gitpod /workspace
6464
sudo apt update && apt install gettext-base
6565
66-
export TF_VAR_TEST_ID="$(echo $RANDOM | md5sum | head -c 5; echo)"
66+
export TF_VAR_TEST_ID="$(echo $RANDOM | md5sum | head -c 5; echo)-aws"
6767
6868
(cd .werft && yarn install && mv node_modules ..) | werft log slice prep
6969
printf '{{ toJson . }}' > context.json

.werft/gke-installer-tests.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ pod:
5050
sudo chown -R gitpod:gitpod /workspace
5151
sudo apt update && apt install gettext-base
5252
53-
export TF_VAR_TEST_ID=$(echo $RANDOM | md5sum | head -c 5; echo)
53+
export TF_VAR_TEST_ID=$(echo $RANDOM | md5sum | head -c 5; echo)-gcp
5454
5555
(cd .werft && yarn install && mv node_modules ..) | werft log slice prep
5656
printf '{{ toJson . }}' > context.json

.werft/installer-tests.ts

Lines changed: 29 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,13 @@ const TEST_CONFIGURATIONS: { [name: string]: TestConfig } = {
9191
"CHECK_INSTALLATION",
9292
],
9393
},
94+
CLEANUP_OLD_TESTS: {
95+
CLOUD: "",
96+
DESCRIPTION: "Deletes old test setups",
97+
PHASES: [
98+
"CLEANUP_OLD_TESTS"
99+
]
100+
}
94101
};
95102

96103
const config: TestConfig = TEST_CONFIGURATIONS[testConfig];
@@ -177,10 +184,10 @@ const INFRA_PHASES: { [name: string]: InfraConfig } = {
177184
makeTarget: "cleanup",
178185
description: "Destroy the created infrastucture",
179186
},
180-
RESULTS: {
181-
phase: "get-results",
182-
makeTarget: "get-results",
183-
description: "Get the result of the setup",
187+
CLEANUP_OLD_TESTS: {
188+
phase: "cleanup-old-tests",
189+
makeTarget: "cleanup-old-tests",
190+
description: "",
184191
},
185192
};
186193

@@ -246,8 +253,11 @@ installerTests(TEST_CONFIGURATIONS[testConfig]).catch((err) => {
246253

247254
export async function installerTests(config: TestConfig) {
248255
console.log(config.DESCRIPTION);
249-
// these phases set up the infrastructure
250-
werft.phase(`create-${cloud}-infra`, `Create the infrastructure in ${cloud}`);
256+
// these phases sets up or clean up the infrastructure
257+
// If the cloud variable is not set, we have a cleanup job in hand
258+
const majorPhase: string = cloud == "" ? `create-${cloud}-infra` : "cleanup-infra"
259+
260+
werft.phase(majorPhase, `Manage the infrastructure`);
251261
for (let phase of config.PHASES) {
252262
const phaseSteps = INFRA_PHASES[phase];
253263
const ret = callMakeTargets(phaseSteps.phase, phaseSteps.description, phaseSteps.makeTarget);
@@ -257,9 +267,14 @@ export async function installerTests(config: TestConfig) {
257267
break;
258268
}
259269
}
260-
werft.done(`create-${cloud}-infra`);
270+
werft.done(majorPhase);
271+
272+
if (cloud == "") {
273+
// this means that it was a cleanup job, nothing more to do here
274+
return
275+
}
261276

262-
if (upgrade === "true") {
277+
if (upgrade === "true") {
263278
// we could run integration tests in the current setup
264279
// but since we run nightly tests on unstable setups, feels unnecessary
265280
// runIntegrationTests()
@@ -279,16 +294,18 @@ if (upgrade === "true") {
279294

280295
// if the preview flag is set to true, the script will print the result and exits
281296
if (preview === "true") {
282-
const resultPhase = INFRA_PHASES["RESULTS"];
283297
werft.phase("print-output", "Get connection details to self-hosted setup");
284298

285-
// TODO(nvn): send the kubeconfig to cloud storage
286-
callMakeTargets(resultPhase.phase, resultPhase.description, resultPhase.makeTarget);
287-
288299
exec(
289300
`werft log result -d "self-hosted preview url" url "https://${process.env["TF_VAR_TEST_ID"]}.tests.gitpod-self-hosted.com"`,
290301
);
291302

303+
if (testConfig == "STANDARD_K3S_TEST") {
304+
exec(`werft log result -d "KUBECONFIG file store under GCP project 'sh-automated-tests'" url "gs://nightly-tests/tf-state/${process.env["TF_VAR_TEST_ID"]}-kubeconfig"`);
305+
} else {
306+
exec(`werft log result -d "KUBECONFIG Connection details" url "Follow cloud specific instructions to connect to the cluster"`);
307+
}
308+
292309
exec(`werft log result -d "Terraform state" url "Terraform state file name is ${process.env["TF_VAR_TEST_ID"]}"`);
293310

294311
werft.done("print-output");

.werft/k3s-installer-tests.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ pod:
5757
5858
curl -sLS https://get.k3sup.dev | sh
5959
60-
export TF_VAR_TEST_ID=$(echo $RANDOM | md5sum | head -c 5; echo)
60+
export TF_VAR_TEST_ID=$(echo $RANDOM | md5sum | head -c 5; echo)-k3s
6161
6262
(cd .werft && yarn install && mv node_modules ..) | werft log slice prep
6363
printf '{{ toJson . }}' > context.json

.werft/self-hosted-installer-tests.yaml

Lines changed: 19 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,10 @@ args:
44
desc: "Name of the supported managed cluster solution to test with, options: [`k3s`, `gke`, `aks`, `eks`], if not specified, it will run for all cloud providers"
55
required: false
66
default: ""
7+
- name: subdomain
8+
desc: "Subdomain to use, starting with `gitpod-*` will omit from cleanup, make sure it is not in use already. A terraform workspace of same name will be used"
9+
required: false
10+
default: ""
711
- name: channel
812
desc: "Replicated channel to use"
913
required: false
@@ -136,8 +140,21 @@ pod:
136140
CLUSTER="k3s"
137141
fi
138142
139-
TESTCONFIG="STANDARD_${CLUSTER^^}_TEST"
143+
export domain="{{ .Annotations.subdomain }}"
144+
145+
export eks=aws
146+
export gke=gcp
147+
export k3s=k3s
148+
export aks=azure
140149
141-
export TF_VAR_TEST_ID=$(echo $RANDOM | md5sum | head -c 5; echo)
150+
export provider=${!CLUSTER}
151+
152+
if [[ "$domain" == "<no value>" ]]; then
153+
export TF_VAR_TEST_ID="$(echo $RANDOM | md5sum | head -c 5; echo)-$provider"
154+
else
155+
export TF_VAR_TEST_ID="$domain"
156+
fi
157+
158+
TESTCONFIG="STANDARD_${CLUSTER^^}_TEST"
142159
143160
npx ts-node .werft/installer-tests.ts ${TESTCONFIG}

install/infra/terraform/tools/issuer/main.tf

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ provider "kubernetes" {
33
}
44

55
resource "kubernetes_secret" "dns_solver" {
6-
count = var.secretAccessKey == null ? 0 : 1
6+
count = var.secretAccessKey == null ? 0 : 1
77
metadata {
88
name = "route53-credentials"
99
namespace = "cert-manager"
@@ -17,7 +17,7 @@ resource "kubernetes_secret" "dns_solver" {
1717
resource "kubernetes_manifest" "clusterissuer_gitpod" {
1818
manifest = {
1919
"apiVersion" = "cert-manager.io/v1"
20-
"kind" = "ClusterIssuer"
20+
"kind" = "ClusterIssuer"
2121
"metadata" = {
2222
"name" = "gitpod-issuer"
2323
}

0 commit comments

Comments
 (0)