diff --git a/tests/tasks/setup/eks/awscli-cp-with-vpc.yaml b/tests/tasks/setup/eks/awscli-cp-with-vpc.yaml index 6edcf30e..7d042371 100644 --- a/tests/tasks/setup/eks/awscli-cp-with-vpc.yaml +++ b/tests/tasks/setup/eks/awscli-cp-with-vpc.yaml @@ -57,7 +57,8 @@ spec: if [ "$CREATED_CLUSTER" == "" ]; then aws eks create-cluster --name $(params.cluster-name) --region $(params.region) --kubernetes-version $(params.kubernetes-version) --role-arn $SERVICE_ROLE_ARN --resources-vpc-config subnetIds=$subnets,securityGroupIds=$sg $ENDPOINT_FLAG fi - aws eks $ENDPOINT_FLAG --region $(params.region) wait cluster-active --name $(params.cluster-name) + aws eks $ENDPOINT_FLAG --region $(params.region) wait cluster-active --name $(params.cluster-name) + sleep 300 - name: write-kubeconfig image: alpine/k8s:1.23.7 script: | diff --git a/tests/tasks/teardown/awscli-eks.yaml b/tests/tasks/teardown/awscli-eks.yaml index 579eb6de..7ce6a801 100644 --- a/tests/tasks/teardown/awscli-eks.yaml +++ b/tests/tasks/teardown/awscli-eks.yaml @@ -38,6 +38,7 @@ spec: aws eks wait nodegroup-deleted --nodegroup-name $i --cluster-name $(params.cluster-name) $ENDPOINT_FLAG --region $(params.region); done; aws eks delete-cluster --name $(params.cluster-name) --region $(params.region) $ENDPOINT_FLAG + sleep 900 - name: teardown-eks-role-stack image: alpine/k8s:1.23.7 script: |