Hello.
When running the playwright test locally, everything is fine. When I run it on gitlab, I get an error. However, sometimes the test passes. After the trace - I see that the test is trying to enter the site, but it stops on a white screen and does not go any further.
I contacted the developers of the library. There is no mistake on their part. Perhaps the problem arises due to incorrect settings and test simply does not go to the page due to a firewall or something like that. Please, help me with it. If you need additional information, I will gladly provide it.
This is test-e2e-prod in gitlab-ci
image: node:14
variables:
IMAGE_VERSION: 0.11.17
PROD_IMAGE_VERSION: prod-$IMAGE_VERSION
stages:
- build
- push
- deploy
- test
build-app:
stage: build
script:
- npm i
- CI=false npm run build
- cd server && npm i
- npm run test
only:
- master
- production
except:
- schedules
cache:
key: '$CI_PROJECT_NAME'
paths:
- node_modules/
- build/
- server/node_modules/
build-docker-integration:
image:
name: gcr.io/kaniko-project/executor:debug
entrypoint: ['']
stage: push
variables:
IMAGE_TAG: $CI_REGISTRY_IMAGE:$IMAGE_VERSION
TEST_IMAGE_TAG: $CI_REGISTRY_IMAGE:test-$IMAGE_VERSION
script:
- mkdir -p /kaniko/.docker
- echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination $IMAGE_TAG
only:
- master
cache:
key: '$CI_PROJECT_NAME'
policy: pull
paths:
- node_modules/
- build/
- server/node_modules/
except:
- schedules
deploy-integration:
image:
name: lachlanevenson/k8s-kubectl:latest
entrypoint: ['/bin/sh', '-c']
stage: deploy
variables:
APISERVER: 'https://infort.in1.cloudjiffy.net/api'
script:
- sed -i "s/IMAGE_VERSION/$IMAGE_VERSION/g" ./kube/kube-deployment.yml
- kubectl config set-cluster integration --server=$APISERVER
- kubectl config set-context integration --cluster=integration
- kubectl config set-credentials integration-user --token=$INTEGRATION_TOKEN
- kubectl config set-context integration --user=integration-user
- kubectl config use-context integration
- kubectl config view
- kubectl apply -f ./kube/integration --insecure-skip-tls-verify
- kubectl apply -f ./kube/kube-deployment.yml --insecure-skip-tls-verify
- kubectl get pods --namespace=default --insecure-skip-tls-verify
only:
- master
except:
- schedules
build-docker-production:
image:
name: gcr.io/kaniko-project/executor:debug
entrypoint: ['']
stage: push
variables:
IMAGE_TAG: $CI_REGISTRY_IMAGE:$PROD_IMAGE_VERSION
script:
- mkdir -p /kaniko/.docker
- echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json
- /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination $IMAGE_TAG
only:
- production
cache:
key: '$CI_PROJECT_NAME'
policy: pull
paths:
- node_modules/
- build/
- server/node_modules/
except:
- schedules
test-e2e:
#when: manual
image: mcr.microsoft.com/playwright:focal
stage: test
script:
- npm config set registry http://registry.npmjs.org/
- npm install --save-dev jest
- npm run e2e
only:
- master
except:
- schedules
test-e2e-prod:
#when: manual
image: mcr.microsoft.com/playwright:focal
stage: test
script:
- npm config set registry http://registry.npmjs.org/
- npm ci
- npm run e2e-prodPW
artifacts:
when: always
paths:
- playwright-report/
- test-results/
only:
- schedules
# image: mcr.microsoft.com/playwright:focal
# stage: test
# script:
# - npm config set registry http://registry.npmjs.org/
# - npm install --save-dev jest
# - npm run e2e-prod
# only:
# - schedules
# test with playwright report >> "pl": "playwright test e2e/trace.test.js" (add to script in package.json)
#test-e2e:
# #when: manual
# image: mcr.microsoft.com/playwright:focal
# stage: test
# script:
# - npm config set registry http://registry.npmjs.org/
# - npm ci
# - npm run pl
# only:
# - master
# artifacts:
# when: always
# paths:
# - playwright-report/
# - test-results/
deploy-production:
image:
name: lachlanevenson/k8s-kubectl:latest
entrypoint: ['/bin/sh', '-c']
stage: deploy
variables:
APISERVER: 'https://89.47.58.103/api'
script:
- sed -i "s/IMAGE_VERSION/$PROD_IMAGE_VERSION/g" ./kube/kube-deployment.yml
- kubectl config set-cluster production --server=$APISERVER
- kubectl config set-context production --cluster=production
- kubectl config set-credentials production-user --token=$PRODUCTION_TOKEN
- kubectl config set-context production --user=production-user
- kubectl config use-context production
- kubectl config view
- kubectl apply -f ./kube/production --insecure-skip-tls-verify
- kubectl apply -f ./kube/kube-deployment.yml --insecure-skip-tls-verify
- kubectl get pods --namespace=default --insecure-skip-tls-verify
only:
- production
except:
- schedules
# cache:
# key: "$CI_PROJECT_NAME"
# policy: pull
# paths:
# - node_modules/
# - build/
# - server/node_modules/