stages:
- build
- trigger
variables:
DOCKER_HOST: tcp://docker:2376
DOCKER_TLS_CERTDIR: "/certs"
DOCKER_TLS_VERIFY: 1
DOCKER_CERT_PATH: "$DOCKER_TLS_CERTDIR/client"
manual_trigger:
stage: trigger
script:
- echo "Manual trigger job executed"
when: manual
build-modified-crawlers:
stage: build
image: docker:latest
services:
- docker:dind
variables:
ECR_REPOSITORY: "REDACTED"
before_script:
- apk update
- apk add python3 py3-pip git
- pip3 install awscli
script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD"
- |
for crawler_dir in */; do
if [[ -d $crawler_dir ]]; then
crawler=$(basename $crawler_dir)
if [[ "$crawler" != "not_finished" ]]; then
echo "Processing crawler: $crawler"
echo "Commit range: $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA"
changed_files=$(git diff --name-only $CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA)
if echo "$changed_files" | grep -q "^$crawler/"; then
echo "Building crawler: $crawler"
cd "$crawler"
docker build -t ${crawler} .
docker tag ${crawler}:latest $ECR_REPOSITORY/${crawler}:latest
aws ecr get-login-password --region eu-west-3 | docker login --username AWS --password-stdin REDACTED.dkr.ecr.eu-west-3.amazonaws.com
docker push $ECR_REPOSITORY/${crawler}:latest
cd ..
else
echo "No changes detected for crawler: $crawler"
fi
fi
fi
done
build-all-crawlers:
stage: trigger
image: docker:latest
services:
- docker:dind
variables:
ECR_REPOSITORY: "REDACTED"
before_script:
- apk update
- apk add python3 py3-pip git
- pip3 install awscli
script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD"
- |
for crawler_dir in */; do
if [[ -d $crawler_dir ]]; then
crawler=$(basename $crawler_dir)
if [[ "$crawler" != "check_site" ]]; then
echo "Building crawler: $crawler_dir"
cd "$crawler_dir"
docker build -t ${crawler} .
docker tag ${crawler}:latest $ECR_REPOSITORY/${crawler}:latest
aws ecr get-login-password --region eu-west-3 | docker login --username AWS --password-stdin REDACTED.dkr.ecr.eu-west-3.amazonaws.com
docker push $ECR_REPOSITORY/${crawler}:latest
cd ..
fi
done
only:
- trigger
So this is my script, the first job runs just fine. It iterates through folders and builds modified crawlers, the second job is supposed to rebuild all crawlers regardless of changes but it doesn’t do anything, it just reports that the job succeeded. Where have I gone wrong in the script? Here’s the runner log from the manual job:
Running with gitlab-runner 16.0.2 (85586bd1)
on k8s-crawlers-runner qaagaxCkj, system ID: r_L21p9a4LP5zh
Preparing the "kubernetes" executor
00:00
Using Kubernetes namespace: gitlab
Using Kubernetes executor with image ubuntu:22.04 ...
Using attach strategy to execute scripts...
Preparing environment
00:04
Waiting for pod gitlab/runner-qaagaxckj-project-44798600-concurrent-0sczgz to be running, status is Pending
Running on runner-qaagaxckj-project-44798600-concurrent-0sczgz via gitlab-runner-6c84c86778-jp4kl...
Getting source from Git repository
00:02
Fetching changes with git depth set to 20...
Initialized empty Git repository in /builds/tracking-infrastructure/k8s-general-crawlers/.git/
Created fresh repository.
Checking out 9031c9e6 as detached HEAD (ref is main)...
Skipping Git submodules setup
Executing "step_script" stage of the job script
00:00
$ echo "Manual trigger job executed"
Manual trigger job executed
Cleaning up project directory and file based variables
00:01
Job succeeded