I want a pipeline that is not blocking (if i don’t run manual job just show status so far).
Also i have a subsequent job that should only run if the manual job was a success
.rules:
default_branch:
- if: ($CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE != "schedule")
merge_request:
- if: ($CI_PIPELINE_SOURCE == "merge_request_event")
manual:
- if: ($CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE != "schedule")
when: manual
default:
- !reference [.rules, default_branch]
- !reference [.rules, merge_request]
# run tests and build
# ....
push:
extends: .docker
stage: push
script:
- docker tag "$EPHEMERAL_IMAGE" "$ECR_IMAGE"
- nix-shell --run 'aws ecr get-login-password --region "$AWS_DEFAULT_REGION" | docker login --username AWS --password-stdin "$ECR"'
- docker push "$ECR_IMAGE"
rules: [!reference [.rules, default]]
when: manual
.deploy:
variables:
TRIGGER_JOB: "example.${ENV}.peter.co"
REV: "$CI_COMMIT_SHA"
trigger:
project: peter/infra/config-mgmt
strategy: depend
deploy-dev:
extends: .deploy
stage: deploy-dev
needs: ["push"]
variables:
ENV: "dev"
rules: [!reference [.rules, default]]
# run post deployment tests
# ...
currently deploy will run even if push was failure!
if i add allow_failure: false
then the pipeline is blocking