Pass the Gitlab variable values using python

Hello , I am New in GitLab, I want that in GitLab, the value of user defined variables which we keep inside the gitlab-ci.yml file, should not keep that value inside gitlab-ci.yml, but the value of that variable should come from the Python code. for example -

This is my gitlab-ci.yml -

stages:

  • generate_artifact

pipeline_status_job:
stage: generate_artifact
script:
- cat simple_script.txt > pipeline_statuses.json
- echo “My Username is $POSTGRES_USERNAME” # Access the environment variable
- echo “My Password is $POSTGRES_PASSWORD” # Access the environment variable
- echo “My Database Name is $POSTGRES_DATABASE_NAME” # Access the environment variable
- echo “This is my Database Version $POSTGRES_VERSION” # Access the environment variable
tags:
- shell
artifacts:
paths:
- pipeline_statuses.json
variables:
POSTGRES_USERNAME: “$username”
POSTGRES_PASSWORD: “$password”
POSTGRES_DATABASE_NAME: “$database_name”
POSTGRES_VERSION: “$postgres_version”

Now I want the value of username , password , database_name and postgres_version to be passed, which value I can give through UI.

Now this is my trigger_pipeline.py file

import requests
import base64
import zipfile
import io
import os
from django.shortcuts import render
from django.http import JsonResponse
from django.http import HttpResponse # Import this if not already imported

class CustomBranchNotFoundError(Exception):
pass
selected_tools =

def trigger_pipeline_view(request):
if request.method == ‘POST’:
try:
form_data = request.POST
username = request.POST.get(‘username’)
password = request.POST.get(‘password’)
database_name = request.POST.get(‘database_name’)
postgres_version = request.POST.get(‘postgres_version’)
# Dictionary mapping tool names to their corresponding branch names
print(f’Username: {username}‘)
print(f’Password: {password}’)
print(f’Database Name: {database_name}‘)
print(f’Postgres Version: {postgres_version}’

        tool_branch_mapping = {
            
            "Fluetd" : "fluentd" ,  
            "Rsyslog" : "rsyslog"  ,
            "Filebeat" : "filebeat",
            "Graylog" : "graylog" ,
            }

        

        # Identify which tools the user has selected
        for tool, branch_name in tool_branch_mapping.items():
            if form_data.get(tool):
                selected_tools.append(branch_name)

        request.session['selected_tools_count'] = len(selected_tools)
        # Replace these variables with your actual GitLab project ID and private token
        project_id = "29"
        private_token = "glpat-8xMVT61BBXVx7Cz_Si8K"
        base_url = "https://gitlab-ce.os3.com/api/v4/"
        headers = {"PRIVATE-TOKEN": private_token}

        pipeline_names = []

      
        for branch_name in selected_tools:
            pipeline_names.append(branch_name)
            trigger_branch(base_url, project_id, headers, branch_name)

    except CustomBranchNotFoundError:
        return render(request, 'custom_error_page.html', {'error_message': "Specified branch does not exist."})

    return render(request, 'result.html', {'message': 'Installation in Progress', 'pipeline_names': pipeline_names})
else:
    return render(request, 'trigger_pipeline.html')

def trigger_branch(base_url, project_id, headers, branch_name):
data = {
“ref”: branch_name,
}
response = requests.post(base_url + f"projects/{project_id}/pipeline", headers=headers, json=data, verify=False)

try:
    response.raise_for_status()  # This will raise an exception if response status code is not 2xx
except requests.exceptions.HTTPError as e:
    if response.status_code == 400 and "Reference not found" in response.json().get('message', {}).get('base', []):
        raise CustomBranchNotFoundError("The specified branch does not exist.")
    else:
        raise ValueError(f"Error triggering pipeline for branch '{branch_name}': {e}")

def get_latest_pipeline_statuses(base_url, project_id, headers, count=2):
response = requests.get(base_url + f"projects/{project_id}/pipelines", headers=headers, verify=False)

if response.status_code != 200:
    raise ValueError(f"Error fetching pipelines: {response.status_code}, {response.json()}")

pipelines = response.json()
if not pipelines:
    return []

latest_pipelines = pipelines[:count]
latest_statuses = []

for pipeline in latest_pipelines:
    latest_status = pipeline['status']
    latest_statuses.append({"id": pipeline['id'], "status": latest_status})

return latest_statuses

def get_latest_pipeline_artifacts(base_url, project_id, headers, pipeline_id):
response = requests.get(base_url + f"projects/{project_id}/pipelines/{pipeline_id}/jobs", headers=headers, verify=False)

if response.status_code != 200:
    raise ValueError(f"Error fetching pipeline jobs: {response.status_code}, {response.json()}")

jobs = response.json()
artifacts = []
# customer_name = request.POST.get('customer_name', '')
for job in jobs:
    response = requests.get(base_url + f"projects/{project_id}/jobs/{job['id']}/artifacts", headers=headers, verify=False)
    if response.status_code == 200:
        with zipfile.ZipFile(io.BytesIO(response.content), 'r') as zip_file:
            # Modify the following to fetch the required artifacts
            required_artifacts = ['ip.txt', 'info.txt']
            for artifact_name in required_artifacts:
                if artifact_name in zip_file.namelist():
                    content = zip_file.read(artifact_name).decode('utf-8')
                    artifacts.append({"filename": artifact_name, "content": content})
                    
                    # Create a new PipelineArtifact instance and save it to the database
                    # Check if an artifact with the same filename and pipeline ID already exists
                    existing_artifact = PipelineArtifact.objects.filter(
                        pipeline_id=pipeline_id,
                        filename=artifact_name
                    ).first()

                    if existing_artifact:
                        # Update the content of the existing artifact
                        existing_artifact.content = content
                        existing_artifact.customer = customer_instance
                        existing_artifact.save()
                    else:
                        # Create a new PipelineArtifact instance and save it to the database
                        artifact = PipelineArtifact(
                            pipeline_id=pipeline_id,
                            filename=artifact_name,
                            content=content,
                        )
                        artifact.save()

return artifacts

def display_artifacts(request):
# Retrieve all saved artifacts from the database
artifacts = PipelineArtifact.objects.all()

# Pass the artifacts to the template
context = {'artifacts': artifacts}

return render(request, 'display_artifacts.html', context)

def get_latest_pipeline_statuses_and_artifacts(request):
# Replace these variables with your actual GitLab project ID and private token
project_id = “29”
private_token = “glpat-8xMVT61BBXVx7Cz_Si8K”
base_url = “https://gitlab-ce.os3.com/api/v4/
headers = {“PRIVATE-TOKEN”: private_token}
pipeline_count = request.session.get(‘selected_tools_count’, 0)
print(pipeline_count)

# Get the statuses of the latest pipelines
latest_pipeline_statuses = get_latest_pipeline_statuses(base_url, project_id, headers, pipeline_count)

# Get the artifacts for each of the latest pipelines
all_artifacts = []
for pipeline_status in latest_pipeline_statuses:
    pipeline_id = pipeline_status["id"]
    artifacts = get_latest_pipeline_artifacts(base_url, project_id, headers, pipeline_id)
    all_artifacts.append({"status": pipeline_status["status"], "artifacts": artifacts})

return JsonResponse({"pipelines": all_artifacts})

Now, I want pass the value of these variables username , password , database_name and postgres_version in gitlab-ci.yml file using python code

Thanks for taking the time to be thorough in your request, it really helps! :blush: