Compare commits

1 Commits

Author SHA1 Message Date
da8cbc96b5 fix(jenkins): new docker host rootless
All checks were successful
Portfolio/pipeline/head This commit looks good
2026-01-27 19:13:34 +00:00

71
Jenkinsfile vendored
View File

@@ -1,8 +1,9 @@
pipeline { pipeline {
agent any agent any
environment { environment {
DOCKER_HOST = "unix:///var/run/docker.sock" DOCKER_HOST = "unix:///run/user/1001/docker.sock"
APP_VERSION = "${env.BRANCH_NAME}-${env.BUILD_NUMBER}" APP_VERSION = "${env.BRANCH_NAME}-${env.BUILD_NUMBER}"
DOCKER_CONFIG = "${env.WORKSPACE}/.docker"
} }
stages { stages {
stage('Build') { stage('Build') {
@@ -16,35 +17,35 @@ pipeline {
sh "docker compose build" sh "docker compose build"
} }
} }
stage('Lighthouse Audit') { // stage('Lighthouse Audit') {
steps { // steps {
// Create volumes but no run with safety // // Create volumes but no run with safety
sh "docker rm -f audit-tmp lighthouse-audit || true" // sh "docker rm -f audit-tmp lighthouse-audit || true"
sh "docker compose --profile audit create --no-build --remove-orphans lighthouse-audit" // sh "docker compose --profile audit create --no-build --remove-orphans lighthouse-audit"
// Extract astro data from temp container to tmp-dist folder // // Extract astro data from temp container to tmp-dist folder
sh "docker create --name audit-tmp portfolio:${env.APP_VERSION}" // sh "docker create --name audit-tmp portfolio:${env.APP_VERSION}"
sh "mkdir -p ./tmp-dist" // sh "mkdir -p ./tmp-dist"
sh "docker cp audit-tmp:/usr/share/caddy/. ./tmp-dist" // sh "docker cp audit-tmp:/usr/share/caddy/. ./tmp-dist"
sh "docker rm -f audit-tmp" // sh "docker rm -f audit-tmp"
// Inject data from tmp-dist and config to audit container // // Inject data from tmp-dist and config to audit container
sh "docker cp ./tmp-dist/. lighthouse-audit:/app/dist/" // sh "docker cp ./tmp-dist/. lighthouse-audit:/app/dist/"
sh "docker cp .lighthouserc.cjs lighthouse-audit:/app/dist/audit-config.cjs" // sh "docker cp .lighthouserc.cjs lighthouse-audit:/app/dist/audit-config.cjs"
sh "rm -rf ./tmp-dist" // sh "rm -rf ./tmp-dist"
// Now run // // Now run
sh "docker start -a lighthouse-audit" // sh "docker start -a lighthouse-audit"
// Get reports // // Get reports
sh "mkdir -p ./.lighthouseci" // sh "mkdir -p ./.lighthouseci"
sh "docker cp lighthouse-audit:/app/.lighthouseci/. ./.lighthouseci/" // sh "docker cp lighthouse-audit:/app/.lighthouseci/. ./.lighthouseci/"
// Clean // // Clean
sh "docker rm -f lighthouse-audit" // sh "docker rm -f lighthouse-audit"
} // }
} // }
stage('Deploy') { stage('Deploy') {
// Deploy only master branch // Deploy only master branch
when { when {
@@ -56,24 +57,20 @@ pipeline {
// Update caddy-reverse-proxy cache // Update caddy-reverse-proxy cache
sh "docker exec caddy-reverse-proxy caddy reload --config /etc/caddy/Caddyfile" sh "docker exec caddy-reverse-proxy caddy reload --config /etc/caddy/Caddyfile"
// Keep updated docker-compose.yml and Caddyfile for safety
sh "cp docker-compose.yml /backup-portfolio/docker-compose.yml"
sh "docker cp portfolio:/etc/caddy/Caddyfile /backup-portfolio/Caddyfile"
} }
} }
} }
post { post {
always { always {
echo "Publish Lighthouse audit..." // echo "Publish Lighthouse audit..."
publishHTML([ // publishHTML([
allowMissing: true, // allowMissing: true,
alwaysLinkToLastBuild: true, // alwaysLinkToLastBuild: true,
keepAll: false, // keepAll: false,
reportDir: '.lighthouseci', // reportDir: '.lighthouseci',
reportFiles: '*.html', // reportFiles: '*.html',
reportName: 'Lighthouse Report' // reportName: 'Lighthouse Report'
]) // ])
echo "Clean unused image" echo "Clean unused image"
sh "docker image prune -f" sh "docker image prune -f"
} }