Cloud Builds #320
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Docker CI/CD with Bun and Snyk | |
on: | |
push: | |
branches: ["master"] | |
pull_request: | |
branches: ["master"] | |
permissions: | |
contents: read | |
security-events: write | |
packages: write | |
env: | |
DOCKER_BUILDKIT: 1 | |
COMPOSE_DOCKER_CLI_BUILD: 1 | |
BUILDX_NO_DEFAULT_LOAD: true | |
BUILDKIT_INLINE_CACHE: 1 | |
BUILDX_CACHE_TTL: "168h" | |
COSIGN_VERSION: "v2.2.2" | |
SYFT_VERSION: "v1.0.0" | |
COSIGN_YES: "true" | |
COSIGN_EXPERIMENTAL: "true" | |
BUILDKIT_PROGRESS: plain | |
ACTIONS_RUNTIME_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
DOCKER_BUILDX_CACHE_DIR: /home/pi/.buildx-cache | |
jobs: | |
build-and-deploy: | |
runs-on: self-hosted | |
strategy: | |
matrix: | |
platform: | |
- linux/amd64 | |
- linux/arm64 | |
include: | |
- platform: linux/amd64 | |
platform-name: amd64 | |
- platform: linux/arm64 | |
platform-name: arm64 | |
fail-fast: false | |
max-parallel: 1 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Install verification tools | |
shell: bash | |
run: | | |
# Create directories first | |
mkdir -p "${GITHUB_WORKSPACE}/bin" | |
mkdir -p "${GITHUB_WORKSPACE}/tmp" | |
cd "${GITHUB_WORKSPACE}/tmp" | |
# Install cosign | |
curl -Lo cosign https://github.com/sigstore/cosign/releases/latest/download/cosign-linux-${{ matrix.platform-name }} | |
chmod +x cosign | |
mv cosign "${GITHUB_WORKSPACE}/bin/" | |
# Install syft | |
SYFT_RELEASE=$(curl -s https://api.github.com/repos/anchore/syft/releases/latest | grep -o '"tag_name": ".*"' | cut -d'"' -f4) | |
curl -Lo syft.tar.gz "https://github.com/anchore/syft/releases/download/${SYFT_RELEASE}/syft_${SYFT_RELEASE#v}_linux_${{ matrix.platform-name }}.tar.gz" | |
tar xzf syft.tar.gz syft | |
mv syft "${GITHUB_WORKSPACE}/bin/" | |
# Add to PATH | |
echo "${GITHUB_WORKSPACE}/bin" >> $GITHUB_PATH | |
# Cleanup | |
cd "${GITHUB_WORKSPACE}" | |
rm -rf "${GITHUB_WORKSPACE}/tmp" | |
# Verify installations | |
"${GITHUB_WORKSPACE}/bin/cosign" version || true | |
"${GITHUB_WORKSPACE}/bin/syft" --version || true | |
- name: Install additional security tools | |
run: | | |
# Install jq if not present | |
if ! command -v jq &> /dev/null; then | |
sudo apt-get update && sudo apt-get install -y jq | |
fi | |
# Initial Setup | |
- name: Set build timestamp | |
id: timestamp | |
run: echo "BUILD_TIMESTAMP=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_ENV | |
- name: Setup environment files | |
env: | |
ENV_PRODUCTION: ${{ secrets.ENV_PRODUCTION }} | |
run: | | |
echo "$ENV_PRODUCTION" > .env.production | |
cp .env.production .env | |
- name: Set metadata variables | |
run: | | |
# Use quotes to properly handle values with special characters | |
echo "IMAGE_DESCRIPTION='Capella Document Search is a web application that allows you to search for documents within Capella scopes and collections. Built with Bun, Svelte, and ELysiaJS.'" >> $GITHUB_ENV | |
echo "IMAGE_LICENSE='MIT'" >> $GITHUB_ENV | |
- name: Docker meta | |
id: meta | |
uses: docker/metadata-action@v5 | |
with: | |
images: docker.io/zx8086/capella-document-search | |
labels: | | |
org.opencontainers.image.title=capella-document-search | |
org.opencontainers.image.description=${{ env.IMAGE_DESCRIPTION }} | |
org.opencontainers.image.created=${BUILD_TIMESTAMP} | |
org.opencontainers.image.version=${GITHUB_REF_NAME:-2.0.0} | |
org.opencontainers.image.revision=${GITHUB_SHA} | |
org.opencontainers.image.authors=Simon Owusu <[email protected]> | |
org.opencontainers.image.vendor=Siobytes | |
org.opencontainers.image.licenses=${{ env.IMAGE_LICENSE }} | |
org.opencontainers.image.url=https://github.com/zx8086/capella-document-search | |
org.opencontainers.image.source=https://github.com/zx8086/capella-document-search | |
org.opencontainers.image.documentation=https://github.com/zx8086/capella-document-search/README.md | |
org.opencontainers.image.base.name=oven/bun:canary-alpine | |
org.opencontainers.image.source.repository=github.com/zx8086/capella-document-search | |
org.opencontainers.image.source.branch=${GITHUB_REF_NAME:-master} | |
org.opencontainers.image.source.commit=${GITHUB_SHA} | |
com.capellaql.maintainer=Simon Owusu <[email protected]> | |
com.capellaql.release-date=${BUILD_TIMESTAMP} | |
com.capellaql.version.is-production=true | |
org.opencontainers.image.ref.name=${GITHUB_REF_NAME:-master} | |
org.opencontainers.image.version.semver=${GITHUB_REF_NAME:-2.0.0} | |
org.opencontainers.image.version.major=2 | |
org.opencontainers.image.version.minor=0 | |
org.opencontainers.image.version.patch=0 | |
tags: | | |
type=raw,value=latest,enable={{is_default_branch}} | |
type=sha,format=long | |
type=ref,event=branch | |
type=ref,event=tag | |
# Platform and runtime setup | |
- name: Set up QEMU | |
uses: docker/setup-qemu-action@v3 | |
with: | |
platforms: linux/amd64,linux/arm64 | |
- name: Set up Bun | |
uses: oven-sh/setup-bun@v1 | |
with: | |
bun-version: latest | |
- name: Cache Bun dependencies | |
uses: actions/cache@v3 | |
with: | |
path: | | |
~/.bun/install/cache | |
node_modules | |
key: ${{ runner.os }}-bun-${{ hashFiles('**/bun.lockb', '**/package.json') }} | |
restore-keys: | | |
${{ runner.os }}-bun- | |
# Security scanning section | |
- name: Run Snyk code scan | |
continue-on-error: true | |
env: | |
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} | |
run: | | |
bun install | |
bun run snyk test --file=package.json --sarif-file-output=snyk.sarif --severity-threshold=high | |
bun run snyk monitor --file=package.json | |
- name: Upload Snyk code scan results | |
if: hashFiles('snyk.sarif') != '' | |
uses: github/codeql-action/upload-sarif@v3 | |
with: | |
sarif_file: snyk.sarif | |
category: snyk-code | |
# Docker build setup | |
- name: Log in to Docker Hub | |
uses: docker/login-action@v3 | |
with: | |
username: ${{ secrets.DOCKER_HUB_USERNAME }} | |
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} | |
- name: Set up Docker Buildx | |
uses: docker/setup-buildx-action@v3 | |
with: | |
driver: cloud | |
endpoint: "zx8086/cldbuild" | |
install: true | |
# Build and push section | |
- name: Build and push Docker image | |
id: docker_build | |
uses: docker/build-push-action@v6 | |
with: | |
context: . | |
push: ${{ github.event_name != 'pull_request' }} | |
tags: ${{ steps.meta.outputs.tags }} | |
labels: ${{ steps.meta.outputs.labels }} | |
platforms: ${{ matrix.platform }} | |
cache-from: | | |
type=registry,ref=docker.io/zx8086/capella-document-search:buildcache-${{ matrix.platform-name }} | |
cache-to: | | |
type=registry,ref=docker.io/zx8086/capella-document-search:buildcache-${{ matrix.platform-name }},mode=max | |
outputs: ${{ github.event_name == 'pull_request' && 'type=cacheonly' || 'type=registry' }} | |
provenance: false | |
- name: Push image with digest | |
if: github.event_name != 'pull_request' | |
run: | | |
echo "Waiting for image to be available..." | |
sleep 15 # Increased wait time | |
# Try multiple methods to get the digest | |
echo "Attempting to get digest..." | |
# Method 1: Direct inspect | |
DIGEST=$(docker buildx imagetools inspect docker.io/zx8086/capella-document-search:latest --raw 2>/dev/null | jq -r '.manifests[0].digest' 2>/dev/null || echo "") | |
# Method 2: Pull and inspect if Method 1 fails | |
if [ -z "$DIGEST" ] || [ "$DIGEST" = "null" ]; then | |
echo "Trying alternative method..." | |
docker pull docker.io/zx8086/capella-document-search:latest 2>/dev/null | |
DIGEST=$(docker inspect docker.io/zx8086/capella-document-search:latest --format='{{index .RepoDigests 0}}' 2>/dev/null | cut -d'@' -f2 || echo "") | |
fi | |
# Verify digest before using | |
if [ -n "$DIGEST" ] && [ "$DIGEST" != "null" ]; then | |
echo "Image digest: $DIGEST" | |
echo "Creating tagged images..." | |
docker buildx imagetools create \ | |
--tag docker.io/zx8086/capella-document-search:latest \ | |
--tag docker.io/zx8086/capella-document-search:${{ github.sha }} \ | |
docker.io/zx8086/capella-document-search@${DIGEST} | |
else | |
echo "Warning: Could not obtain valid digest, skipping digest push" | |
# List available images for debugging | |
echo "Available images:" | |
docker images | grep capella-document-search | |
exit 0 | |
fi | |
- name: Validate image metadata | |
if: github.event_name != 'pull_request' | |
run: | | |
IMAGE_REF="docker.io/zx8086/capella-document-search:latest" | |
echo "Validating image metadata..." | |
docker buildx imagetools inspect ${IMAGE_REF} --format '{{json .}}' | jq . | |
- name: Verify image manifest | |
if: github.event_name != 'pull_request' | |
run: | | |
echo "Checking manifest..." | |
MANIFEST=$(docker buildx imagetools inspect docker.io/zx8086/capella-document-search:latest --raw) | |
echo "$MANIFEST" | jq . | |
- name: Verify image attestations | |
if: github.event_name != 'pull_request' | |
continue-on-error: true | |
run: | | |
# Set results directory name first | |
TIMESTAMP=$(date +%Y%m%d_%H%M%S) | |
RESULTS_DIR="attestation-results-${TIMESTAMP}" | |
echo "ATTESTATION_RESULTS_DIR=${RESULTS_DIR}" >> $GITHUB_ENV | |
mkdir -p "${RESULTS_DIR}" | |
# Get image digest and reference | |
echo "Getting image digest..." | |
IMAGE_REF="docker.io/zx8086/capella-document-search:latest" | |
IMAGE_DIGEST=$(docker buildx imagetools inspect "${IMAGE_REF}" --raw | jq -r '.manifests[0].digest') | |
IMAGE_WITH_DIGEST="${IMAGE_REF}@${IMAGE_DIGEST}" | |
echo "Image reference: ${IMAGE_REF}" | |
echo "Image digest: ${IMAGE_DIGEST}" | |
# Additional verification methods | |
echo "Attempting direct verification..." | |
cosign verify-attestation \ | |
--certificate-identity-regexp=".*" \ | |
--certificate-oidc-issuer="https://token.actions.githubusercontent.com" \ | |
"${IMAGE_WITH_DIGEST}" > "${RESULTS_DIR}/direct-attestation.json" 2>/dev/null || true | |
echo "Attempting SLSA verification..." | |
cosign verify-attestation \ | |
--type slsaprovenance \ | |
--certificate-identity-regexp=".*" \ | |
--certificate-oidc-issuer="https://token.actions.githubusercontent.com" \ | |
"${IMAGE_WITH_DIGEST}" > "${RESULTS_DIR}/slsa-attestation.json" 2>/dev/null || true | |
echo "Attempting predicate-less verification..." | |
cosign verify-attestation \ | |
--certificate-identity-regexp=".*" \ | |
--certificate-oidc-issuer="https://token.actions.githubusercontent.com" \ | |
"${IMAGE_WITH_DIGEST}" > "${RESULTS_DIR}/predicate-attestation.json" 2>/dev/null || true | |
# Download all attestations for analysis | |
echo "Downloading attestations..." | |
cosign download attestation "${IMAGE_WITH_DIGEST}" > "${RESULTS_DIR}/all-attestations.json" 2>/dev/null || true | |
# Extract from manifest and save all data | |
echo "Extracting from manifest..." | |
docker buildx imagetools inspect "${IMAGE_WITH_DIGEST}" --raw > "${RESULTS_DIR}/manifest.json" | |
jq '.manifests[].annotations | select(."org.opencontainers.image.attestations" != null)' \ | |
"${RESULTS_DIR}/manifest.json" > "${RESULTS_DIR}/manifest-attestations.json" 2>/dev/null || true | |
# Check results | |
VERIFICATION_SUCCESS=false | |
for file in "${RESULTS_DIR}"/*.json; do | |
if [ -s "$file" ] && jq -e . >/dev/null 2>&1 < "$file"; then | |
VERIFICATION_SUCCESS=true | |
echo "✅ Found valid data in $(basename "$file")" | |
fi | |
done | |
# Generate comprehensive report | |
{ | |
echo "### 📝 Attestation Verification Report" | |
echo "- Image: ${IMAGE_REF}" | |
echo "- Digest: ${IMAGE_DIGEST}" | |
echo "- Timestamp: $(date -u)" | |
echo "" | |
echo "#### Verification Results:" | |
for file in "${RESULTS_DIR}"/*.json; do | |
if [ -s "$file" ]; then | |
basename=$(basename "$file") | |
size=$(wc -c < "$file") | |
echo "- File: ${basename}" | |
echo " - Size: ${size} bytes" | |
if jq -e . >/dev/null 2>&1 < "$file"; then | |
echo " - Status: ✅ Valid JSON" | |
echo " - Content Preview:" | |
jq -r 'try .predicateType // "No predicate type"' "$file" | head -n 1 | |
else | |
echo " - Status: ⚠️ Invalid JSON" | |
fi | |
fi | |
done | |
echo "" | |
echo "#### Overall Status:" | |
if [ "$VERIFICATION_SUCCESS" = true ]; then | |
echo "✅ Successfully found attestation data" | |
else | |
echo "⚠️ No attestations could be verified" | |
fi | |
} > "${RESULTS_DIR}/verification-report.md" | |
# Set output directory for next step | |
echo "VERIFICATION_SUCCESS=${VERIFICATION_SUCCESS}" >> $GITHUB_ENV | |
- name: Upload attestation results | |
if: github.event_name != 'pull_request' | |
uses: actions/upload-artifact@v4 | |
with: | |
name: attestation-results-${{ matrix.platform-name }} | |
path: | | |
${{ env.ATTESTATION_RESULTS_DIR }}/*.json | |
${{ env.ATTESTATION_RESULTS_DIR }}/*.md | |
retention-days: 90 | |
- name: Verify SBOM | |
if: github.event_name != 'pull_request' | |
run: | | |
mkdir -p sbom-output | |
syft docker.io/zx8086/capella-document-search:latest \ | |
-o json=sbom-output/syft-sbom.json \ | |
-o spdx-json=sbom-output/spdx-sbom.json \ | |
-o cyclonedx-json=sbom-output/cyclonedx-sbom.json | |
- name: Upload SBOM Files | |
if: github.event_name != 'pull_request' | |
uses: actions/upload-artifact@v4 | |
with: | |
name: sbom-files-${{ matrix.platform-name }} | |
path: sbom-output/ | |
retention-days: 90 | |
# Testing section | |
- name: Test container | |
if: github.event_name != 'pull_request' | |
run: | | |
# Stop existing containers using port 3000 | |
docker ps -q --filter publish=3000 | xargs -r docker stop | |
docker rm -f capella-search-test 2>/dev/null || true | |
# Run container with properly formatted environment variables | |
docker run -d \ | |
--platform linux/amd64 \ | |
--name capella-search-test \ | |
-p 3000:3000 \ | |
--env-file .env \ | |
-e NODE_ENV=production \ | |
-e BUILD_VERSION=${{ github.ref_name || '0.0.1' }} \ | |
-e COMMIT_HASH=${{ github.sha }} \ | |
-e BUILD_DATE=${{ env.BUILD_TIMESTAMP }} \ | |
-e ENABLE_FILE_LOGGING=false \ | |
-e LOG_MAX_FILES=7d \ | |
-e LOG_MAX_SIZE=10m \ | |
-e LOG_LEVEL=info \ | |
zx8086/capella-document-search:latest | |
# Add delay and better error handling | |
sleep 15 | |
if ! docker ps | grep -q capella-search-test; then | |
echo "Container failed to start. Showing logs:" | |
docker logs capella-search-test | |
exit 1 | |
fi | |
# Capture metrics | |
CONTAINER_STATUS=$(docker inspect --format='{{.State.Status}}' capella-search-test) | |
echo "CONTAINER_STATUS=${CONTAINER_STATUS}" >> $GITHUB_ENV | |
echo "CONTAINER_START_TIME=$(docker inspect --format='{{.State.StartedAt}}' capella-search-test)" >> $GITHUB_ENV | |
# Container security scanning | |
- name: Run Snyk container scan | |
continue-on-error: true | |
env: | |
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} | |
run: | | |
# Install Snyk as a project dependency | |
bun add snyk | |
# Run Snyk container test for the current platform | |
echo "Running Snyk scan for ${{ matrix.platform }}..." | |
if [[ "${{ matrix.platform }}" == "linux/amd64" ]]; then | |
bun run snyk container test docker.io/zx8086/capella-document-search:latest \ | |
--file=Dockerfile \ | |
--platform=linux/amd64 \ | |
--severity-threshold=high \ | |
--sarif-file-output=snyk-docker.sarif \ | |
--json > snyk-report.json || echo "Vulnerabilities found but continuing..." | |
if [[ "${{ github.ref }}" == "refs/heads/master" ]]; then | |
bun run snyk container monitor docker.io/zx8086/capella-document-search:latest \ | |
--file=Dockerfile \ | |
--platform=linux/amd64 || echo "Monitor command failed but continuing..." | |
fi | |
else | |
echo "Skipping Snyk container scan for ${{ matrix.platform }} - only running on amd64" | |
fi | |
- name: Upload Docker Snyk scan results | |
if: always() && hashFiles('snyk-docker.sarif') != '' | |
uses: github/codeql-action/upload-sarif@v3 | |
with: | |
sarif_file: snyk-docker.sarif | |
category: snyk-docker | |
# Build Summary | |
- name: Docker Build Summary | |
if: always() | |
run: | | |
{ | |
echo "### 🐳 Docker Build Summary" | |
echo "" | |
echo "#### 📊 Build Overview" | |
echo "| Metric | Value |" | |
echo "|--------|-------|" | |
echo "| 🆔 Build ID | \`${GITHUB_SHA::7}\` |" | |
echo "| 📦 Image | capella-document-search |" | |
echo "| ⚡ Status | ${CONTAINER_STATUS:-N/A} |" | |
# Container Status | |
if [ "${CONTAINER_STATUS:-}" = "running" ]; then | |
STATS=$(docker stats capella-search-test --no-stream --format "{{.CPUPerc}},{{.MemUsage}},{{.NetIO}},{{.BlockIO}}") | |
echo "| 🔄 Status | ✅ Running |" | |
echo "| 🕒 Start Time | ${CONTAINER_START_TIME} |" | |
else | |
echo "| 🔄 Status | ⚠️ Not Running |" | |
fi | |
} >> $GITHUB_STEP_SUMMARY | |
# Cleanup | |
- name: Cleanup | |
if: always() | |
run: | | |
rm -f .env .env.production | |
rm -f snyk.sarif snyk-docker.sarif | |
docker container rm -f capella-search-test || true | |
docker image prune -f | |
docker volume prune -f | |
docker builder prune -a -f | |
rm -rf node_modules | |
rm -rf security-artifacts | |
rm -rf /usr/local/bin/syft | |
rm -rf ~/.sigstore | |
echo "Cleanup completed" | |
- name: Verify metadata | |
continue-on-error: true | |
run: | | |
echo "Verifying metadata output..." | |
# Check if metadata environment variables are set | |
REQUIRED_LABELS=( | |
"org.opencontainers.image.description" | |
"org.opencontainers.image.licenses" | |
"org.opencontainers.image.title" | |
"org.opencontainers.image.version" | |
"org.opencontainers.image.revision" | |
) | |
# Parse metadata JSON safely | |
METADATA_JSON='${{ steps.meta.outputs.json }}' | |
if [ -n "$METADATA_JSON" ]; then | |
echo "Processing metadata..." | |
# Extract labels using jq | |
for label in "${REQUIRED_LABELS[@]}"; do | |
VALUE=$(echo "$METADATA_JSON" | jq -r ".labels[\"$label\"] // \"\"") | |
if [ -z "$VALUE" ] || [ "$VALUE" = "null" ]; then | |
echo "⚠️ Warning: Missing or empty metadata: $label" | |
else | |
echo "✅ $label: $VALUE" | |
fi | |
done | |
# Print all available labels for debugging | |
echo -e "\nAll available labels:" | |
echo "$METADATA_JSON" | jq -r '.labels | keys[]' || echo "No labels found" | |
else | |
echo "⚠️ Warning: No metadata JSON output found" | |
# Print environment variables for debugging | |
echo -e "\nAvailable environment variables:" | |
env | grep -i "DOCKER_METADATA" || echo "No metadata environment variables found" | |
fi | |
echo "Metadata verification completed" |