Add Docker Meta Data, Image Assestation, SBOM #308
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Docker CI/CD with Bun and Snyk | |
on: | |
push: | |
branches: ["master"] | |
pull_request: | |
branches: ["master"] | |
permissions: | |
contents: read | |
security-events: write | |
packages: write | |
env: | |
DOCKER_BUILDKIT: 1 | |
COMPOSE_DOCKER_CLI_BUILD: 1 | |
BUILDX_NO_DEFAULT_LOAD: true | |
BUILDKIT_INLINE_CACHE: 1 | |
BUILDX_CACHE_TTL: "168h" | |
COSIGN_VERSION: "v2.2.2" | |
SYFT_VERSION: "v1.0.0" | |
COSIGN_YES: "true" # Auto-confirm cosign operations | |
COSIGN_EXPERIMENTAL: "true" # Enable experimental features | |
jobs: | |
build-and-deploy: | |
runs-on: self-hosted | |
strategy: | |
matrix: | |
platform: | |
- linux/amd64 | |
- linux/arm64 | |
include: | |
- platform: linux/amd64 | |
platform-name: amd64 | |
- platform: linux/arm64 | |
platform-name: arm64 | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Install verification tools | |
shell: bash | |
run: | | |
# Create directories first | |
mkdir -p "${GITHUB_WORKSPACE}/bin" | |
mkdir -p "${GITHUB_WORKSPACE}/tmp" | |
cd "${GITHUB_WORKSPACE}/tmp" | |
# Install cosign | |
curl -Lo cosign https://github.com/sigstore/cosign/releases/latest/download/cosign-linux-arm64 | |
chmod +x cosign | |
mv cosign "${GITHUB_WORKSPACE}/bin/" | |
# Install syft | |
SYFT_RELEASE=$(curl -s https://api.github.com/repos/anchore/syft/releases/latest | grep -o '"tag_name": ".*"' | cut -d'"' -f4) | |
curl -Lo syft.tar.gz "https://github.com/anchore/syft/releases/download/${SYFT_RELEASE}/syft_${SYFT_RELEASE#v}_linux_amd64.tar.gz" | |
tar xzf syft.tar.gz syft | |
mv syft "${GITHUB_WORKSPACE}/bin/" | |
# Add to PATH | |
echo "${GITHUB_WORKSPACE}/bin" >> $GITHUB_PATH | |
# Cleanup | |
cd "${GITHUB_WORKSPACE}" | |
rm -rf "${GITHUB_WORKSPACE}/tmp" | |
# Verify installations | |
"${GITHUB_WORKSPACE}/bin/cosign" version || true | |
"${GITHUB_WORKSPACE}/bin/syft" --version || true | |
- name: Install additional security tools | |
run: | | |
# Install jq if not present | |
if ! command -v jq &> /dev/null; then | |
sudo apt-get update && sudo apt-get install -y jq | |
fi | |
# Initial Setup | |
- name: Set build timestamp | |
id: timestamp | |
run: echo "BUILD_TIMESTAMP=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_ENV | |
- name: Setup environment files | |
env: | |
ENV_PRODUCTION: ${{ secrets.ENV_PRODUCTION }} | |
run: | | |
echo "$ENV_PRODUCTION" > .env.production | |
cp .env.production .env | |
- name: Docker meta | |
id: meta | |
uses: docker/metadata-action@v5 | |
with: | |
images: docker.io/zx8086/capella-document-search | |
tags: | | |
type=raw,value=latest,enable={{is_default_branch}} | |
type=sha,format=long | |
type=ref,event=branch | |
type=ref,event=tag | |
# Platform and runtime setup | |
- name: Set up QEMU | |
uses: docker/setup-qemu-action@v3 | |
with: | |
platforms: linux/amd64,linux/arm64 | |
- name: Set up Bun | |
uses: oven-sh/setup-bun@v1 | |
with: | |
bun-version: latest | |
- name: Cache Bun dependencies | |
uses: actions/cache@v3 | |
with: | |
path: | | |
~/.bun/install/cache | |
node_modules | |
key: ${{ runner.os }}-bun-${{ hashFiles('**/bun.lockb', '**/package.json') }} | |
restore-keys: | | |
${{ runner.os }}-bun- | |
# Security scanning section | |
- name: Run Snyk code scan | |
continue-on-error: true | |
env: | |
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} | |
run: | | |
bun install | |
bun run snyk test --file=package.json --sarif-file-output=snyk.sarif --severity-threshold=high | |
bun run snyk monitor --file=package.json | |
- name: Upload Snyk code scan results | |
if: hashFiles('snyk.sarif') != '' | |
uses: github/codeql-action/upload-sarif@v3 | |
with: | |
sarif_file: snyk.sarif | |
category: snyk-code | |
# Docker build setup | |
- name: Log in to Docker Hub | |
uses: docker/login-action@v3 | |
with: | |
username: ${{ secrets.DOCKER_HUB_USERNAME }} | |
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} | |
- name: Set up Docker Buildx | |
uses: docker/setup-buildx-action@v3 | |
with: | |
platforms: linux/amd64,linux/arm64 | |
driver-opts: | | |
image=moby/buildkit:latest | |
# Build and push section | |
- name: Build and push Docker image | |
uses: docker/build-push-action@v6 | |
id: docker_build | |
with: | |
context: . | |
push: ${{ github.event_name != 'pull_request' }} | |
tags: ${{ steps.meta.outputs.tags }} | |
labels: ${{ steps.meta.outputs.labels }} | |
platforms: ${{ matrix.platform }} | |
provenance: true | |
sbom: true | |
cache-from: | | |
type=registry,ref=docker.io/zx8086/capella-document-search:buildcache | |
cache-to: | | |
type=registry,ref=docker.io/zx8086/capella-document-search:buildcache,mode=max | |
# Add these new steps after the build step | |
- name: Push image with digest | |
if: github.event_name != 'pull_request' | |
run: | | |
DIGEST="${{ steps.docker_build.outputs.digest }}" | |
docker buildx imagetools create \ | |
--tag docker.io/zx8086/capella-document-search:latest \ | |
--tag docker.io/zx8086/capella-document-search:${{ github.sha }} \ | |
docker.io/zx8086/capella-document-search@${DIGEST} | |
- name: Validate image metadata | |
if: github.event_name != 'pull_request' | |
run: | | |
IMAGE_REF="docker.io/zx8086/capella-document-search:latest" | |
echo "Validating image metadata..." | |
docker buildx imagetools inspect ${IMAGE_REF} --format '{{json .}}' | jq . | |
- name: Verify image manifest | |
if: github.event_name != 'pull_request' | |
run: | | |
echo "Checking manifest..." | |
MANIFEST=$(docker buildx imagetools inspect docker.io/zx8086/capella-document-search:latest --raw) | |
echo "$MANIFEST" | jq . | |
- name: Verify image attestations | |
if: github.event_name != 'pull_request' | |
run: | | |
cosign verify-attestation \ | |
--type slsaprovenance \ | |
--certificate-identity-regexp=".*" \ | |
--certificate-oidc-issuer="https://token.actions.githubusercontent.com" \ | |
docker.io/zx8086/capella-document-search:latest | |
- name: Upload attestation results | |
if: github.event_name != 'pull_request' | |
uses: actions/upload-artifact@v4 | |
with: | |
name: attestation-results-${{ matrix.platform-name }} | |
path: | | |
attestation.json | |
provenance.json | |
retention-days: 90 | |
- name: Verify SBOM | |
if: github.event_name != 'pull_request' | |
run: | | |
mkdir -p sbom-output | |
syft docker.io/zx8086/capella-document-search:latest \ | |
-o json=sbom-output/syft-sbom.json \ | |
-o spdx-json=sbom-output/spdx-sbom.json \ | |
-o cyclonedx-json=sbom-output/cyclonedx-sbom.json | |
- name: Upload SBOM Files | |
if: github.event_name != 'pull_request' | |
uses: actions/upload-artifact@v4 | |
with: | |
name: sbom-files-${{ matrix.platform-name }} | |
path: sbom-output/ | |
retention-days: 90 | |
# Testing section | |
- name: Test container | |
if: github.event_name != 'pull_request' | |
run: | | |
# Stop existing containers using port 3000 | |
docker ps -q --filter publish=3000 | xargs -r docker stop | |
docker rm -f capella-search-test 2>/dev/null || true | |
# Run container with properly formatted environment variables | |
docker run -d \ | |
--name capella-search-test \ | |
-p 3000:3000 \ | |
--env-file .env \ | |
-e NODE_ENV=production \ | |
-e BUILD_VERSION=${{ github.ref_name || '0.0.1' }} \ | |
-e COMMIT_HASH=${{ github.sha }} \ | |
-e BUILD_DATE=${{ env.BUILD_TIMESTAMP }} \ | |
-e ENABLE_FILE_LOGGING=false \ | |
-e LOG_MAX_FILES=7d \ | |
-e LOG_MAX_SIZE=10m \ | |
-e LOG_LEVEL=info \ | |
zx8086/capella-document-search:latest | |
# Add delay and better error handling | |
sleep 15 | |
if ! docker ps | grep -q capella-search-test; then | |
echo "Container failed to start. Showing logs:" | |
docker logs capella-search-test | |
exit 1 | |
fi | |
# Capture metrics | |
CONTAINER_STATUS=$(docker inspect --format='{{.State.Status}}' capella-search-test) | |
echo "CONTAINER_STATUS=${CONTAINER_STATUS}" >> $GITHUB_ENV | |
echo "CONTAINER_START_TIME=$(docker inspect --format='{{.State.StartedAt}}' capella-search-test)" >> $GITHUB_ENV | |
# Container security scanning | |
- name: Run Snyk container scan | |
continue-on-error: true | |
env: | |
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} | |
run: | | |
bun install | |
bun run snyk test --file=package.json --sarif-file-output=snyk.sarif --severity-threshold=high | |
bun run snyk container test zx8086/capella-document-search:latest \ | |
--file=Dockerfile \ | |
--severity-threshold=high \ | |
--sarif-file-output=snyk-docker.sarif || true | |
if [[ "${{ github.ref }}" == "refs/heads/master" ]]; then | |
bun run snyk container monitor zx8086/capella-document-search:latest \ | |
--file=Dockerfile \ | |
--platform=linux/arm64 || true | |
fi | |
- name: Upload Docker Snyk scan results | |
if: always() && hashFiles('snyk-docker.sarif') != '' | |
uses: github/codeql-action/upload-sarif@v3 | |
with: | |
sarif_file: snyk-docker.sarif | |
category: snyk-docker | |
# Build Summary | |
- name: Docker Build Summary | |
if: always() | |
run: | | |
{ | |
echo "### π³ Docker Build Summary" | |
echo "" | |
echo "#### π Build Overview" | |
echo "| Metric | Value |" | |
echo "|--------|-------|" | |
echo "| π Build ID | \`${GITHUB_SHA::7}\` |" | |
echo "| π¦ Image | capella-document-search |" | |
echo "| β‘ Status | ${CONTAINER_STATUS:-N/A} |" | |
# Container Status | |
if [ "${CONTAINER_STATUS:-}" = "running" ]; then | |
STATS=$(docker stats capella-search-test --no-stream --format "{{.CPUPerc}},{{.MemUsage}},{{.NetIO}},{{.BlockIO}}") | |
echo "| π Status | β Running |" | |
echo "| π Start Time | ${CONTAINER_START_TIME} |" | |
else | |
echo "| π Status | β οΈ Not Running |" | |
fi | |
} >> $GITHUB_STEP_SUMMARY | |
# Cleanup | |
- name: Cleanup | |
if: always() | |
run: | | |
rm -f .env .env.production | |
rm -f snyk.sarif snyk-docker.sarif | |
docker container rm -f capella-search-test || true | |
docker image prune -f | |
docker volume prune -f | |
docker builder prune -a -f | |
rm -rf node_modules | |
rm -rf security-artifacts | |
rm -rf /usr/local/bin/syft | |
rm -rf ~/.sigstore | |
echo "Cleanup completed" |