Fix video player - remove spaces + encode url #387
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Docker CI/CD with Bun and Snyk | |
on: | |
push: | |
branches: ["master"] | |
pull_request: | |
branches: ["master"] | |
permissions: | |
contents: read | |
security-events: write | |
packages: write | |
env: | |
DOCKER_BUILDKIT: 1 | |
COMPOSE_DOCKER_CLI_BUILD: 1 | |
BUILDX_NO_DEFAULT_LOAD: true | |
BUILDKIT_INLINE_CACHE: 1 | |
BUILDX_CACHE_TTL: "168h" | |
COSIGN_VERSION: "v2.2.2" | |
SYFT_VERSION: "v1.0.0" | |
COSIGN_YES: "true" | |
COSIGN_EXPERIMENTAL: "true" | |
BUILDKIT_PROGRESS: plain | |
ACTIONS_RUNTIME_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
DOCKER_BUILDX_CACHE_DIR: /home/pi/.buildx-cache | |
jobs: | |
build-and-deploy: | |
runs-on: ${{ github.event_name == 'pull_request' && 'ubuntu-latest' || 'self-hosted' }} | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Install verification tools | |
shell: bash | |
run: | | |
# Create directories first | |
mkdir -p "${GITHUB_WORKSPACE}/bin" | |
mkdir -p "${GITHUB_WORKSPACE}/tmp" | |
cd "${GITHUB_WORKSPACE}/tmp" | |
# Install cosign | |
curl -Lo cosign "https://github.com/sigstore/cosign/releases/latest/download/cosign-linux-amd64" | |
chmod +x cosign | |
mv cosign "${GITHUB_WORKSPACE}/bin/" | |
# Install syft | |
SYFT_RELEASE=$(curl -s https://api.github.com/repos/anchore/syft/releases/latest | grep -o '"tag_name": ".*"' | cut -d'"' -f4) | |
curl -Lo syft.tar.gz "https://github.com/anchore/syft/releases/download/${SYFT_RELEASE}/syft_${SYFT_RELEASE#v}_linux_amd64.tar.gz" | |
tar xzf syft.tar.gz syft | |
mv syft "${GITHUB_WORKSPACE}/bin/" | |
# Add to PATH | |
echo "${GITHUB_WORKSPACE}/bin" >> $GITHUB_PATH | |
# Cleanup | |
cd "${GITHUB_WORKSPACE}" | |
rm -rf "${GITHUB_WORKSPACE}/tmp" | |
# Verify installations | |
"${GITHUB_WORKSPACE}/bin/cosign" version || true | |
"${GITHUB_WORKSPACE}/bin/syft" --version || true | |
- name: Install additional security tools | |
run: | | |
# Install jq if not present | |
if ! command -v jq &> /dev/null; then | |
sudo apt-get update && sudo apt-get install -y jq | |
fi | |
- name: Set build timestamp | |
id: timestamp | |
run: echo "BUILD_TIMESTAMP=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_ENV | |
- name: Setup environment files | |
env: | |
ENV_PRODUCTION: ${{ secrets.ENV_PRODUCTION }} | |
run: | | |
echo "$ENV_PRODUCTION" > .env.production | |
cp .env.production .env | |
- name: Set metadata variables | |
run: | | |
# Use quotes to properly handle values with special characters | |
echo "IMAGE_DESCRIPTION='Capella Document Search is a web application that allows you to search for documents within Capella scopes and collections. Built with Bun, Svelte, and ELysiaJS.'" >> $GITHUB_ENV | |
echo "IMAGE_LICENSE='MIT'" >> $GITHUB_ENV | |
- name: Docker meta | |
id: meta | |
uses: docker/metadata-action@v5 | |
with: | |
images: docker.io/zx8086/capella-document-search | |
labels: | | |
org.opencontainers.image.title=capella-document-search | |
org.opencontainers.image.description=${{ env.IMAGE_DESCRIPTION }} | |
org.opencontainers.image.created=${BUILD_TIMESTAMP} | |
org.opencontainers.image.version=${GITHUB_REF_NAME:-2.0.0} | |
org.opencontainers.image.revision=${GITHUB_SHA} | |
org.opencontainers.image.authors=Simon Owusu <[email protected]> | |
org.opencontainers.image.vendor=Siobytes | |
org.opencontainers.image.licenses=${{ env.IMAGE_LICENSE }} | |
org.opencontainers.image.url=https://github.com/zx8086/capella-document-search | |
org.opencontainers.image.source=https://github.com/zx8086/capella-document-search | |
org.opencontainers.image.documentation=https://github.com/zx8086/capella-document-search/README.md | |
org.opencontainers.image.base.name=oven/bun:canary-alpine | |
org.opencontainers.image.source.repository=github.com/zx8086/capella-document-search | |
org.opencontainers.image.source.branch=${GITHUB_REF_NAME:-master} | |
org.opencontainers.image.source.commit=${GITHUB_SHA} | |
com.capellaql.maintainer=Simon Owusu <[email protected]> | |
com.capellaql.release-date=${BUILD_TIMESTAMP} | |
com.capellaql.version.is-production=true | |
org.opencontainers.image.ref.name=${GITHUB_REF_NAME:-master} | |
org.opencontainers.image.version.semver=${GITHUB_REF_NAME:-2.0.0} | |
org.opencontainers.image.version.major=2 | |
org.opencontainers.image.version.minor=0 | |
org.opencontainers.image.version.patch=0 | |
tags: | | |
type=raw,value=latest,enable={{is_default_branch}} | |
type=raw,value=buildcache,enable=true | |
# Platform and runtime setup | |
- name: Set up QEMU | |
uses: docker/setup-qemu-action@v3 | |
with: | |
platforms: linux/amd64,linux/arm64 | |
- name: Set up Bun | |
uses: oven-sh/setup-bun@v1 | |
with: | |
bun-version: 'latest' | |
- name: Cache Bun dependencies | |
uses: actions/cache@v3 | |
with: | |
path: | | |
~/.bun/install/cache | |
node_modules | |
key: ${{ runner.os }}-bun-${{ hashFiles('**/bun.lockb', '**/package.json') }} | |
restore-keys: | | |
${{ runner.os }}-bun- | |
# Security scanning section | |
- name: Run Snyk code scan | |
continue-on-error: true | |
env: | |
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} | |
run: | | |
bun install | |
bun run snyk test --file=package.json --sarif-file-output=snyk.sarif --severity-threshold=high | |
bun run snyk monitor --file=package.json | |
- name: Upload Snyk code scan results | |
if: hashFiles('snyk.sarif') != '' | |
uses: github/codeql-action/upload-sarif@v3 | |
with: | |
sarif_file: snyk.sarif | |
category: snyk-code | |
# Docker build setup | |
- name: Log in to Docker Hub | |
uses: docker/login-action@v3 | |
with: | |
username: ${{ secrets.DOCKER_HUB_USERNAME }} | |
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} | |
- name: Set up Docker Buildx | |
uses: docker/setup-buildx-action@v3 | |
with: | |
driver: cloud | |
endpoint: "zx8086/cldbuild" | |
install: true | |
- name: Install dependencies and generate lockfile | |
run: | | |
bun install | |
# Verify lockfile exists | |
if [ -f "bun.lockb" ]; then | |
echo "Lockfile generated successfully" | |
else | |
echo "Warning: Lockfile not generated" | |
fi | |
- name: Build and push Docker image | |
uses: docker/build-push-action@v6 | |
with: | |
context: . | |
push: ${{ github.event_name != 'pull_request' }} | |
tags: ${{ steps.meta.outputs.tags }} | |
labels: ${{ steps.meta.outputs.labels }} | |
platforms: linux/amd64,linux/arm64 | |
cache-from: type=registry,ref=docker.io/zx8086/capella-document-search:buildcache | |
cache-to: type=registry,ref=docker.io/zx8086/capella-document-search:buildcache,mode=max | |
build-args: | | |
NODE_ENV=production | |
BUILD_VERSION=${{ github.ref_name || '0.0.1' }} | |
COMMIT_HASH=${{ github.sha }} | |
BUILD_DATE=${{ env.BUILD_TIMESTAMP }} | |
outputs: ${{ github.event_name == 'pull_request' && 'type=cacheonly' || 'type=registry' }} | |
- name: Verify image | |
if: github.event_name != 'pull_request' | |
run: | | |
# Wait for image to be available | |
sleep 15 | |
# Verify the image and its architectures | |
echo "Verifying image manifest..." | |
docker buildx imagetools inspect docker.io/zx8086/capella-document-search:latest | |
- name: Validate image metadata | |
if: github.event_name != 'pull_request' | |
run: | | |
IMAGE_REF="docker.io/zx8086/capella-document-search:latest" | |
echo "Validating image metadata..." | |
docker buildx imagetools inspect ${IMAGE_REF} --format '{{json .}}' | jq . | |
- name: Verify image attestations | |
if: github.event_name != 'pull_request' | |
continue-on-error: true | |
run: | | |
# Set results directory name first | |
TIMESTAMP=$(date +%Y%m%d_%H%M%S) | |
RESULTS_DIR="attestation-results-${TIMESTAMP}" | |
echo "ATTESTATION_RESULTS_DIR=${RESULTS_DIR}" >> $GITHUB_ENV | |
mkdir -p "${RESULTS_DIR}" | |
# Get image digest and reference | |
echo "Getting image digest..." | |
IMAGE_REF="docker.io/zx8086/capella-document-search:latest" | |
IMAGE_DIGEST=$(docker buildx imagetools inspect "${IMAGE_REF}" --raw | jq -r '.manifests[0].digest') | |
IMAGE_WITH_DIGEST="${IMAGE_REF}@${IMAGE_DIGEST}" | |
echo "Image reference: ${IMAGE_REF}" | |
echo "Image digest: ${IMAGE_DIGEST}" | |
# Additional verification methods | |
echo "Attempting direct verification..." | |
cosign verify-attestation \ | |
--certificate-identity-regexp=".*" \ | |
--certificate-oidc-issuer="https://token.actions.githubusercontent.com" \ | |
"${IMAGE_WITH_DIGEST}" > "${RESULTS_DIR}/direct-attestation.json" 2>/dev/null || true | |
echo "Attempting SLSA verification..." | |
cosign verify-attestation \ | |
--type slsaprovenance \ | |
--certificate-identity-regexp=".*" \ | |
--certificate-oidc-issuer="https://token.actions.githubusercontent.com" \ | |
"${IMAGE_WITH_DIGEST}" > "${RESULTS_DIR}/slsa-attestation.json" 2>/dev/null || true | |
echo "Attempting predicate-less verification..." | |
cosign verify-attestation \ | |
--certificate-identity-regexp=".*" \ | |
--certificate-oidc-issuer="https://token.actions.githubusercontent.com" \ | |
"${IMAGE_WITH_DIGEST}" > "${RESULTS_DIR}/predicate-attestation.json" 2>/dev/null || true | |
# Download all attestations for analysis | |
echo "Downloading attestations..." | |
cosign download attestation "${IMAGE_WITH_DIGEST}" > "${RESULTS_DIR}/all-attestations.json" 2>/dev/null || true | |
# Extract from manifest and save all data | |
echo "Extracting from manifest..." | |
docker buildx imagetools inspect "${IMAGE_WITH_DIGEST}" --raw > "${RESULTS_DIR}/manifest.json" | |
jq '.manifests[].annotations | select(."org.opencontainers.image.attestations" != null)' \ | |
"${RESULTS_DIR}/manifest.json" > "${RESULTS_DIR}/manifest-attestations.json" 2>/dev/null || true | |
# Check results | |
VERIFICATION_SUCCESS=false | |
for file in "${RESULTS_DIR}"/*.json; do | |
if [ -s "$file" ] && jq -e . >/dev/null 2>&1 < "$file"; then | |
VERIFICATION_SUCCESS=true | |
echo "✅ Found valid data in $(basename "$file")" | |
fi | |
done | |
# Generate comprehensive report | |
{ | |
echo "### 📝 Attestation Verification Report" | |
echo "- Image: ${IMAGE_REF}" | |
echo "- Digest: ${IMAGE_DIGEST}" | |
echo "- Timestamp: $(date -u)" | |
echo "" | |
echo "#### Verification Results:" | |
for file in "${RESULTS_DIR}"/*.json; do | |
if [ -s "$file" ]; then | |
basename=$(basename "$file") | |
size=$(wc -c < "$file") | |
echo "- File: ${basename}" | |
echo " - Size: ${size} bytes" | |
if jq -e . >/dev/null 2>&1 < "$file"; then | |
echo " - Status: ✅ Valid JSON" | |
echo " - Content Preview:" | |
jq -r 'try .predicateType // "No predicate type"' "$file" | head -n 1 | |
else | |
echo " - Status: ⚠️ Invalid JSON" | |
fi | |
fi | |
done | |
echo "" | |
echo "#### Overall Status:" | |
if [ "$VERIFICATION_SUCCESS" = true ]; then | |
echo "✅ Successfully found attestation data" | |
else | |
echo "⚠️ No attestations could be verified" | |
fi | |
} > "${RESULTS_DIR}/verification-report.md" | |
# Set output directory for next step | |
echo "VERIFICATION_SUCCESS=${VERIFICATION_SUCCESS}" >> $GITHUB_ENV | |
- name: Upload attestation results | |
if: github.event_name != 'pull_request' | |
uses: actions/upload-artifact@v4 | |
with: | |
name: attestation-results | |
path: | | |
${{ env.ATTESTATION_RESULTS_DIR }}/*.json | |
${{ env.ATTESTATION_RESULTS_DIR }}/*.md | |
retention-days: 90 | |
- name: Verify SBOM | |
if: github.event_name != 'pull_request' | |
run: | | |
mkdir -p sbom-output | |
syft docker.io/zx8086/capella-document-search:latest \ | |
-o json=sbom-output/syft-sbom.json \ | |
-o spdx-json=sbom-output/spdx-sbom.json \ | |
-o cyclonedx-json=sbom-output/cyclonedx-sbom.json | |
- name: Upload SBOM Files | |
if: github.event_name != 'pull_request' | |
uses: actions/upload-artifact@v4 | |
with: | |
name: sbom-files | |
path: sbom-output/ | |
retention-days: 90 | |
# Testing section | |
- name: Test container | |
if: github.event_name != 'pull_request' | |
run: | | |
# Get the runner's architecture | |
RUNNER_ARCH=$(uname -m) | |
# Stop existing containers using port 3000 | |
docker ps -q --filter publish=3000 | xargs -r docker stop | |
docker rm -f capella-search-test 2>/dev/null || true | |
# Run container (will automatically use correct architecture) | |
docker run -d \ | |
--name capella-search-test \ | |
-p 3000:3000 \ | |
--env-file .env \ | |
-e NODE_ENV=production \ | |
-e BUILD_VERSION=${{ github.ref_name || '0.0.1' }} \ | |
-e COMMIT_HASH=${{ github.sha }} \ | |
-e BUILD_DATE=${{ env.BUILD_TIMESTAMP }} \ | |
-e ENABLE_FILE_LOGGING=false \ | |
-e LOG_MAX_FILES=7d \ | |
-e LOG_MAX_SIZE=10m \ | |
-e LOG_LEVEL=info \ | |
zx8086/capella-document-search:latest | |
# Add delay and better error handling | |
sleep 15 | |
if ! docker ps | grep -q capella-search-test; then | |
echo "Container failed to start. Showing logs:" | |
docker logs capella-search-test | |
exit 1 | |
fi | |
# Capture metrics | |
CONTAINER_STATUS=$(docker inspect --format='{{.State.Status}}' capella-search-test) | |
echo "CONTAINER_STATUS=${CONTAINER_STATUS}" >> $GITHUB_ENV | |
echo "CONTAINER_START_TIME=$(docker inspect --format='{{.State.StartedAt}}' capella-search-test)" >> $GITHUB_ENV | |
# Container security scanning | |
- name: Run Snyk container scan | |
continue-on-error: true | |
env: | |
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} | |
run: | | |
# Install Snyk as a project dependency | |
bun add snyk | |
# Run Snyk container test | |
echo "Running Snyk scan..." | |
bun run snyk container test docker.io/zx8086/capella-document-search:latest \ | |
--file=Dockerfile \ | |
--severity-threshold=high \ | |
--sarif-file-output=snyk-docker.sarif \ | |
--json > snyk-report.json || echo "Vulnerabilities found but continuing..." | |
if [[ "${{ github.ref }}" == "refs/heads/master" ]]; then | |
bun run snyk container monitor docker.io/zx8086/capella-document-search:latest \ | |
--file=Dockerfile || echo "Monitor command failed but continuing..." | |
fi | |
- name: Upload Docker Snyk scan results | |
if: always() && hashFiles('snyk-docker.sarif') != '' | |
uses: github/codeql-action/upload-sarif@v3 | |
with: | |
sarif_file: snyk-docker.sarif | |
category: snyk-docker | |
# Build Summary | |
- name: Docker Build Summary | |
if: always() | |
run: | | |
{ | |
echo "### 🐳 Docker Build Summary" | |
echo "" | |
echo "#### 📊 Build Overview" | |
echo "| Metric | Value |" | |
echo "|--------|-------|" | |
echo "| 🆔 Build ID | \`${GITHUB_SHA::7}\` |" | |
echo "| 📦 Image | capella-document-search |" | |
echo "| ⚡ Status | ${CONTAINER_STATUS:-N/A} |" | |
# Container Status | |
if [ "${CONTAINER_STATUS:-}" = "running" ]; then | |
STATS=$(docker stats capella-search-test --no-stream --format "{{.CPUPerc}},{{.MemUsage}},{{.NetIO}},{{.BlockIO}}") | |
echo "| 🔄 Status | ✅ Running |" | |
echo "| 🕒 Start Time | ${CONTAINER_START_TIME} |" | |
else | |
echo "| 🔄 Status | ⚠️ Not Running |" | |
fi | |
} >> $GITHUB_STEP_SUMMARY | |
# Cleanup | |
- name: Cleanup | |
if: always() | |
run: | | |
rm -f .env .env.production | |
rm -f snyk.sarif snyk-docker.sarif | |
docker container rm -f capella-search-test || true | |
docker image prune -f | |
docker volume prune -f | |
docker builder prune -a -f | |
rm -rf node_modules | |
rm -rf security-artifacts | |
rm -rf /usr/local/bin/syft | |
rm -rf ~/.sigstore | |
echo "Cleanup completed" | |
- name: Verify metadata | |
continue-on-error: true | |
shell: bash | |
run: | | |
echo "Verifying metadata output..." | |
# Store metadata JSON in a file to avoid shell interpretation issues | |
echo '${{ steps.meta.outputs.json }}' > metadata.json | |
if [ -s metadata.json ]; then | |
echo "Processing metadata..." | |
jq -r '.labels | keys[]' metadata.json 2>/dev/null || echo "No labels found" | |
else | |
echo "⚠️ Warning: No metadata JSON output found" | |
fi | |
echo "Metadata verification completed" |