Skip to content

Document provisioning requirements, enable prettier formatting of mar… #161

Document provisioning requirements, enable prettier formatting of mar…

Document provisioning requirements, enable prettier formatting of mar… #161

Workflow file for this run

name: Build and Test
on:
push:
branches:
- '**'
pull_request:
branches:
- main
workflow_dispatch: {}
# Deployment requirements:
# - secrets.DOCKERHUB_TOKEN is a Personal Access Token for hub.docker.com with Read & Write access
# - secrets.ECR_PUBLIC_AWS_ACCESS_KEY_ID & secrets.ECR_PUBLIC_AWS_SECRET_ACCESS_KEY are for an AWS IAM User with the policy:
# {
# "Version": "2012-10-17",
# "Statement": [
# {
# "Effect": "Allow",
# "Action": [
# "ecr-public:InitiateLayerUpload",
# "ecr-public:UploadLayerPart",
# "ecr-public:PutImage",
# "ecr-public:CompleteLayerUpload",
# "ecr-public:BatchCheckLayerAvailability",
# "ecr-public:DescribeImages",
# "ecr-public:BatchDeleteImage"
# ],
# "Resource": "arn:aws:ecr-public::<aws-account-here>:repository/refacto"
# },
# {
# "Effect": "Allow",
# "Action": [
# "sts:GetServiceBearerToken",
# "ecr-public:GetAuthorizationToken"
# ],
# "Resource": "*"
# }
# ]
# }
# (DescribeImages & BatchDeleteImage are required for auto-pruning old images. The rest are needed by docker/build-push-action)
jobs:
build_and_test:
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- name: Determine version
id: version
run: |
set -e;
VERSION="$(date -u '+%Y%m%d-%H%M%S')";
echo "version=$VERSION" >> "$GITHUB_OUTPUT";
echo "Version $VERSION";
- name: Checkout
uses: actions/checkout@v6
- name: Install Node
uses: actions/setup-node@v6
with:
node-version: '24'
cache: npm
cache-dependency-path: |
backend/package-lock.json
frontend/package-lock.json
e2e/package-lock.json
- name: Build and Test
run: PARALLEL_BUILD=false PARALLEL_E2E=false EXPLICIT_WAIT_TIMEOUT=20000 TEST_TIMEOUT=60000 npm test
- name: Bundle
run: |
cd build;
rm -r node_modules;
tar -czf ../build.tar.gz .;
- name: Upload Bundle
uses: actions/upload-artifact@v5
with:
name: refacto
retention-days: 1
if-no-files-found: error
path: build.tar.gz
outputs:
version: ${{ steps.version.outputs.version }}
smoke_test:
needs:
- build_and_test
runs-on: ubuntu-latest
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
include:
- { node: '20' }
- { node: '22' }
- { node: '24' }
steps:
- name: Install Node
uses: actions/setup-node@v6
with:
node-version: ${{ matrix.node }}
- name: Download Bundle
uses: actions/download-artifact@v6
with:
name: refacto
- name: Unpack
run: tar -xf build.tar.gz && rm build.tar.gz
- name: Smoke Test
run: |
set -e;
npm install --omit=dev;
./index.js > output.log 2>&1 & APP_PID="$!";
while [ ! -f output.log ] || ! grep 'Available at' < output.log > /dev/null 2>&1; do
if ! ps -p "$APP_PID" > /dev/null; then
APP_EXIT_CODE="$(wait "$APP_PID" > /dev/null; echo "$?")";
cat output.log;
echo "Application failed to launch (exit code: $APP_EXIT_CODE).";
false;
fi;
sleep 0.1;
done;
wget localhost:5000 -O test-index.html;
if ! grep '<title>Refacto</title>' < test-index.html > /dev/null; then
cat output.log;
echo "Unexpected main page response" >&2;
cat test-index.html;
false;
fi;
kill -2 "$APP_PID";
wait "$APP_PID";
if ! grep 'Shutdown complete' < output.log > /dev/null; then
cat output.log;
echo "Application failed to shut down" >&2;
false;
fi;
create_docker_release:
needs:
- build_and_test
- smoke_test
runs-on: ubuntu-latest
timeout-minutes: 10
if: ${{ github.ref == 'refs/heads/main' && github.head_ref == null }}
steps:
- name: Download Bundle
uses: actions/download-artifact@v6
with:
name: refacto
- name: Unpack
run: tar -xf build.tar.gz && rm build.tar.gz
- name: Authenticate with Docker Hub
uses: docker/login-action@v3
with:
registry: docker.io
username: refacto
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Authenticate with AWS
uses: aws-actions/configure-aws-credentials@v5
with:
aws-access-key-id: ${{ secrets.ECR_PUBLIC_AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.ECR_PUBLIC_AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1 # ECR public is only available via the N. Virginia region
- name: Authenticate with ECR Public
uses: docker/login-action@v3
with:
registry: public.ecr.aws
- name: Set up QEMU for Cross-Architecture Builds
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
cache-image: false # this step does not take long anyway, and cache saving step seems to always fail
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and Push Docker Image
uses: docker/build-push-action@v6
env:
DOCKER_BUILD_RECORD_UPLOAD: false
with:
context: .
github-token: '-'
platforms: linux/amd64,linux/arm64
cache-from: type=gha,timeout=3m
cache-to: type=gha,timeout=3m
pull: true
push: true
tags: |
docker.io/refacto/refacto:${{ needs.build_and_test.outputs.version }}
docker.io/refacto/refacto:latest
public.ecr.aws/w4z9z1e2/refacto:${{ needs.build_and_test.outputs.version }}
public.ecr.aws/w4z9z1e2/refacto:latest
labels: |
org.opencontainers.image.title=Refacto
org.opencontainers.image.description=The Refacto app for running online Retrospectives
org.opencontainers.image.version=${{ needs.build_and_test.outputs.version }}
org.opencontainers.image.url=https://retro.davidje13.com/
org.opencontainers.image.source=${{ github.repositoryUrl }}
org.opencontainers.image.licenses=GPL-3.0-or-later
org.opencontainers.image.base.name=docker.io/node:24-alpine
- name: Prune old ECR images
# work around lack of support for lifecycle policies in ECR Public (see https://github.com/aws/containers-roadmap/issues/1268)
run: |
set -e;
cat >task.mjs <<"EOF"
import { json } from 'node:stream/consumers';
import { spawnSync } from 'node:child_process';
const untaggedThresholdTime = Date.now() - 1000 * 60 * 60 * 24; // 1 day
const repositorySizeLimitBytes = 20 * 1024 * 1024 * 1024; // 20GB (ECR Public free tier max across all repositories is 50GB)
const aws = process.argv[2];
const input = await json(process.stdin);
const indexItems = input.imageDetails.filter((item) => item.imageManifestMediaType.includes('image.index.v1'));
console.log(`total index items: ${indexItems.length}`);
for (const item of indexItems) {
item.imagePushedTimestamp = Date.parse(item.imagePushedAt);
}
indexItems.sort((a, b) => b.imagePushedTimestamp - a.imagePushedTimestamp); // sort newest first
let totalSize = 0;
const toDelete = [];
for (const item of indexItems) {
if (!item.imageTags?.length && item.imagePushedTimestamp < untaggedThresholdTime) {
// untagged image index: obvious candidate for deletion
console.log(`untagged image: ${item.imageDigest}`);
toDelete.push(item.imageDigest);
continue;
}
totalSize += item.imageSizeInBytes;
if (item.imageTags?.includes('latest')) {
// never remove image tagged as 'latest'
continue;
}
if (totalSize > repositorySizeLimitBytes) {
// max repository size exceeded: prune oldest images
console.log(`old image: ${item.imageDigest}`);
toDelete.push(item.imageDigest);
}
}
while (toDelete.length > 0) {
const batch = toDelete.splice(0, 100);
console.log('deleting batch:', batch);
const batchFilter = batch.map((digest) => `imageDigest=${digest}`).join(',');
const result = spawnSync(
aws,
['ecr-public', 'batch-delete-image', '--repository-name', 'refacto', '--image-ids', batchFilter],
{ stdio: ['ignore', 'inherit', 'inherit'] },
);
if (result.status !== 0) {
console.log(`batch delete failed (status ${result.status}, signal ${result.signal}, error ${result.error})`);
process.exit(1);
}
}
EOF
aws ecr-public describe-images --repository-name refacto --output json | node task.mjs "$(which aws)";
create_github_release:
needs:
- build_and_test
- smoke_test
runs-on: ubuntu-latest
timeout-minutes: 10
if: ${{ github.ref == 'refs/heads/main' && github.head_ref == null }}
permissions:
contents: write
steps:
- name: Download Bundle
uses: actions/download-artifact@v6
with:
name: refacto
- name: Create GitHub Release
env:
API_BASE: 'https://api.github.com/repos/${{ github.repository }}'
COMMIT: ${{ github.sha }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
VERSION: ${{ needs.build_and_test.outputs.version }}
run: |
set -e;
wget -S \
--header='Accept: application/vnd.github.v3+json' \
--header="Authorization: token $GITHUB_TOKEN" \
--post-data="$(jq -n --arg n "$VERSION" --arg c "$COMMIT" '{tag_name: $n, target_commitish: $c}')" \
"$API_BASE/releases" -O release.json;
cat release.json;
echo "Uploading bundle...";
UPLOAD_URL="$(jq -r '.upload_url' < release.json | sed 's/{[^}]*}//')";
wget -S \
--header='Accept: application/vnd.github.v3+json' \
--header="Authorization: token $GITHUB_TOKEN" \
--header='Content-Type: application/gzip' \
--post-file='build.tar.gz' \
"$UPLOAD_URL?name=build.tar.gz" -O release-file.json;
cat release-file.json;
echo "Done.";