Skip to content

Heavyweight type checkers #21890

Heavyweight type checkers

Heavyweight type checkers #21890

name: Backend Code Checks
on:
pull_request:
merge_group:
types: [checks_requested]
push:
branches:
- "main"
- "release-**"
permissions:
checks: write
pull-requests: write
env:
IMAGE: ethyca/fides:local
DEFAULT_PYTHON_VERSION: "3.13.11"
# Docker auth with read-only permissions.
DOCKER_USER: ${{ secrets.DOCKER_USER }}
DOCKER_RO_TOKEN: ${{ secrets.DOCKER_RO_TOKEN }}
SAAS_OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.SAAS_OP_SERVICE_ACCOUNT_TOKEN }}
SAAS_SECRETS_OP_VAULT_ID: ${{ secrets.SAAS_SECRETS_OP_VAULT_ID }}
jobs:
###############
## Prechecks ##
###############
Check-Backend-Changes:
runs-on: ubuntu-latest
outputs:
has_backend_changes: ${{ steps.filter.outputs.backend }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Check for backend file changes
uses: dorny/paths-filter@v3
id: filter
with:
list-files: shell
filters: |
backend:
- '**/*.py'
- '**/*.pxl'
- '**/*requirements.txt'
- 'Dockerfile'
- 'Makefile'
- 'pyproject.toml'
- 'setup.cfg'
- 'noxfile.py'
- 'data/**'
- '.github/workflows/backend_checks.yml'
- name: Log changed files
if: steps.filter.outputs.backend == 'true'
run: echo "${{ steps.filter.outputs.backend_files }}"
Collect-Tests:
needs: Check-Backend-Changes
if: needs.Check-Backend-Changes.outputs.has_backend_changes == 'true'
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set Up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
cache: "pip"
- name: Install Nox
run: pip install nox>=2022
- name: Cache Nox virtual environment
uses: actions/cache@v4
with:
path: .nox/
key: ${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('noxfile.py') }}-${{ hashFiles('noxfiles/**.py') }}-${{ hashFiles('pyproject.toml') }}
restore-keys: |
${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}
- name: Run Static Check
run: nox -s collect_tests
Validate-API-Types:
needs: Check-Backend-Changes
if: needs.Check-Backend-Changes.outputs.has_backend_changes == 'true'
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set Up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
cache: "pip"
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 20
- name: Install TypeScript dependencies
run: |
npm install -g typescript ts-node
npm install --no-save @types/node
- name: Install Python dependencies
run: |
pip install --upgrade pip
pip install nox>=2022
- name: Install fides dependencies (for schema generation)
run: |
pip install -e .
pip install -r requirements.txt
- name: Generate OpenAPI schema (without starting server)
run: |
python scripts/generate_openapi_schema.py openapi.json
echo "✅ OpenAPI schema generated"
env:
FIDES__TEST_MODE: "true"
FIDES__SECURITY__APP_ENCRYPTION_KEY: "OLMkv91j8DHiDAULnK5Lxx3kSCov30b3"
FIDES__SECURITY__OAUTH_ROOT_CLIENT_ID: "fidesadmin"
FIDES__SECURITY__OAUTH_ROOT_CLIENT_SECRET: "fidesadminsecret"
- name: Validate Admin UI types
run: |
echo "🔍 Validating Admin UI TypeScript types..."
ts-node --project scripts/tsconfig.json scripts/validate_api_types.ts \
openapi.json \
clients/admin-ui/src/types/api \
2>&1 | tee admin-ui-validation.log
continue-on-error: true
id: validate-admin-ui
- name: Validate Privacy Center types
run: |
echo "🔍 Validating Privacy Center TypeScript types..."
ts-node --project scripts/tsconfig.json scripts/validate_api_types.ts \
openapi.json \
clients/privacy-center/types/api \
2>&1 | tee privacy-center-validation.log
continue-on-error: true
id: validate-privacy-center
- name: Upload validation logs
if: always()
uses: actions/upload-artifact@v4
with:
name: api-type-validation-logs
path: |
admin-ui-validation.log
privacy-center-validation.log
openapi.json
retention-days: 7
- name: Comment PR with results
if: github.event_name == 'pull_request' && always()
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
// Unique marker to identify our comment
const COMMENT_MARKER = '<!-- api-type-validation-comment -->';
const adminUiLogDisplay = fs.existsSync('admin-ui-validation.log')
? fs.readFileSync('admin-ui-validation.log', 'utf8')
: 'No validation log found';
const privacyCenterLogDisplay = fs.existsSync('privacy-center-validation.log')
? fs.readFileSync('privacy-center-validation.log', 'utf8')
: 'No validation log found';
// Parse validation results from logs (don't rely on step outcome)
const adminUiLog = fs.existsSync('admin-ui-validation.log')
? fs.readFileSync('admin-ui-validation.log', 'utf8')
: '';
const privacyCenterLog = fs.existsSync('privacy-center-validation.log')
? fs.readFileSync('privacy-center-validation.log', 'utf8')
: '';
// Debug: Log what we found
console.log('Admin UI log exists:', fs.existsSync('admin-ui-validation.log'));
console.log('Admin UI log length:', adminUiLog.length);
console.log('Privacy Center log exists:', fs.existsSync('privacy-center-validation.log'));
console.log('Privacy Center log length:', privacyCenterLog.length);
// Show first 500 chars of each log for debugging
if (adminUiLog.length > 0) {
console.log('Admin UI log preview:', adminUiLog.substring(0, 500));
}
if (privacyCenterLog.length > 0) {
console.log('Privacy Center log preview:', privacyCenterLog.substring(0, 500));
}
// Count errors and warnings from logs
const countErrors = (log) => {
const match = log.match(/Errors:\s+(\d+)/);
if (match) console.log('Found errors:', match[1]);
return match ? parseInt(match[1]) : 0;
};
const countWarnings = (log) => {
const match = log.match(/Warnings:\s+(\d+)/);
if (match) console.log('Found warnings:', match[1]);
return match ? parseInt(match[1]) : 0;
};
const countMissingFields = (log) => {
const matches = log.match(/\[missing_field\]/g);
return matches ? matches.length : 0;
};
console.log('=== Parsing Admin UI Log ===');
const adminUiErrors = countErrors(adminUiLog);
const adminUiWarnings = countWarnings(adminUiLog);
console.log('=== Parsing Privacy Center Log ===');
const privacyCenterErrors = countErrors(privacyCenterLog);
const privacyCenterWarnings = countWarnings(privacyCenterLog);
const totalErrors = adminUiErrors + privacyCenterErrors;
const totalWarnings = adminUiWarnings + privacyCenterWarnings;
const totalMissingFields = countMissingFields(adminUiLog) + countMissingFields(privacyCenterLog);
console.log('=== Final Counts ===');
console.log('Total errors:', totalErrors);
console.log('Total warnings:', totalWarnings);
console.log('Total missing fields:', totalMissingFields);
const adminUiHasIssues = adminUiErrors > 0 || adminUiWarnings > 0;
const privacyCenterHasIssues = privacyCenterErrors > 0 || privacyCenterWarnings > 0;
const hasAnyIssues = totalErrors > 0 || totalWarnings > 0;
console.log('Has any issues:', hasAnyIssues);
// Build comment body
let comment = COMMENT_MARKER + '\n## 🔍 API Type Validation Results\n\n';
if (!hasAnyIssues) {
comment += '✅ All TypeScript types match the backend API schemas!\n\n';
} else {
comment += '⚠️ Type mismatches detected (non-blocking check)\n\n';
// Add prominent warning if many missing fields
if (totalMissingFields > 10) {
comment += '> [!WARNING]\n';
comment += `> **${totalMissingFields} missing fields detected!** This usually means TypeScript types are out of sync.\n`;
comment += '> **Most likely fix:** Regenerate the TypeScript types (see below).\n\n';
}
comment += '### 🔧 To fix these issues:\n';
comment += '```bash\n';
comment += 'cd clients\n';
comment += 'npm run openapi:generate\n';
comment += 'git add -A\n';
comment += 'git commit -m "chore: regenerate TypeScript types from OpenAPI schema"\n';
comment += '```\n\n';
comment += '⚠️ **Important:** Review the changes carefully before committing!\n';
comment += '- Check for breaking changes in existing types\n';
comment += '- Verify manual overrides are preserved\n';
comment += '- Test affected frontend components\n\n';
comment += `📎 [View full logs in artifacts](${context.payload.repository.html_url}/actions/runs/${context.runId})\n\n`;
}
// Add brief status and link to artifacts
comment += '### Validation Status:\n\n';
if (adminUiHasIssues) {
comment += `- **Admin UI**: ⚠️ ${adminUiErrors} error(s), ${adminUiWarnings} warning(s)\n`;
} else {
comment += `- **Admin UI**: ✅ Passed\n`;
}
if (privacyCenterHasIssues) {
comment += `- **Privacy Center**: ⚠️ ${privacyCenterErrors} error(s), ${privacyCenterWarnings} warning(s)\n\n`;
} else {
comment += `- **Privacy Center**: ✅ Passed\n\n`;
}
comment += `📎 **[View full validation logs](${context.payload.repository.html_url}/actions/runs/${context.runId})**\n`;
comment += '> Click "Summary" → Download "api-type-validation-logs" artifact\n\n';
// Repeat fix instructions at the bottom for easy access
if (hasAnyIssues) {
comment += '---\n\n';
comment += '### 🔧 Quick Fix:\n';
comment += '```bash\n';
comment += 'cd clients\n';
comment += 'npm run openapi:generate\n';
comment += 'git add -A\n';
comment += 'git commit -m "chore: regenerate TypeScript types"\n';
comment += '```\n';
}
// Find existing comment
const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
});
const existingComment = comments.find(c => c.body?.includes(COMMENT_MARKER));
if (existingComment) {
// Update existing comment
await github.rest.issues.updateComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: existingComment.id,
body: comment
});
console.log('Updated existing comment');
} else {
// Create new comment
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body: comment
});
console.log('Created new comment');
}
- name: Generate validation summary
if: always()
run: |
echo "## 🔍 API Type Validation Summary" >> $GITHUB_STEP_SUMMARY
admin_ui_failed=false
privacy_center_failed=false
if [ "${{ steps.validate-admin-ui.outcome }}" = "failure" ]; then
admin_ui_failed=true
fi
if [ "${{ steps.validate-privacy-center.outcome }}" = "failure" ]; then
privacy_center_failed=true
fi
if [ "$admin_ui_failed" = true ] || [ "$privacy_center_failed" = true ]; then
# Count missing fields
missing_count=$(grep -c "\[missing_field\]" admin-ui-validation.log privacy-center-validation.log 2>/dev/null || echo "0")
echo "⚠️ **Type mismatches detected** (non-blocking)" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [ "$missing_count" -gt 10 ]; then
echo "### ⚠️ $missing_count missing fields detected!" >> $GITHUB_STEP_SUMMARY
echo "This usually means TypeScript types are out of sync with the backend." >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
fi
echo "### 🔧 Quick Fix:" >> $GITHUB_STEP_SUMMARY
echo "\`\`\`bash" >> $GITHUB_STEP_SUMMARY
echo "cd clients" >> $GITHUB_STEP_SUMMARY
echo "npm run openapi:generate" >> $GITHUB_STEP_SUMMARY
echo "git add -A" >> $GITHUB_STEP_SUMMARY
echo "git commit -m \"chore: regenerate TypeScript types from OpenAPI schema\"" >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "⚠️ **Review changes carefully before committing!**" >> $GITHUB_STEP_SUMMARY
echo "- Check for breaking changes" >> $GITHUB_STEP_SUMMARY
echo "- Verify manual overrides are preserved" >> $GITHUB_STEP_SUMMARY
echo "- Test affected components" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "📎 See artifacts for full validation logs" >> $GITHUB_STEP_SUMMARY
# Set output for status check
echo "::warning title=API Type Validation::$missing_count type mismatches detected. Run 'npm run openapi:generate' in clients/ to fix."
else
echo "✅ **All TypeScript types match the backend API schemas!**" >> $GITHUB_STEP_SUMMARY
echo "::notice title=API Type Validation::All types are in sync"
fi
Build:
needs: [Check-Backend-Changes, Collect-Tests, Validate-API-Types]
if: needs.Check-Backend-Changes.outputs.has_backend_changes == 'true'
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
- name: Build container
uses: docker/build-push-action@v6
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
build-args: PYTHON_VERSION=${{ env.DEFAULT_PYTHON_VERSION }}
target: prod
outputs: type=docker,dest=/tmp/python-${{ env.DEFAULT_PYTHON_VERSION }}.tar
push: false
tags: ${{ env.IMAGE }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Upload container
uses: actions/upload-artifact@v4
with:
name: python-${{ env.DEFAULT_PYTHON_VERSION }}
path: /tmp/python-${{ env.DEFAULT_PYTHON_VERSION }}.tar
retention-days: 1
##################
## Performance ##
##################
Performance-Checks:
needs: [Check-Backend-Changes, Check-Container-Startup]
if: needs.Check-Backend-Changes.outputs.has_backend_changes == 'true'
runs-on: ubuntu-latest
continue-on-error: true
steps:
- name: Download container
uses: actions/download-artifact@v4
with:
name: python-${{ env.DEFAULT_PYTHON_VERSION }}
path: /tmp/
- name: Load image
run: docker load --input /tmp/python-${{ env.DEFAULT_PYTHON_VERSION }}.tar
- name: Checkout
uses: actions/checkout@v4
- name: Set Up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
cache: "pip"
- name: Install Rust/Cargo
run: curl -y https://sh.rustup.rs -sSf | sh
- name: Install Drill
run: cargo install drill
- name: Install Nox
run: pip install nox>=2022
- name: Cache Nox virtual environment
uses: actions/cache@v4
with:
path: .nox/
key: ${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('noxfile.py') }}-${{ hashFiles('noxfiles/**.py') }}-${{ hashFiles('pyproject.toml') }}
restore-keys: |
${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ env.DOCKER_USER }}
password: ${{ env.DOCKER_RO_TOKEN }}
- name: Run Performance Tests
run: nox -s performance_tests
#################
## Misc Checks ##
#################
Check-Container-Startup:
needs: [Check-Backend-Changes, Build]
if: needs.Check-Backend-Changes.outputs.has_backend_changes == 'true'
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Pull Docker images in background
run: |
docker pull postgres:16 > /dev/null 2>&1 &
docker pull redis:8.0-alpine > /dev/null 2>&1 &
echo "Docker pull initiated in background."
shell: bash
- name: Download container
uses: actions/download-artifact@v4
with:
name: python-${{ env.DEFAULT_PYTHON_VERSION }}
path: /tmp/
- name: Load image
run: docker load --input /tmp/python-${{ env.DEFAULT_PYTHON_VERSION }}.tar
- name: Checkout
uses: actions/checkout@v4
- name: Set Up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
cache: "pip"
- name: Install Nox
run: pip install nox>=2022
- name: Cache Nox virtual environment
uses: actions/cache@v4
with:
path: .nox/
key: ${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('noxfile.py') }}-${{ hashFiles('noxfiles/**.py') }}-${{ hashFiles('pyproject.toml') }}
restore-keys: |
${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ env.DOCKER_USER }}
password: ${{ env.DOCKER_RO_TOKEN }}
- name: Run Fides webserver startup check
run: nox -s check_container_startup
- name: Run Celery worker startup check
run: nox -s check_worker_startup
Migration-Checks:
needs: [Check-Backend-Changes, Check-Container-Startup]
if: needs.Check-Backend-Changes.outputs.has_backend_changes == 'true'
strategy:
matrix:
test_selection:
- "check_migrations"
- "check_migration_downgrade"
runs-on: ubuntu-latest
timeout-minutes: 15
continue-on-error: false
steps:
- name: Download container
uses: actions/download-artifact@v4
with:
name: python-${{ env.DEFAULT_PYTHON_VERSION }}
path: /tmp/
- name: Load image
run: docker load --input /tmp/python-${{ env.DEFAULT_PYTHON_VERSION }}.tar
- name: Checkout
uses: actions/checkout@v4
- name: Set Up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
cache: "pip"
- name: Install Nox
run: pip install nox>=2022
- name: Cache Nox virtual environment
uses: actions/cache@v4
with:
path: .nox/
key: ${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('noxfile.py') }}-${{ hashFiles('noxfiles/**.py') }}-${{ hashFiles('pyproject.toml') }}
restore-keys: |
${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ env.DOCKER_USER }}
password: ${{ env.DOCKER_RO_TOKEN }}
- name: Run migration test
run: nox -s "${{ matrix.test_selection }}"
Misc-Tests:
needs: [Check-Backend-Changes, Check-Container-Startup]
if: needs.Check-Backend-Changes.outputs.has_backend_changes == 'true'
strategy:
matrix:
test_selection:
- "check_fides_annotations"
- "fides_db_scan"
- "docs_check"
- "minimal_config_startup"
runs-on: ubuntu-latest
timeout-minutes: 15
continue-on-error: true
steps:
- name: Download container
uses: actions/download-artifact@v4
with:
name: python-${{ env.DEFAULT_PYTHON_VERSION }}
path: /tmp/
- name: Load image
run: docker load --input /tmp/python-${{ env.DEFAULT_PYTHON_VERSION }}.tar
- name: Checkout
uses: actions/checkout@v4
- name: Set Up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
cache: "pip"
- name: Install Nox
run: pip install nox>=2022
- name: Cache Nox virtual environment
uses: actions/cache@v4
with:
path: .nox/
key: ${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('noxfile.py') }}-${{ hashFiles('noxfiles/**.py') }}-${{ hashFiles('pyproject.toml') }}
restore-keys: |
${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-qq
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ env.DOCKER_USER }}
password: ${{ env.DOCKER_RO_TOKEN }}
- name: Run test suite
run: nox -s "${{ matrix.test_selection }}"
- name: Publish Test Report
uses: mikepenz/action-junit-report@v5
if: success() || failure() # always run even if the previous step fails
with:
report_paths: '**/test_report.xml'
################
## Safe Tests ##
################
Safe-Tests:
needs: [Check-Backend-Changes, Check-Container-Startup]
if: needs.Check-Backend-Changes.outputs.has_backend_changes == 'true'
strategy:
fail-fast: false
matrix:
test_selection:
- "ctl-not-external"
- "ops-unit-api"
- "ops-unit-non-api"
- "ops-integration"
- "api"
- "lib"
- "misc-unit"
- "misc-integration"
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- name: Pull Docker images in background
run: |
docker pull postgres:16 > /dev/null 2>&1 &
docker pull redis:8.0-alpine > /dev/null 2>&1 &
echo "Docker pull initiated in background."
shell: bash
- name: Download container
uses: actions/download-artifact@v4
with:
name: python-${{ env.DEFAULT_PYTHON_VERSION }}
path: /tmp/
- name: Load image
run: docker load --input /tmp/python-${{ env.DEFAULT_PYTHON_VERSION }}.tar
- name: Checkout
uses: actions/checkout@v4
- name: Set Up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
cache: "pip"
- name: Install Nox
run: pip install nox>=2022
- name: Cache Nox virtual environment
uses: actions/cache@v4
with:
path: .nox/
key: ${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ matrix.test_selection }}-${{ hashFiles('noxfile.py') }}-${{ hashFiles('noxfiles/**.py') }}-${{ hashFiles('pyproject.toml') }}
restore-keys: |
${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ matrix.test_selection }}
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ env.DOCKER_USER }}
password: ${{ env.DOCKER_RO_TOKEN }}
- name: Run test suite
run: nox -s "pytest(${{ matrix.test_selection }})"
- name: Publish Test Report
uses: mikepenz/action-junit-report@v5
if: success() || failure() # always run even if the previous step fails
with:
report_paths: '**/test_report.xml'
- name: Upload coverage
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: false
##################
## Unsafe Tests ##
##################
# NOTE: Matrixes aren't used here due to the danger of race conditions for external resources
Pytest-Ctl-External:
needs: [Check-Backend-Changes, Check-Container-Startup]
if: needs.Check-Backend-Changes.outputs.has_backend_changes == 'true' && (contains(github.event.pull_request.labels.*.name, 'run unsafe ci checks') || github.event_name == 'push' || github.event_name == 'merge_group')
strategy:
max-parallel: 1 # This prevents collisions in shared external resources
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- name: Download container
uses: actions/download-artifact@v4
with:
name: python-${{ env.DEFAULT_PYTHON_VERSION }}
path: /tmp/
- name: Load image
run: docker load --input /tmp/python-${{ env.DEFAULT_PYTHON_VERSION }}.tar
- name: Checkout
uses: actions/checkout@v4
- name: Set Up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
cache: "pip"
- name: Install Nox
run: pip install nox>=2022
- name: Cache Nox virtual environment
uses: actions/cache@v4
with:
path: .nox/
key: ${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('noxfile.py') }}-${{ hashFiles('noxfiles/**.py') }}-${{ hashFiles('pyproject.toml') }}
restore-keys: |
${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ env.DOCKER_USER }}
password: ${{ env.DOCKER_RO_TOKEN }}
- name: Load secrets from 1Password
uses: 1password/load-secrets-action@v2
with:
# Export loaded secrets as environment variables
export-env: true
env:
OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }}
# Secrets to pull from 1Password
AWS_ACCESS_KEY_ID: op://github-actions/ctl/AWS_ACCESS_KEY_ID
AWS_DEFAULT_REGION: op://github-actions/ctl/AWS_DEFAULT_REGION
AWS_SECRET_ACCESS_KEY: op://github-actions/ctl/AWS_SECRET_ACCESS_KEY
DYNAMODB_ACCESS_KEY_ID: op://github-actions/dynamodb/DYNAMODB_ACCESS_KEY_ID
DYNAMODB_ACCESS_KEY: op://github-actions/dynamodb/DYNAMODB_ACCESS_KEY
DYNAMODB_REGION: op://github-actions/dynamodb/DYNAMODB_REGION
OKTA_CLIENT_ID: op://github-actions/okta/OKTA_CLIENT_ID
OKTA_PRIVATE_KEY: op://github-actions/okta/OKTA_PRIVATE_KEY
REDSHIFT_FIDESCTL_PASSWORD: op://github-actions/ctl/REDSHIFT_FIDESCTL_PASSWORD
SNOWFLAKE_FIDESCTL_PASSWORD: op://github-actions/ctl/SNOWFLAKE_FIDESCTL_PASSWORD
- name: Run external test suite
run: nox -s "pytest(ctl-external)"
env:
BIGQUERY_CONFIG: ${{ secrets.BIGQUERY_CONFIG }}
External-Datastores:
needs: [Check-Backend-Changes, Check-Container-Startup]
if: needs.Check-Backend-Changes.outputs.has_backend_changes == 'true' && (contains(github.event.pull_request.labels.*.name, 'run unsafe ci checks') || github.event_name == 'push' || github.event_name == 'merge_group')
strategy:
max-parallel: 1 # This prevents collisions in shared external resources
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- name: Download container
uses: actions/download-artifact@v4
with:
name: python-${{ env.DEFAULT_PYTHON_VERSION }}
path: /tmp/
- name: Load image
run: docker load --input /tmp/python-${{ env.DEFAULT_PYTHON_VERSION }}.tar
- name: Checkout
uses: actions/checkout@v4
- name: Set Up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
cache: "pip"
- name: Install Nox
run: pip install nox>=2022
- name: Cache Nox virtual environment
uses: actions/cache@v4
with:
path: .nox/
key: ${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('noxfile.py') }}-${{ hashFiles('noxfiles/**.py') }}-${{ hashFiles('pyproject.toml') }}
restore-keys: |
${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ env.DOCKER_USER }}
password: ${{ env.DOCKER_RO_TOKEN }}
- name: Load secrets from 1Password
uses: 1password/load-secrets-action@v2
with:
# Export loaded secrets as environment variables
export-env: true
env:
OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }}
# Secrets to pull from 1Password
BIGQUERY_DATASET: op://github-actions/bigquery/BIGQUERY_DATASET
BIGQUERY_KEYFILE_CREDS: op://github-actions/bigquery/BIGQUERY_KEYFILE_CREDS
BIGQUERY_ENTERPRISE_DATASET: op://github-actions/bigquery-enterprise/BIGQUERY_ENTERPRISE_DATASET
BIGQUERY_ENTERPRISE_KEYFILE_CREDS: op://github-actions/bigquery-enterprise/BIGQUERY_ENTERPRISE_KEYFILE_CREDS
DYNAMODB_ACCESS_KEY_ID: op://github-actions/dynamodb/DYNAMODB_ACCESS_KEY_ID
DYNAMODB_ACCESS_KEY: op://github-actions/dynamodb/DYNAMODB_ACCESS_KEY
DYNAMODB_ASSUME_ROLE_ARN: op://github-actions/dynamodb/DYNAMODB_ASSUME_ROLE_ARN
DYNAMODB_REGION: op://github-actions/dynamodb/DYNAMODB_REGION
GOOGLE_CLOUD_SQL_MYSQL_DATABASE_NAME: op://github-actions/gcp-mysql/GOOGLE_CLOUD_SQL_MYSQL_DATABASE_NAME
GOOGLE_CLOUD_SQL_MYSQL_DB_IAM_USER: op://github-actions/gcp-mysql/GOOGLE_CLOUD_SQL_MYSQL_DB_IAM_USER
GOOGLE_CLOUD_SQL_MYSQL_INSTANCE_CONNECTION_NAME: op://github-actions/gcp-mysql/GOOGLE_CLOUD_SQL_MYSQL_INSTANCE_CONNECTION_NAME
GOOGLE_CLOUD_SQL_MYSQL_KEYFILE_CREDS: op://github-actions/gcp-mysql/GOOGLE_CLOUD_SQL_MYSQL_KEYFILE_CREDS
GOOGLE_CLOUD_SQL_POSTGRES_DATABASE_NAME: op://github-actions/gcp-postgres/GOOGLE_CLOUD_SQL_POSTGRES_DATABASE_NAME
GOOGLE_CLOUD_SQL_POSTGRES_DATABASE_SCHEMA_NAME: op://github-actions/gcp-postgres/GOOGLE_CLOUD_SQL_POSTGRES_DATABASE_SCHEMA_NAME
GOOGLE_CLOUD_SQL_POSTGRES_DB_IAM_USER: op://github-actions/gcp-postgres/GOOGLE_CLOUD_SQL_POSTGRES_DB_IAM_USER
GOOGLE_CLOUD_SQL_POSTGRES_INSTANCE_CONNECTION_NAME: op://github-actions/gcp-postgres/GOOGLE_CLOUD_SQL_POSTGRES_INSTANCE_CONNECTION_NAME
GOOGLE_CLOUD_SQL_POSTGRES_KEYFILE_CREDS: op://github-actions/gcp-postgres/GOOGLE_CLOUD_SQL_POSTGRES_KEYFILE_CREDS
OKTA_CLIENT_ID: op://github-actions/okta/OKTA_CLIENT_ID
OKTA_ORG_URL: op://github-actions/okta/OKTA_ORG_URL
OKTA_PRIVATE_KEY: op://github-actions/okta/OKTA_PRIVATE_KEY
RDS_MYSQL_AWS_ACCESS_KEY_ID: op://github-actions/rds-mysql/RDS_MYSQL_AWS_ACCESS_KEY_ID
RDS_MYSQL_AWS_SECRET_ACCESS_KEY: op://github-actions/rds-mysql/RDS_MYSQL_AWS_SECRET_ACCESS_KEY
RDS_MYSQL_DB_INSTANCE: op://github-actions/rds-mysql/RDS_MYSQL_DB_INSTANCE
RDS_MYSQL_DB_NAME: op://github-actions/rds-mysql/RDS_MYSQL_DB_NAME
RDS_MYSQL_DB_USERNAME: op://github-actions/rds-mysql/RDS_MYSQL_DB_USERNAME
RDS_MYSQL_REGION: op://github-actions/rds-mysql/RDS_MYSQL_REGION
RDS_POSTGRES_AWS_ACCESS_KEY_ID: op://github-actions/rds-postgres/RDS_POSTGRES_AWS_ACCESS_KEY_ID
RDS_POSTGRES_AWS_SECRET_ACCESS_KEY: op://github-actions/rds-postgres/RDS_POSTGRES_AWS_SECRET_ACCESS_KEY
RDS_POSTGRES_DB_USERNAME: op://github-actions/rds-postgres/RDS_POSTGRES_DB_USERNAME
RDS_POSTGRES_REGION: op://github-actions/rds-postgres/RDS_POSTGRES_REGION
REDSHIFT_TEST_DATABASE: op://github-actions/redshift/REDSHIFT_TEST_DATABASE
REDSHIFT_TEST_DB_SCHEMA: op://github-actions/redshift/REDSHIFT_TEST_DB_SCHEMA
REDSHIFT_TEST_HOST: op://github-actions/redshift/REDSHIFT_TEST_HOST
REDSHIFT_TEST_PASSWORD: op://github-actions/redshift/REDSHIFT_TEST_PASSWORD
REDSHIFT_TEST_PORT: op://github-actions/redshift/REDSHIFT_TEST_PORT
REDSHIFT_TEST_USER: op://github-actions/redshift/REDSHIFT_TEST_USER
SNOWFLAKE_TEST_ACCOUNT_IDENTIFIER: op://github-actions/snowflake/SNOWFLAKE_TEST_ACCOUNT_IDENTIFIER
SNOWFLAKE_TEST_DATABASE_NAME: op://github-actions/snowflake/SNOWFLAKE_TEST_DATABASE_NAME
SNOWFLAKE_TEST_PASSWORD: op://github-actions/snowflake/SNOWFLAKE_TEST_PASSWORD
SNOWFLAKE_TEST_PRIVATE_KEY_PASSPHRASE: op://github-actions/snowflake/SNOWFLAKE_TEST_PRIVATE_KEY_PASSPHRASE
SNOWFLAKE_TEST_PRIVATE_KEY: op://github-actions/snowflake/SNOWFLAKE_TEST_PRIVATE_KEY
SNOWFLAKE_TEST_SCHEMA_NAME: op://github-actions/snowflake/SNOWFLAKE_TEST_SCHEMA_NAME
SNOWFLAKE_TEST_USER_LOGIN_NAME: op://github-actions/snowflake/SNOWFLAKE_TEST_USER_LOGIN_NAME
SNOWFLAKE_TEST_WAREHOUSE_NAME: op://github-actions/snowflake/SNOWFLAKE_TEST_WAREHOUSE_NAME
S3_AWS_ACCESS_KEY_ID: op://github-actions/s3/S3_AWS_ACCESS_KEY_ID
S3_AWS_SECRET_ACCESS_KEY: op://github-actions/s3/S3_AWS_SECRET_ACCESS_KEY
MONGODB_ATLAS_HOST: op://github-actions/mongodb-atlas/MONGODB_ATLAS_HOST
MONGODB_ATLAS_USERNAME: op://github-actions/mongodb-atlas/MONGODB_ATLAS_USERNAME
MONGODB_ATLAS_PASSWORD: op://github-actions/mongodb-atlas/MONGODB_ATLAS_PASSWORD
MONGODB_ATLAS_DEFAULT_AUTH_DB: op://github-actions/mongodb-atlas/MONGODB_ATLAS_DEFAULT_AUTH_DB
MONGODB_ATLAS_USE_SRV: op://github-actions/mongodb-atlas/MONGODB_ATLAS_USE_SRV
MONGODB_ATLAS_SSL_ENABLED: op://github-actions/mongodb-atlas/MONGODB_ATLAS_SSL_ENABLED
- name: Integration Tests (Misc)
run: nox -s "pytest(misc-integration-external)"
- name: Integration Tests (External)
run: nox -s "pytest(ops-external-datastores)"
External-SaaS-Connectors:
needs: [Check-Backend-Changes, Check-Container-Startup]
if: needs.Check-Backend-Changes.outputs.has_backend_changes == 'true' && (contains(github.event.pull_request.labels.*.name, 'run unsafe ci checks') || github.event_name == 'push' || github.event_name == 'merge_group')
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
contents: read
id-token: write
strategy:
max-parallel: 1 # This prevents collisions in shared external resources
steps:
- name: Download container
uses: actions/download-artifact@v4
with:
name: python-${{ env.DEFAULT_PYTHON_VERSION }}
path: /tmp/
- name: Load image
run: docker load --input /tmp/python-${{ env.DEFAULT_PYTHON_VERSION }}.tar
- name: Checkout
uses: actions/checkout@v4
- name: Set Up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.DEFAULT_PYTHON_VERSION }}
cache: "pip"
- name: Install Nox
run: pip install nox>=2022
- name: Cache Nox virtual environment
uses: actions/cache@v4
with:
path: .nox/
key: ${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-${{ hashFiles('noxfile.py') }}-${{ hashFiles('noxfiles/**.py') }}-${{ hashFiles('pyproject.toml') }}
restore-keys: |
${{ runner.os }}-nox-${{ github.job }}-${{ env.DEFAULT_PYTHON_VERSION }}-
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ env.DOCKER_USER }}
password: ${{ env.DOCKER_RO_TOKEN }}
- name: SaaS Connector Tests
env:
SAAS_OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.SAAS_OP_SERVICE_ACCOUNT_TOKEN }}
SAAS_SECRETS_OP_VAULT_ID: ${{ secrets.SAAS_SECRETS_OP_VAULT_ID }}
run: nox -s "pytest(ops-saas)"
# Summary job for branch protection - passes if all jobs pass or are skipped
Backend-Checks-Summary:
runs-on: ubuntu-latest
if: always()
needs:
- Collect-Tests
- Validate-API-Types
- Build
- Performance-Checks
- Check-Container-Startup
- Migration-Checks
- Misc-Tests
- Safe-Tests
# Unsafe tests are optional, so we don't include them in the summary
steps:
- name: Check job results
run: |
echo "Collect-Tests: ${{ needs.Collect-Tests.result }}"
echo "Validate-API-Types: ${{ needs.Validate-API-Types.result }}"
echo "Build: ${{ needs.Build.result }}"
echo "Performance-Checks: ${{ needs.Performance-Checks.result }}"
echo "Check-Container-Startup: ${{ needs.Check-Container-Startup.result }}"
echo "Migration-Checks: ${{ needs.Migration-Checks.result }}"
echo "Misc-Tests: ${{ needs.Misc-Tests.result }}"
echo "Safe-Tests: ${{ needs.Safe-Tests.result }}"
# Fail only if jobs failed (not if skipped)
if [ "${{ needs.Collect-Tests.result }}" == "failure" ] || \
[ "${{ needs.Build.result }}" == "failure" ] || \
[ "${{ needs.Check-Container-Startup.result }}" == "failure" ] || \
[ "${{ needs.Migration-Checks.result }}" == "failure" ] || \
[ "${{ needs.Misc-Tests.result }}" == "failure" ] || \
[ "${{ needs.Safe-Tests.result }}" == "failure" ]; then
echo "❌ One or more required jobs failed"
exit 1
fi
# Check for cancelled jobs (treat as failure)
if [ "${{ needs.Collect-Tests.result }}" == "cancelled" ] || \
[ "${{ needs.Build.result }}" == "cancelled" ] || \
[ "${{ needs.Check-Container-Startup.result }}" == "cancelled" ] || \
[ "${{ needs.Migration-Checks.result }}" == "cancelled" ] || \
[ "${{ needs.Misc-Tests.result }}" == "cancelled" ] || \
[ "${{ needs.Safe-Tests.result }}" == "cancelled" ]; then
echo "❌ One or more required jobs were cancelled"
exit 1
fi
# Performance checks can fail without blocking (continue-on-error: true)
if [ "${{ needs.Performance-Checks.result }}" == "failure" ]; then
echo "⚠️ Performance checks failed but not blocking"
fi
# API type validation can fail without blocking (informational check)
if [ "${{ needs.Validate-API-Types.result }}" == "failure" ]; then
echo "⚠️ API type validation found mismatches but not blocking"
fi
echo "✅ All required backend checks passed or were skipped"