Skip to content

docs(release-notes): call out Docker runc CVEs #18

docs(release-notes): call out Docker runc CVEs

docs(release-notes): call out Docker runc CVEs #18

name: Algolia Reindex
on:
push:
branches:
- main
paths:
- docs/**
- i18n/**
- src/**
- static/**
- docusaurus.config.ts
- sidebars.js
- sidebar-semver-sort.js
workflow_dispatch:
workflow_call:
secrets:
ALGOLIA_CRAWLER_USER_ID:
required: true
ALGOLIA_CRAWLER_API_KEY:
required: true
concurrency:
group: algolia-reindex
cancel-in-progress: false
jobs:
algolia-reindex:
name: Reindex Algolia Search
runs-on: ubuntu-latest
env:
ALGOLIA_APP_ID: ${{ vars.ALGOLIA_APP_ID || 'JUYLFQHE7W' }}
ALGOLIA_CRAWLER_NAME: ${{ vars.ALGOLIA_CRAWLER_NAME || 'unraid' }}
ALGOLIA_REINDEX_DELAY_SECONDS: ${{ vars.ALGOLIA_REINDEX_DELAY_SECONDS || '300' }}
steps:
- name: Wait for docs deployment to propagate
if: github.event_name == 'push'
run: |
set -euo pipefail
echo "Waiting ${ALGOLIA_REINDEX_DELAY_SECONDS}s before reindexing ${ALGOLIA_CRAWLER_NAME}."
sleep "${ALGOLIA_REINDEX_DELAY_SECONDS}"
- name: Resolve crawler id
id: resolve
env:
ALGOLIA_CRAWLER_USER_ID: ${{ secrets.ALGOLIA_CRAWLER_USER_ID }}
ALGOLIA_CRAWLER_API_KEY: ${{ secrets.ALGOLIA_CRAWLER_API_KEY }}
run: |
set -euo pipefail
response="$(
curl --silent --show-error --fail \
--user "${ALGOLIA_CRAWLER_USER_ID}:${ALGOLIA_CRAWLER_API_KEY}" \
"https://crawler.algolia.com/api/1/crawlers?name=${ALGOLIA_CRAWLER_NAME}&itemsPerPage=100"
)"
crawler_id="$(
jq -er \
'.items[0].id' \
<<<"${response}"
)"
details="$(
curl --silent --show-error --fail \
--user "${ALGOLIA_CRAWLER_USER_ID}:${ALGOLIA_CRAWLER_API_KEY}" \
"https://crawler.algolia.com/api/1/crawlers/${crawler_id}"
)"
crawler_status="$(
jq -er \
'if .blocked then "blocked"
elif .reindexing then "reindexing"
elif .running then "running"
else "paused"
end' \
<<<"${details}"
)"
echo "crawler_id=${crawler_id}" >> "${GITHUB_OUTPUT}"
echo "crawler_status=${crawler_status}" >> "${GITHUB_OUTPUT}"
echo "Resolved crawler ${ALGOLIA_CRAWLER_NAME} (${crawler_id}) with current status ${crawler_status}."
- name: Trigger crawler reindex
id: reindex
env:
ALGOLIA_CRAWLER_USER_ID: ${{ secrets.ALGOLIA_CRAWLER_USER_ID }}
ALGOLIA_CRAWLER_API_KEY: ${{ secrets.ALGOLIA_CRAWLER_API_KEY }}
run: |
set -euo pipefail
response="$(
curl --silent --show-error --fail \
--user "${ALGOLIA_CRAWLER_USER_ID}:${ALGOLIA_CRAWLER_API_KEY}" \
--request POST \
--header "content-type: application/json" \
"https://crawler.algolia.com/api/1/crawlers/${{ steps.resolve.outputs.crawler_id }}/reindex"
)"
task_id="$(
jq -er \
'.taskId' \
<<<"${response}"
)"
echo "task_id=${task_id}" >> "${GITHUB_OUTPUT}"
echo "Queued Algolia reindex task ${task_id} for crawler ${ALGOLIA_CRAWLER_NAME}."
- name: Confirm crawler entered reindexing state
env:
ALGOLIA_CRAWLER_USER_ID: ${{ secrets.ALGOLIA_CRAWLER_USER_ID }}
ALGOLIA_CRAWLER_API_KEY: ${{ secrets.ALGOLIA_CRAWLER_API_KEY }}
run: |
set -euo pipefail
for attempt in 1 2 3 4 5; do
response="$(
curl --silent --show-error --fail \
--user "${ALGOLIA_CRAWLER_USER_ID}:${ALGOLIA_CRAWLER_API_KEY}" \
"https://crawler.algolia.com/api/1/crawlers/${{ steps.resolve.outputs.crawler_id }}"
)"
reindexing="$(
jq -er \
'.reindexing' \
<<<"${response}"
)"
if [ "${reindexing}" = "true" ]; then
status="$(
jq -er \
'if .blocked then "blocked"
elif .reindexing then "reindexing"
elif .running then "running"
else "paused"
end' \
<<<"${response}"
)"
echo "Crawler ${ALGOLIA_CRAWLER_NAME} is now ${status}."
exit 0
fi
sleep 5
done
echo "Crawler ${ALGOLIA_CRAWLER_NAME} did not report reindexing=true after the reindex request." >&2
exit 1