From ccf8230ff582552467a7a6efede1dca1247277ad Mon Sep 17 00:00:00 2001 From: spwoodcock Date: Fri, 25 Jul 2025 18:29:09 +0100 Subject: [PATCH] feat: remove pglite, rely on svelte store + electric sync --- compose.yaml | 1 - src/Dockerfile.ui.debug | 7 - src/Dockerfile.ui.prod | 5 - src/mapper/package.json | 5 +- src/mapper/pnpm-lock.yaml | 40 -- src/mapper/src/lib/api/fetch.ts | 63 +-- src/mapper/src/lib/api/offline.ts | 115 ------ .../components/dialog-entities-actions.svelte | 6 +- .../src/lib/components/forms/wrapper.svelte | 7 +- .../components/map/flatgeobuf-layer.svelte | 1 - src/mapper/src/lib/components/map/main.svelte | 8 +- .../src/lib/components/offline/index.svelte | 6 +- .../components/offline/offline-data.svelte | 172 --------- src/mapper/src/lib/db/api-submissions.ts | 120 ------ src/mapper/src/lib/db/entities.ts | 87 ----- src/mapper/src/lib/db/events.ts | 81 ---- src/mapper/src/lib/db/helpers.ts | 38 -- src/mapper/src/lib/db/init.js | 71 ---- src/mapper/src/lib/db/pglite.ts | 188 --------- src/mapper/src/lib/db/projects.ts | 63 --- src/mapper/src/lib/types.ts | 2 +- src/mapper/src/lib/utils/dev-reset.ts | 15 +- src/mapper/src/routes/+layout.svelte | 25 +- src/mapper/src/routes/+layout.ts | 3 - src/mapper/src/routes/+page.svelte | 23 +- src/mapper/src/routes/+page.ts | 5 +- .../routes/project/[projectId]/+page.svelte | 83 +--- .../src/routes/project/[projectId]/+page.ts | 4 +- src/mapper/src/store/common.svelte.ts | 6 - src/mapper/src/store/entities.svelte.ts | 358 ++++++------------ src/mapper/src/store/projects.svelte.ts | 27 +- src/mapper/src/store/tasks.svelte.ts | 134 +++---- src/mapper/static/config.json | 1 - src/mapper/tests/offline.test.ts | 214 ----------- src/mapper/vite.config.ts | 7 - 35 files changed, 210 insertions(+), 1781 deletions(-) delete mode 100644 src/mapper/src/lib/api/offline.ts delete mode 100644 src/mapper/src/lib/components/offline/offline-data.svelte delete mode 100644 src/mapper/src/lib/db/api-submissions.ts delete mode 100644 src/mapper/src/lib/db/entities.ts delete mode 100644 src/mapper/src/lib/db/helpers.ts delete mode 100644 src/mapper/src/lib/db/init.js delete mode 100644 src/mapper/src/lib/db/pglite.ts delete mode 100644 src/mapper/src/lib/db/projects.ts delete mode 100644 src/mapper/tests/offline.test.ts diff --git a/compose.yaml b/compose.yaml index 39bf71fd1f..73a198a08b 100644 --- a/compose.yaml +++ b/compose.yaml @@ -211,7 +211,6 @@ services: additional_contexts: - code=src/mapper dockerfile: Dockerfile.ui.debug - target: init-db volumes: - ./src/mapper/messages:/app/messages - ./src/mapper/project.inlang:/app/project.inlang diff --git a/src/Dockerfile.ui.debug b/src/Dockerfile.ui.debug index 19e3824437..5c09c0fd07 100755 --- a/src/Dockerfile.ui.debug +++ b/src/Dockerfile.ui.debug @@ -16,10 +16,3 @@ ENV PATH="$PNPM_HOME:$PATH" RUN corepack enable && corepack install RUN pnpm install CMD ["pnpm", "run", "dev"] - - -# Initialising PGLite db and creating tar dump (used during tests) -FROM build AS init-db -COPY --from=code ./src/lib/db/init.js ./src/lib/db/init.js -RUN pnpm run init-db -RUN cp pgdata.tar.gz /migrations/init/ diff --git a/src/Dockerfile.ui.prod b/src/Dockerfile.ui.prod index adc185948d..38e1c174c8 100644 --- a/src/Dockerfile.ui.prod +++ b/src/Dockerfile.ui.prod @@ -25,11 +25,6 @@ COPY mapper/package.json mapper/pnpm-lock.yaml ./ RUN corepack enable && corepack install RUN pnpm install COPY mapper/ . -# # Code no longer used for bootstrapping db from tar -# # Build the bootstrap db -# RUN pnpm run init-db -# # Copy to required location for build -# RUN cp pgdata.tar.gz /migrations/init/ RUN pnpm run build diff --git a/src/mapper/package.json b/src/mapper/package.json index 2a3483c879..ba33c4ffc3 100644 --- a/src/mapper/package.json +++ b/src/mapper/package.json @@ -16,8 +16,7 @@ "format": "prettier --plugin prettier-plugin-svelte --write .", "test:integration": "playwright test", "test:unit": "vitest", - "cleanup": "pnpm dlx knip", - "init-db": "node src/lib/db/init.js" + "cleanup": "pnpm dlx knip" }, "devDependencies": { "@inlang/paraglide-js": "^2.0.13", @@ -49,8 +48,6 @@ }, "dependencies": { "@electric-sql/client": "1.0.5", - "@electric-sql/pglite": "^0.3.4", - "@electric-sql/pglite-sync": "^0.3.7", "@hotosm/ui": "0.2.0-b5", "@maplibre/maplibre-gl-directions": "^0.7.1", "@shoelace-style/shoelace": "2.17.1", diff --git a/src/mapper/pnpm-lock.yaml b/src/mapper/pnpm-lock.yaml index 24e563adee..e7ab1cc646 100644 --- a/src/mapper/pnpm-lock.yaml +++ b/src/mapper/pnpm-lock.yaml @@ -11,12 +11,6 @@ importers: '@electric-sql/client': specifier: 1.0.5 version: 1.0.5 - '@electric-sql/pglite': - specifier: ^0.3.4 - version: 0.3.4 - '@electric-sql/pglite-sync': - specifier: ^0.3.7 - version: 0.3.7(@electric-sql/pglite@0.3.4) '@hotosm/ui': specifier: 0.2.0-b5 version: 0.2.0-b5(@types/react@18.3.3) @@ -695,25 +689,9 @@ packages: resolution: {integrity: sha512-WyOx8cJQ+FQus4Mm4uPIZA64gbk3Wxh0so5Lcii0aJifqwoVOlfFtorjLE0Hen4OYyHZMXDWqMmaQemBhgxFRQ==} engines: {node: '>=14'} - '@electric-sql/client@1.0.0': - resolution: {integrity: sha512-kGiVbBIlMqc/CeJpWZuLjxNkm0836NWxeMtIWH2w5IUK8pUL13hyxg3ZkR7+FlTGhpKuZRiCP5nPOH9D6wbhPw==} - '@electric-sql/client@1.0.5': resolution: {integrity: sha512-DO7dvfCbZU6k33vr3ymBCXER6kPpoBODoRBru7oI16B4/ZXlxhMBpsmzmd8p9dQrPICCpQm6bBkNI6qI3oUAIQ==} - '@electric-sql/experimental@1.0.0': - resolution: {integrity: sha512-wOKZyph3cvJ4J5fDwqDpR7nilPOHEFwAtbAM5X/PhV3JZCSzmkbosaQRWsxbHQ80MJv7gNFP960+TyqGAjqouQ==} - peerDependencies: - '@electric-sql/client': 1.0.0 - - '@electric-sql/pglite-sync@0.3.7': - resolution: {integrity: sha512-sMlt3Y338VdhRUtbP3Vm/O6h6O9uqsSxwE52lHfOgRmeUv1zAymqgaeHzDztzpcEs2svfQbnKXetI/97mqqbEA==} - peerDependencies: - '@electric-sql/pglite': 0.3.4 - - '@electric-sql/pglite@0.3.4': - resolution: {integrity: sha512-h5hoL2GuxcWN8Q3+jtesIRem14iIvAZVEsTeUF6eO9RiUb6ar73QVIEW9t+Ud58iXAcAE2dFMtWqw3W2Oo4LDw==} - '@esbuild/aix-ppc64@0.25.5': resolution: {integrity: sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA==} engines: {node: '>=18'} @@ -4688,30 +4666,12 @@ snapshots: '@ctrl/tinycolor@4.1.0': {} - '@electric-sql/client@1.0.0': - optionalDependencies: - '@rollup/rollup-darwin-arm64': 4.44.1 - '@electric-sql/client@1.0.5': dependencies: '@microsoft/fetch-event-source': 2.0.1 optionalDependencies: '@rollup/rollup-darwin-arm64': 4.28.0 - '@electric-sql/experimental@1.0.0(@electric-sql/client@1.0.0)': - dependencies: - '@electric-sql/client': 1.0.0 - optionalDependencies: - '@rollup/rollup-darwin-arm64': 4.44.1 - - '@electric-sql/pglite-sync@0.3.7(@electric-sql/pglite@0.3.4)': - dependencies: - '@electric-sql/client': 1.0.0 - '@electric-sql/experimental': 1.0.0(@electric-sql/client@1.0.0) - '@electric-sql/pglite': 0.3.4 - - '@electric-sql/pglite@0.3.4': {} - '@esbuild/aix-ppc64@0.25.5': optional: true diff --git a/src/mapper/src/lib/api/fetch.ts b/src/mapper/src/lib/api/fetch.ts index 9b9987aff7..1ab2659fb4 100644 --- a/src/mapper/src/lib/api/fetch.ts +++ b/src/mapper/src/lib/api/fetch.ts @@ -1,7 +1,3 @@ -import type { PGlite } from '@electric-sql/pglite'; -import type { DbApiSubmissionType } from '$lib/types.ts'; -import { DbApiSubmission } from '$lib/db/api-submissions.ts'; - const API_URL = import.meta.env.VITE_API_URL; const DEFAULT_CACHE_NAME = 'c488ea01-8c52-4a18-a93e-934bc77f1eb8'; @@ -66,61 +62,4 @@ async function fetchFormMediBlobUrls(projectId: number): Promise<{ [filename: st return formMediaBlobs; } -function decodeBase64File(base64: string, name: string, type: string): File { - const byteString = atob(base64.split(',')[1]); - const arrayBuffer = new Uint8Array(byteString.length); - for (let i = 0; i < byteString.length; i++) { - arrayBuffer[i] = byteString.charCodeAt(i); - } - const blob = new Blob([arrayBuffer], { type }); - return new File([blob], name, { type }); -} - -async function getSubmissionFetchOptions(row: DbApiSubmissionType): Promise { - if (row.content_type === 'application/json') { - return { - method: row.method, - body: JSON.stringify(row.payload), - headers: { - 'Content-Type': 'application/json', - }, - credentials: 'include', - }; - } - - if (row.content_type === 'multipart/form-data') { - const form = new FormData(); - form.append('submission_xml', row.payload.form.submission_xml); - - for (const f of row.payload.form.submission_files) { - const file = decodeBase64File(f.base64, f.name, f.type); - form.append('submission_files', file); - } - - return { - method: row.method, - body: form, - credentials: 'include', - }; - } - - throw new Error(`Unsupported content_type: ${row.content_type}`); -} - -async function trySendingSubmission(db: PGlite, row: DbApiSubmissionType): Promise { - try { - const options = await getSubmissionFetchOptions(row); - const res = await fetch(row.url, options); - - if (!res.ok) throw new Error(`HTTP ${res.status}`); - - await DbApiSubmission.update(db, row.id, 'RECEIVED'); - return true; - } catch (err) { - console.error('Offline send failed:', err); - await DbApiSubmission.update(db, row.id, 'FAILED', String(err)); - return false; - } -} - -export { fetchCachedBlobUrl, fetchBlobUrl, fetchFormMediBlobUrls, trySendingSubmission }; +export { fetchCachedBlobUrl, fetchBlobUrl, fetchFormMediBlobUrls }; diff --git a/src/mapper/src/lib/api/offline.ts b/src/mapper/src/lib/api/offline.ts deleted file mode 100644 index a0945499d8..0000000000 --- a/src/mapper/src/lib/api/offline.ts +++ /dev/null @@ -1,115 +0,0 @@ -import type { PGlite } from '@electric-sql/pglite'; - -import { DbApiSubmission } from '$lib/db/api-submissions.ts'; -import { trySendingSubmission } from '$lib/api/fetch.ts'; -import { getCommonStore, getAlertStore } from '$store/common.svelte.ts'; -import { getLoginStore } from '$store/login.svelte'; - -const commonStore = getCommonStore(); -const alertStore = getAlertStore(); -const loginStore = getLoginStore(); - -function wait(ms: number) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -/** - * Iterate and attempt to send all pending API submissions from the local database. - * - * This function is triggered in two scenarios: - * - Automatically when coming back online (debounced via $effect) - * - Manually via the sync button in the map UI - * - * Each submission is sent sequentially. If the API acknowledges receipt (status becomes 'RECEIVED'), - * it is considered a success. Failures are logged but not retried immediately (they remain in 'PENDING'). - * - * Syncing status is tracked via `commonStore.offlineDataIsSyncing` and alerts are shown after each attempt. - */ -async function iterateAndSendOfflineSubmissions(db: PGlite): Promise { - if (!db) return false; - - // Count remaining pending submissions - const total = await DbApiSubmission.count(db); - if (total === 0) return true; // Nothing to be done - - const queuedSubmissions = await DbApiSubmission.allQueued(db); - const hasUserSub = queuedSubmissions?.some((row) => !!row?.user_sub); - const authDetails = loginStore.getAuthDetails; - - // if user made offline submissions while logged in and not logged in during online sync, prompt them to log in before before uploading submissions - if (hasUserSub && !authDetails) { - alertStore.setAlert({ - message: 'You must be logged in to send offline data.', - variant: 'danger', - }); - loginStore.toggleLoginModal(true); - return false; - } - - commonStore.setOfflineDataIsSyncing(true); - alertStore.setAlert({ - message: `Found ${total} offline submissions.`, - variant: 'default', - }); - - let sent = 0; - let failed = 0; - let processed = 0; // How many attempts made (success + fail) - - // Use `while (true)` for continued iteration, as we manually break loop below - // It's safer than `while (processed < total)` in this dynamic context - while (true) { - const row = await DbApiSubmission.next(db); - if (!row) break; // No more pending entries - - const success = await trySendingSubmission(db, row); - - if (success) { - sent++; - // Commented, as this is bad ux if multiple send in quick succession - // alertStore.setAlert({ - // message: `Successfully sent offline data ${sent}/${total} to API`, - // variant: 'success' - // }); - await DbApiSubmission.deleteById(db, row.id); - commonStore.setOfflineSyncPercentComplete((sent / total) * 100); - // Wait 1 second until next API call - await wait(1000); - } else { - alertStore.setAlert({ - message: `Failed to send offline data ${sent + 1}/${total} to API — stopping sync.`, - variant: 'danger', - }); - failed++; - await DbApiSubmission.moveToFailedTable(db, row.id); - } - } - - commonStore.setOfflineSyncPercentComplete(null); - commonStore.setOfflineDataIsSyncing(false); - - if (sent + failed === total) { - const remainingSubmissions = await DbApiSubmission.count(db); - if (remainingSubmissions !== 0) { - alertStore.setAlert({ - message: `Offline sync incomplete: ${remainingSubmissions} remaining to send.`, - variant: 'warning', - }); - return false; - } - - alertStore.setAlert({ - message: `Finished sending offline data.`, - variant: 'success', - }); - return true; - } else { - alertStore.setAlert({ - message: `Offline sync incomplete: ${sent}/${total} submissions sent.`, - variant: 'warning', - }); - return false; - } -} - -export { trySendingSubmission, iterateAndSendOfflineSubmissions }; diff --git a/src/mapper/src/lib/components/dialog-entities-actions.svelte b/src/mapper/src/lib/components/dialog-entities-actions.svelte index 3f71ecfebf..3275660822 100644 --- a/src/mapper/src/lib/components/dialog-entities-actions.svelte +++ b/src/mapper/src/lib/components/dialog-entities-actions.svelte @@ -1,6 +1,5 @@ - -
- {m['offline.total_offline_submissions']()}: {apiSubmissions?.length} - downloadQueuedApiSubmissions()} - onkeydown={(e: KeyboardEvent) => { - e.key === 'Enter' && downloadQueuedApiSubmissions(); - }} - role="button" - tabindex="0" - size="small" - class="button" - > - - {m['offline.download_csv']()} - - {m['offline.total_failed_submissions']()}: {apiFailures?.length} - downloadFailedApiSubmissions()} - onkeydown={(e: KeyboardEvent) => { - e.key === 'Enter' && downloadFailedApiSubmissions(); - }} - role="button" - tabindex="0" - size="small" - class="button" - > - - {m['offline.download_csv']()} - - - {#if apiSubmissionsFormatted?.length} -

Queued API Submissions

- - {#each apiSubmissionsFormatted as submission} - -
    -
  • ID: {submission.id}
  • -
  • Type: {submission.submissionType}
  • -
  • Queued At: {submission.queuedAt}
  • -
  • Last Attempt At: {submission.lastAttemptAt}
  • -
  • Retry Count: {submission.retries}
  • -
  • Error: {submission.error}
  • -
-
- {/each} -
- {:else} -

No queued submissions found.

- {/if} -
diff --git a/src/mapper/src/lib/db/api-submissions.ts b/src/mapper/src/lib/db/api-submissions.ts deleted file mode 100644 index c35d00f639..0000000000 --- a/src/mapper/src/lib/db/api-submissions.ts +++ /dev/null @@ -1,120 +0,0 @@ -import type { PGlite } from '@electric-sql/pglite'; -import type { DbApiSubmissionType } from '$lib/types.ts'; - -async function create( - db: PGlite, - data: { - url: string; - user_sub?: string | undefined; - method: DbApiSubmissionType['method']; - content_type?: DbApiSubmissionType['content_type']; - payload?: any; - headers?: Record | null; - }, -): Promise { - if (!db) return; - - const { - url, - user_sub = null, // default - method, - content_type = 'application/json', // default - payload = null, - headers = null, - } = data; - - const result = await db.query( - `INSERT INTO api_submissions - (url, user_sub, method, content_type, payload, headers, status, retry_count, error, queued_at) - VALUES - ($1, $2, $3, $4, $5, $6, 'PENDING', 0, NULL, now()) - RETURNING *`, - [url, user_sub, method, content_type, payload, headers], - ); - - return result.rows.at(-1) as DbApiSubmissionType | undefined; -} - -async function count(db: PGlite): Promise { - if (!db) return 0; - - const dbData = await db.query(`SELECT COUNT(*) as count FROM api_submissions WHERE status = 'PENDING'`); - const row = dbData.rows.at(-1) as { count: number } | undefined; - return row?.count ?? 0; -} - -async function next(db: PGlite): Promise { - if (!db) return; - - // NOTE we allow 2 retries when calling the API - const dbData = await db.query( - `SELECT * FROM api_submissions - WHERE status = 'PENDING' OR (status = 'FAILED' AND retry_count < 2) - ORDER BY queued_at ASC - LIMIT 1`, - ); - return dbData.rows.at(-1) as DbApiSubmissionType | undefined; -} - -async function update( - db: PGlite, - id: number, - status: 'RECEIVED' | 'PENDING' | 'FAILED', - error: string | null = null, -): Promise { - if (!db) return; - - await db.query( - `UPDATE api_submissions - SET - status = $1, - error = $2, - last_attempt_at = now(), - success_at = CASE WHEN $3 = 'RECEIVED' THEN now() ELSE NULL END, - retry_count = CASE WHEN $4 = 'FAILED' THEN retry_count + 1 ELSE retry_count END - WHERE id = $5`, - // We have to send status as multiple vars, else type inference throws errors - [status, error, status, status, id], - ); -} - -async function deleteById(db: PGlite, id: number): Promise { - if (!db) return; - await db.query(`DELETE FROM api_submissions WHERE id = $1;`, [id]); -} - -async function allQueued(db: PGlite): Promise { - if (!db) return null; - - const query = await db.query(`SELECT * FROM api_submissions;`); - return (query.rows as DbApiSubmissionType[]) ?? []; -} - -async function moveToFailedTable(db: PGlite, id: number): Promise { - if (!db) return; - await db.query( - `INSERT INTO api_failures - SELECT * FROM api_submissions - WHERE id = $1;`, - [id], - ); - await deleteById(db, id); -} - -async function allFailed(db: PGlite): Promise { - if (!db) return null; - - const query = await db.query(`SELECT * FROM api_failures;`); - return (query.rows as DbApiSubmissionType[]) ?? []; -} - -export const DbApiSubmission = { - allQueued, - create, - update, - count, - next, - deleteById, - moveToFailedTable, - allFailed, -}; diff --git a/src/mapper/src/lib/db/entities.ts b/src/mapper/src/lib/db/entities.ts deleted file mode 100644 index aaa270bccf..0000000000 --- a/src/mapper/src/lib/db/entities.ts +++ /dev/null @@ -1,87 +0,0 @@ -import type { PGlite } from '@electric-sql/pglite'; -import type { Feature } from 'geojson'; - -import type { DbEntityType } from '$lib/types'; -import { applyDataToTableWithCsvCopy } from '$lib/db/helpers'; -import { javarosaToGeojsonGeom } from '$lib/odk/javarosa'; - -async function update(db: PGlite, entity: Partial) { - await db.query( - `UPDATE odk_entities - SET status = $2, submission_ids = $3 - WHERE entity_id = $1`, - [entity.entity_id, entity.status, entity.submission_ids], - ); -} - -async function create(db: PGlite, entity: DbEntityType) { - // Note if we are creating a single entity in the local db, it will always include - // the geometry field as it is a newgeom or badgeom record. - await db.query( - `INSERT INTO odk_entities ( - entity_id, - status, - project_id, - task_id, - submission_ids, - osm_id, - geometry, - created_by - ) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8)`, - [ - entity.entity_id, - entity.status, - entity.project_id, - entity.task_id, - entity.submission_ids, - entity.osm_id, - entity.geometry, - entity.created_by, - ], - ); -} - -// An optimised insert for multiple geom records in bulk -async function bulkCreate(db: PGlite, entities: DbEntityType[]) { - // The entities are already in Record format, however we ensure all undefined or empty strings are set to null for insert - const dataObj = entities.map((entity) => ({ - entity_id: entity.entity_id, - status: entity.status, - project_id: entity.project_id, - task_id: entity.task_id, - osm_id: entity.osm_id, - submission_ids: entity.submission_ids === '' ? null : entity.submission_ids, - geometry: entity.geometry === '' ? null : entity.geometry, - created_by: entity.created_by, - })); - - await applyDataToTableWithCsvCopy(db, 'odk_entities', dataObj); -} - -// Convert a DbEntity entry to a GeoJSON Feature -function toGeojsonFeature(entity: DbEntityType): Feature | null { - const geometry = javarosaToGeojsonGeom(entity.geometry); - if (!geometry) return null; - - return { - type: 'Feature', - geometry, - properties: { - entity_id: entity.entity_id, - status: entity.status, - project_id: entity.project_id, - task_id: entity.task_id, - osm_id: entity.osm_id, - submission_ids: entity.submission_ids, - created_by: entity.created_by, - }, - }; -} - -export const DbEntity = { - create, - update, - bulkCreate, - toGeojsonFeature, -}; diff --git a/src/mapper/src/lib/db/events.ts b/src/mapper/src/lib/db/events.ts index 751be76c6d..555934e49b 100644 --- a/src/mapper/src/lib/db/events.ts +++ b/src/mapper/src/lib/db/events.ts @@ -34,19 +34,6 @@ async function add_event( const response = await resp.json(); return response; - - // // Uncomment this for local first approach - // await db.task_events.create({ - // data: { - // event_id: crypto.randomUUID(), - // project_id: projectId, - // task_id: taskId, - // event: action, - // comment: comment, - // created_at: new Date().toISOString(), - // user_id: userId, - // }, - // }); } export async function mapTask(/* db, */ projectId: number, taskId: number): Promise { @@ -64,71 +51,3 @@ export async function resetTask(/* db, */ projectId: number, taskId: number): Pr export async function commentTask(/* db, */ projectId: number, taskId: number, comment: string): Promise { await add_event(/* db, */ projectId, taskId, 'COMMENT', comment); } - -// async function finishTask(db, projectId: number, taskId: number, userId: number): Promise { -// // const query = ` -// // WITH last AS ( -// // SELECT * -// // FROM task_events -// // WHERE project_id = ? AND task_id = ? -// // ORDER BY aid DESC -// // LIMIT 1 -// // ), -// // locked AS ( -// // SELECT * -// // FROM last -// // WHERE user_id = ? AND action = 'LOCKED_FOR_MAPPING' -// // ) -// // INSERT INTO task_events ( -// // event_id, project_id, task_id, action, -// // comment, created_at, user_id -// // ) -// // SELECT -// // ?, -- event_id -// // ?, -- project_id -// // ?, -- task_id -// // 'UNLOCKED_TO_VALIDATE', -// // 'Note: Mapping finished', -// // ? -// // user_id -// // FROM last -// // WHERE user_id = ? -// // RETURNING project_id, task_id, user_id; -// // ` - -// // const newEvent: NewEvent = db.rawQuery({ -// // sql: query, -// // bindParams: [ -// // projectId, -// // taskId, -// // userId, -// // // -// // genUUID(), -// // projectId, -// // taskId, -// // new Date().toISOString(), -// // // -// // userId, -// // ] -// // }) - -// // assert(newEvent.project_id === projectId); -// // assert(newEvent.task_id === taskId); -// // assert(newEvent.user_id === userId); - -// await add_event(db, projectId, taskId, userId, 'UNLOCKED_TO_VALIDATE'); -// } - -// async function validateTask(db, projectId: number, taskId: number, userId: number): Promise { -// await add_event(db, projectId, taskId, userId, 'LOCKED_FOR_VALIDATION'); -// } - -// async function goodTask(db, projectId: number, taskId: number, userId: number): Promise { -// await add_event(db, projectId, taskId, userId, 'VALIDATED'); -// } - -// async function commentTask(db, projectId: number, taskId: number, userId: number, comment: string): Promise { -// await add_event(db, projectId, taskId, userId, 'COMMENT', comment); -// } - -// export { mapTask, finishTask, validateTask, goodTask, commentTask }; diff --git a/src/mapper/src/lib/db/helpers.ts b/src/mapper/src/lib/db/helpers.ts deleted file mode 100644 index 4ce38a51e7..0000000000 --- a/src/mapper/src/lib/db/helpers.ts +++ /dev/null @@ -1,38 +0,0 @@ -import type { PGliteInterface } from '@electric-sql/pglite'; - -// For bulk import of rows into PGLite using COPY FROM csv blob functionality -// approach taken from https://github.com/electric-sql/pglite repo. -export async function applyDataToTableWithCsvCopy(db: PGliteInterface, table: string, data: Record[]) { - // Get column names from the first message - const columns = Object.keys(data[0]); - - // Create CSV data - const csvData = data - .map((message) => { - return columns - .map((column) => { - const value = message[column]; - // Escape double quotes and wrap in quotes if necessary - if (typeof value === 'string' && (value.includes(',') || value.includes('"') || value.includes('\n'))) { - return `"${value.replace(/"/g, '""')}"`; - } - return value === null ? '\\N' : value; - }) - .join(','); - }) - .join('\n'); - const csvBlob = new Blob([csvData], { type: 'text/csv' }); - - // Perform COPY FROM - await db.query( - ` - COPY "public"."${table}" (${columns.map((c) => `"${c}"`).join(', ')}) - FROM '/dev/blob' - WITH (FORMAT csv, NULL '\\N') - `, - [], - { - blob: csvBlob, - }, - ); -} diff --git a/src/mapper/src/lib/db/init.js b/src/mapper/src/lib/db/init.js deleted file mode 100644 index 210481997c..0000000000 --- a/src/mapper/src/lib/db/init.js +++ /dev/null @@ -1,71 +0,0 @@ -// Script to generate a PGLite db dump -// Creates db 'fmtm' & user 'fmtm' -// Applies required migrations -// NOTE this is a plain js file as it's ran directly by NodeJS -// NOTE (we use this during tests) - -import path from 'path'; -import fs from 'fs'; -import { PGlite } from '@electric-sql/pglite'; - -const MIGRATIONS_PATH = path.resolve('/migrations/init/'); -const MIGRATION_FILENAMES = [ - 'shared/1-enums.sql', - 'shared/2-tables.sql', - 'shared/3-constraints.sql', - 'shared/4-indexes.sql', - 'frontend-only/schema.sql', -]; - -async function createFreshDbDump() { - // Step 1: create a main db and create 'fmtm' db and user - const bootstrap = new PGlite({ - // debug: 1 - }); - await bootstrap.query(` - CREATE USER fmtm WITH PASSWORD 'fmtm'; - `); - await bootstrap.query(` - CREATE DATABASE fmtm OWNER fmtm; - `); - await bootstrap.query(` - ALTER SCHEMA public OWNER TO fmtm; - `); - - // Step 2: dump the bootstrap db (which contains created fmtm db and user) - const tempFile = await bootstrap.dumpDataDir(); - const tempFilePath = tempFile.name; - fs.writeFileSync(tempFilePath, Buffer.from(await tempFile.arrayBuffer())); - await bootstrap.close(); - - // Step 3: load it and connect to 'fmtm' as 'fmtm' user - const db = new PGlite({ - // debug: 1 - database: 'fmtm', - username: 'fmtm', - loadDataDir: tempFile, - }); - - // Remove the temporary bootstrap file - fs.rmSync(tempFilePath); - - // Step 4: Apply all migrations - for (const file of MIGRATION_FILENAMES) { - const sql = fs.readFileSync(path.join(MIGRATIONS_PATH, file), 'utf-8'); - console.log(`Running ${file}...`); - await db.exec(sql); - } - - // Step 5: Dump final database - const finalDump = await db.dumpDataDir(); - const finalDumpPath = finalDump.name; - fs.writeFileSync(finalDumpPath, Buffer.from(await finalDump.arrayBuffer())); - await db.close(); - - console.log(`✅ DB dump written to ${finalDumpPath}`); -} - -createFreshDbDump().catch((err) => { - console.error('❌ Failed to init DB:', err); - process.exit(1); -}); diff --git a/src/mapper/src/lib/db/pglite.ts b/src/mapper/src/lib/db/pglite.ts deleted file mode 100644 index 3e0b45edbe..0000000000 --- a/src/mapper/src/lib/db/pglite.ts +++ /dev/null @@ -1,188 +0,0 @@ -import { PGlite } from '@electric-sql/pglite'; -import { electricSync } from '@electric-sql/pglite-sync'; - -// Here we use IndexedDB with relaxedDurability to improve performance -// https://pglite.dev/docs/filesystems#indexeddb-fs -// https://pglite.dev/benchmarks -// Unfortunately the OPFS filesystem does not work well with Safari yet, -// and also adds the complication of needing a service worker init. -const DB_URL = 'idb://fieldtm'; - -import enums from '$migrations/init/shared/1-enums.sql?raw'; -import tables from '$migrations/init/shared/2-tables.sql?raw'; -import constraints from '$migrations/init/shared/3-constraints.sql?raw'; -import indexes from '$migrations/init/shared/4-indexes.sql?raw'; -import frontendOnlySchema from '$migrations/init/frontend-only/schema.sql?raw'; -const migrationFiles = import.meta.glob('$migrations/*.sql', { - query: '?raw', - import: 'default', - eager: true, -}); - -// To prevent loading the PGLite database twice, we wrap the -// initDb function in a top-level singleton that guarantees -// initDb is only run once per session, no matter how many -// times the +layout.ts load() function is called -let dbPromise: ReturnType | null = null; -export function getDbOnce(): Promise { - if (!dbPromise) { - dbPromise = getDb(); - } - return dbPromise; -} - -// NOTE here I had the idea to create the db structure in advance, -// NOTE export the blob as part of the build, then import at every -// NOTE start. The problem is the output is about 4MB gzipped, so -// NOTE this isn't any better than attempting bootstrap each time. -export async function loadDbFromDump(dbUrl: string = DB_URL, dbDumpData: string | Blob | Uint8Array): Promise { - console.warn('DB not initialized, loading from tarball...'); - // Can be from a URL in browser environment - // e.g. import dbDumpUrl from '$migrations/init/pgdata.tar.gz?url'; - - let dbDumpBlob: Blob; - - if (typeof window !== 'undefined' && typeof fetch === 'function' && typeof dbDumpData === 'string') { - // Browser environment - const response = await fetch(dbDumpData); - dbDumpBlob = await response.blob(); - } else { - // Node.js environment (Vitest, etc.) - dbDumpBlob = dbDumpData instanceof Blob ? dbDumpData : new Blob([dbDumpData], { type: 'application/x-gzip' }); - } - - return new PGlite(dbUrl, { - // debug: 1, - username: 'fmtm', - database: 'fmtm', - loadDataDir: dbDumpBlob, - relaxedDurability: true, - extensions: { - electric: electricSync(), - }, - }); -} - -// Try to open existing DB and test schema, else initialise schema from scratch. -// The tradeoff is slower performance on, first load but then better performance -// every time after. -const getDb = async (): Promise => { - if (dbPromise) { - return dbPromise; // Return the existing promise if already in progress - } - - dbPromise = (async () => { - try { - // Here we use IndexedDB with relaxedDurability to improve performance - // https://pglite.dev/docs/filesystems#indexeddb-fs - // https://pglite.dev/benchmarks - // Unfortunately the OPFS filesystem does not work well with Safari yet, - // and also adds the complication of needing a service worker init. - const db = new PGlite(DB_URL, { - username: 'fmtm', - database: 'fmtm', - relaxedDurability: true, - extensions: { - electric: electricSync(), - }, - // debug: 2 // show postgres logs for easier debugging - }); - - const tableCheck = await db.query(` - SELECT 1 FROM information_schema.tables - WHERE table_name = 'task_events'; - `); - - if (tableCheck.rows.length !== 1) { - const success = await cleanupIndexedDb('fieldtm'); - if (!success) { - console.warn('Failed to clear IndexedDB, attempting init anyway'); - } - throw new Error('Database schema is not initialised yet. Re-creating.'); - } - - await applyMigrations(db); - - return db; - } catch (e) { - // return loadDbFromDump(); - return initDb(); - } - })(); - - return dbPromise; -}; - -async function applyMigrations(db: PGlite): Promise { - const sorted = Object.entries(migrationFiles) - .sort(([a], [b]) => a.localeCompare(b)) - .map(([, sql]) => sql); - - for (const sql of sorted) { - await db.exec(sql); - } -} - -async function cleanupIndexedDb(dbName: string): Promise { - return new Promise((resolve) => { - const DBDeleteRequest = indexedDB.deleteDatabase(dbName); - DBDeleteRequest.onerror = () => { - console.error('The IndexedDB database could not be cleared!'); - resolve(false); - }; - DBDeleteRequest.onsuccess = () => { - console.log('Deleted existing database without valid schema.'); - resolve(true); - }; - }); -} - -const initDb = async (): Promise => { - // By default PGLite uses postgres user and database - // We need to bootstrap by creating fmtm user and database - // Then reconnect to the new db as the user - console.warn('Database not initialized, creating schema...'); - - const boostrapDb = new PGlite(DB_URL); - await boostrapDb.query(` - DO $$ - BEGIN - IF NOT EXISTS ( - SELECT * FROM pg_user where usename = 'fmtm' - ) THEN - CREATE USER fmtm WITH PASSWORD 'fmtm'; - -- Required permission for copying from CSV - GRANT pg_read_server_files TO fmtm; - END IF; - END $$; - `); - // Check if database exists (cannot use a DO block here) - const res = await boostrapDb.query(` - SELECT 1 FROM pg_database WHERE datname = 'fmtm'; - `); - if (res.rows.length === 0) { - await boostrapDb.query(`CREATE DATABASE fmtm OWNER fmtm;`); - } - - const finalDb = new PGlite(DB_URL, { - username: 'fmtm', - database: 'fmtm', - relaxedDurability: true, - extensions: { - electric: electricSync(), - }, - // debug: 2, - }); - - await finalDb.exec(` - ${enums} - ${tables} - ${constraints} - ${indexes} - ${frontendOnlySchema} - `); - - await applyMigrations(finalDb); - - return finalDb; -}; diff --git a/src/mapper/src/lib/db/projects.ts b/src/mapper/src/lib/db/projects.ts deleted file mode 100644 index c4b5c845b8..0000000000 --- a/src/mapper/src/lib/db/projects.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { PGlite } from '@electric-sql/pglite'; -import type { DbProjectType } from '$lib/types'; -import { DB_PROJECT_COLUMNS } from '$lib/types'; - -async function all(db: PGlite): Promise { - if (!db) return null; - - const query = await db.query(`SELECT * FROM projects;`); - return (query.rows as DbProjectType[]) ?? []; -} - -async function one(db: PGlite, projectId: string): Promise { - if (!db) return; - - const response = await db.query(`SELECT * FROM projects WHERE id = $1;`, [projectId]); - return response.rows.at(-1) as DbProjectType | undefined; -} - -async function upsert(db: PGlite, projectData: Partial): Promise { - if (!db || !projectData || !projectData.id) return; - - // Filter keys to only include those present in the actual DB schema - const columns = Object.keys(projectData).filter((key) => DB_PROJECT_COLUMNS.has(key)); - if (columns.length === 0) { - console.warn('Project upsert skipped: no valid columns provided'); - return; - } - - const placeholders = columns.map((_, i) => `$${i + 1}`).join(', '); - const values = columns.map((key) => projectData[key as keyof DbProjectType]); - - const updateClause = columns - .filter((col) => col !== 'id') // Don't update ID - .map((col) => `${col} = excluded.${col}`) - .join(', '); - - const sql = ` - INSERT INTO projects (${columns.join(', ')}) - VALUES (${placeholders}) - ON CONFLICT(id) - ${updateClause ? `DO UPDATE SET ${updateClause}` : `DO NOTHING`}; - `; - - await db.query(sql, values); -} - -// This isn't super efficient, but as we only insert 12 at a time, it's not terrible -// We want to do this instead of DELETE then bulk COPY, as we don't want to lose the -// project data already loaded if the user went to a project details page. -async function bulkUpsert(db: PGlite, projects: Partial[]): Promise { - if (!db || !projects.length) return; - - for (const project of projects) { - await DbProject.upsert(db, project); - } -} - -export const DbProject = { - one, - all, - upsert, - bulkUpsert, -}; diff --git a/src/mapper/src/lib/types.ts b/src/mapper/src/lib/types.ts index 8d328b3149..003fde0e7a 100644 --- a/src/mapper/src/lib/types.ts +++ b/src/mapper/src/lib/types.ts @@ -215,7 +215,7 @@ export type DbEntityType = { status: entityStatusOptions; project_id: number; task_id: number; - osm_id: number; + osm_id: bigint; submission_ids: string; geometry: string | null; created_by: string | null; diff --git a/src/mapper/src/lib/utils/dev-reset.ts b/src/mapper/src/lib/utils/dev-reset.ts index 8a4524c1be..0b99bb0aaf 100644 --- a/src/mapper/src/lib/utils/dev-reset.ts +++ b/src/mapper/src/lib/utils/dev-reset.ts @@ -19,20 +19,7 @@ export async function clearAllDevCaches() { await Promise.all(cacheNames.map((name) => caches.delete(name))); } - // 3. Clear all IndexedDB databases - if (indexedDB?.databases) { - const dbs = await indexedDB.databases(); - await Promise.all( - dbs.map((db) => { - if (db.name) { - console.log(`Deleting IndexedDB: ${db.name}`); - return indexedDB.deleteDatabase(db.name); - } - }), - ); - } - - // 4. Clear OPFS storage + // 3. Clear OPFS storage await clearAllOPFS(); // Set key to prevent clearing multiple times per session diff --git a/src/mapper/src/routes/+layout.svelte b/src/mapper/src/routes/+layout.svelte index c25f7c53e6..4f7eeda845 100644 --- a/src/mapper/src/routes/+layout.svelte +++ b/src/mapper/src/routes/+layout.svelte @@ -2,28 +2,23 @@ import '$styles/page.css'; import '@hotosm/ui/dist/hotosm-ui'; - import { onMount, onDestroy } from 'svelte'; + import { onMount } from 'svelte'; import { online } from 'svelte/reactivity/window'; import { error } from '@sveltejs/kit'; import type { PageProps } from './$types'; - import { pwaInfo } from 'virtual:pwa-info'; - import type { RegisterSWOptions } from 'vite-plugin-pwa/types'; - import { getCommonStore, getAlertStore } from '$store/common.svelte.ts'; + import { getCommonStore } from '$store/common.svelte.ts'; import { getLoginStore } from '$store/login.svelte.ts'; import { refreshCookies, getUserDetailsFromApi } from '$lib/api/login'; import Toast from '$lib/components/toast.svelte'; import Header from '$lib/components/header.svelte'; - import { m } from '$translations/messages.js'; let { data, children }: PageProps = $props(); const commonStore = getCommonStore(); const loginStore = getLoginStore(); - const alertStore = getAlertStore(); commonStore.setConfig(data.config); - let dbPromise = data.dbPromise; let lastOnlineStatus: boolean | null = $state(null); let loginDebounce: ReturnType | null = $state(null); @@ -84,24 +79,10 @@ document.head.appendChild(linkElement); } }); - - onDestroy(async() => { - const db = await dbPromise(); - db.close() - });
- - {#await dbPromise} -
- -
- {:then db} - {@render children?.({ data, db })} - {:catch error} -

Error loading PGLite: {error.message}

- {/await} + {@render children?.({ data })}
diff --git a/src/mapper/src/routes/+layout.ts b/src/mapper/src/routes/+layout.ts index 793f1e33f6..cd380c7104 100644 --- a/src/mapper/src/routes/+layout.ts +++ b/src/mapper/src/routes/+layout.ts @@ -1,7 +1,6 @@ import 'virtual:uno.css'; import type { LayoutLoad } from './$types'; -import { getDbOnce } from '$lib/db/pglite'; import { clearAllDevCaches } from '$lib/utils/dev-reset'; // NOTE we can't prerender as we are using dynamic routing [projectId] @@ -10,7 +9,6 @@ export const ssr = false; export const load: LayoutLoad = async ({ fetch }) => { let config; - const dbPromise = getDbOnce(); // Don't await here to allow loading spinner in layout.svelte try { const s3Response = await fetch(`${import.meta.env.VITE_S3_URL}/fmtm-data/frontend/config.json`); @@ -31,7 +29,6 @@ export const load: LayoutLoad = async ({ fetch }) => { } return { - dbPromise, config, }; }; diff --git a/src/mapper/src/routes/+page.svelte b/src/mapper/src/routes/+page.svelte index 7fdfd57037..29e60438c4 100644 --- a/src/mapper/src/routes/+page.svelte +++ b/src/mapper/src/routes/+page.svelte @@ -1,25 +1,14 @@