diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 00000000..8baad660 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,36 @@ +name: Gradle CI + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + build: + runs-on: macos-latest + + steps: + - uses: actions/checkout@v6 + + - name: Set up JDK 17 + uses: actions/setup-java@v5 + with: + java-version: "17" + distribution: "temurin" + cache: gradle + + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v5 + + - name: Clean project + run: ./gradlew clean + + - name: Run tests + run: ./gradlew allTests --stacktrace --continue + + - name: Test Summary + uses: test-summary/action@v2 + with: + paths: "**/build/test-results/**/TEST-*.xml" + if: always() diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..a0685db8 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,101 @@ +name: Publish Shared XCFramework + +on: + push: + tags: + - "v*" + workflow_dispatch: + inputs: + ref: + description: "Git ref (branch or tag) to build from" + required: true + default: main + +permissions: + contents: write + +env: + XCF_NAME: Shared + XCF_GRADLE_TASK: umbrella:assembleSharedXCFramework + XCF_OUTPUT_DIR: umbrella/build/XCFrameworks/release + +jobs: + build-and-release: + runs-on: macos-latest + + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + ref: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.ref || github.ref }} + + - name: Set up JDK 17 + uses: actions/setup-java@v5 + with: + java-version: "17" + distribution: "temurin" + cache: gradle + + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v5 + + - name: Build XCFramework + run: ./gradlew $XCF_GRADLE_TASK --stacktrace + + - name: Package XCFramework + id: package + shell: bash + env: + EVENT_NAME: ${{ github.event_name }} + run: | + set -euo pipefail + + read_property_version() { + local prop + if [[ ! -f gradle.properties ]]; then + echo "gradle.properties not found" >&2 + return 1 + fi + prop=$(awk -F'=' '/^version[[:space:]]*=/ {gsub(/^[[:space:]]+|[[:space:]]+$/, "", $2); gsub(/^"|"$/, "", $2); print $2; exit}' gradle.properties) + if [[ -z "$prop" ]]; then + echo "version property missing in gradle.properties" >&2 + return 1 + fi + printf '%s' "$prop" + } + + property_version=$(read_property_version) + + if [[ "$EVENT_NAME" == "workflow_dispatch" ]]; then + version="$property_version" + tag_name="v${version}" + else + tag_name="${GITHUB_REF_NAME}" + version="$property_version" + if [[ "$tag_name" != "v${version}" ]]; then + echo "Tag ${tag_name} does not match gradle.properties version ${version}" >&2 + exit 1 + fi + fi + + archive_name="${XCF_NAME}-${version}.xcframework.zip" + output_dir="$XCF_OUTPUT_DIR" + cd "$output_dir" + rm -f "${XCF_NAME}.xcframework.zip" "$archive_name" + zip -r "$archive_name" "${XCF_NAME}.xcframework" + + echo "archive-path=$output_dir/$archive_name" >> "$GITHUB_OUTPUT" + echo "version=$version" >> "$GITHUB_OUTPUT" + echo "tag-name=$tag_name" >> "$GITHUB_OUTPUT" + + - name: Publish Release + uses: softprops/action-gh-release@v2 + with: + tag_name: ${{ steps.package.outputs.tag-name }} + name: Shared XCFramework ${{ steps.package.outputs.version }} + draft: false + prerelease: ${{ contains(steps.package.outputs.version, '-') }} + make_latest: ${{ !contains(steps.package.outputs.version, '-') }} + files: ${{ steps.package.outputs.archive-path }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 1080af78..c4d2f087 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,35 @@ build # Android local.properties + +# Xcode +# +# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore + +## User settings +xcuserdata/ + +## Obj-C/Swift specific +*.hmap + +## App packaging +*.ipa +*.dSYM.zip +*.dSYM + +# Swift Package Manager +# +# Add this line if you want to avoid checking in source code from Swift Package Manager dependencies. +# Packages/ +# Package.pins +# Package.resolved +# *.xcodeproj +# +# Xcode automatically generates this directory with a .xcworkspacedata file and xcuserdata +# hence it is not needed unless you have added a package configuration file to your project +# .swiftpm + +.build/ + +# macOS +.DS_Store diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 00000000..c13b25b8 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,149 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Overview + +The mobile-sync project (internally named "quransync") is a Kotlin Multiplatform Mobile (KMM) library for synchronizing Quran.com user data across Android and iOS. It implements bidirectional synchronization with conflict resolution for page bookmarks. + +## Development Commands + +### Building and Testing +```bash +# Clean and build entire project +./gradlew clean build + +# Run all tests across all modules +./gradlew allTests + +# Run tests for specific module +./gradlew :syncengine:test +./gradlew :persistence:test + +# Build iOS framework +./gradlew :umbrella:assembleXCFramework + +# Run Android demo +./gradlew :demo:android:installDebug +``` + +### Module-Specific Commands +```bash +# Test individual components +./gradlew :syncengine:testDebugUnitTest +./gradlew :syncengine:testReleaseUnitTest + +# Check for dependency updates +./gradlew dependencyUpdates +``` + +## Architecture Overview + +The project follows a layered architecture with clear separation of concerns: + +### Module Dependencies +``` +mutations-definitions (foundational) + ↑ + ┌────┴────┬────────────┐ + │ │ │ +persistence syncengine │ + └────┬────┴────────┐ │ + │ │ │ + sync-pipelines │ │ + ↑ │ │ + │ │ │ + umbrella demo:android +``` + +### Core Modules + +**mutations-definitions**: Foundation module defining `Mutation`, `LocalModelMutation`, and `RemoteModelMutation` types used across the entire sync system. + +**persistence**: SQLDelight-based data layer with `PageBookmarksRepository` and change tracking. Handles cross-platform database operations and mutation state persistence. + +**syncengine**: Pure business logic module containing `SynchronizationClient`, `PageBookmarksSynchronizationExecutor`, conflict resolution system, network layer (Ktor), and scheduling system. Contains no external dependencies in core executor logic. + +**sync-pipelines**: Integration layer with `SyncEnginePipeline` that bridges syncengine and persistence. Provides the main high-level API via `RepositoryDataFetcher` and `ResultReceiver`. + +**umbrella**: iOS framework packaging module that exports all public APIs as "Shared" framework. + +**demo:android**: Sample Android app demonstrating library usage. + +### Key Architectural Patterns + +- **Dependency Inversion**: syncengine defines interfaces implemented by persistence +- **Adapter Pattern**: sync-pipelines acts as adapter between layers +- **Pure Business Logic**: Core sync logic has no external dependencies +- **Conflict Resolution Pipeline**: Sophisticated preprocessing and conflict handling + +## Technology Stack + +- **Language**: Kotlin Multiplatform (targeting iOS and Android) +- **Database**: SQLDelight for cross-platform SQL operations +- **Networking**: Ktor HTTP client with platform-specific implementations +- **Serialization**: kotlinx.serialization +- **Async**: kotlinx.coroutines +- **Testing**: kotlin.test with kotlinx.coroutines.test +- **Build**: Gradle with Kotlin DSL +- **iOS Distribution**: XCFramework via umbrella module + +## Development Guidelines + +### Module Boundaries +- Keep syncengine pure (no external persistence dependencies) +- Use sync-pipelines for integration between layers +- Maintain clear interfaces between modules + +### Testing Strategy +- syncengine has comprehensive unit tests with timing-sensitive scheduling tests +- Use `kotlinx.coroutines.test` for coroutine testing +- Test timing with tolerance values (typically 100ms tolerance) + +### Synchronization Architecture +The sync system implements a sophisticated bidirectional flow: + +1. **Local Changes**: `PageBookmarksRepository` tracks mutations +2. **Sync Trigger**: Events fire through `SynchronizationClient` +3. **Pipeline Execution**: `PageBookmarksSynchronizationExecutor` orchestrates: + - Fetch local/remote mutations + - Conflict detection and resolution + - Push/pull data exchange +4. **Result Persistence**: Coordinated by sync-pipelines + +### Scheduling System +The recently added `Scheduler` in syncengine manages sync timing with: +- `APP_START` trigger (30s delay) +- `LOCAL_DATA_MODIFIED` trigger (5s delay) +- `IMMEDIATE` trigger (0ms delay) +- Exponential backoff retry logic (200ms base, 2.5x multiplier, max 5 retries) +- State machine tracking scheduler lifecycle + +**Error Handling**: The Scheduler uses exception-based error handling: +- Task functions should throw exceptions to indicate failure +- Scheduler catches exceptions and applies retry logic automatically +- After maximum retries, the final exception is reported to the failure callback +- Success is indicated by task function completing without throwing + +### Platform-Specific Considerations +- HTTP clients: OkHttp (Android), Darwin (iOS) +- Database drivers: Android SQLite, iOS native +- Framework packaging: XCFramework for iOS consumption + +## Common Development Tasks + +### Adding New Mutation Types +1. Define in mutations-definitions module +2. Update persistence layer with SQL schema changes +3. Add syncengine business logic +4. Wire through sync-pipelines + +### Extending Sync Logic +- Add new preprocessing steps to `LocalMutationsPreprocessor`/`RemoteMutationsPreprocessor` +- Extend conflict resolution in `ConflictDetector`/`ConflictResolver` +- Update `PageBookmarksSynchronizationExecutor` pipeline + +### Testing Sync Behavior +- Use test timings in `SchedulerTest.kt` as reference +- Mock network layer for integration tests +- Test conflict scenarios with controlled data states \ No newline at end of file diff --git a/SYNCENGINE_ARCHITECTURE.md b/SYNCENGINE_ARCHITECTURE.md new file mode 100644 index 00000000..63dcd18a --- /dev/null +++ b/SYNCENGINE_ARCHITECTURE.md @@ -0,0 +1,203 @@ +# SyncEngine Architecture & Conflict Resolution + +## Overview + +The SyncEngine package provides a robust synchronization system for page bookmarks across multiple devices. It handles bidirectional synchronization between local and remote data stores, with sophisticated conflict detection and resolution mechanisms. + +## Core Components + +### 1. SynchronizationClient Interface + +The main entry point for synchronization operations: + +```kotlin +interface SynchronizationClient { + fun localDataUpdated() // Triggered when local data changes + fun applicationStarted() // Triggered when app starts +} +``` + +**Key Features:** +- Provides a simple interface for triggering sync operations +- Handles authentication and network communication +- Orchestrates the complete synchronization pipeline + +### 2. PageBookmarksSynchronizationExecutor + +The core business logic executor that contains no external dependencies. This should ease testing the whole synchronization pipeline with unit tests. + +```kotlin +class PageBookmarksSynchronizationExecutor { + suspend fun executePipeline( + fetchLocal: suspend () -> PipelineInitData, + fetchRemote: suspend (Long) -> FetchedRemoteData, + checkLocalExistence: suspend (List) -> Map, + pushLocal: suspend (List>, Long) -> PushResultData + ): PipelineResult +} +``` + +**Pipeline Steps:** +1. **Initialize**: Fetch local mutations and last modification date +2. **Preprocess Local**: Validate and transform local mutations +3. **Fetch Remote**: Get remote mutations since last sync +4. **Preprocess Remote**: Filter and transform remote mutations +5. **Detect Conflicts**: Identify conflicting mutations +6. **Resolve Conflicts**: Apply conflict resolution rules +7. **Push Local**: Send non-conflicting local mutations +8. **Combine Results**: Merge all remote mutations for persistence + +## Conflict Resolution System + +### Conflict Detection + +The `ConflictDetector` identifies conflicts between local and remote mutations: + +#### Conflict Types + +1. **Page-Level Conflicts**: Multiple mutations for the same page +2. **Resource-Level Conflicts**: Mutations for the same resource ID +3. **Cross-Reference Conflicts**: Local mutations referencing remote resources + +### Conflict Resolution + +The `ConflictResolver` applies business rules to resolve detected conflicts: + +#### Resolution Rules + +1. **Illogical Scenarios** (throw exceptions): + - Local creation vs Remote deletion + - Local deletion vs Remote creation + +2. **Same Operation Conflicts**: + - Both sides created → Accept remote + - Both sides deleted → Accept remote + +3. **Mixed Operation Conflicts**: + - Local deletion + Remote creation → Accept remote, push local creation + - Other combinations → Accept remote + +## Data Flow + +### Synchronization Pipeline + +``` +┌─────────────────┐ ┌──────────────────┐ +│ Local Data │ │ Remote Server │ +└─────────────────┘ └──────────────────┘ + │ │ + ▼ ▼ +┌─────────────────┐ ┌──────────────────┐ +│ 1. Fetch Local │ │ 2. Fetch Remote │ +│ Mutations │ │ Mutations │ +└─────────────────┘ └──────────────────┘ + │ │ + ▼ ▼ +┌─────────────────┐ ┌──────────────────┐ +│ 3. Preprocess │ │ 4. Preprocess │ +│ Local Data │ │ Remote Data │ +└─────────────────┘ └──────────────────┘ + │ │ + └───────────────────────┘ + │ + ▼ + ┌─────────────────────────┐ + │ 5. Detect Conflicts │ + │ (Input: Preprocessed │ + │ Local + Remote) │ + └─────────────────────────┘ + │ + ▼ + ┌─────────────────────────┐ + │ 6. Resolve Conflicts │ + └─────────────────────────┘ + │ + ▼ + ┌─────────────────────────┐ + │ 7. Push Non-Conflicting │ + │ Local Mutations │ + └─────────────────────────┘ + │ + ▼ + ┌─────────────────────────┐ + │ 8. Combine & Return │ + │ (Non-conflicting │ + │ Remote + Resolved │ + │ Remote + Pushed │ + │ Remote) │ + └─────────────────────────┘ +``` + +### Mutation Types + +The system handles three types of mutations: + +1. **CREATE**: New resource creation +2. **UPDATE**: Existing resource modification +3. **DELETE**: Resource deletion + +**Important Note**: UPDATE mutations are converted to CREATE mutations during preprocessing for page bookmarks, as they're not expected for them. + +## Preprocessing Logic + +### Local Mutations Preprocessor + +Validates local mutations and ensures logical consistency: + +```kotlin +fun preprocess(localMutations: List>): List> { + // Convert MODIFIED to CREATED + val transformedModifiedMutations = modifiedMutations.map { + it.copy(mutation = Mutation.CREATED) + } + + // Validate logical constraints + // - Max 2 mutations per page + // - Max 1 deletion per page + // - Max 1 creation per page + // - Deletions must have remote IDs + + return processedMutations +} +``` + +### Remote Mutations Preprocessor + +Filters and transforms remote mutations: + +```kotlin +suspend fun preprocess(remoteMutations: List>): List> { + // Filter DELETE mutations for non-existent local resources + val filteredDeletedMutations = deletedMutations.filter { + checkLocalExistence(listOf(it.remoteID))[it.remoteID] ?: false + } + + // Convert MODIFIED to CREATED + val transformedModifiedMutations = modifiedMutations.map { + it.copy(mutation = Mutation.CREATED) + } + + return createdMutations + filteredDeletedMutations + transformedModifiedMutations +} +``` + +## Network Layer + +### Request/Response Pattern + +The network layer uses a clean request/response pattern. + +## Configuration and Dependencies + +### Required Dependencies + +The sync engine requires three main dependencies to function: + +- **LocalDataFetcher**: Provides access to local mutations and existence checks +- **ResultNotifier**: Handles sync success/failure callbacks with results +- **LocalModificationDateFetcher**: Tracks the last local modification timestamp +- **AuthenticationDataFetcher**: Supplies authentication headers for API requests + +### Environment Configuration + +The sync engine needs the remote server endpoint URL to establish network communication. diff --git a/auth/build.gradle.kts b/auth/build.gradle.kts new file mode 100644 index 00000000..a71f35b4 --- /dev/null +++ b/auth/build.gradle.kts @@ -0,0 +1,81 @@ +import org.jetbrains.kotlin.gradle.dsl.JvmTarget + +plugins { + alias(libs.plugins.kotlin.multiplatform) + alias(libs.plugins.android.library) + alias(libs.plugins.vanniktech.maven.publish) +} + +kotlin { + iosX64() + iosArm64() + iosSimulatorArm64() + + + androidTarget { + publishLibraryVariants("release") + compilerOptions { + jvmTarget.set(JvmTarget.JVM_17) + } + } + + sourceSets { + commonMain.dependencies { + implementation(libs.kotlinx.coroutines.core) + implementation(libs.oidc.appsupport) + implementation(libs.ktor.client.core) + implementation(libs.ktor.client.content.negotiation) + implementation(libs.ktor.serialization.json) + implementation(libs.sha2) + } + + commonTest.dependencies { + implementation(libs.kotlin.test) + implementation(libs.kotlinx.coroutines.test) + } + + androidMain.dependencies { + implementation(libs.ktor.client.okhttp) + } + + // No explicit iOS dependencies needed for oidc-appsupport unless specific override + // But we need a ktor engine for iOS if we pass a client. + val appleMain by creating { + dependsOn(commonMain.get()) + dependencies { + implementation(libs.ktor.client.darwin) + } + } + + iosX64Main.get().dependsOn(appleMain) + iosArm64Main.get().dependsOn(appleMain) + iosSimulatorArm64Main.get().dependsOn(appleMain) + } +} + +android { + namespace = "com.quran.shared.auth" + compileSdk = libs.versions.android.compile.sdk.get().toInt() + + defaultConfig { + minSdk = libs.versions.android.min.sdk.get().toInt() + } + + compileOptions { + sourceCompatibility = JavaVersion.valueOf("VERSION_${libs.versions.android.java.version.get()}") + targetCompatibility = JavaVersion.valueOf("VERSION_${libs.versions.android.java.version.get()}") + } +} + +mavenPublishing { + publishToMavenCentral() + signAllPublications() + coordinates(libs.versions.project.group.get(), "auth", libs.versions.project.version.get()) + + pom { + name = "Quran.com Auth Layer" + description = "A library for authentication with Quran.com" + inceptionYear = libs.versions.project.inception.year.get() + url = libs.versions.project.url.get() + } +} diff --git a/auth/src/androidMain/AndroidManifest.xml b/auth/src/androidMain/AndroidManifest.xml new file mode 100644 index 00000000..8bdb7e14 --- /dev/null +++ b/auth/src/androidMain/AndroidManifest.xml @@ -0,0 +1,4 @@ + + + + diff --git a/auth/src/commonMain/kotlin/com/quran/shared/auth/AuthenticationManager.kt b/auth/src/commonMain/kotlin/com/quran/shared/auth/AuthenticationManager.kt new file mode 100644 index 00000000..7505bf86 --- /dev/null +++ b/auth/src/commonMain/kotlin/com/quran/shared/auth/AuthenticationManager.kt @@ -0,0 +1,341 @@ +package com.quran.shared.auth + +import io.ktor.client.HttpClient +import io.ktor.client.call.body +import io.ktor.client.plugins.contentnegotiation.ContentNegotiation +import io.ktor.client.request.post +import io.ktor.client.request.setBody +import io.ktor.client.statement.HttpResponse +import io.ktor.http.ContentType +import io.ktor.http.contentType +import io.ktor.serialization.kotlinx.json.json +import io.ktor.util.encodeBase64 +import io.ktor.util.generateNonce +import kotlinx.serialization.json.Json +import kotlinx.serialization.json.jsonObject +import kotlinx.serialization.json.jsonPrimitive +import org.kotlincrypto.hash.sha2.SHA256 +import org.publicvalue.multiplatform.oidc.OpenIdConnectClient +import org.publicvalue.multiplatform.oidc.types.CodeChallengeMethod +import kotlin.time.Clock + +/** + * Manages OAuth authentication with Quran.com using Quran Foundation's OAuth2 endpoints. + * + * Implements OAuth 2.0 with PKCE (RFC 7636) for secure public client authentication. + * Platform-agnostic implementation for Kotlin Multiplatform. + * + * Reference: https://api-docs.quran.foundation/docs/category/oauth2_apis + */ +class AuthenticationManager( + private val usePreProduction: Boolean = true +) { + // OAuth endpoints from Quran Foundation + private val baseUrl = if (usePreProduction) { + "https://prelive-oauth2.quran.foundation" + } else { + "https://oauth2.quran.foundation" + } + + private val authorizationEndpoint = "$baseUrl/oauth2/auth" + private val tokenEndpoint = "$baseUrl/oauth2/token" + private val revokeEndpoint = "$baseUrl/oauth2/revoke" + + // OAuth application credentials + private val clientId = "YOUR_CLIENT_ID_HERE" + private val clientSecret = null + // Mobile redirect URI - must match the deep link scheme configured in the app + private val redirectUri = "com.quran.oauth://callback" + + // Scopes requested from OAuth server + private val requestedScopes = listOf( + "openid", + "offline_access", + "content" + ) + + // HTTP client for making token requests + private val httpClient = HttpClient { + install(ContentNegotiation) { + json(Json { + ignoreUnknownKeys = true + isLenient = true + }) + } + } + + // Internal OIDC client - not exposed to consumers + private val oidcClient = OpenIdConnectClient( + block = { + endpoints { + authorizationEndpoint = this@AuthenticationManager.authorizationEndpoint + tokenEndpoint = this@AuthenticationManager.tokenEndpoint + } + + clientId = this@AuthenticationManager.clientId + clientSecret = this@AuthenticationManager.clientSecret // PKCE doesn't use client secret for public clients + scope = requestedScopes.joinToString(" ") + redirectUri = this@AuthenticationManager.redirectUri + codeChallengeMethod = CodeChallengeMethod.S256 + } + ) + + /** + * Builds the OAuth2 authorization URL for initiating login flow. + * + * Implements PKCE (Proof Key for Code Exchange) for enhanced security: + * - Generates code_challenge from code_verifier + * - Includes state parameter to prevent CSRF attacks + * + * @param codeVerifier PKCE code verifier (must be stored for token exchange) + * @param state Random state value for CSRF protection (must be validated in redirect) + * @return Authorization URL to open in browser + */ + fun buildAuthorizationUrl( + codeVerifier: String, + state: String + ): String { + // Calculate code challenge (SHA-256 hash of code verifier) + val codeChallenge = calculateSHA256(codeVerifier) + + // Build authorization URL with PKCE parameters + val params = mapOf( + "client_id" to clientId, + "redirect_uri" to redirectUri, + "response_type" to "code", + "scope" to requestedScopes.joinToString(" "), + "state" to state, + "nonce" to generateNonce(), + "code_challenge" to codeChallenge, + "code_challenge_method" to "S256" + ) + + val queryString = params.entries.joinToString("&") { (key, value) -> + "$key=${urlEncode(value)}" + } + + return "$authorizationEndpoint?$queryString" + } + + /** + * Exchanges OAuth2 authorization code for access and refresh tokens. + * + * Implements PKCE token exchange by including the code_verifier with the request. + * + * @param code Authorization code from OAuth redirect + * @param codeVerifier PKCE code verifier (must match the one used for authorization) + * @return TokenResponse containing access_token and optional refresh_token + * @throws IllegalArgumentException if code or verifier is empty + * @throws Exception if token exchange fails + */ + suspend fun exchangeCodeForToken( + code: String, + codeVerifier: String + ): TokenResponse { + require(code.isNotEmpty()) { "Authorization code cannot be empty" } + require(codeVerifier.isNotEmpty()) { "Code verifier cannot be empty" } + + return try { + // Make POST request to token endpoint + val response: HttpResponse = httpClient.post(tokenEndpoint) { + contentType(ContentType.Application.FormUrlEncoded) + // Send form-encoded body with token request parameters + val body = "grant_type=authorization_code" + + "&code=$code" + + "&client_id=$clientId" + + "&redirect_uri=${urlEncode(redirectUri)}" + + "&code_verifier=$codeVerifier" + setBody(body) + } + + // Parse response as JSON + val responseBody: String = response.body() + val jsonObject = Json.parseToJsonElement(responseBody).jsonObject + + // Check for error response + if (jsonObject.containsKey("error")) { + val error = jsonObject["error"]?.jsonPrimitive?.content ?: "Unknown error" + val errorDescription = jsonObject["error_description"]?.jsonPrimitive?.content + ?: "No description provided" + throw Exception("Token exchange failed: $error - $errorDescription") + } + + // Extract tokens from successful response + val accessToken = jsonObject["access_token"]?.jsonPrimitive?.content + ?: throw Exception("No access_token in response") + + val refreshToken = jsonObject["refresh_token"]?.jsonPrimitive?.content + + val expiresIn = jsonObject["expires_in"]?.jsonPrimitive?.content?.toLongOrNull() + ?: 3600L + + TokenResponse( + accessToken = accessToken, + refreshToken = refreshToken, + expiresIn = expiresIn, + tokenType = jsonObject["token_type"]?.jsonPrimitive?.content ?: "Bearer" + ) + } catch (e: Exception) { + throw Exception("Failed to exchange authorization code for token: ${e.message}", e) + } + } + + /** + * Refreshes an expired access token using a refresh token. + * + * @param refreshToken Previously obtained refresh token + * @return New TokenResponse with updated access_token + * @throws IllegalArgumentException if refresh token is empty + * @throws Exception if token refresh fails + */ + suspend fun refreshToken(refreshToken: String): TokenResponse { + require(refreshToken.isNotEmpty()) { "Refresh token cannot be empty" } + + return try { + // Make POST request to token endpoint for refresh + val response: HttpResponse = httpClient.post(tokenEndpoint) { + contentType(ContentType.Application.FormUrlEncoded) + val body = "grant_type=refresh_token" + + "&refresh_token=$refreshToken" + + "&client_id=$clientId" + setBody(body) + } + + // Parse response + val responseBody: String = response.body() + val jsonObject = Json.parseToJsonElement(responseBody).jsonObject + + // Check for error + if (jsonObject.containsKey("error")) { + val error = jsonObject["error"]?.jsonPrimitive?.content ?: "Unknown error" + throw Exception("Token refresh failed: $error") + } + + // Extract new tokens + val accessToken = jsonObject["access_token"]?.jsonPrimitive?.content + ?: throw Exception("No access_token in refresh response") + + val newRefreshToken = jsonObject["refresh_token"]?.jsonPrimitive?.content + ?: refreshToken // Reuse old refresh token if not provided + + val expiresIn = jsonObject["expires_in"]?.jsonPrimitive?.content?.toLongOrNull() + ?: 3600L + + TokenResponse( + accessToken = accessToken, + refreshToken = newRefreshToken, + expiresIn = expiresIn, + tokenType = jsonObject["token_type"]?.jsonPrimitive?.content ?: "Bearer" + ) + } catch (e: Exception) { + throw Exception("Failed to refresh token: ${e.message}", e) + } + } + + /** + * Revokes an access or refresh token (logs out user). + * + * @param token The token to revoke (access_token or refresh_token) + * @param tokenTypeHint Optional hint: "access_token" or "refresh_token" + * @return true if revocation succeeded, false otherwise + */ + suspend fun revokeToken( + token: String, + tokenTypeHint: String? = null + ): Boolean { + return try { + require(token.isNotEmpty()) { "Token cannot be empty" } + + val response: HttpResponse = httpClient.post(revokeEndpoint) { + contentType(ContentType.Application.FormUrlEncoded) + val body = buildString { + append("token=$token") + append("&client_id=$clientId") + if (tokenTypeHint != null) { + append("&token_type_hint=$tokenTypeHint") + } + } + setBody(body) + } + + // HTTP 200 means success, 204 means already revoked + response.status.value in listOf(200, 204) + } catch (e: Exception) { + false + } + } + + /** + * Validates if a token is still valid (not expired). + * + * @param expirationTime Token expiration timestamp (milliseconds since epoch) + * @return true if token is still valid, false if expired + */ + fun isTokenValid(expirationTime: Long): Boolean { + // Add 60 second buffer to refresh before actual expiration + return currentTimeMillis() < (expirationTime - 60_000) + } + + // ========================= Helper Methods ========================= + + /** + * Calculates SHA-256 hash of input string and returns Base64URL encoded result. + * Platform-agnostic implementation using platform-specific functions. + * + * @param input String to hash + * @return Base64URL encoded SHA-256 hash (without padding) + */ + private fun calculateSHA256(input: String): String { + val bytes = input.encodeToByteArray() + val digest = sha256(bytes) + return base64UrlEncode(digest) + } + + /** + * URL encodes a string for use in query parameters. + * Platform-agnostic implementation using pure Kotlin. + * + * @param value String to encode + * @return URL-encoded string + */ + private fun urlEncode(value: String): String { + return value.toCharArray().map { char -> + when (char) { + in 'A'..'Z', in 'a'..'z', in '0'..'9', '-', '.', '_', '~' -> char.toString() + ' ' -> "%20" + else -> { + val bytes = char.toString().encodeToByteArray() + bytes.joinToString("") { "%${it.toUByte().toString(16).padStart(2, '0').uppercase()}" } + } + } + }.joinToString("") + } +} + +/** + * Response from OAuth2 token endpoint. + * + * Reference: https://api-docs.quran.foundation/docs/category/oauth2_apis + */ +data class TokenResponse( + val accessToken: String, + val refreshToken: String?, + val expiresIn: Long, + val tokenType: String = "Bearer" +) + +fun sha256(data: ByteArray): ByteArray { + val digest = SHA256() + digest.update(data) + return digest.digest() +} +fun base64UrlEncode(data: ByteArray): String { + val base64 = data.encodeBase64() + return base64.replace("+", "-") + .replace("/", "_") + .replace("=", "") +} + +fun currentTimeMillis(): Long = Clock.System.now().toEpochMilliseconds() + + diff --git a/build.gradle.kts b/build.gradle.kts index 46ed7d19..d9ff7b6f 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -6,7 +6,30 @@ plugins { alias(libs.plugins.kotlin.multiplatform) apply false alias(libs.plugins.sqldelight) apply false alias(libs.plugins.vanniktech.maven.publish) apply false + alias(libs.plugins.ksp) apply false + alias(libs.plugins.native.coroutines) apply false } group = "com.quran.shared" -version = "0.0.1-SNAPSHOT" \ No newline at end of file +version = providers.gradleProperty("version").orElse("0.0.1-SNAPSHOT").get() + +// Configure test logging - show details only for failures +allprojects { + tasks.withType { + testLogging { + // Only show output for failed tests, or when requested via --info + events("failed") + exceptionFormat = org.gradle.api.tasks.testing.logging.TestExceptionFormat.FULL + showStandardStreams = false + showCauses = true + showExceptions = true + showStackTraces = true + + // Show more details when --info flag is used + if (project.gradle.startParameter.logLevel == LogLevel.INFO) { + events("started", "passed", "skipped", "failed", "standard_out", "standard_error") + showStandardStreams = true + } + } + } +} diff --git a/demo/android/build.gradle.kts b/demo/android/build.gradle.kts index b2bcf098..9e25c662 100644 --- a/demo/android/build.gradle.kts +++ b/demo/android/build.gradle.kts @@ -1,3 +1,5 @@ +import org.jetbrains.kotlin.gradle.dsl.JvmTarget + plugins { alias(libs.plugins.android.application) alias(libs.plugins.kotlin.android) @@ -5,12 +7,13 @@ plugins { } android { - compileSdk = 35 + compileSdk = 36 namespace = "com.quran.shared.demo.android" defaultConfig { - minSdk = 21 - targetSdk = 35 + minSdk = 23 + targetSdk = 36 + manifestPlaceholders["oidcRedirectScheme"] = "com.quran.oauth" } compileOptions { @@ -18,8 +21,10 @@ android { targetCompatibility = JavaVersion.VERSION_17 } - kotlinOptions { - jvmTarget = "17" + kotlin { + compilerOptions { + jvmTarget.set(JvmTarget.JVM_17) + } } buildFeatures.compose = true @@ -28,7 +33,14 @@ android { dependencies { implementation(projects.syncengine) implementation(projects.persistence) + implementation(projects.auth) + + // Android Framework & Lifecycle + implementation("androidx.appcompat:appcompat:1.7.0") + implementation("androidx.lifecycle:lifecycle-viewmodel:2.8.4") + implementation("androidx.lifecycle:lifecycle-runtime-ktx:2.8.4") + // Compose implementation(platform(libs.compose.bom)) implementation(libs.compose.ui) implementation(libs.compose.ui.tooling.preview) @@ -36,5 +48,8 @@ dependencies { implementation(libs.compose.foundation) implementation(libs.compose.material3) implementation(libs.compose.runtime) + implementation("androidx.lifecycle:lifecycle-viewmodel-compose:2.8.4") + implementation("androidx.activity:activity-compose:1.9.2") + debugImplementation(libs.compose.ui.tooling) } \ No newline at end of file diff --git a/demo/android/src/main/AndroidManifest.xml b/demo/android/src/main/AndroidManifest.xml index 35076d77..6b8cbb5c 100644 --- a/demo/android/src/main/AndroidManifest.xml +++ b/demo/android/src/main/AndroidManifest.xml @@ -1,6 +1,8 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/demo/android/src/main/kotlin/com/quran/shared/demo/android/MainActivity.kt b/demo/android/src/main/kotlin/com/quran/shared/demo/android/MainActivity.kt index 3418f7d3..482897cf 100644 --- a/demo/android/src/main/kotlin/com/quran/shared/demo/android/MainActivity.kt +++ b/demo/android/src/main/kotlin/com/quran/shared/demo/android/MainActivity.kt @@ -1,6 +1,72 @@ package com.quran.shared.demo.android -import android.app.Activity +import android.content.Intent +import android.os.Bundle +import androidx.activity.ComponentActivity +import androidx.activity.compose.setContent +import androidx.lifecycle.lifecycleScope +import com.quran.shared.demo.android.ui.auth.AuthScreen +import com.quran.shared.demo.android.ui.auth.AuthViewModel +import kotlinx.coroutines.launch -class MainActivity : Activity() { +/** + * Main activity for the Android demo app. + * + * Responsibilities: + * - Display the authentication screen + * - Handle OAuth redirect callbacks from the system browser + * - Manage activity lifecycle and deep linking + * + * When user completes OAuth flow in browser, the app is opened via deep link + * with a redirect URI containing the authorization code. This activity captures + * that redirect and passes it to the AuthViewModel for token exchange. + */ +class MainActivity : ComponentActivity() { + private val authViewModel: AuthViewModel by lazy { AuthViewModel(application) } + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + + setContent { + AuthScreen( + viewModel = authViewModel, + onAuthenticationSuccess = { + // Navigate to main app or home screen + // For now, just log success + println("Authentication successful!") + } + ) + } + + // Handle OAuth redirect if this activity was opened via deep link + handleOAuthRedirect(intent) + } + + override fun onNewIntent(intent: Intent) { + super.onNewIntent(intent) + // Handle OAuth redirect when app is already running + handleOAuthRedirect(intent) + } + + /** + * Extracts OAuth redirect URI from intent and notifies ViewModel. + * + * The AndroidManifest.xml is configured to handle intents with the scheme + * "com.quran.oauth" and host "callback", which matches the redirectUri in + * AuthenticationManager. + * + * Example redirect URI: com.quran.oauth://callback?code=AUTH_CODE&state=STATE + */ + private fun handleOAuthRedirect(intent: Intent) { + val action = intent.action + val uri = intent.data + + if (action == Intent.ACTION_VIEW && uri != null) { + val redirectUri = uri.toString() + + lifecycleScope.launch { + authViewModel.handleOAuthRedirect(redirectUri) + } + } + } } \ No newline at end of file diff --git a/demo/android/src/main/kotlin/com/quran/shared/demo/android/ui/auth/AuthScreen.kt b/demo/android/src/main/kotlin/com/quran/shared/demo/android/ui/auth/AuthScreen.kt new file mode 100644 index 00000000..acf37b23 --- /dev/null +++ b/demo/android/src/main/kotlin/com/quran/shared/demo/android/ui/auth/AuthScreen.kt @@ -0,0 +1,246 @@ +package com.quran.shared.demo.android.ui.auth + +import androidx.compose.foundation.layout.* +import androidx.compose.material3.* +import androidx.compose.runtime.* +import androidx.compose.ui.Alignment +import androidx.compose.ui.Modifier +import androidx.compose.ui.platform.LocalContext +import androidx.compose.ui.text.style.TextAlign +import androidx.compose.ui.unit.dp +import androidx.lifecycle.viewmodel.compose.viewModel + +/** + * Authentication screen for the Android demo app. + * + * Displays: + * - Login button to initiate OAuth flow + * - Loading state during authentication + * - Success message after successful login + * - Error messages for failed authentication + * + * Follows Jetpack Compose best practices with state management via ViewModel. + */ +@Composable +fun AuthScreen( + viewModel: AuthViewModel = viewModel(), + onAuthenticationSuccess: () -> Unit = {} +) { + val authState by viewModel.authState.collectAsState() + val error by viewModel.error.collectAsState() + val context = LocalContext.current + + LaunchedEffect(authState) { + if (authState is AuthState.Success) { + onAuthenticationSuccess() + } + } + + Box( + modifier = Modifier + .fillMaxSize() + .padding(16.dp), + contentAlignment = Alignment.Center + ) { + Column( + horizontalAlignment = Alignment.CenterHorizontally, + modifier = Modifier + .fillMaxWidth() + .wrapContentHeight() + ) { + // Title + Text( + text = "Quran.com Sync", + style = MaterialTheme.typography.headlineLarge, + textAlign = TextAlign.Center, + modifier = Modifier.padding(bottom = 8.dp) + ) + + Text( + text = "Sign in with Quran Foundation", + style = MaterialTheme.typography.bodyLarge, + textAlign = TextAlign.Center, + modifier = Modifier.padding(bottom = 32.dp), + color = MaterialTheme.colorScheme.onSurfaceVariant + ) + + // Content based on auth state + when (authState) { + is AuthState.Idle -> { + LoginButtonContent( + onLoginClick = { + viewModel.login(context as android.app.Activity) + } + ) + } + is AuthState.Loading -> { + LoadingContent() + } + is AuthState.Success -> { + SuccessContent() + } + is AuthState.Error -> { + ErrorContent( + error = error, + onRetry = { + viewModel.login(context as android.app.Activity) + }, + onDismiss = { + viewModel.clearError() + } + ) + } + } + } + } +} + +@Composable +private fun LoginButtonContent(onLoginClick: () -> Unit) { + Button( + onClick = onLoginClick, + modifier = Modifier + .fillMaxWidth() + .height(50.dp), + shape = MaterialTheme.shapes.medium + ) { + Text( + text = "Sign in with OAuth", + style = MaterialTheme.typography.labelLarge + ) + } + + Spacer(modifier = Modifier.height(16.dp)) + + Text( + text = "You will be redirected to Quran Foundation to securely sign in.", + style = MaterialTheme.typography.bodySmall, + textAlign = TextAlign.Center, + color = MaterialTheme.colorScheme.onSurfaceVariant + ) +} + +@Composable +private fun LoadingContent() { + Column( + horizontalAlignment = Alignment.CenterHorizontally, + modifier = Modifier.fillMaxWidth() + ) { + CircularProgressIndicator( + modifier = Modifier.size(48.dp) + ) + + Spacer(modifier = Modifier.height(16.dp)) + + Text( + text = "Signing in...", + style = MaterialTheme.typography.bodyLarge, + textAlign = TextAlign.Center + ) + } +} + +@Composable +private fun SuccessContent() { + Column( + horizontalAlignment = Alignment.CenterHorizontally, + modifier = Modifier.fillMaxWidth() + ) { + Text( + text = "✓", + style = MaterialTheme.typography.displayLarge, + color = MaterialTheme.colorScheme.primary, + textAlign = TextAlign.Center + ) + + Spacer(modifier = Modifier.height(16.dp)) + + Text( + text = "Successfully signed in!", + style = MaterialTheme.typography.bodyLarge, + textAlign = TextAlign.Center + ) + + Spacer(modifier = Modifier.height(8.dp)) + + Text( + text = "Your session is now active.", + style = MaterialTheme.typography.bodySmall, + textAlign = TextAlign.Center, + color = MaterialTheme.colorScheme.onSurfaceVariant + ) + } +} + +@Composable +private fun ErrorContent( + error: String?, + onRetry: () -> Unit, + onDismiss: () -> Unit +) { + Column( + horizontalAlignment = Alignment.CenterHorizontally, + modifier = Modifier + .fillMaxWidth() + .padding(16.dp) + ) { + Text( + text = "✗", + style = MaterialTheme.typography.displayLarge, + color = MaterialTheme.colorScheme.error, + textAlign = TextAlign.Center + ) + + Spacer(modifier = Modifier.height(16.dp)) + + Text( + text = "Authentication Failed", + style = MaterialTheme.typography.bodyLarge, + textAlign = TextAlign.Center + ) + + if (error != null) { + Spacer(modifier = Modifier.height(8.dp)) + + Surface( + modifier = Modifier + .fillMaxWidth() + .padding(8.dp), + color = MaterialTheme.colorScheme.errorContainer, + shape = MaterialTheme.shapes.small + ) { + Text( + text = error, + style = MaterialTheme.typography.bodySmall, + color = MaterialTheme.colorScheme.onErrorContainer, + modifier = Modifier.padding(12.dp), + textAlign = TextAlign.Center + ) + } + } + + Spacer(modifier = Modifier.height(16.dp)) + + Row( + modifier = Modifier + .fillMaxWidth() + .padding(horizontal = 8.dp), + horizontalArrangement = Arrangement.spacedBy(8.dp) + ) { + OutlinedButton( + onClick = onDismiss, + modifier = Modifier.weight(1f) + ) { + Text("Dismiss") + } + + Button( + onClick = onRetry, + modifier = Modifier.weight(1f) + ) { + Text("Retry") + } + } + } +} + diff --git a/demo/android/src/main/kotlin/com/quran/shared/demo/android/ui/auth/AuthViewModel.kt b/demo/android/src/main/kotlin/com/quran/shared/demo/android/ui/auth/AuthViewModel.kt new file mode 100644 index 00000000..ba23b43a --- /dev/null +++ b/demo/android/src/main/kotlin/com/quran/shared/demo/android/ui/auth/AuthViewModel.kt @@ -0,0 +1,344 @@ +package com.quran.shared.demo.android.ui.auth + +import android.app.Activity +import android.app.Application +import android.content.Context +import android.content.Intent +import android.content.SharedPreferences +import android.net.Uri +import androidx.lifecycle.AndroidViewModel +import androidx.lifecycle.viewModelScope +import com.quran.shared.auth.AuthenticationManager +import kotlinx.coroutines.flow.MutableStateFlow +import kotlinx.coroutines.flow.StateFlow +import kotlinx.coroutines.launch +import kotlin.random.Random +import androidx.core.content.edit + +/** + * ViewModel that manages authentication state and OAuth flow for Android. + * + * Implements OAuth 2.0 with PKCE (RFC 7636) for secure mobile authentication. + * Handles: + * - PKCE code generation (code_verifier and code_challenge) + * - Browser launch for user authorization + * - OAuth redirect callback processing + * - Token storage in encrypted SharedPreferences + * - Token refresh and validation + * + * Follows CLAUDE.md architecture by separating UI state (ViewModel) from + * business logic (AuthenticationManager). + */ +class AuthViewModel(application: Application) : AndroidViewModel(application) { + private val authManager = AuthenticationManager() + + // TODO: check if to use shared storage instead of SharedPrefs + private val prefs: SharedPreferences = application.getSharedPreferences("oauth", Context.MODE_PRIVATE) + + private val _authState = MutableStateFlow(AuthState.Idle) + val authState: StateFlow = _authState + + private val _error = MutableStateFlow(null) + val error: StateFlow = _error + + + /** + * Initiates OAuth login flow. + * + * Process: + * 1. Generate PKCE code verifier (cryptographically random 128 characters) + * 2. Create code challenge (SHA-256 hash + base64 encoding) + * 3. Generate state parameter (CSRF protection) + * 4. Store verifier and state for later validation + * 5. Build authorization URL + * 6. Launch system browser + * + * @param activity Activity to launch browser from + */ + fun login(activity: Activity) { + viewModelScope.launch { + try { + _authState.value = AuthState.Loading + _error.value = null + + // Generate PKCE code verifier (43-128 characters, unreserved characters) + val codeVerifier = generateCodeVerifier() + + // Generate state for CSRF protection + val state = generateRandomState() + + // Build authorization URL with PKCE + val authUrl = authManager.buildAuthorizationUrl( + codeVerifier = codeVerifier, + state = state + ) + // TODO: remove logging from production + println("DEBUG: Authorization URL: $authUrl") + + // Store verifier and state for callback + storeOAuthState(codeVerifier, state) + + // Launch browser with authorization URL + launchBrowser(activity, authUrl) + + } catch (e: Exception) { + _error.value = e.message ?: "Authentication failed" + _authState.value = AuthState.Error(e) + // TODO: remove logging from production + println("DEBUG: OAuth error: ${e.message}") + e.printStackTrace() + } + } + } + + /** + * Handles OAuth redirect callback from system browser. + * + * Called from MainActivity when app is opened via deep link: + * com.quran.oauth://callback?code=AUTH_CODE&state=STATE_VALUE + * + * Process: + * 1. Parse redirect URI + * 2. Check for error in redirect + * 3. Validate state parameter (CSRF protection) + * 4. Extract authorization code + * 5. Exchange code for tokens using AuthenticationManager + * 6. Store tokens securely + * 7. Update UI state + * + * @param redirectUri The redirect URI from OAuth provider + */ + fun handleOAuthRedirect(redirectUri: String) { + viewModelScope.launch { + try { + _authState.value = AuthState.Loading + _error.value = null + + // Parse redirect URI + val uri = Uri.parse(redirectUri) + + // Check for error in redirect + val errorParam = uri.getQueryParameter("error") + if (errorParam != null) { + val errorDescription = uri.getQueryParameter("error_description") + ?: "Unknown error" + throw Exception("OAuth Error: $errorParam - $errorDescription") + } + + // Extract authorization code and state + val authCode = uri.getQueryParameter("code") + ?: throw Exception("No authorization code in redirect") + + val returnedState = uri.getQueryParameter("state") + ?: throw Exception("No state parameter in redirect") + + // Validate state (CSRF protection) + val storedState = retrieveStoredState() + if (storedState != returnedState) { + throw Exception("State parameter mismatch - possible CSRF attack") + } + + // Retrieve stored code verifier + val codeVerifier = retrieveStoredCodeVerifier() + ?: throw Exception("Code verifier not found - invalid state") + + // Exchange authorization code for tokens + val tokenResponse = authManager.exchangeCodeForToken( + code = authCode, + codeVerifier = codeVerifier + ) + + // Store tokens securely + storeTokens(tokenResponse) + + // Clear stored OAuth state + clearOAuthState() + + _authState.value = AuthState.Success + + } catch (e: Exception) { + _error.value = e.message ?: "Failed to handle OAuth redirect" + _authState.value = AuthState.Error(e) + } + } + } + + /** + * Refreshes the access token if expired. + * + * Should be called before making API requests to ensure valid token. + * + * @return true if refresh succeeded, false otherwise + */ + suspend fun refreshAccessTokenIfNeeded(): Boolean { + return try { + val refreshToken = retrieveStoredRefreshToken() ?: return false + val expirationTime = retrieveTokenExpiration() + + // Check if token is expired (with 60 second buffer) + if (!authManager.isTokenValid(expirationTime)) { + val newTokenResponse = authManager.refreshToken(refreshToken) + storeTokens(newTokenResponse) + true + } else { + true + } + } catch (e: Exception) { + false + } + } + + /** + * Logs out user by revoking tokens and clearing storage. + */ + fun logout() { + viewModelScope.launch { + try { + val accessToken = retrieveStoredAccessToken() + if (accessToken != null) { + authManager.revokeToken(accessToken, "access_token") + } + + val refreshToken = retrieveStoredRefreshToken() + if (refreshToken != null) { + authManager.revokeToken(refreshToken, "refresh_token") + } + + clearAllTokens() + _authState.value = AuthState.Idle + _error.value = null + + } catch (e: Exception) { + _error.value = "Logout failed: ${e.message}" + } + } + } + + fun clearError() { + _error.value = null + } + + // ========================= Helper Methods ========================= + + /** + * Generates a cryptographically random PKCE code verifier. + * + * RFC 7636: 43-128 characters from unreserved characters + * [A-Z] [a-z] [0-9] - . _ ~ + */ + private fun generateCodeVerifier(): String { + val charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" + return (1..128).map { charset[Random.nextInt(charset.length)] }.joinToString("") + } + + /** + * Generates a random state parameter for CSRF protection. + */ + private fun generateRandomState(): String { + val charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + return (1..32).map { charset[Random.nextInt(charset.length)] }.joinToString("") + } + + /** + * Launches system browser with OAuth authorization URL. + */ + private fun launchBrowser(activity: Activity, authUrl: String) { + val intent = Intent(Intent.ACTION_VIEW, Uri.parse(authUrl)) + activity.startActivity(intent) + } + + /** + * Stores OAuth state (code_verifier, state) for callback validation. + */ + private fun storeOAuthState(codeVerifier: String, state: String) { + prefs.edit { + putString("code_verifier", codeVerifier) + .putString("state", state) + } + } + + /** + * Clears OAuth state after successful authentication. + */ + private fun clearOAuthState() { + prefs.edit { + remove("code_verifier") + .remove("state") + } + } + + /** + * Stores tokens securely in SharedPreferences. + * + * In production, use EncryptedSharedPreferences or Android Keystore. + */ + private fun storeTokens(tokenResponse: com.quran.shared.auth.TokenResponse) { + val expirationTime = System.currentTimeMillis() + (tokenResponse.expiresIn * 1000) + prefs.edit { + putString("access_token", tokenResponse.accessToken) + .putString("refresh_token", tokenResponse.refreshToken) + .putLong("token_expiration", expirationTime) + .putLong("token_retrieved_at", System.currentTimeMillis()) + } + } + + /** + * Retrieves stored access token. + */ + fun retrieveStoredAccessToken(): String? { + return prefs.getString("access_token", null) + } + + /** + * Retrieves stored refresh token. + */ + private fun retrieveStoredRefreshToken(): String? { + return prefs.getString("refresh_token", null) + } + + /** + * Retrieves stored code verifier for token exchange. + */ + private fun retrieveStoredCodeVerifier(): String? { + return prefs.getString("code_verifier", null) + } + + /** + * Retrieves stored state parameter for validation. + */ + private fun retrieveStoredState(): String? { + return prefs.getString("state", null) + } + + /** + * Retrieves token expiration time. + */ + private fun retrieveTokenExpiration(): Long { + return prefs.getLong("token_expiration", 0) + } + + /** + * Clears all stored tokens. + */ + private fun clearAllTokens() { + prefs.edit { + remove("access_token") + .remove("refresh_token") + .remove("token_expiration") + .remove("token_retrieved_at") + .remove("code_verifier") + .remove("state") + } + } +} + +/** + * Sealed class representing authentication state + */ +sealed class AuthState { + object Idle : AuthState() + object Loading : AuthState() + object Success : AuthState() + data class Error(val exception: Exception) : AuthState() +} + diff --git a/demo/apple/QuranSyncDemo/QuranSyncDemo.xcodeproj/project.pbxproj b/demo/apple/QuranSyncDemo/QuranSyncDemo.xcodeproj/project.pbxproj index 6f9423ea..5b6fb9e7 100644 --- a/demo/apple/QuranSyncDemo/QuranSyncDemo.xcodeproj/project.pbxproj +++ b/demo/apple/QuranSyncDemo/QuranSyncDemo.xcodeproj/project.pbxproj @@ -6,6 +6,10 @@ objectVersion = 77; objects = { +/* Begin PBXBuildFile section */ + C163F6BE2EAA42E6001AD126 /* KMPNativeCoroutinesAsync in Frameworks */ = {isa = PBXBuildFile; productRef = C163F6BD2EAA42E6001AD126 /* KMPNativeCoroutinesAsync */; }; +/* End PBXBuildFile section */ + /* Begin PBXContainerItemProxy section */ C1D8E3602DDA5A530076D793 /* PBXContainerItemProxy */ = { isa = PBXContainerItemProxy; @@ -52,6 +56,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + C163F6BE2EAA42E6001AD126 /* KMPNativeCoroutinesAsync in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -113,6 +118,7 @@ ); name = QuranSyncDemo; packageProductDependencies = ( + C163F6BD2EAA42E6001AD126 /* KMPNativeCoroutinesAsync */, ); productName = QuranSyncDemo; productReference = C1D8E3522DDA5A520076D793 /* QuranSyncDemo.app */; @@ -172,7 +178,7 @@ attributes = { BuildIndependentTargetsInParallel = 1; LastSwiftUpdateCheck = 1630; - LastUpgradeCheck = 1630; + LastUpgradeCheck = 2600; TargetAttributes = { C1D8E3512DDA5A520076D793 = { CreatedOnToolsVersion = 16.3; @@ -196,6 +202,9 @@ ); mainGroup = C1D8E3492DDA5A520076D793; minimizedProjectReferenceProxies = 1; + packageReferences = ( + C163F6BC2EAA42E6001AD126 /* XCRemoteSwiftPackageReference "KMP-NativeCoroutines" */, + ); preferredProjectObjectVersion = 77; productRefGroup = C1D8E3532DDA5A520076D793 /* Products */; projectDirPath = ""; @@ -248,7 +257,7 @@ ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "cd \"$SRCROOT/../../..\"\n./gradlew :umbrella:embedAndSignAppleFrameworkForXcode\n"; + shellScript = "if [ \"YES\" = \"$OVERRIDE_KOTLIN_BUILD_IDE_SUPPORTED\" ]; then\n echo \"Skipping Gradle build task invocation due to OVERRIDE_KOTLIN_BUILD_IDE_SUPPORTED environment variable set to \\\"YES\\\"\"\n exit 0\nfi\ncd \"$SRCROOT/../../..\"\n./gradlew :umbrella:embedAndSignAppleFrameworkForXcode"; }; /* End PBXShellScriptBuildPhase section */ @@ -349,6 +358,7 @@ MTL_FAST_MATH = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; + STRING_CATALOG_GENERATE_SYMBOLS = YES; SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; }; @@ -406,6 +416,7 @@ MTL_ENABLE_DEBUG_INFO = NO; MTL_FAST_MATH = YES; SDKROOT = iphoneos; + STRING_CATALOG_GENERATE_SYMBOLS = YES; SWIFT_COMPILATION_MODE = wholemodule; VALIDATE_PRODUCT = YES; }; @@ -583,6 +594,25 @@ defaultConfigurationName = Release; }; /* End XCConfigurationList section */ + +/* Begin XCRemoteSwiftPackageReference section */ + C163F6BC2EAA42E6001AD126 /* XCRemoteSwiftPackageReference "KMP-NativeCoroutines" */ = { + isa = XCRemoteSwiftPackageReference; + repositoryURL = "https://github.com/rickclephas/KMP-NativeCoroutines.git"; + requirement = { + branch = master; + kind = branch; + }; + }; +/* End XCRemoteSwiftPackageReference section */ + +/* Begin XCSwiftPackageProductDependency section */ + C163F6BD2EAA42E6001AD126 /* KMPNativeCoroutinesAsync */ = { + isa = XCSwiftPackageProductDependency; + package = C163F6BC2EAA42E6001AD126 /* XCRemoteSwiftPackageReference "KMP-NativeCoroutines" */; + productName = KMPNativeCoroutinesAsync; + }; +/* End XCSwiftPackageProductDependency section */ }; rootObject = C1D8E34A2DDA5A520076D793 /* Project object */; } diff --git a/demo/apple/QuranSyncDemo/QuranSyncDemo.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/demo/apple/QuranSyncDemo/QuranSyncDemo.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved new file mode 100644 index 00000000..ac2d26d3 --- /dev/null +++ b/demo/apple/QuranSyncDemo/QuranSyncDemo.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -0,0 +1,24 @@ +{ + "originHash" : "609133b310e9c4579a1d75e9696445b51bd516493282d44ac82bc290103e37c2", + "pins" : [ + { + "identity" : "kmp-nativecoroutines", + "kind" : "remoteSourceControl", + "location" : "https://github.com/rickclephas/KMP-NativeCoroutines.git", + "state" : { + "branch" : "master", + "revision" : "e4b66be72f3e904b8fa096ac255486c81bacce5f" + } + }, + { + "identity" : "rxswift", + "kind" : "remoteSourceControl", + "location" : "https://github.com/ReactiveX/RxSwift.git", + "state" : { + "revision" : "5dd1907d64f0d36f158f61a466bab75067224893", + "version" : "6.9.0" + } + } + ], + "version" : 3 +} diff --git a/demo/apple/QuranSyncDemo/QuranSyncDemo.xcodeproj/project.xcworkspace/xcuserdata/ahmedre.xcuserdatad/UserInterfaceState.xcuserstate b/demo/apple/QuranSyncDemo/QuranSyncDemo.xcodeproj/project.xcworkspace/xcuserdata/ahmedre.xcuserdatad/UserInterfaceState.xcuserstate deleted file mode 100644 index cae8c38a..00000000 Binary files a/demo/apple/QuranSyncDemo/QuranSyncDemo.xcodeproj/project.xcworkspace/xcuserdata/ahmedre.xcuserdatad/UserInterfaceState.xcuserstate and /dev/null differ diff --git a/demo/apple/QuranSyncDemo/QuranSyncDemo/AuthView.swift b/demo/apple/QuranSyncDemo/QuranSyncDemo/AuthView.swift new file mode 100644 index 00000000..2c04b38c --- /dev/null +++ b/demo/apple/QuranSyncDemo/QuranSyncDemo/AuthView.swift @@ -0,0 +1,151 @@ +import SwiftUI +import AuthenticationServices + +/** + * Authentication screen for iOS demo app. + * + * Displays: + * - Login button to initiate OAuth flow + * - Loading state during authentication + * - Success message after successful login + * - Error messages for failed authentication + * + * Uses SwiftUI with MVVM pattern for state management. + */ +struct AuthView: View { + @StateObject private var viewModel = AuthViewModel() + var onAuthenticationSuccess: () -> Void = {} + + var body: some View { + ZStack { + // Background + Color(.systemBackground) + .ignoresSafeArea() + + VStack(spacing: 24) { + // Header + VStack(spacing: 8) { + Text("Quran.com Sync") + .font(.largeTitle) + .fontWeight(.bold) + + Text("Sign in with Quran Foundation") + .font(.body) + .foregroundColor(.secondary) + } + .padding(.bottom, 32) + + // Content based on auth state + Group { + switch viewModel.authState { + case .idle: + loginButtonContent + case .loading: + loadingContent + case .success: + successContent + case .error: + errorContent + } + } + + Spacer() + } + .padding(.horizontal, 16) + .padding(.vertical, 32) + } + .onChange(of: viewModel.authState) { _, newState in + if case .success = newState { + onAuthenticationSuccess() + } + } + } + + // MARK: - Content Views + + private var loginButtonContent: some View { + VStack(spacing: 16) { + Button(action: { viewModel.login() }) { + Text("Sign in with OAuth") + .font(.headline) + .foregroundColor(.white) + .frame(maxWidth: .infinity) + .frame(height: 50) + .background(Color.accentColor) + .cornerRadius(8) + } + + Text("You will be redirected to Quran Foundation to securely sign in.") + .font(.caption) + .foregroundColor(.secondary) + .multilineTextAlignment(.center) + } + } + + private var loadingContent: some View { + VStack(spacing: 16) { + ProgressView() + .scaleEffect(1.5, anchor: .center) + .padding() + + Text("Signing in...") + .font(.body) + } + } + + private var successContent: some View { + VStack(spacing: 16) { + Image(systemName: "checkmark.circle.fill") + .font(.system(size: 60)) + .foregroundColor(.green) + + Text("Successfully signed in!") + .font(.headline) + + Text("Your session is now active.") + .font(.caption) + .foregroundColor(.secondary) + } + .padding() + } + + private var errorContent: some View { + VStack(spacing: 16) { + Image(systemName: "xmark.circle.fill") + .font(.system(size: 60)) + .foregroundColor(.red) + + Text("Authentication Failed") + .font(.headline) + + if let error = viewModel.error { + Text(error) + .font(.caption) + .foregroundColor(.red) + .padding(12) + .background(Color(.systemRed).opacity(0.1)) + .cornerRadius(8) + .multilineTextAlignment(.center) + } + + HStack(spacing: 12) { + Button("Dismiss") { + viewModel.clearError() + } + .buttonStyle(.bordered) + + Button("Retry") { + viewModel.login() + } + .buttonStyle(.borderedProminent) + } + } + .padding() + } +} + +// MARK: - Preview +#Preview { + AuthView() +} + diff --git a/demo/apple/QuranSyncDemo/QuranSyncDemo/AuthViewModel.swift b/demo/apple/QuranSyncDemo/QuranSyncDemo/AuthViewModel.swift new file mode 100644 index 00000000..8f8fc423 --- /dev/null +++ b/demo/apple/QuranSyncDemo/QuranSyncDemo/AuthViewModel.swift @@ -0,0 +1,533 @@ +import Foundation +import Combine +import AuthenticationServices +import Security +import CryptoKit +import Shared + +/** + * ViewModel for managing OAuth authentication on iOS. + * + * Implements OAuth 2.0 with PKCE (RFC 7636) for secure mobile authentication. + * Handles: + * - PKCE code generation (code_verifier and code_challenge) + * - ASWebAuthenticationSession for secure OAuth consent screen + * - OAuth redirect callback processing + * - Token storage in iOS Keychain + * - Token refresh and validation + * + * Uses Combine framework for reactive state management and SwiftUI integration. + * Follows CLAUDE.md architecture by separating UI state (ObservableObject) from + * business logic (AuthenticationManager). + */ +class AuthViewModel: NSObject, ObservableObject, ASWebAuthenticationPresentationContextProviding { + @Published var authState: AuthState = .idle + @Published var error: String? + + private let authManager = AuthenticationManager(usePreProduction: true) + private var cancellables = Set() + + override init() { + super.init() + } + + /** + * Initiates OAuth login flow on iOS. + * + * Process: + * 1. Generate PKCE code verifier (cryptographically random 128 characters) + * 2. Create code challenge (SHA-256 hash + base64 encoding) + * 3. Generate state parameter (CSRF protection) + * 4. Store verifier and state for later validation + * 5. Build authorization URL + * 6. Launch ASWebAuthenticationSession for OAuth consent + * 7. Handle redirect callback + * + * ASWebAuthenticationSession provides: + * - Secure OAuth consent screen + * - Session cookie sharing with Safari + * - Automatic redirect handling + */ + func login() { + DispatchQueue.main.async { + self.authState = .loading + self.error = nil + } + + do { + // Generate PKCE code verifier (43-128 characters, unreserved characters) + let codeVerifier = generateCodeVerifier() + + // Generate state for CSRF protection + let state = generateRandomState() + + // Build authorization URL with PKCE + let authUrl = authManager.buildAuthorizationUrl( + codeVerifier: codeVerifier, + state: state + ) + + guard let url = URL(string: authUrl) else { + throw NSError(domain: "AuthViewModel", code: -1, userInfo: [ + NSLocalizedDescriptionKey: "Invalid authorization URL" + ]) + } + + // Store verifier and state for callback validation + storeOAuthState(codeVerifier: codeVerifier, state: state) + + // Create ASWebAuthenticationSession for secure OAuth + let session = ASWebAuthenticationSession( + url: url, + callbackURLScheme: "com.quran.oauth" + ) { [weak self] callbackUrl, error in + self?.handleAuthenticationCallback(url: callbackUrl, error: error) + } + + // Set presentation context provider + session.presentationContextProvider = self + + // Start the authentication session + if session.start() { + // Session started successfully + } else { + DispatchQueue.main.async { + self.error = "Failed to start authentication session" + self.authState = .error + } + } + + } catch { + DispatchQueue.main.async { + self.error = error.localizedDescription + self.authState = .error + } + } + } + + /** + * Handles OAuth redirect callback from ASWebAuthenticationSession. + * + * Called when user completes OAuth authorization and is redirected back to app: + * com.quran.oauth://callback?code=AUTH_CODE&state=STATE_VALUE + * + * Process: + * 1. Parse redirect URL + * 2. Check for error in redirect + * 3. Validate state parameter (CSRF protection) + * 4. Extract authorization code + * 5. Exchange code for tokens using AuthenticationManager + * 6. Store tokens in Keychain + * 7. Update UI state + */ + private func handleAuthenticationCallback(url: URL?, error: Error?) { + DispatchQueue.main.async { + self.authState = .loading + self.error = nil + } + + // Handle ASWebAuthenticationSession errors + if let error = error { + if let authError = error as? ASWebAuthenticationSessionError { + if authError.code == .canceledLogin { + DispatchQueue.main.async { + self.authState = .idle + self.error = nil + } + } else { + DispatchQueue.main.async { + self.error = "Authentication failed: \(authError.localizedDescription)" + self.authState = .error + } + } + } else { + DispatchQueue.main.async { + self.error = error.localizedDescription + self.authState = .error + } + } + return + } + + guard let callbackUrl = url else { + DispatchQueue.main.async { + self.error = "No callback URL received" + self.authState = .error + } + return + } + + self.handleOAuthRedirect(url: callbackUrl) + } + + /** + * Handles OAuth redirect callback from deep link. + * + * Called when app is opened via deep link with authorization code. + * + * @param url The redirect URL containing authorization code or error + */ + func handleOAuthRedirect(url: URL) { + DispatchQueue.main.async { + self.authState = .loading + self.error = nil + } + + guard let components = URLComponents(url: url, resolvingAgainstBaseURL: false) else { + DispatchQueue.main.async { + self.error = "Invalid redirect URL" + self.authState = .error + } + return + } + + // Check for error parameter + if let errorParam = components.queryItems?.first(where: { $0.name == "error" })?.value { + let errorDescription = components.queryItems? + .first(where: { $0.name == "error_description" })?.value ?? "Unknown error" + DispatchQueue.main.async { + self.error = "OAuth Error: \(errorParam) - \(errorDescription)" + self.authState = .error + } + return + } + + // Extract authorization code + guard let authCode = components.queryItems? + .first(where: { $0.name == "code" })?.value else { + DispatchQueue.main.async { + self.error = "No authorization code in redirect" + self.authState = .error + } + return + } + + // Extract and validate state parameter + guard let returnedState = components.queryItems? + .first(where: { $0.name == "state" })?.value else { + DispatchQueue.main.async { + self.error = "No state parameter in redirect" + self.authState = .error + } + return + } + + // Validate state (CSRF protection) + let storedState = retrieveStoredState() + guard storedState == returnedState else { + DispatchQueue.main.async { + self.error = "State parameter mismatch - possible CSRF attack" + self.authState = .error + } + return + } + + // Retrieve stored code verifier + guard let codeVerifier = retrieveStoredCodeVerifier() else { + DispatchQueue.main.async { + self.error = "Code verifier not found - invalid state" + self.authState = .error + } + return + } + + // Exchange code for tokens on background thread + Task { + do { + // Exchange authorization code for tokens + let tokenResponse = try await self.authManager.exchangeCodeForToken( + code: authCode, + codeVerifier: codeVerifier + ) + + // Store tokens in Keychain + try self.storeTokensInKeychain(tokenResponse) + + let runner = await MainActor.run { + // Clear stored OAuth state + self.clearOAuthState() + self.authState = .success + } + } catch { + await MainActor.run { + self.error = error.localizedDescription + self.authState = .error + } + } + } + } + + /** + * Refreshes the access token if expired. + * + * Should be called before making API requests to ensure valid token. + */ + func refreshAccessTokenIfNeeded() async -> Bool { + do { + guard let refreshToken = try retrieveTokenFromKeychain(key: "refreshToken") else { + return false + } + + let expirationTime = UserDefaults.standard.double(forKey: "tokenExpirationTime") + + // Check if token is expired (with 60 second buffer) + if !authManager.isTokenValid(expirationTime: Int64(expirationTime)) { + let newTokenResponse = try await authManager.refreshToken(refreshToken: refreshToken) + try storeTokensInKeychain(newTokenResponse) + return true + } else { + return true + } + } catch { + return false + } + } + + /** + * Logs out user by revoking tokens and clearing storage. + */ + func logout() { + Task { + do { + if let accessToken = try self.retrieveTokenFromKeychain(key: "accessToken") { + _ = try await self.authManager.revokeToken(token: accessToken, tokenTypeHint: "access_token") + } + + if let refreshToken = try self.retrieveTokenFromKeychain(key: "refreshToken") { + _ = try await self.authManager.revokeToken(token: refreshToken, tokenTypeHint: "refresh_token") + } + + await MainActor.run { + self.clearAllTokens() + self.authState = .idle + self.error = nil + } + } catch { + await MainActor.run { + self.error = "Logout failed: \(error.localizedDescription)" + } + } + } + } + + /** + * Retrieves stored access token. + */ + func getAccessToken() -> String? { + try? retrieveTokenFromKeychain(key: "accessToken") + } + + func clearError() { + error = nil + } + + // ========================= Helper Methods ========================= + + /** + * Generates a cryptographically random PKCE code verifier. + * + * RFC 7636: 43-128 characters from unreserved characters + * [A-Z] [a-z] [0-9] - . _ ~ + */ + private func generateCodeVerifier() -> String { + let charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" + var result = "" + for _ in 0..<128 { + let randomIndex = Int.random(in: 0.. String { + let charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + var result = "" + for _ in 0..<32 { + let randomIndex = Int.random(in: 0.. String? { + UserDefaults.standard.string(forKey: "oauth_code_verifier") + } + + /** + * Retrieves stored state parameter for validation. + */ + private func retrieveStoredState() -> String? { + UserDefaults.standard.string(forKey: "oauth_state") + } + + /** + * Stores tokens securely in iOS Keychain. + * + * Keychain provides encrypted storage for sensitive data. + */ + private func storeTokensInKeychain(_ tokenResponse: Shared.TokenResponse) throws { + let expirationTime = Date().timeIntervalSince1970 + Double(tokenResponse.expiresIn) + + // Store access token + try storeTokenInKeychain(token: tokenResponse.accessToken, key: "accessToken") + + // Store refresh token if provided + if let refreshToken = tokenResponse.refreshToken { + try storeTokenInKeychain(token: refreshToken, key: "refreshToken") + } + + // Store expiration time in UserDefaults + UserDefaults.standard.set(expirationTime, forKey: "tokenExpirationTime") + } + + /** + * Stores a single token in Keychain. + */ + private func storeTokenInKeychain(token: String, key: String) throws { + let data = token.data(using: .utf8)! + + // First, try to delete existing value + var deleteQuery: [String: Any] = [ + kSecClass as String: kSecClassGenericPassword, + kSecAttrAccount as String: key + ] + SecItemDelete(deleteQuery as CFDictionary) + + // Add new value + var addQuery: [String: Any] = [ + kSecClass as String: kSecClassGenericPassword, + kSecAttrAccount as String: key, + kSecValueData as String: data, + kSecAttrAccessible as String: kSecAttrAccessibleWhenUnlockedThisDeviceOnly + ] + + let status = SecItemAdd(addQuery as CFDictionary, nil) + guard status == errSecSuccess else { + throw NSError(domain: "Keychain", code: Int(status), userInfo: [ + NSLocalizedDescriptionKey: "Failed to store token in Keychain" + ]) + } + } + + /** + * Retrieves a token from Keychain. + */ + private func retrieveTokenFromKeychain(key: String) throws -> String? { + var query: [String: Any] = [ + kSecClass as String: kSecClassGenericPassword, + kSecAttrAccount as String: key, + kSecReturnData as String: true + ] + + var result: AnyObject? + let status = SecItemCopyMatching(query as CFDictionary, &result) + + guard status == errSecSuccess else { + if status == errSecItemNotFound { + return nil + } + throw NSError(domain: "Keychain", code: Int(status), userInfo: [ + NSLocalizedDescriptionKey: "Failed to retrieve token from Keychain" + ]) + } + + guard let data = result as? Data else { + return nil + } + + return String(data: data, encoding: .utf8) + } + + /** + * Clears all stored tokens from Keychain and UserDefaults. + */ + private func clearAllTokens() { + let keys = ["accessToken", "refreshToken", "oauth_code_verifier", "oauth_state", "tokenExpirationTime"] + + for key in keys { + var query: [String: Any] = [ + kSecClass as String: kSecClassGenericPassword, + kSecAttrAccount as String: key + ] + SecItemDelete(query as CFDictionary) + UserDefaults.standard.removeObject(forKey: key) + } + } + + // MARK: - ASWebAuthenticationPresentationContextProviding + + /** + * Provides the presentation anchor for ASWebAuthenticationSession. + * + * Required by ASWebAuthenticationPresentationContextProviding protocol. + */ + func presentationAnchor(for session: ASWebAuthenticationSession) -> ASPresentationAnchor { + return ASPresentationAnchor() + } +} + +/** + * Represents the authentication state machine. + */ +enum AuthState { + case idle + case loading + case success + case error +} + +// MARK: - Helper Functions + +/** + * Generates a cryptographically random PKCE code verifier. + */ +private func generateCodeVerifier() -> String { + let charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" + var result = "" + for _ in 0..<128 { + let randomIndex = Int.random(in: 0.. String { + let charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + var result = "" + for _ in 0..<32 { + let randomIndex = Int.random(in: 0.. String { - return "Sura \(bookmark.sura), Ayah \(bookmark.ayah)" + func formatBookmark(_ bookmark: BookmarkItem) -> String { + "Page \(bookmark.page)" } // Format the timestamp - func formatTimestamp(_ timestamp: Int64) -> String { - let date = Date(timeIntervalSince1970: TimeInterval(timestamp)) + func formatTimestamp(_ date: Date) -> String { let formatter = DateFormatter() formatter.dateStyle = .medium formatter.timeStyle = .short return formatter.string(from: date) } + + private static func mapToItem(_ bookmark: Bookmark.PageBookmark) -> BookmarkItem { + let identifier = bookmark.localId + let lastUpdatedDate = bookmark.lastUpdated + return BookmarkItem(id: identifier, page: Int(bookmark.page), lastUpdated: lastUpdatedDate) + } } diff --git a/demo/apple/QuranSyncDemo/QuranSyncDemo/ContentView.swift b/demo/apple/QuranSyncDemo/QuranSyncDemo/ContentView.swift index 322713c6..83c10140 100644 --- a/demo/apple/QuranSyncDemo/QuranSyncDemo/ContentView.swift +++ b/demo/apple/QuranSyncDemo/QuranSyncDemo/ContentView.swift @@ -27,7 +27,7 @@ struct ContentView: View { Text(viewModel.formatBookmark(bookmark)) .font(.headline) - Text("Added: \(viewModel.formatTimestamp(bookmark.last_updated))") + Text("Updated: \(viewModel.formatTimestamp(bookmark.lastUpdated))") .font(.caption) .foregroundColor(.secondary) } @@ -37,7 +37,13 @@ struct ContentView: View { } Button(action: { - viewModel.addRandomBookmark() + Task { + do { + try await viewModel.addRandomBookmark() + } catch { + print("Failed to add random bookmark: \(error)") + } + } }) { Label("Add Random Bookmark", systemImage: "bookmark.fill") .frame(maxWidth: .infinity) @@ -46,13 +52,9 @@ struct ContentView: View { .padding() } .navigationTitle("Quran Bookmarks") - .toolbar { - Button(action: { - viewModel.loadBookmarks() - }) { - Image(systemName: "arrow.clockwise") - } - } + } + .task { + await viewModel.observeBookmarks() } } } diff --git a/demo/apple/QuranSyncDemo/QuranSyncDemo/DatabaseManager.swift b/demo/apple/QuranSyncDemo/QuranSyncDemo/DatabaseManager.swift index a046002b..437cfd33 100644 --- a/demo/apple/QuranSyncDemo/QuranSyncDemo/DatabaseManager.swift +++ b/demo/apple/QuranSyncDemo/QuranSyncDemo/DatabaseManager.swift @@ -6,37 +6,33 @@ // import Foundation +import KMPNativeCoroutinesAsync import Shared class DatabaseManager { static let shared = DatabaseManager() - private let database: QuranDatabase + private let bookmarksRepository: BookmarksRepository private init() { let driverFactory = DriverFactory() - self.database = DriverFactoryKt.makeDatabase(driverFactory: driverFactory) + self.bookmarksRepository = BookmarksRepositoryFactory.shared.createRepository(driverFactory: driverFactory) } - // Provides access to bookmark queries - var bookmarkQueries: BookmarksQueries { - return database.bookmarksQueries + func bookmarksSequence() -> any AsyncSequence<[Bookmark.PageBookmark], Error> { + return asyncSequence(for: bookmarksRepository.getBookmarksFlow()).map { bookmarks in + bookmarks.compactMap { $0 as? Bookmark.PageBookmark } + } } - // Get all bookmarks - func getAllBookmarks() -> [Bookmarks] { - return bookmarkQueries.getBookmarks().executeAsList() + // Add a bookmark for a given page using async/await bridge. + func addPageBookmark(page: Int) async throws { + try await asyncFunction(for: bookmarksRepository.addBookmark(page: Int32(page))) } - // Add a new bookmark - func addBookmark(sura: Int64, ayah: Int64) { - bookmarkQueries.addBookmark(sura: sura, ayah: ayah) - } - - // Add a random bookmark - func addRandomBookmark() { - let randomSura = Int64.random(in: 1...114) - let randomAyah = Int64.random(in: 1...286) // Max ayah count is 286 in Quran - addBookmark(sura: randomSura, ayah: randomAyah) + // Add a random bookmark using async/await bridge. + func addRandomBookmark() async throws { + let randomPage = Int.random(in: 1...604) + try await addPageBookmark(page: randomPage) } } diff --git a/demo/apple/QuranSyncDemo/QuranSyncDemo/QuranSyncDemoApp.swift b/demo/apple/QuranSyncDemo/QuranSyncDemo/QuranSyncDemoApp.swift index 2e22c7a2..0cf00bc9 100644 --- a/demo/apple/QuranSyncDemo/QuranSyncDemo/QuranSyncDemoApp.swift +++ b/demo/apple/QuranSyncDemo/QuranSyncDemo/QuranSyncDemoApp.swift @@ -1,24 +1,112 @@ -// -// QuranSyncDemoApp.swift -// QuranSyncDemo -// -// Created by Ahmed El-Helw on 5/18/25. -// - import SwiftUI -import Shared +import UIKit + +/** + * App delegate that handles OAuth deep link redirects on iOS. + * + * When the user completes OAuth flow in the browser, the system opens the app + * via a deep link with the authorization code. This delegate captures that + * redirect and notifies the AuthViewModel. + */ +class AppDelegate: NSObject, UIApplicationDelegate { + + /** + * Called when the app is opened with a URL (deep link). + * + * This handles OAuth redirects from the system browser. + * The redirect URL format: quran-sync://oauth/callback?code=AUTH_CODE&state=STATE + */ + func application( + _ app: UIApplication, + open url: URL, + options: [UIApplication.OpenURLOptionsKey: Any] = [:] + ) -> Bool { + // Handle OAuth redirect + if url.scheme == "com.quran.oauth" && url.host == "callback" { + // Notify the app about the OAuth callback + NotificationCenter.default.post( + name: NSNotification.Name("OAuthRedirect"), + object: nil, + userInfo: ["url": url] + ) + return true + } + + return false + } + + /** + * Called when the app is opened via scene activation (iOS 13+). + */ + func application( + _ application: UIApplication, + configurationForConnecting connectingSceneSession: UISceneSession, + options: UIScene.ConnectionOptions + ) -> UISceneConfiguration { + let configuration = UISceneConfiguration( + name: nil, + sessionRole: connectingSceneSession.role + ) + + // Handle URL context from scene options + if let urlContext = options.urlContexts.first { + NotificationCenter.default.post( + name: NSNotification.Name("OAuthRedirect"), + object: nil, + userInfo: ["url": urlContext.url] + ) + } + return configuration + } +} + +/** + * Main App entry point for the iOS demo. + * + * Sets up the scene, authentication flow, and handles deep link redirects. + */ @main struct QuranSyncDemoApp: App { - // Initialize the database manager when the app starts - init() { - // Access the shared instance to ensure it's initialized on app startup - let _ = DatabaseManager.shared - } - - var body: some Scene { - WindowGroup { - ContentView() + @UIApplicationDelegateAdaptor(AppDelegate.self) var appDelegate + @StateObject private var authViewModel = AuthViewModel() + @State private var isAuthenticating = false + + var body: some Scene { + WindowGroup { + ZStack { + // Main auth screen + AuthView(onAuthenticationSuccess: { + isAuthenticating = true + }) + .environmentObject(authViewModel) + + // Overlay for showing authentication success + if isAuthenticating { + Color.black.opacity(0.4) + .ignoresSafeArea() + + VStack { + Text("Ready to Sync!") + .font(.headline) + .foregroundColor(.white) + } + .padding() + .background(Color.green.opacity(0.8)) + .cornerRadius(12) + .padding() + } + } + .onReceive( + NotificationCenter.default.publisher( + for: NSNotification.Name("OAuthRedirect") + ) + ) { notification in + if let url = notification.userInfo?["url"] as? URL { + authViewModel.handleOAuthRedirect(url: url) + } + } + } } - } } + diff --git a/gradle.properties b/gradle.properties index 30f06b7e..a8a63b48 100644 --- a/gradle.properties +++ b/gradle.properties @@ -6,8 +6,11 @@ org.gradle.caching=true # Kotlin kotlin.code.style=official +kotlin.mpp.applyDefaultHierarchyTemplate=false # Android android.useAndroidX=true android.nonTransitiveRClass=true +# versioning +version=0.0.2 diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index fc797cab..86a2b96b 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -1,19 +1,63 @@ [versions] -agp = "8.10.0" -kotlin = "2.1.21" +agp = "8.13.2" +kotlin = "2.3.0" +kotlinx-serialization = "1.9.0" +kotlinx-datetime = "0.7.1" +ksp = "2.3.4" -maven-publish = "0.32.0" +kermit = "2.0.8" +native-coroutines = "1.0.0" -compose = "2025.05.00" +maven-publish = "0.35.0" + +compose = "2025.12.01" coroutines = "1.10.2" -sqldelight = "2.1.0" +sha2 = "0.8.0" +sqldelight = "2.2.1" +ktor = "3.3.3" + +oidc = "0.16.2" + +# Android configuration +android-compile-sdk = "36" +android-min-sdk = "23" +android-java-version = "17" + +# Project metadata +project-group = "com.quran" +project-version = "1.0.0" +project-inception-year = "2025" +project-url = "https://github.com/quran/mobile-data" [libraries] kotlinx-coroutines-core = { module = "org.jetbrains.kotlinx:kotlinx-coroutines-core", version.ref = "coroutines" } +kotlinx-coroutines-test = { module = "org.jetbrains.kotlinx:kotlinx-coroutines-test", version.ref = "coroutines" } + +kotlin-test = { module = "org.jetbrains.kotlin:kotlin-test", version.ref = "kotlin" } +kotlin-test-junit = { module = "org.jetbrains.kotlin:kotlin-test-junit", version.ref = "kotlin" } + +kermit = { module = "co.touchlab:kermit", version.ref = "kermit" } +sha2 = { module = "org.kotlincrypto.hash:sha2", version.ref = "sha2" } +sqldelight-jdbc-driver = { module = "app.cash.sqldelight:jdbc-driver", version.ref = "sqldelight" } sqldelight-extensions = { module = "app.cash.sqldelight:coroutines-extensions", version.ref = "sqldelight" } sqldelight-android-driver = { module = "app.cash.sqldelight:android-driver", version.ref = "sqldelight" } sqldelight-native-driver = { module = "app.cash.sqldelight:native-driver", version.ref = "sqldelight" } +sqldelight-sqlite-driver = { module = "app.cash.sqldelight:sqlite-driver", version.ref = "sqldelight" } + +# Ktor dependencies +ktor-client-core = { module = "io.ktor:ktor-client-core", version.ref = "ktor" } +ktor-client-content-negotiation = { module = "io.ktor:ktor-client-content-negotiation", version.ref = "ktor" } +ktor-client-logging = { module = "io.ktor:ktor-client-logging", version.ref = "ktor" } +ktor-client-okhttp = { module = "io.ktor:ktor-client-okhttp", version.ref = "ktor" } +ktor-client-darwin = { module = "io.ktor:ktor-client-darwin", version.ref = "ktor" } +ktor-serialization-json = { module = "io.ktor:ktor-serialization-kotlinx-json", version.ref = "ktor" } + +# Kotlin Serialization +kotlinx-serialization-json = { module = "org.jetbrains.kotlinx:kotlinx-serialization-json", version.ref = "kotlinx-serialization" } + +# Kotlin DateTime +kotlinx-datetime = { module = "org.jetbrains.kotlinx:kotlinx-datetime", version.ref = "kotlinx-datetime" } compose-bom = { module = "androidx.compose:compose-bom", version.ref = "compose" } compose-foundation = { module = "androidx.compose.foundation:foundation" } @@ -24,11 +68,17 @@ compose-ui = { module = "androidx.compose.ui:ui" } compose-ui-tooling = { module = "androidx.compose.ui:ui-tooling" } compose-ui-tooling-preview = { module = "androidx.compose.ui:ui-tooling-preview" } +oidc-appsupport = { module = "io.github.kalinjul.kotlin.multiplatform:oidc-appsupport", version.ref = "oidc" } + + [plugins] android-library = { id = "com.android.library", version.ref = "agp" } android-application = { id = "com.android.application", version.ref = "agp" } kotlin-android = { id = "org.jetbrains.kotlin.android", version.ref = "kotlin" } kotlin-compose = { id = "org.jetbrains.kotlin.plugin.compose", version.ref = "kotlin" } kotlin-multiplatform = { id = "org.jetbrains.kotlin.multiplatform", version.ref = "kotlin" } +kotlin-serialization = { id = "org.jetbrains.kotlin.plugin.serialization", version.ref = "kotlin" } sqldelight = { id = "app.cash.sqldelight", version.ref = "sqldelight" } vanniktech-maven-publish = { id = "com.vanniktech.maven.publish", version.ref = "maven-publish" } +ksp = { id = "com.google.devtools.ksp", version.ref = "ksp" } +native-coroutines = { id = "com.rickclephas.kmp.nativecoroutines", version.ref = "native-coroutines" } diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 1b33c55b..f8e1ee31 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index ca025c83..23449a2b 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.14-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-9.2.1-bin.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/gradlew b/gradlew index 23d15a93..adff685a 100755 --- a/gradlew +++ b/gradlew @@ -1,7 +1,7 @@ #!/bin/sh # -# Copyright © 2015-2021 the original authors. +# Copyright © 2015 the original authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -114,7 +114,6 @@ case "$( uname )" in #( NONSTOP* ) nonstop=true ;; esac -CLASSPATH="\\\"\\\"" # Determine the Java command to use to start the JVM. @@ -172,7 +171,6 @@ fi # For Cygwin or MSYS, switch paths to Windows format before running java if "$cygwin" || "$msys" ; then APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) - CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) JAVACMD=$( cygpath --unix "$JAVACMD" ) @@ -212,7 +210,6 @@ DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' set -- \ "-Dorg.gradle.appname=$APP_BASE_NAME" \ - -classpath "$CLASSPATH" \ -jar "$APP_HOME/gradle/wrapper/gradle-wrapper.jar" \ "$@" diff --git a/gradlew.bat b/gradlew.bat index db3a6ac2..c4bdd3ab 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -70,11 +70,10 @@ goto fail :execute @rem Setup the command line -set CLASSPATH= @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %* +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %* :end @rem End local scope for the variables with windows NT shell diff --git a/mutations-definitions/build.gradle.kts b/mutations-definitions/build.gradle.kts new file mode 100644 index 00000000..582517a3 --- /dev/null +++ b/mutations-definitions/build.gradle.kts @@ -0,0 +1,46 @@ +plugins { + alias(libs.plugins.kotlin.multiplatform) + alias(libs.plugins.vanniktech.maven.publish) +} + +kotlin { + iosX64() + iosArm64() + iosSimulatorArm64() + + macosArm64() + + jvm() + + sourceSets { + commonMain.dependencies { + implementation(libs.kotlinx.coroutines.core) + } + + commonTest.dependencies { + implementation(libs.kotlin.test) + } + } + + // don't show warnings for expect/actual classes + targets.configureEach { + compilations.configureEach { + compileTaskProvider.get().compilerOptions { + freeCompilerArgs.add("-Xexpect-actual-classes") + } + } + } +} + +mavenPublishing { + signAllPublications() + coordinates(libs.versions.project.group.get(), "mutations-definitions", libs.versions.project.version.get()) + + pom { + name = "Quran.com Mutations Definitions" + description = "Type declarations that can be used for mutations tracking." + inceptionYear = "2025" + url = "https://github.com/quran/mobile-sync" + } +} + diff --git a/mutations-definitions/src/commonMain/kotlin/com/quran/shared/mutations/MutationType.kt b/mutations-definitions/src/commonMain/kotlin/com/quran/shared/mutations/MutationType.kt new file mode 100644 index 00000000..bdc81f2f --- /dev/null +++ b/mutations-definitions/src/commonMain/kotlin/com/quran/shared/mutations/MutationType.kt @@ -0,0 +1,11 @@ +package com.quran.shared.mutations + +enum class Mutation { + CREATED, + DELETED, + MODIFIED +} + +class LocalModelMutation(val model: Model, val remoteID: String?, val localID: String, val mutation: Mutation) + +class RemoteModelMutation(val model: Model, val remoteID: String, val mutation: Mutation) \ No newline at end of file diff --git a/persistence/build.gradle.kts b/persistence/build.gradle.kts index a100714d..082cec26 100644 --- a/persistence/build.gradle.kts +++ b/persistence/build.gradle.kts @@ -1,4 +1,3 @@ -import com.vanniktech.maven.publish.SonatypeHost import org.jetbrains.kotlin.gradle.dsl.JvmTarget plugins { @@ -6,6 +5,8 @@ plugins { alias(libs.plugins.android.library) alias(libs.plugins.sqldelight) alias(libs.plugins.vanniktech.maven.publish) + alias(libs.plugins.ksp) + alias(libs.plugins.native.coroutines) } kotlin { @@ -13,6 +14,7 @@ kotlin { iosArm64() iosSimulatorArm64() + jvm() androidTarget { publishLibraryVariants("release") compilerOptions { @@ -24,15 +26,44 @@ kotlin { commonMain.dependencies { implementation(libs.kotlinx.coroutines.core) implementation(libs.sqldelight.extensions) + implementation(libs.kermit) + implementation(libs.kotlinx.datetime) + api(projects.mutationsDefinitions) + } + + commonTest.dependencies { + implementation(libs.kotlin.test) + implementation(libs.kotlinx.coroutines.test) + } + + jvmMain.dependencies { + implementation(libs.sqldelight.sqlite.driver) } androidMain.dependencies { implementation(libs.sqldelight.android.driver) } - nativeMain.dependencies { - implementation(libs.sqldelight.native.driver) + androidUnitTest.dependencies { + implementation(libs.sqldelight.sqlite.driver) + implementation(libs.sqldelight.jdbc.driver) + } + + val appleMain by creating { + dependsOn(commonMain.get()) + dependencies { + implementation(libs.sqldelight.native.driver) + } } + + iosX64Main.get().dependsOn(appleMain) + iosArm64Main.get().dependsOn(appleMain) + iosSimulatorArm64Main.get().dependsOn(appleMain) + } + + sourceSets.all { + languageSettings.optIn("kotlin.experimental.ExperimentalObjCName") + languageSettings.optIn("kotlin.time.ExperimentalTime") } // don't show warnings for expect/actual classes @@ -47,15 +78,21 @@ kotlin { android { namespace = "com.quran.shared.persistence" - compileSdk = 35 + compileSdk = libs.versions.android.compile.sdk.get().toInt() defaultConfig { - minSdk = 21 + minSdk = libs.versions.android.min.sdk.get().toInt() } compileOptions { - sourceCompatibility = JavaVersion.VERSION_17 - targetCompatibility = JavaVersion.VERSION_17 + sourceCompatibility = JavaVersion.valueOf("VERSION_${libs.versions.android.java.version.get()}") + targetCompatibility = JavaVersion.valueOf("VERSION_${libs.versions.android.java.version.get()}") + } + + testOptions { + unitTests { + isIncludeAndroidResources = true + } } } @@ -69,14 +106,14 @@ sqldelight { } mavenPublishing { - publishToMavenCentral(SonatypeHost.CENTRAL_PORTAL) + publishToMavenCentral() signAllPublications() - coordinates(group.toString(), "persistence", version.toString()) + coordinates(libs.versions.project.group.get(), "persistence", libs.versions.project.version.get()) pom { name = "Quran.com Persistence Layer" - description = "A library for sharing data between iOS and Android mobile apps" - inceptionYear = "2025" - url = "https://github.com/quran/mobile-data" + description = "A library for storing user data for a Quran.com reading app, with the capability of tracking changes." + inceptionYear = libs.versions.project.inception.year.get() + url = libs.versions.project.url.get() } } diff --git a/persistence/src/androidMain/kotlin/com/quran/shared/persistence/util/PlatformDateTime.android.kt b/persistence/src/androidMain/kotlin/com/quran/shared/persistence/util/PlatformDateTime.android.kt new file mode 100644 index 00000000..ba4d6095 --- /dev/null +++ b/persistence/src/androidMain/kotlin/com/quran/shared/persistence/util/PlatformDateTime.android.kt @@ -0,0 +1,7 @@ +package com.quran.shared.persistence.util + +import kotlin.time.Instant + +actual typealias PlatformDateTime = Instant +actual fun PlatformDateTime.fromPlatform(): Instant = this +actual fun PlatformDateTime.toPlatform(): PlatformDateTime = this \ No newline at end of file diff --git a/persistence/src/androidUnitTest/kotlin/com/quran/shared/persistence/TestDatabaseDriver.android.kt b/persistence/src/androidUnitTest/kotlin/com/quran/shared/persistence/TestDatabaseDriver.android.kt new file mode 100644 index 00000000..6c31ca8d --- /dev/null +++ b/persistence/src/androidUnitTest/kotlin/com/quran/shared/persistence/TestDatabaseDriver.android.kt @@ -0,0 +1,12 @@ +package com.quran.shared.persistence + +import app.cash.sqldelight.db.SqlDriver +import app.cash.sqldelight.driver.jdbc.sqlite.JdbcSqliteDriver + +actual class TestDatabaseDriver { + actual fun createDriver(): SqlDriver { + val driver = JdbcSqliteDriver(JdbcSqliteDriver.IN_MEMORY) + QuranDatabase.Schema.create(driver) + return driver + } +} \ No newline at end of file diff --git a/persistence/src/nativeMain/kotlin/com/quran/shared/persistence/DriverFactory.native.kt b/persistence/src/appleMain/kotlin/com/quran/shared/persistence/DriverFactory.native.kt similarity index 100% rename from persistence/src/nativeMain/kotlin/com/quran/shared/persistence/DriverFactory.native.kt rename to persistence/src/appleMain/kotlin/com/quran/shared/persistence/DriverFactory.native.kt diff --git a/persistence/src/appleMain/kotlin/com/quran/shared/persistence/util/PlatformDateTime.native.kt b/persistence/src/appleMain/kotlin/com/quran/shared/persistence/util/PlatformDateTime.native.kt new file mode 100644 index 00000000..c441dfdf --- /dev/null +++ b/persistence/src/appleMain/kotlin/com/quran/shared/persistence/util/PlatformDateTime.native.kt @@ -0,0 +1,10 @@ +package com.quran.shared.persistence.util + +import kotlinx.datetime.toKotlinInstant +import kotlinx.datetime.toNSDate +import platform.Foundation.NSDate +import kotlin.time.Instant + +actual typealias PlatformDateTime = NSDate +actual fun PlatformDateTime.fromPlatform(): Instant = toKotlinInstant() +actual fun Instant.toPlatform(): PlatformDateTime = toNSDate() \ No newline at end of file diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/DriverFactory.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/DriverFactory.kt index 558341cf..6eab3b8e 100644 --- a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/DriverFactory.kt +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/DriverFactory.kt @@ -6,7 +6,7 @@ expect class DriverFactory { fun makeDriver(): SqlDriver } -fun makeDatabase(driverFactory: DriverFactory): QuranDatabase { +internal fun makeDatabase(driverFactory: DriverFactory): QuranDatabase { val driver = driverFactory.makeDriver() return QuranDatabase(driver) } \ No newline at end of file diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/BookmarkMigration.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/BookmarkMigration.kt new file mode 100644 index 00000000..41b8fe9d --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/BookmarkMigration.kt @@ -0,0 +1,6 @@ +package com.quran.shared.persistence.input + +sealed class BookmarkMigration { + data class Page(val page: Int) : BookmarkMigration() + data class Ayah(val sura: Int, val ayah: Int) : BookmarkMigration() +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/CollectionMigration.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/CollectionMigration.kt new file mode 100644 index 00000000..6a376af8 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/CollectionMigration.kt @@ -0,0 +1,3 @@ +package com.quran.shared.persistence.input + +data class CollectionMigration(val name: String) diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/RemoteBookmark.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/RemoteBookmark.kt new file mode 100644 index 00000000..2f7a86db --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/RemoteBookmark.kt @@ -0,0 +1,12 @@ +package com.quran.shared.persistence.input + +import com.quran.shared.persistence.util.PlatformDateTime + +sealed class RemoteBookmark { + data class Page(val page: Int, val lastUpdated: PlatformDateTime) : RemoteBookmark() + data class Ayah( + val sura: Int, + val ayah: Int, + val lastUpdated: PlatformDateTime + ) : RemoteBookmark() +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/RemoteCollection.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/RemoteCollection.kt new file mode 100644 index 00000000..2ce6b61d --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/RemoteCollection.kt @@ -0,0 +1,8 @@ +package com.quran.shared.persistence.input + +import com.quran.shared.persistence.util.PlatformDateTime + +data class RemoteCollection( + val name: String?, + val lastUpdated: PlatformDateTime +) diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/RemoteCollectionBookmark.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/RemoteCollectionBookmark.kt new file mode 100644 index 00000000..5d8ff3a8 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/RemoteCollectionBookmark.kt @@ -0,0 +1,24 @@ +package com.quran.shared.persistence.input + +import com.quran.shared.persistence.util.PlatformDateTime + +sealed class RemoteCollectionBookmark { + abstract val collectionId: String + abstract val lastUpdated: PlatformDateTime + abstract val bookmarkId: String? + + data class Page( + override val collectionId: String, + val page: Int, + override val lastUpdated: PlatformDateTime, + override val bookmarkId: String? = null + ) : RemoteCollectionBookmark() + + data class Ayah( + override val collectionId: String, + val sura: Int, + val ayah: Int, + override val lastUpdated: PlatformDateTime, + override val bookmarkId: String? = null + ) : RemoteCollectionBookmark() +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/RemoteNote.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/RemoteNote.kt new file mode 100644 index 00000000..1061ae95 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/input/RemoteNote.kt @@ -0,0 +1,10 @@ +package com.quran.shared.persistence.input + +import com.quran.shared.persistence.util.PlatformDateTime + +data class RemoteNote( + val body: String?, + val startAyahId: Long?, + val endAyahId: Long?, + val lastUpdated: PlatformDateTime +) diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/Bookmark.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/Bookmark.kt new file mode 100644 index 00000000..11c8535b --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/Bookmark.kt @@ -0,0 +1,20 @@ +package com.quran.shared.persistence.model + +import com.quran.shared.persistence.util.PlatformDateTime + +sealed class Bookmark { + abstract val localId: String + + data class PageBookmark( + val page: Int, + val lastUpdated: PlatformDateTime, + override val localId: String + ) : Bookmark() + + data class AyahBookmark( + val sura: Int, + val ayah: Int, + val lastUpdated: PlatformDateTime, + override val localId: String + ) : Bookmark() +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/Collection.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/Collection.kt new file mode 100644 index 00000000..be37c778 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/Collection.kt @@ -0,0 +1,9 @@ +package com.quran.shared.persistence.model + +import com.quran.shared.persistence.util.PlatformDateTime + +data class Collection( + val name: String, + val lastUpdated: PlatformDateTime, + val localId: String +) diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/CollectionBookmark.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/CollectionBookmark.kt new file mode 100644 index 00000000..9456aca0 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/CollectionBookmark.kt @@ -0,0 +1,30 @@ +package com.quran.shared.persistence.model + +import com.quran.shared.persistence.util.PlatformDateTime + +sealed class CollectionBookmark { + abstract val collectionLocalId: String + abstract val collectionRemoteId: String? + abstract val bookmarkLocalId: String + abstract val lastUpdated: PlatformDateTime + abstract val localId: String + + data class PageBookmark( + override val collectionLocalId: String, + override val collectionRemoteId: String?, + override val bookmarkLocalId: String, + val page: Int, + override val lastUpdated: PlatformDateTime, + override val localId: String + ) : CollectionBookmark() + + data class AyahBookmark( + override val collectionLocalId: String, + override val collectionRemoteId: String?, + override val bookmarkLocalId: String, + val sura: Int, + val ayah: Int, + override val lastUpdated: PlatformDateTime, + override val localId: String + ) : CollectionBookmark() +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/DatabaseTypes.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/DatabaseTypes.kt new file mode 100644 index 00000000..a89583a0 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/DatabaseTypes.kt @@ -0,0 +1,13 @@ +package com.quran.shared.persistence.model + +import com.quran.shared.persistence.Ayah_bookmark +import com.quran.shared.persistence.Bookmark_collection +import com.quran.shared.persistence.Collection +import com.quran.shared.persistence.Note +import com.quran.shared.persistence.Page_bookmark + +internal typealias DatabasePageBookmark = Page_bookmark +internal typealias DatabaseAyahBookmark = Ayah_bookmark +internal typealias DatabaseCollection = Collection +internal typealias DatabaseBookmarkCollection = Bookmark_collection +internal typealias DatabaseNote = Note diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/Note.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/Note.kt new file mode 100644 index 00000000..b97f77d4 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/model/Note.kt @@ -0,0 +1,11 @@ +package com.quran.shared.persistence.model + +import com.quran.shared.persistence.util.PlatformDateTime + +data class Note( + val body: String, + val startAyahId: Long, + val endAyahId: Long, + val lastUpdated: PlatformDateTime, + val localId: String +) diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/BookmarksRepositoryFactory.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/BookmarksRepositoryFactory.kt new file mode 100644 index 00000000..7c631da6 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/BookmarksRepositoryFactory.kt @@ -0,0 +1,38 @@ +package com.quran.shared.persistence.repository.bookmark + +import com.quran.shared.persistence.DriverFactory +import com.quran.shared.persistence.makeDatabase +import com.quran.shared.persistence.repository.bookmark.repository.BookmarksRepository +import com.quran.shared.persistence.repository.bookmark.repository.BookmarksRepositoryImpl +import com.quran.shared.persistence.repository.bookmark.repository.BookmarksSynchronizationRepository + +/** + * Factory for creating BookmarksRepository instances. + * This factory hides the details of database creation and provides a clean interface + * for obtaining repository instances. + */ +object BookmarksRepositoryFactory { + /** + * Creates a new instance of BookmarksRepository. + * The repository is backed by a SQLite database created using the provided driver factory. + * + * @param driverFactory The driver factory to use for database creation + * @return BookmarksRepository A new repository instance + */ + fun createRepository(driverFactory: DriverFactory): BookmarksRepository { + val database = makeDatabase(driverFactory) + return BookmarksRepositoryImpl(database) + } + + /** + * Creates a new instance of BookmarksSynchronizationRepository. + * This repository provides synchronization-specific operations for bookmarks. + * + * @param driverFactory The driver factory to use for database creation + * @return BookmarksSynchronizationRepository A new synchronization repository instance + */ + fun createSynchronizationRepository(driverFactory: DriverFactory): BookmarksSynchronizationRepository { + val database = makeDatabase(driverFactory) + return BookmarksRepositoryImpl(database) + } +} \ No newline at end of file diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/extension/BookmarkQueriesExtensions.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/extension/BookmarkQueriesExtensions.kt new file mode 100644 index 00000000..fdacb016 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/extension/BookmarkQueriesExtensions.kt @@ -0,0 +1,45 @@ +@file:OptIn(ExperimentalTime::class) + +package com.quran.shared.persistence.repository.bookmark.extension + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.persistence.model.Bookmark +import com.quran.shared.persistence.model.DatabaseAyahBookmark +import com.quran.shared.persistence.model.DatabasePageBookmark +import com.quran.shared.persistence.util.toPlatform +import kotlin.time.ExperimentalTime +import kotlin.time.Instant + +internal fun DatabasePageBookmark.toBookmark(): Bookmark.PageBookmark { + return Bookmark.PageBookmark( + page = page.toInt(), + lastUpdated = Instant.fromEpochMilliseconds(created_at).toPlatform(), + localId = local_id.toString() + ) +} + +internal fun DatabasePageBookmark.toBookmarkMutation(): LocalModelMutation = + LocalModelMutation( + mutation = if (deleted == 0L) Mutation.CREATED else Mutation.DELETED, + model = toBookmark(), + remoteID = remote_id, + localID = local_id.toString() + ) + +internal fun DatabaseAyahBookmark.toBookmark(): Bookmark.AyahBookmark { + return Bookmark.AyahBookmark( + sura = sura.toInt(), + ayah = ayah.toInt(), + lastUpdated = Instant.fromEpochMilliseconds(modified_at).toPlatform(), + localId = local_id.toString() + ) +} + +internal fun DatabaseAyahBookmark.toBookmarkMutation(): LocalModelMutation = + LocalModelMutation( + mutation = if (deleted == 0L) Mutation.CREATED else Mutation.DELETED, + model = toBookmark(), + remoteID = remote_id, + localID = local_id.toString() + ) \ No newline at end of file diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/repository/BookmarksRepository.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/repository/BookmarksRepository.kt new file mode 100644 index 00000000..5dcc63e1 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/repository/BookmarksRepository.kt @@ -0,0 +1,65 @@ +package com.quran.shared.persistence.repository.bookmark.repository + +import com.quran.shared.persistence.model.Bookmark +import com.quran.shared.persistence.input.BookmarkMigration +import com.rickclephas.kmp.nativecoroutines.NativeCoroutines +import kotlinx.coroutines.flow.Flow + +interface BookmarksRepository { + /** + * Fetch and returns all bookmarks. + * + * @return List the current list of bookmarks + */ + @NativeCoroutines + suspend fun getAllBookmarks(): List + + /** + * Returns a flow of all bookmarks for observation. + */ + @NativeCoroutines + fun getBookmarksFlow(): Flow> + + /** + * Add a bookmark for a specific page. + * + * @return the [Bookmark.PageBookmark] + */ + @NativeCoroutines + suspend fun addBookmark(page: Int): Bookmark.PageBookmark + + /** + * Add a bookmark for a given sura and ayah. + * + * @return the [Bookmark.AyahBookmark] + */ + @NativeCoroutines + suspend fun addBookmark(sura: Int, ayah: Int): Bookmark + + /** + * Delete a bookmark for a specific page. + * + * @return a boolean denoting success + */ + @NativeCoroutines + suspend fun deleteBookmark(page: Int): Boolean + + /** + * Delete a bookmark for a sura and ayah. + * + * @return a boolean denoting success + */ + @NativeCoroutines + suspend fun deleteBookmark(sura: Int, ayah: Int): Boolean + + /** + * Migrates existing bookmarks to the new storage format. + * This method should only be called once during app initialization, after + * bookmarks are added and before any changes by the user are handled. + * + * @param bookmarks List of bookmarks to migrate + * @throws IllegalStateException if either bookmarks or mutations tables are not empty + */ + @NativeCoroutines + suspend fun migrateBookmarks(bookmarks: List) +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/repository/BookmarksRepositoryImpl.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/repository/BookmarksRepositoryImpl.kt new file mode 100644 index 00000000..20792a4b --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/repository/BookmarksRepositoryImpl.kt @@ -0,0 +1,269 @@ +package com.quran.shared.persistence.repository.bookmark.repository + +import co.touchlab.kermit.Logger +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.persistence.QuranDatabase +import com.quran.shared.persistence.input.BookmarkMigration +import com.quran.shared.persistence.input.RemoteBookmark +import com.quran.shared.persistence.model.Bookmark +import com.quran.shared.persistence.repository.bookmark.extension.toBookmark +import com.quran.shared.persistence.repository.bookmark.extension.toBookmarkMutation +import com.quran.shared.persistence.util.fromPlatform +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.IO +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.flow.combine +import kotlinx.coroutines.withContext +import app.cash.sqldelight.coroutines.asFlow +import app.cash.sqldelight.coroutines.mapToList + +class BookmarksRepositoryImpl( + private val database: QuranDatabase +) : BookmarksRepository, BookmarksSynchronizationRepository { + + private val logger = Logger.withTag("PageBookmarksRepository") + private val pageBookmarkQueries = lazy { database.page_bookmarksQueries } + private val ayahBookmarkQueries = lazy { database.ayah_bookmarksQueries } + + override suspend fun getAllBookmarks(): List { + return withContext(Dispatchers.IO) { + val pageBookmarks = pageBookmarkQueries.value.getBookmarks() + .executeAsList() + .map { it.toBookmark() } + val ayahBookmarks = ayahBookmarkQueries.value.getBookmarks() + .executeAsList() + .map { it.toBookmark() } + + // TODO - sort options - ex sort by location, by date added (default) + sortBookmarks(pageBookmarks + ayahBookmarks) + } + } + + override fun getBookmarksFlow(): Flow> { + val pageBookmarksFlow = pageBookmarkQueries.value.getBookmarks() + .asFlow() + .mapToList(Dispatchers.IO) + + val ayahBookmarksFlow = ayahBookmarkQueries.value.getBookmarks() + .asFlow() + .mapToList(Dispatchers.IO) + + return combine(pageBookmarksFlow, ayahBookmarksFlow) { pageList, ayahList -> + val pageBookmarks = pageList.map { it.toBookmark() } + val ayahBookmarks = ayahList.map { it.toBookmark() } + sortBookmarks(pageBookmarks + ayahBookmarks) + } + } + + private fun sortBookmarks(bookmarks: List): List { + return bookmarks.sortedByDescending { bookmark -> + when (bookmark) { + is Bookmark.AyahBookmark -> bookmark.lastUpdated.fromPlatform().toEpochMilliseconds() + is Bookmark.PageBookmark -> bookmark.lastUpdated.fromPlatform().toEpochMilliseconds() + } + } + } + + override suspend fun addBookmark(page: Int): Bookmark.PageBookmark { + logger.i { "Adding page bookmark for page $page" } + return withContext(Dispatchers.IO) { + pageBookmarkQueries.value.addNewBookmark(page.toLong()) + val record = pageBookmarkQueries.value.getBookmarkForPage(page.toLong()) + .executeAsOneOrNull() + requireNotNull(record) { "Expected page bookmark for page $page after insert." } + record.toBookmark() + } + } + + override suspend fun addBookmark(sura: Int, ayah: Int): Bookmark.AyahBookmark { + logger.i { "Adding ayah bookmark for $sura:$ayah" } + return withContext(Dispatchers.IO) { + val ayahId = getAyahId(sura, ayah) + ayahBookmarkQueries.value.addNewBookmark(ayahId.toLong(), sura.toLong(), ayah.toLong()) + val record = ayahBookmarkQueries.value.getBookmarkForAyah(sura.toLong(), ayah.toLong()) + .executeAsOneOrNull() + requireNotNull(record) { "Expected ayah bookmark for $sura:$ayah after insert." } + record.toBookmark() + } + } + + override suspend fun deleteBookmark(page: Int): Boolean { + logger.i { "Deleting page bookmark for page $page" } + withContext(Dispatchers.IO) { + pageBookmarkQueries.value.deleteBookmark(page.toLong()) + } + return true + } + + override suspend fun deleteBookmark(sura: Int, ayah: Int): Boolean { + logger.i { "Deleting page bookmark for $sura:$ayah" } + withContext(Dispatchers.IO) { + ayahBookmarkQueries.value.deleteBookmark(sura.toLong(), ayah.toLong()) + } + return true + } + + override suspend fun migrateBookmarks(bookmarks: List) { + withContext(Dispatchers.IO) { + // Check if the bookmarks table is empty + val existingBookmarks = pageBookmarkQueries.value.getBookmarks().executeAsList() + if (existingBookmarks.isNotEmpty()) { + throw IllegalStateException("Cannot migrate bookmarks: bookmarks table is not empty. Found ${existingBookmarks.size} bookmarks.") + } + + database.transaction { + bookmarks.forEach { bookmark -> + when (bookmark) { + is BookmarkMigration.Ayah -> { + val ayahId = getAyahId(bookmark.sura, bookmark.ayah) + ayahBookmarkQueries.value.addNewBookmark( + ayahId.toLong(), + bookmark.sura.toLong(), + bookmark.ayah.toLong() + ) + } + is BookmarkMigration.Page -> + pageBookmarkQueries.value.addNewBookmark(bookmark.page.toLong()) + } + } + } + } + } + + override suspend fun fetchMutatedBookmarks(): List> { + return withContext(Dispatchers.IO) { + val pageMutations = pageBookmarkQueries.value.getUnsyncedBookmarks() + .executeAsList() + .map { it.toBookmarkMutation() } + val ayahMutations = ayahBookmarkQueries.value.getUnsyncedBookmarks() + .executeAsList() + .map { it.toBookmarkMutation() } + pageMutations + ayahMutations + } + } + + override suspend fun applyRemoteChanges( + updatesToPersist: List>, + localMutationsToClear: List> + ) { + logger.i { "Applying remote changes with ${updatesToPersist.size} updates to persist and ${localMutationsToClear.size} local mutations to clear" } + return withContext(Dispatchers.IO) { + database.transaction { + val committedCreationKeys = updatesToPersist + .filter { it.mutation == Mutation.CREATED } + .map { it.model.key() } + .toSet() + + // Clear local mutations + // TODO: Should check that passed local IDs are valid + localMutationsToClear.forEach { local -> + when (local.mutation) { + Mutation.DELETED -> clearLocalMutation(local) + Mutation.CREATED, Mutation.MODIFIED -> { + val localKey = local.model.key() + if (!committedCreationKeys.contains(localKey)) { + clearLocalMutation(local) + } + } + } + } + + // Apply remote updates + updatesToPersist.forEach { remote -> + when (remote.mutation) { + Mutation.CREATED -> applyRemoteBookmarkAddition(remote) + Mutation.DELETED -> applyRemoteBookmarkDeletion(remote) + Mutation.MODIFIED -> { + throw RuntimeException("Unexpected MODIFIED remote modification for page bookmarks.") + } + } + } + } + } + } + + private fun applyRemoteBookmarkAddition(remote: RemoteModelMutation) { + when (val model = remote.model) { + is RemoteBookmark.Ayah -> { + val ayahId = getAyahId(model.sura, model.ayah) + val updatedAt = model.lastUpdated.fromPlatform().toEpochMilliseconds() + ayahBookmarkQueries.value.persistRemoteBookmark( + remote_id = remote.remoteID, + ayah_id = ayahId.toLong(), + sura = model.sura.toLong(), + ayah = model.ayah.toLong(), + created_at = updatedAt, + modified_at = updatedAt + ) + } + is RemoteBookmark.Page -> + pageBookmarkQueries.value.persistRemoteBookmark( + remote_id = remote.remoteID, + page = model.page.toLong(), + created_at = model.lastUpdated.fromPlatform().toEpochMilliseconds(), + modified_at = model.lastUpdated.fromPlatform().toEpochMilliseconds() + ) + } + } + + private fun clearLocalMutation(local: LocalModelMutation) { + when (local.model) { + is Bookmark.AyahBookmark -> + ayahBookmarkQueries.value.clearLocalMutationFor(id = local.localID.toLong()) + is Bookmark.PageBookmark -> + pageBookmarkQueries.value.clearLocalMutationFor(id = local.localID.toLong()) + } + } + + private fun applyRemoteBookmarkDeletion(remote: RemoteModelMutation) { + when (remote.model) { + is RemoteBookmark.Ayah -> + ayahBookmarkQueries.value.hardDeleteBookmarkFor(remoteID = remote.remoteID) + is RemoteBookmark.Page -> + pageBookmarkQueries.value.hardDeleteBookmarkFor(remoteID = remote.remoteID) + } + } + + private fun getAyahId(sura: Int, ayah: Int): Int { + // TODO - fix this + return 1 + } + + override suspend fun remoteResourcesExist(remoteIDs: List): Map { + if (remoteIDs.isEmpty()) { + return emptyMap() + } + + return withContext(Dispatchers.IO) { + val pageIDs = pageBookmarkQueries.value.checkRemoteIDsExistence(remoteIDs) + .executeAsList() + .map { it.remote_id } + val ayahIDs = ayahBookmarkQueries.value.checkRemoteIDsExistence(remoteIDs) + .executeAsList() + .map { it.remote_id } + val existentIDs = (pageIDs + ayahIDs).toSet() + + remoteIDs.map { Pair(it, existentIDs.contains(it)) } + .associateBy { it.first } + .mapValues { it.value.second } + } + } +} + +private data class BookmarkKey(val type: String, val first: Int, val second: Int?) + +private fun Bookmark.key(): BookmarkKey { + return when (this) { + is Bookmark.PageBookmark -> BookmarkKey("PAGE", page, null) + is Bookmark.AyahBookmark -> BookmarkKey("AYAH", sura, ayah) + } +} + +private fun RemoteBookmark.key(): BookmarkKey { + return when (this) { + is RemoteBookmark.Page -> BookmarkKey("PAGE", page, null) + is RemoteBookmark.Ayah -> BookmarkKey("AYAH", sura, ayah) + } +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/repository/BookmarksSynchronizationRepository.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/repository/BookmarksSynchronizationRepository.kt new file mode 100644 index 00000000..414d3f20 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/bookmark/repository/BookmarksSynchronizationRepository.kt @@ -0,0 +1,32 @@ +package com.quran.shared.persistence.repository.bookmark.repository + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.persistence.input.RemoteBookmark +import com.quran.shared.persistence.model.Bookmark + +interface BookmarksSynchronizationRepository { + /** + * Returns a list of bookmarks that have been mutated locally (created or deleted) + * and need to be synchronized with the remote server. + */ + suspend fun fetchMutatedBookmarks(): List> + + /** + * Persists the remote state of bookmarks after a successful synchronization operation. + * This method should be called after the remote server has confirmed the changes. + * + * @param updatesToPersist List of remote bookmark inputs with their remote IDs and mutation + * states to be persisted. These must have a remoteID setup. + * @param localMutationsToClear List of local mutations to be cleared. An item of this list + * denotes either a mutation that was committed remotely, or a mutation that overridden. If it + * was committed, a counterpart is expected in `updatesToPersists` to persist it as a remote + * bookmark. These must be input from the list returned by `fetchMutatedBookmarks`. + */ + suspend fun applyRemoteChanges( + updatesToPersist: List>, + localMutationsToClear: List> + ) + + suspend fun remoteResourcesExist(remoteIDs: List): Map +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/CollectionsRepositoryFactory.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/CollectionsRepositoryFactory.kt new file mode 100644 index 00000000..05759d67 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/CollectionsRepositoryFactory.kt @@ -0,0 +1,36 @@ +package com.quran.shared.persistence.repository.collection + +import com.quran.shared.persistence.DriverFactory +import com.quran.shared.persistence.makeDatabase +import com.quran.shared.persistence.repository.collection.repository.CollectionsRepository +import com.quran.shared.persistence.repository.collection.repository.CollectionsRepositoryImpl +import com.quran.shared.persistence.repository.collection.repository.CollectionsSynchronizationRepository + +/** + * Factory for creating CollectionsRepository instances. + * This factory hides the details of database creation and provides a clean interface + * for obtaining repository instances. + */ +object CollectionsRepositoryFactory { + /** + * Creates a new instance of CollectionsRepository. + * + * @param driverFactory The driver factory to use for database creation + * @return CollectionsRepository A new repository instance + */ + fun createRepository(driverFactory: DriverFactory): CollectionsRepository { + val database = makeDatabase(driverFactory) + return CollectionsRepositoryImpl(database) + } + + /** + * Creates a new instance of CollectionsSynchronizationRepository. + * + * @param driverFactory The driver factory to use for database creation + * @return CollectionsSynchronizationRepository A new synchronization repository instance + */ + fun createSynchronizationRepository(driverFactory: DriverFactory): CollectionsSynchronizationRepository { + val database = makeDatabase(driverFactory) + return CollectionsRepositoryImpl(database) + } +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/extension/CollectionQueriesExtensions.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/extension/CollectionQueriesExtensions.kt new file mode 100644 index 00000000..eae8b240 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/extension/CollectionQueriesExtensions.kt @@ -0,0 +1,34 @@ +@file:OptIn(ExperimentalTime::class) + +package com.quran.shared.persistence.repository.collection.extension + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.persistence.model.DatabaseCollection +import com.quran.shared.persistence.model.Collection as PersistenceCollection +import com.quran.shared.persistence.util.toPlatform +import kotlin.time.ExperimentalTime +import kotlin.time.Instant + +internal fun DatabaseCollection.toCollection(): PersistenceCollection { + return PersistenceCollection( + name = name, + lastUpdated = Instant.fromEpochMilliseconds(modified_at).toPlatform(), + localId = local_id.toString() + ) +} + +internal fun DatabaseCollection.toCollectionMutation(): LocalModelMutation { + val mutation = when { + deleted == 1L -> Mutation.DELETED + remote_id == null -> Mutation.CREATED + else -> Mutation.MODIFIED + } + + return LocalModelMutation( + mutation = mutation, + model = toCollection(), + remoteID = remote_id, + localID = local_id.toString() + ) +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/repository/CollectionsRepository.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/repository/CollectionsRepository.kt new file mode 100644 index 00000000..3d990d18 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/repository/CollectionsRepository.kt @@ -0,0 +1,27 @@ +package com.quran.shared.persistence.repository.collection.repository + +import com.quran.shared.persistence.model.Collection + +interface CollectionsRepository { + /** + * Fetch and returns all collections. + * + * @return List the current list of collections + */ + suspend fun getAllCollections(): List + + /** + * Add a collection with the provided name. + */ + suspend fun addCollection(name: String): Collection + + /** + * Update the name of a collection identified by its local ID. + */ + suspend fun updateCollection(localId: String, name: String): Collection + + /** + * Delete a collection identified by its local ID. + */ + suspend fun deleteCollection(localId: String): Boolean +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/repository/CollectionsRepositoryImpl.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/repository/CollectionsRepositoryImpl.kt new file mode 100644 index 00000000..c1067488 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/repository/CollectionsRepositoryImpl.kt @@ -0,0 +1,151 @@ +package com.quran.shared.persistence.repository.collection.repository + +import co.touchlab.kermit.Logger +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.persistence.QuranDatabase +import com.quran.shared.persistence.input.RemoteCollection +import com.quran.shared.persistence.model.Collection +import com.quran.shared.persistence.repository.collection.extension.toCollection +import com.quran.shared.persistence.repository.collection.extension.toCollectionMutation +import com.quran.shared.persistence.util.fromPlatform +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.IO +import kotlinx.coroutines.withContext + +class CollectionsRepositoryImpl( + private val database: QuranDatabase +) : CollectionsRepository, CollectionsSynchronizationRepository { + + private val logger = Logger.withTag("CollectionsRepository") + private val collectionQueries = lazy { database.collectionsQueries } + + override suspend fun getAllCollections(): List { + return withContext(Dispatchers.IO) { + collectionQueries.value.getCollections() + .executeAsList() + .map { it.toCollection() } + } + } + + override suspend fun addCollection(name: String): Collection { + logger.i { "Adding collection with name=$name" } + return withContext(Dispatchers.IO) { + collectionQueries.value.addNewCollection(name) + val record = collectionQueries.value.getCollectionByName(name) + .executeAsOneOrNull() + requireNotNull(record) { "Expected collection for name=$name after insert." } + record.toCollection() + } + } + + override suspend fun updateCollection(localId: String, name: String): Collection { + logger.i { "Updating collection localId=$localId with name=$name" } + return withContext(Dispatchers.IO) { + collectionQueries.value.updateCollectionName(name = name, id = localId.toLong()) + val record = collectionQueries.value.getCollectionByLocalId(localId.toLong()) + .executeAsOneOrNull() + requireNotNull(record) { "Expected collection localId=$localId after update." } + record.toCollection() + } + } + + override suspend fun deleteCollection(localId: String): Boolean { + logger.i { "Deleting collection localId=$localId" } + withContext(Dispatchers.IO) { + collectionQueries.value.deleteCollection(id = localId.toLong()) + } + return true + } + + override suspend fun fetchMutatedCollections(): List> { + return withContext(Dispatchers.IO) { + collectionQueries.value.getUnsyncedCollections() + .executeAsList() + .map { it.toCollectionMutation() } + } + } + + override suspend fun applyRemoteChanges( + updatesToPersist: List>, + localMutationsToClear: List> + ) { + logger.i { + "Applying collection remote changes with ${updatesToPersist.size} updates " + + "and clearing ${localMutationsToClear.size} local mutations" + } + return withContext(Dispatchers.IO) { + database.transaction { + localMutationsToClear.forEach { local -> + collectionQueries.value.clearLocalMutationFor(id = local.localID.toLong()) + } + + updatesToPersist.forEach { remote -> + when (remote.mutation) { + Mutation.CREATED, Mutation.MODIFIED -> applyRemoteCollectionUpsert(remote) + Mutation.DELETED -> applyRemoteCollectionDeletion(remote) + } + } + } + } + } + + private fun applyRemoteCollectionUpsert(remote: RemoteModelMutation) { + val name = remote.model.name + if (name.isNullOrEmpty()) { + logger.w { "Skipping remote collection mutation without name: remoteId=${remote.remoteID}" } + return + } + + val updatedAt = remote.model.lastUpdated.fromPlatform().toEpochMilliseconds() + val existingByRemote = collectionQueries.value.getCollectionByRemoteId(remote.remoteID) + .executeAsOneOrNull() + + if (existingByRemote != null) { + collectionQueries.value.updateRemoteCollection( + remote_id = remote.remoteID, + name = name, + modified_at = updatedAt + ) + return + } + + val existingByName = collectionQueries.value.getCollectionByName(name) + .executeAsOneOrNull() + if (existingByName != null) { + collectionQueries.value.updateRemoteCollectionByLocalId( + local_id = existingByName.local_id, + remote_id = remote.remoteID, + name = name, + modified_at = updatedAt + ) + } else { + collectionQueries.value.persistRemoteCollection( + remote_id = remote.remoteID, + name = name, + created_at = updatedAt, + modified_at = updatedAt + ) + } + } + + private fun applyRemoteCollectionDeletion(remote: RemoteModelMutation) { + collectionQueries.value.deleteRemoteCollection(remote_id = remote.remoteID) + } + + override suspend fun remoteResourcesExist(remoteIDs: List): Map { + if (remoteIDs.isEmpty()) { + return emptyMap() + } + + return withContext(Dispatchers.IO) { + val existentIDs = collectionQueries.value.checkRemoteIDsExistence(remoteIDs) + .executeAsList() + .map { it.remote_id } + .toSet() + + remoteIDs.associateWith { existentIDs.contains(it) } + } + } +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/repository/CollectionsSynchronizationRepository.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/repository/CollectionsSynchronizationRepository.kt new file mode 100644 index 00000000..1faddfaf --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collection/repository/CollectionsSynchronizationRepository.kt @@ -0,0 +1,32 @@ +package com.quran.shared.persistence.repository.collection.repository + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.persistence.input.RemoteCollection +import com.quran.shared.persistence.model.Collection + +interface CollectionsSynchronizationRepository { + /** + * Returns a list of collections that have been mutated locally (created, modified, or deleted) + * and need to be synchronized with the remote server. + */ + suspend fun fetchMutatedCollections(): List> + + /** + * Persists the remote state of collections after a successful synchronization operation. + * This method should be called after the remote server has confirmed the changes. + * + * @param updatesToPersist List of remote collection inputs with their remote IDs and mutation + * states to be persisted. These must have a remoteID setup. + * @param localMutationsToClear List of local mutations to be cleared. An item of this list + * denotes either a mutation that was committed remotely, or a mutation that overridden. If it + * was committed, a counterpart is expected in `updatesToPersist` to persist it as a remote + * collection. These must be input from the list returned by `fetchMutatedCollections`. + */ + suspend fun applyRemoteChanges( + updatesToPersist: List>, + localMutationsToClear: List> + ) + + suspend fun remoteResourcesExist(remoteIDs: List): Map +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collectionbookmark/CollectionBookmarksRepositoryFactory.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collectionbookmark/CollectionBookmarksRepositoryFactory.kt new file mode 100644 index 00000000..69c5613f --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collectionbookmark/CollectionBookmarksRepositoryFactory.kt @@ -0,0 +1,28 @@ +package com.quran.shared.persistence.repository.collectionbookmark + +import com.quran.shared.persistence.DriverFactory +import com.quran.shared.persistence.makeDatabase +import com.quran.shared.persistence.repository.collectionbookmark.repository.CollectionBookmarksRepository +import com.quran.shared.persistence.repository.collectionbookmark.repository.CollectionBookmarksRepositoryImpl +import com.quran.shared.persistence.repository.collectionbookmark.repository.CollectionBookmarksSynchronizationRepository + +/** + * Factory for creating CollectionBookmarksRepository instances. + */ +object CollectionBookmarksRepositoryFactory { + /** + * Creates a new instance of CollectionBookmarksRepository. + */ + fun createRepository(driverFactory: DriverFactory): CollectionBookmarksRepository { + val database = makeDatabase(driverFactory) + return CollectionBookmarksRepositoryImpl(database) + } + + /** + * Creates a new instance of CollectionBookmarksSynchronizationRepository. + */ + fun createSynchronizationRepository(driverFactory: DriverFactory): CollectionBookmarksSynchronizationRepository { + val database = makeDatabase(driverFactory) + return CollectionBookmarksRepositoryImpl(database) + } +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collectionbookmark/repository/CollectionBookmarksRepository.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collectionbookmark/repository/CollectionBookmarksRepository.kt new file mode 100644 index 00000000..afcc5b27 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collectionbookmark/repository/CollectionBookmarksRepository.kt @@ -0,0 +1,21 @@ +package com.quran.shared.persistence.repository.collectionbookmark.repository + +import com.quran.shared.persistence.model.Bookmark +import com.quran.shared.persistence.model.CollectionBookmark + +interface CollectionBookmarksRepository { + /** + * Returns all bookmarks linked to a collection. + */ + suspend fun getBookmarksForCollection(collectionLocalId: String): List + + /** + * Adds a bookmark to a collection locally. + */ + suspend fun addBookmarkToCollection(collectionLocalId: String, bookmark: Bookmark): CollectionBookmark + + /** + * Removes a bookmark from a collection locally. + */ + suspend fun removeBookmarkFromCollection(collectionLocalId: String, bookmark: Bookmark): Boolean +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collectionbookmark/repository/CollectionBookmarksRepositoryImpl.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collectionbookmark/repository/CollectionBookmarksRepositoryImpl.kt new file mode 100644 index 00000000..f23bb4f8 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collectionbookmark/repository/CollectionBookmarksRepositoryImpl.kt @@ -0,0 +1,342 @@ +package com.quran.shared.persistence.repository.collectionbookmark.repository + +import co.touchlab.kermit.Logger +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.persistence.QuranDatabase +import com.quran.shared.persistence.input.RemoteCollectionBookmark +import com.quran.shared.persistence.model.Bookmark +import com.quran.shared.persistence.model.CollectionBookmark +import com.quran.shared.persistence.model.DatabaseBookmarkCollection +import com.quran.shared.persistence.util.fromPlatform +import com.quran.shared.persistence.util.toPlatform +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.IO +import kotlinx.coroutines.withContext +import kotlin.time.Instant + +class CollectionBookmarksRepositoryImpl( + private val database: QuranDatabase +) : CollectionBookmarksRepository, CollectionBookmarksSynchronizationRepository { + + private val logger = Logger.withTag("CollectionBookmarksRepository") + private val bookmarkCollectionQueries = lazy { database.bookmark_collectionsQueries } + private val pageBookmarkQueries = lazy { database.page_bookmarksQueries } + private val ayahBookmarkQueries = lazy { database.ayah_bookmarksQueries } + private val collectionQueries = lazy { database.collectionsQueries } + + override suspend fun getBookmarksForCollection(collectionLocalId: String): List { + return withContext(Dispatchers.IO) { + bookmarkCollectionQueries.value + .getCollectionBookmarksForCollectionWithDetails(collection_local_id = collectionLocalId.toLong()) + .executeAsList() + .mapNotNull { record -> + toCollectionBookmark( + bookmarkType = record.bookmark_type, + bookmarkLocalId = record.bookmark_local_id, + page = record.page, + sura = record.sura, + ayah = record.ayah, + collectionLocalId = record.collection_local_id, + collectionRemoteId = record.collection_remote_id, + modifiedAt = record.modified_at, + localId = record.local_id, + logMissingBookmark = false + ) + } + } + } + + override suspend fun addBookmarkToCollection(collectionLocalId: String, bookmark: Bookmark): CollectionBookmark { + return withContext(Dispatchers.IO) { + val bookmarkType = bookmark.toCollectionBookmarkType() + bookmarkCollectionQueries.value.addBookmarkToCollection( + bookmark_local_id = bookmark.localId, + bookmark_type = bookmarkType, + collection_local_id = collectionLocalId.toLong() + ) + val record = bookmarkCollectionQueries.value + .getCollectionBookmarkFor(bookmark.localId, collectionLocalId.toLong()) + .executeAsOneOrNull() + requireNotNull(record) { + "Expected collection bookmark for collection=$collectionLocalId and bookmark=${bookmark.localId}." + } + val collection = collectionQueries.value + .getCollectionByLocalId(collectionLocalId.toLong()) + .executeAsOneOrNull() + record.toCollectionBookmark( + collectionRemoteId = collection?.remote_id, + bookmark = bookmark + ) + } + } + + override suspend fun removeBookmarkFromCollection(collectionLocalId: String, bookmark: Bookmark): Boolean { + return withContext(Dispatchers.IO) { + bookmarkCollectionQueries.value.deleteBookmarkFromCollection( + bookmark_local_id = bookmark.localId, + collection_local_id = collectionLocalId.toLong() + ) + true + } + } + + override suspend fun fetchMutatedCollectionBookmarks(): List> { + return withContext(Dispatchers.IO) { + bookmarkCollectionQueries.value.getUnsyncedCollectionBookmarksWithDetails() + .executeAsList() + .mapNotNull { record -> + val collectionRemoteId = record.collection_remote_id + if (collectionRemoteId.isNullOrEmpty()) { + logger.w { "Skipping collection bookmark without remote collection ID: localId=${record.local_id}" } + return@mapNotNull null + } + val mutation = if (record.deleted == 1L) Mutation.DELETED else Mutation.CREATED + val collectionBookmark = toCollectionBookmark( + bookmarkType = record.bookmark_type, + bookmarkLocalId = record.bookmark_local_id, + page = record.page, + sura = record.sura, + ayah = record.ayah, + collectionLocalId = record.collection_local_id, + collectionRemoteId = collectionRemoteId, + modifiedAt = record.modified_at, + localId = record.local_id, + logMissingBookmark = true + ) ?: return@mapNotNull null + LocalModelMutation( + mutation = mutation, + model = collectionBookmark, + remoteID = record.remote_id, + localID = record.local_id.toString() + ) + } + } + } + + override suspend fun applyRemoteChanges( + updatesToPersist: List>, + localMutationsToClear: List> + ) { + logger.i { + "Applying remote collection bookmark changes with " + + "${updatesToPersist.size} updates to persist and ${localMutationsToClear.size} local mutations to clear" + } + return withContext(Dispatchers.IO) { + database.transaction { + localMutationsToClear.forEach { local -> + if (local.mutation == Mutation.DELETED) { + bookmarkCollectionQueries.value.clearLocalMutationFor(id = local.localID.toLong()) + } + } + + updatesToPersist.forEach { remote -> + when (remote.mutation) { + Mutation.CREATED -> applyRemoteCollectionBookmarkUpsert(remote) + Mutation.DELETED -> applyRemoteCollectionBookmarkDeletion(remote) + Mutation.MODIFIED -> + throw RuntimeException("Unexpected MODIFIED remote modification for collection bookmarks.") + } + } + } + } + } + + private fun applyRemoteCollectionBookmarkUpsert(remote: RemoteModelMutation) { + val collection = collectionQueries.value + .getCollectionByRemoteId(remote.model.collectionId) + .executeAsOneOrNull() + if (collection == null) { + logger.w { "Skipping remote collection bookmark without local collection: remoteId=${remote.model.collectionId}" } + return + } + val bookmarkLocalId = resolveBookmarkLocalId(remote.model, createIfMissing = true) + if (bookmarkLocalId == null) { + logger.w { "Skipping remote collection bookmark without local bookmark: remoteId=${remote.remoteID}" } + return + } + val (bookmarkType, updatedAt) = remote.model.toBookmarkTypeWithTimestamp() + bookmarkCollectionQueries.value.persistRemoteBookmarkCollection( + remote_id = remote.remoteID, + bookmark_local_id = bookmarkLocalId.toString(), + bookmark_type = bookmarkType, + collection_local_id = collection.local_id, + created_at = updatedAt, + modified_at = updatedAt + ) + } + + private fun applyRemoteCollectionBookmarkDeletion(remote: RemoteModelMutation) { + val collection = collectionQueries.value + .getCollectionByRemoteId(remote.model.collectionId) + .executeAsOneOrNull() + val bookmarkLocalId = resolveBookmarkLocalId(remote.model, createIfMissing = false) + if (collection != null && bookmarkLocalId != null) { + bookmarkCollectionQueries.value.deleteRemoteBookmarkCollection( + bookmark_local_id = bookmarkLocalId.toString(), + collection_local_id = collection.local_id + ) + return + } + bookmarkCollectionQueries.value.deleteRemoteBookmarkCollectionByRemoteId(remote_id = remote.remoteID) + } + + private fun resolveBookmarkLocalId( + bookmark: RemoteCollectionBookmark, + createIfMissing: Boolean + ): Long? { + return when (bookmark) { + is RemoteCollectionBookmark.Page -> { + val page = bookmark.page.toLong() + if (createIfMissing) { + pageBookmarkQueries.value.insertBookmarkIfMissing(page) + } + pageBookmarkQueries.value.getBookmarkForPage(page) + .executeAsOneOrNull() + ?.local_id + } + is RemoteCollectionBookmark.Ayah -> { + val sura = bookmark.sura.toLong() + val ayah = bookmark.ayah.toLong() + if (createIfMissing) { + val ayahId = getAyahId(bookmark.sura, bookmark.ayah) + ayahBookmarkQueries.value.insertBookmarkIfMissing( + ayah_id = ayahId.toLong(), + sura = sura, + ayah = ayah + ) + } + ayahBookmarkQueries.value.getBookmarkForAyah(sura, ayah) + .executeAsOneOrNull() + ?.local_id + } + } + } + + private fun toCollectionBookmark( + bookmarkType: String, + bookmarkLocalId: String, + page: Long?, + sura: Long?, + ayah: Long?, + collectionLocalId: Long, + collectionRemoteId: String?, + modifiedAt: Long, + localId: Long, + logMissingBookmark: Boolean + ): CollectionBookmark? { + if (bookmarkLocalId.toLongOrNull() == null) { + logger.w { "Skipping collection bookmark with non-numeric bookmark id: $bookmarkLocalId" } + return null + } + val updatedAt = Instant.fromEpochMilliseconds(modifiedAt).toPlatform() + return when (bookmarkType.uppercase()) { + "PAGE" -> { + val pageValue = page?.toInt() + if (pageValue == null) { + if (logMissingBookmark) { + logger.w { "Skipping collection bookmark without local bookmark: localId=$localId" } + } + null + } else { + CollectionBookmark.PageBookmark( + collectionLocalId = collectionLocalId.toString(), + collectionRemoteId = collectionRemoteId, + bookmarkLocalId = bookmarkLocalId, + page = pageValue, + lastUpdated = updatedAt, + localId = localId.toString() + ) + } + } + "AYAH" -> { + val suraValue = sura?.toInt() + val ayahValue = ayah?.toInt() + if (suraValue == null || ayahValue == null) { + if (logMissingBookmark) { + logger.w { "Skipping collection bookmark without local bookmark: localId=$localId" } + } + null + } else { + CollectionBookmark.AyahBookmark( + collectionLocalId = collectionLocalId.toString(), + collectionRemoteId = collectionRemoteId, + bookmarkLocalId = bookmarkLocalId, + sura = suraValue, + ayah = ayahValue, + lastUpdated = updatedAt, + localId = localId.toString() + ) + } + } + else -> null + } + } + + private fun DatabaseBookmarkCollection.toCollectionBookmark( + collectionRemoteId: String?, + bookmark: Bookmark + ): CollectionBookmark { + val updatedAt = Instant.fromEpochMilliseconds(modified_at).toPlatform() + return when (bookmark) { + is Bookmark.PageBookmark -> + CollectionBookmark.PageBookmark( + collectionLocalId = collection_local_id.toString(), + collectionRemoteId = collectionRemoteId, + bookmarkLocalId = bookmark.localId, + page = bookmark.page, + lastUpdated = updatedAt, + localId = local_id.toString() + ) + is Bookmark.AyahBookmark -> + CollectionBookmark.AyahBookmark( + collectionLocalId = collection_local_id.toString(), + collectionRemoteId = collectionRemoteId, + bookmarkLocalId = bookmark.localId, + sura = bookmark.sura, + ayah = bookmark.ayah, + lastUpdated = updatedAt, + localId = local_id.toString() + ) + } + } + + private fun RemoteCollectionBookmark.toBookmarkTypeWithTimestamp(): Pair { + val updatedAt = lastUpdated.fromPlatform().toEpochMilliseconds() + return when (this) { + is RemoteCollectionBookmark.Page -> "PAGE" to updatedAt + is RemoteCollectionBookmark.Ayah -> "AYAH" to updatedAt + } + } + + private fun Bookmark.toCollectionBookmarkType(): String { + return when (this) { + is Bookmark.PageBookmark -> "PAGE" + is Bookmark.AyahBookmark -> "AYAH" + } + } + + private fun getAyahId(sura: Int, ayah: Int): Int { + // TODO - fix this + return 1 + } + + override suspend fun remoteResourcesExist(remoteIDs: List): Map { + if (remoteIDs.isEmpty()) { + return emptyMap() + } + + return withContext(Dispatchers.IO) { + val existentIDs = bookmarkCollectionQueries.value + .checkRemoteIDsExistence(remoteIDs) + .executeAsList() + .map { it.remote_id } + .toSet() + + remoteIDs.map { Pair(it, existentIDs.contains(it)) } + .associateBy { it.first } + .mapValues { it.value.second } + } + } +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collectionbookmark/repository/CollectionBookmarksSynchronizationRepository.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collectionbookmark/repository/CollectionBookmarksSynchronizationRepository.kt new file mode 100644 index 00000000..57b424cb --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/collectionbookmark/repository/CollectionBookmarksSynchronizationRepository.kt @@ -0,0 +1,29 @@ +package com.quran.shared.persistence.repository.collectionbookmark.repository + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.persistence.input.RemoteCollectionBookmark +import com.quran.shared.persistence.model.CollectionBookmark + +interface CollectionBookmarksSynchronizationRepository { + /** + * Returns a list of collection bookmarks that have been mutated locally + * and need to be synchronized with the remote server. + */ + suspend fun fetchMutatedCollectionBookmarks(): List> + + /** + * Persists the remote state of collection bookmarks after a successful synchronization operation. + * + * @param updatesToPersist List of remote collection bookmarks with their remote IDs and mutation + * states to be persisted. These must have a remoteID setup. + * @param localMutationsToClear List of local mutations to be cleared. An item of this list + * denotes either a mutation that was committed remotely, or a mutation that was overridden. + */ + suspend fun applyRemoteChanges( + updatesToPersist: List>, + localMutationsToClear: List> + ) + + suspend fun remoteResourcesExist(remoteIDs: List): Map +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/note/extension/NoteQueriesExtensions.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/note/extension/NoteQueriesExtensions.kt new file mode 100644 index 00000000..a7836e9b --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/note/extension/NoteQueriesExtensions.kt @@ -0,0 +1,41 @@ +@file:OptIn(ExperimentalTime::class) + +package com.quran.shared.persistence.repository.note.extension + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.persistence.model.DatabaseNote +import com.quran.shared.persistence.model.Note +import com.quran.shared.persistence.util.toPlatform +import kotlin.time.ExperimentalTime +import kotlin.time.Instant + +internal fun DatabaseNote.toNote(): Note { + val normalizedModifiedAt = normalizeEpochMillis(modified_at) + return Note( + body = note, + startAyahId = start_ayah_id, + endAyahId = end_ayah_id, + lastUpdated = Instant.fromEpochMilliseconds(normalizedModifiedAt).toPlatform(), + localId = local_id.toString() + ) +} + +internal fun DatabaseNote.toNoteMutation(): LocalModelMutation { + val mutation = when { + deleted == 1L -> Mutation.DELETED + remote_id == null -> Mutation.CREATED + else -> Mutation.MODIFIED + } + + return LocalModelMutation( + mutation = mutation, + model = toNote(), + remoteID = remote_id, + localID = local_id.toString() + ) +} + +private fun normalizeEpochMillis(value: Long): Long { + return if (value < 1_000_000_000_000L) value * 1000 else value +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/note/repository/NotesRepository.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/note/repository/NotesRepository.kt new file mode 100644 index 00000000..213b077d --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/note/repository/NotesRepository.kt @@ -0,0 +1,25 @@ +package com.quran.shared.persistence.repository.note.repository + +import com.quran.shared.persistence.model.Note + +interface NotesRepository { + /** + * Fetch and returns all notes. + */ + suspend fun getAllNotes(): List + + /** + * Add a note locally. + */ + suspend fun addNote(body: String, startAyahId: Long, endAyahId: Long): Note + + /** + * Update a note by its local ID. + */ + suspend fun updateNote(localId: String, body: String, startAyahId: Long, endAyahId: Long): Note + + /** + * Delete a note by its local ID. + */ + suspend fun deleteNote(localId: String): Boolean +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/note/repository/NotesRepositoryImpl.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/note/repository/NotesRepositoryImpl.kt new file mode 100644 index 00000000..f721bfac --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/note/repository/NotesRepositoryImpl.kt @@ -0,0 +1,142 @@ +package com.quran.shared.persistence.repository.note.repository + +import co.touchlab.kermit.Logger +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.persistence.QuranDatabase +import com.quran.shared.persistence.input.RemoteNote +import com.quran.shared.persistence.model.Note +import com.quran.shared.persistence.repository.note.extension.toNote +import com.quran.shared.persistence.repository.note.extension.toNoteMutation +import com.quran.shared.persistence.util.fromPlatform +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.IO +import kotlinx.coroutines.withContext + +class NotesRepositoryImpl( + private val database: QuranDatabase +) : NotesRepository, NotesSynchronizationRepository { + + private val logger = Logger.withTag("NotesRepository") + private val notesQueries = lazy { database.notesQueries } + + override suspend fun getAllNotes(): List { + return withContext(Dispatchers.IO) { + notesQueries.value.getNotes() + .executeAsList() + .map { it.toNote() } + } + } + + override suspend fun addNote(body: String, startAyahId: Long, endAyahId: Long): Note { + logger.i { "Adding note for range=$startAyahId-$endAyahId" } + return withContext(Dispatchers.IO) { + notesQueries.value.addNewNote( + note = body, + start_ayah_id = startAyahId, + end_ayah_id = endAyahId + ) + val record = notesQueries.value.getLastInsertedNote() + .executeAsOneOrNull() + requireNotNull(record) { "Expected note after insert." } + record.toNote() + } + } + + override suspend fun updateNote(localId: String, body: String, startAyahId: Long, endAyahId: Long): Note { + logger.i { "Updating note localId=$localId" } + return withContext(Dispatchers.IO) { + notesQueries.value.updateNote( + note = body, + start_ayah_id = startAyahId, + end_ayah_id = endAyahId, + id = localId.toLong() + ) + val record = notesQueries.value.getNoteByLocalId(localId.toLong()) + .executeAsOneOrNull() + requireNotNull(record) { "Expected note localId=$localId after update." } + record.toNote() + } + } + + override suspend fun deleteNote(localId: String): Boolean { + logger.i { "Deleting note localId=$localId" } + withContext(Dispatchers.IO) { + notesQueries.value.deleteNote(id = localId.toLong()) + } + return true + } + + override suspend fun fetchMutatedNotes(lastModified: Long): List> { + return withContext(Dispatchers.IO) { + notesQueries.value.getUnsyncedNotes(last_modified = lastModified) + .executeAsList() + .map { it.toNoteMutation() } + } + } + + override suspend fun applyRemoteChanges( + updatesToPersist: List>, + localMutationsToClear: List> + ) { + logger.i { + "Applying note remote changes with ${updatesToPersist.size} updates " + + "and clearing ${localMutationsToClear.size} local mutations" + } + return withContext(Dispatchers.IO) { + database.transaction { + localMutationsToClear.forEach { local -> + notesQueries.value.clearLocalMutationFor(id = local.localID.toLong()) + } + + updatesToPersist.forEach { remote -> + when (remote.mutation) { + Mutation.CREATED, Mutation.MODIFIED -> applyRemoteNoteUpsert(remote) + Mutation.DELETED -> applyRemoteNoteDeletion(remote) + } + } + } + } + } + + private fun applyRemoteNoteUpsert(remote: RemoteModelMutation) { + val model = remote.model + val body = model.body + val startAyahId = model.startAyahId + val endAyahId = model.endAyahId + if (body.isNullOrEmpty() || startAyahId == null || endAyahId == null) { + logger.w { "Skipping remote note mutation without body or ranges: remoteId=${remote.remoteID}" } + return + } + + val updatedAt = model.lastUpdated.fromPlatform().toEpochMilliseconds() + notesQueries.value.persistRemoteNote( + remote_id = remote.remoteID, + note = body, + start_ayah_id = startAyahId, + end_ayah_id = endAyahId, + created_at = updatedAt, + modified_at = updatedAt + ) + } + + private fun applyRemoteNoteDeletion(remote: RemoteModelMutation) { + notesQueries.value.deleteRemoteNote(remote_id = remote.remoteID) + } + + override suspend fun remoteResourcesExist(remoteIDs: List): Map { + if (remoteIDs.isEmpty()) { + return emptyMap() + } + + return withContext(Dispatchers.IO) { + val existentIDs = notesQueries.value.checkRemoteIDsExistence(remoteIDs) + .executeAsList() + .map { it.remote_id } + .toSet() + + remoteIDs.associateWith { existentIDs.contains(it) } + } + } +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/note/repository/NotesSynchronizationRepository.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/note/repository/NotesSynchronizationRepository.kt new file mode 100644 index 00000000..5e67cab7 --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/repository/note/repository/NotesSynchronizationRepository.kt @@ -0,0 +1,23 @@ +package com.quran.shared.persistence.repository.note.repository + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.persistence.input.RemoteNote +import com.quran.shared.persistence.model.Note + +interface NotesSynchronizationRepository { + /** + * Returns a list of notes that have been mutated locally and need to be synchronized. + */ + suspend fun fetchMutatedNotes(lastModified: Long): List> + + /** + * Persists the remote state of notes after a successful synchronization operation. + */ + suspend fun applyRemoteChanges( + updatesToPersist: List>, + localMutationsToClear: List> + ) + + suspend fun remoteResourcesExist(remoteIDs: List): Map +} diff --git a/persistence/src/commonMain/kotlin/com/quran/shared/persistence/util/PlatformDateTime.kt b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/util/PlatformDateTime.kt new file mode 100644 index 00000000..760e96ee --- /dev/null +++ b/persistence/src/commonMain/kotlin/com/quran/shared/persistence/util/PlatformDateTime.kt @@ -0,0 +1,7 @@ +package com.quran.shared.persistence.util + +import kotlin.time.Instant + +expect class PlatformDateTime +expect fun PlatformDateTime.fromPlatform(): Instant +expect fun Instant.toPlatform(): PlatformDateTime \ No newline at end of file diff --git a/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/ayah_bookmarks.sq b/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/ayah_bookmarks.sq new file mode 100644 index 00000000..9b8da3b5 --- /dev/null +++ b/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/ayah_bookmarks.sq @@ -0,0 +1,81 @@ +CREATE TABLE IF NOT EXISTS ayah_bookmark( + local_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + remote_id TEXT, + ayah_id INTEGER NOT NULL, + sura INTEGER NOT NULL, + ayah INTEGER NOT NULL, + created_at INTEGER DEFAULT (CAST(strftime('%s', 'now') AS INTEGER) * 1000) NOT NULL, + modified_at INTEGER DEFAULT (CAST(strftime('%s', 'now') AS INTEGER) * 1000) NOT NULL, + -- Ensure deleted is either 0 or 1 + deleted INTEGER NOT NULL DEFAULT 0, + CHECK (deleted IN (0, 1)), + UNIQUE(sura, ayah) +); + +CREATE INDEX IF NOT EXISTS ayah_bookmark_remote_id_idx ON ayah_bookmark(remote_id); + +getBookmarks: + SELECT * FROM ayah_bookmark WHERE deleted = 0 ORDER BY created_at DESC; + +getBookmarkForAyah: + SELECT * FROM ayah_bookmark WHERE sura = ? AND ayah = ? LIMIT 1; + +getBookmarkByLocalId: + SELECT * FROM ayah_bookmark WHERE local_id = ? LIMIT 1; + +addNewBookmark { + INSERT OR IGNORE INTO ayah_bookmark (remote_id, ayah_id, sura, ayah, deleted) + VALUES (NULL, :ayah_id, :sura, :ayah, 0); + UPDATE ayah_bookmark + SET deleted = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE sura = :sura AND ayah = :ayah; +} + +insertBookmarkIfMissing { + INSERT OR IGNORE INTO ayah_bookmark (remote_id, ayah_id, sura, ayah, deleted) + VALUES (NULL, :ayah_id, :sura, :ayah, 0); + UPDATE ayah_bookmark + SET deleted = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE sura = :sura AND ayah = :ayah AND deleted = 1; +} + +getUnsyncedBookmarks: + SELECT * FROM ayah_bookmark WHERE remote_id IS NULL OR deleted = 1 ORDER BY created_at DESC; + +persistRemoteBookmark { + INSERT OR IGNORE INTO ayah_bookmark (remote_id, ayah_id, sura, ayah, created_at, modified_at, deleted) + VALUES (:remote_id, :ayah_id, :sura, :ayah, :created_at, :modified_at, 0); + UPDATE ayah_bookmark + SET remote_id = :remote_id, + ayah_id = :ayah_id, + created_at = :created_at, + modified_at = :modified_at, + deleted = 0 + WHERE sura = :sura AND ayah = :ayah; +} + +hardDeleteBookmarkFor { + DELETE FROM ayah_bookmark WHERE remote_id=:remoteID; +} + +clearLocalMutationFor { + DELETE FROM ayah_bookmark WHERE remote_id IS NULL AND local_id = :id; + UPDATE ayah_bookmark + SET deleted = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE local_id = :id; +} + +checkRemoteIDsExistence: + SELECT remote_id FROM ayah_bookmark WHERE remote_id IN :queried_ids; + +-- Removes the record of a local bookmark or marks it as deleted if it's remote. +deleteBookmark { + DELETE FROM ayah_bookmark WHERE sura=:sura AND ayah=:ayah AND remote_id IS NULL; + UPDATE ayah_bookmark + SET deleted = 1, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE sura=:sura AND ayah=:ayah AND remote_id IS NOT NULL; +} diff --git a/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/bookmark_collections.sq b/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/bookmark_collections.sq new file mode 100644 index 00000000..b4ec3444 --- /dev/null +++ b/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/bookmark_collections.sq @@ -0,0 +1,186 @@ +CREATE TABLE IF NOT EXISTS bookmark_collection( + local_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + remote_id TEXT, + bookmark_local_id TEXT NOT NULL, + bookmark_type TEXT NOT NULL, + collection_local_id INTEGER NOT NULL, + created_at INTEGER DEFAULT (CAST(strftime('%s', 'now') AS INTEGER) * 1000) NOT NULL, + modified_at INTEGER DEFAULT (CAST(strftime('%s', 'now') AS INTEGER) * 1000) NOT NULL, + deleted INTEGER NOT NULL DEFAULT 0, + CHECK (deleted IN (0, 1)), + UNIQUE(bookmark_local_id, collection_local_id), + CHECK (bookmark_type IN ('PAGE','AYAH')), + FOREIGN KEY(collection_local_id) REFERENCES collection(local_id) +); + +CREATE INDEX IF NOT EXISTS bookmark_collection_remote_id_idx ON bookmark_collection(remote_id); + +getCollectionBookmarks: + SELECT * FROM bookmark_collection WHERE deleted = 0 ORDER BY created_at DESC; + +getCollectionBookmarksForCollection: + SELECT * FROM bookmark_collection + WHERE collection_local_id = ? AND deleted = 0 + ORDER BY created_at DESC; + +getCollectionBookmarksWithDetails: + SELECT + bc.local_id, + bc.remote_id, + bc.bookmark_local_id, + bc.bookmark_type, + bc.collection_local_id, + bc.created_at, + bc.modified_at, + bc.deleted, + c.remote_id AS collection_remote_id, + c.name AS collection_name, + pb.page AS page, + ab.sura AS sura, + ab.ayah AS ayah + FROM bookmark_collection bc + JOIN collection c ON c.local_id = bc.collection_local_id + LEFT JOIN page_bookmark pb + ON pb.local_id = CAST(bc.bookmark_local_id AS INTEGER) + AND bc.bookmark_type = 'PAGE' + LEFT JOIN ayah_bookmark ab + ON ab.local_id = CAST(bc.bookmark_local_id AS INTEGER) + AND bc.bookmark_type = 'AYAH' + WHERE bc.deleted = 0 + ORDER BY bc.created_at DESC; + +getCollectionBookmarksForCollectionWithDetails: + SELECT + bc.local_id, + bc.remote_id, + bc.bookmark_local_id, + bc.bookmark_type, + bc.collection_local_id, + bc.created_at, + bc.modified_at, + bc.deleted, + c.remote_id AS collection_remote_id, + c.name AS collection_name, + pb.page AS page, + ab.sura AS sura, + ab.ayah AS ayah + FROM bookmark_collection bc + JOIN collection c ON c.local_id = bc.collection_local_id + LEFT JOIN page_bookmark pb + ON pb.local_id = CAST(bc.bookmark_local_id AS INTEGER) + AND bc.bookmark_type = 'PAGE' + LEFT JOIN ayah_bookmark ab + ON ab.local_id = CAST(bc.bookmark_local_id AS INTEGER) + AND bc.bookmark_type = 'AYAH' + WHERE bc.collection_local_id = :collection_local_id + AND bc.deleted = 0 + ORDER BY bc.created_at DESC; + +getUnsyncedCollectionBookmarksWithDetails: + SELECT + bc.local_id, + bc.remote_id, + bc.bookmark_local_id, + bc.bookmark_type, + bc.collection_local_id, + bc.created_at, + bc.modified_at, + bc.deleted, + c.remote_id AS collection_remote_id, + c.name AS collection_name, + pb.page AS page, + ab.sura AS sura, + ab.ayah AS ayah + FROM bookmark_collection bc + JOIN collection c ON c.local_id = bc.collection_local_id + LEFT JOIN page_bookmark pb + ON pb.local_id = CAST(bc.bookmark_local_id AS INTEGER) + AND bc.bookmark_type = 'PAGE' + LEFT JOIN ayah_bookmark ab + ON ab.local_id = CAST(bc.bookmark_local_id AS INTEGER) + AND bc.bookmark_type = 'AYAH' + WHERE bc.remote_id IS NULL OR bc.deleted = 1 + ORDER BY bc.created_at DESC; + +getCollectionBookmarkFor: + SELECT * FROM bookmark_collection + WHERE bookmark_local_id = ? AND collection_local_id = ? LIMIT 1; + +addBookmarkToCollection { + INSERT OR IGNORE INTO bookmark_collection ( + remote_id, + bookmark_local_id, + bookmark_type, + collection_local_id, + deleted + ) + VALUES (NULL, :bookmark_local_id, :bookmark_type, :collection_local_id, 0); + UPDATE bookmark_collection + SET deleted = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE bookmark_local_id = :bookmark_local_id AND collection_local_id = :collection_local_id; +} + +deleteBookmarkFromCollection { + DELETE FROM bookmark_collection + WHERE bookmark_local_id = :bookmark_local_id + AND collection_local_id = :collection_local_id + AND remote_id IS NULL; + UPDATE bookmark_collection + SET deleted = 1, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE bookmark_local_id = :bookmark_local_id + AND collection_local_id = :collection_local_id + AND remote_id IS NOT NULL; +} + +getUnsyncedCollectionBookmarks: + SELECT * FROM bookmark_collection + WHERE remote_id IS NULL OR deleted = 1 + ORDER BY created_at DESC; + +persistRemoteBookmarkCollection { + INSERT OR IGNORE INTO bookmark_collection ( + remote_id, + bookmark_local_id, + bookmark_type, + collection_local_id, + created_at, + modified_at, + deleted + ) + VALUES ( + :remote_id, + :bookmark_local_id, + :bookmark_type, + :collection_local_id, + :created_at, + :modified_at, + 0 + ); + UPDATE bookmark_collection + SET remote_id = :remote_id, + bookmark_type = :bookmark_type, + deleted = 0, + modified_at = :modified_at + WHERE bookmark_local_id = :bookmark_local_id AND collection_local_id = :collection_local_id; +} + +deleteRemoteBookmarkCollectionByRemoteId: + DELETE FROM bookmark_collection WHERE remote_id = :remote_id; + +deleteRemoteBookmarkCollection { + DELETE FROM bookmark_collection + WHERE bookmark_local_id = :bookmark_local_id AND collection_local_id = :collection_local_id; +} + +clearLocalMutationFor { + DELETE FROM bookmark_collection WHERE remote_id IS NULL AND local_id = :id; + UPDATE bookmark_collection + SET deleted = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE local_id = :id; +} + +checkRemoteIDsExistence: + SELECT remote_id FROM bookmark_collection WHERE remote_id IN :queried_ids; diff --git a/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/bookmarks.sq b/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/bookmarks.sq deleted file mode 100644 index e8c81329..00000000 --- a/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/bookmarks.sq +++ /dev/null @@ -1,12 +0,0 @@ -CREATE TABLE bookmarks( - id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - sura INTEGER NOT NULL, - ayah INTEGER NOT NULL, - last_updated INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL -); - -getBookmarks: - SELECT * FROM bookmarks; - -addBookmark: - INSERT INTO bookmarks (sura, ayah) VALUES (?, ?); \ No newline at end of file diff --git a/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/collections.sq b/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/collections.sq new file mode 100644 index 00000000..61d55187 --- /dev/null +++ b/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/collections.sq @@ -0,0 +1,93 @@ +CREATE TABLE IF NOT EXISTS collection( + local_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + remote_id TEXT, + name TEXT NOT NULL UNIQUE, + created_at INTEGER DEFAULT (CAST(strftime('%s', 'now') AS INTEGER) * 1000) NOT NULL, + modified_at INTEGER DEFAULT (CAST(strftime('%s', 'now') AS INTEGER) * 1000) NOT NULL, + deleted INTEGER NOT NULL DEFAULT 0, + dirty INTEGER NOT NULL DEFAULT 0, + CHECK (deleted IN (0, 1)), + CHECK (dirty IN (0, 1)) +); + +CREATE INDEX IF NOT EXISTS collection_remote_id_idx ON collection(remote_id); + +getCollections: + SELECT * FROM collection WHERE deleted = 0 ORDER BY created_at DESC; + +getCollectionByName: + SELECT * FROM collection WHERE name = ? LIMIT 1; + +getCollectionByLocalId: + SELECT * FROM collection WHERE local_id = ? LIMIT 1; + +getCollectionByRemoteId: + SELECT * FROM collection WHERE remote_id = ? LIMIT 1; + +addNewCollection { + INSERT OR IGNORE INTO collection (remote_id, name, deleted, dirty) + VALUES (NULL, :name, 0, 0); + UPDATE collection + SET deleted = 0, + dirty = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE name = :name; +} + +updateCollectionName: + UPDATE collection + SET name = :name, + dirty = CASE WHEN remote_id IS NULL THEN dirty ELSE 1 END, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE local_id = :id; + +deleteCollection { + DELETE FROM bookmark_collection WHERE collection_local_id = :id; + DELETE FROM collection WHERE local_id = :id AND remote_id IS NULL; + UPDATE collection + SET deleted = 1, + dirty = 1, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE local_id = :id AND remote_id IS NOT NULL; +} + +getUnsyncedCollections: + SELECT * FROM collection WHERE remote_id IS NULL OR dirty = 1 ORDER BY created_at DESC; + +persistRemoteCollection: + INSERT INTO collection (remote_id, name, created_at, modified_at, deleted, dirty) + VALUES (:remote_id, :name, :created_at, :modified_at, 0, 0); + +updateRemoteCollection: + UPDATE collection + SET name = :name, + modified_at = :modified_at, + deleted = 0, + dirty = 0 + WHERE remote_id = :remote_id; + +updateRemoteCollectionByLocalId: + UPDATE collection + SET remote_id = :remote_id, + name = :name, + modified_at = :modified_at, + deleted = 0, + dirty = 0 + WHERE local_id = :local_id; + +deleteRemoteCollection { + DELETE FROM bookmark_collection WHERE collection_local_id IN ( + SELECT local_id FROM collection WHERE remote_id = :remote_id + ); + DELETE FROM collection WHERE remote_id = :remote_id; +} + +clearLocalMutationFor: + UPDATE collection + SET deleted = 0, + dirty = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE local_id = :id; + +checkRemoteIDsExistence: + SELECT remote_id FROM collection WHERE remote_id IN :queried_ids; diff --git a/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/notes.sq b/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/notes.sq new file mode 100644 index 00000000..c7208cde --- /dev/null +++ b/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/notes.sq @@ -0,0 +1,80 @@ +CREATE TABLE IF NOT EXISTS note( + local_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + remote_id TEXT, + note TEXT NOT NULL, + start_ayah_id INTEGER NOT NULL, + end_ayah_id INTEGER NOT NULL, + created_at INTEGER DEFAULT (CAST(strftime('%s', 'now') AS INTEGER) * 1000) NOT NULL, + modified_at INTEGER DEFAULT (CAST(strftime('%s', 'now') AS INTEGER) * 1000) NOT NULL, + deleted INTEGER NOT NULL DEFAULT 0, + -- Ensure deleted is either 0 or 1 + CHECK (deleted IN (0, 1)) +); + +CREATE INDEX IF NOT EXISTS note_start_ayah_idx ON note(start_ayah_id); +CREATE INDEX IF NOT EXISTS note_end_ayah_idx ON note(end_ayah_id); +CREATE INDEX IF NOT EXISTS note_remote_id_idx ON note(remote_id); + +getNotes: + SELECT * FROM note WHERE deleted = 0 ORDER BY created_at DESC; + +getNoteByLocalId: + SELECT * FROM note WHERE local_id = ? LIMIT 1; + +getLastInsertedNote: + SELECT * FROM note WHERE local_id = last_insert_rowid() LIMIT 1; + +addNewNote: + INSERT INTO note (remote_id, note, start_ayah_id, end_ayah_id, deleted) + VALUES (NULL, :note, :start_ayah_id, :end_ayah_id, 0); + +updateNote: + UPDATE note + SET note = :note, + start_ayah_id = :start_ayah_id, + end_ayah_id = :end_ayah_id, + deleted = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE local_id = :id; + +deleteNote { + DELETE FROM note WHERE local_id = :id AND remote_id IS NULL; + UPDATE note + SET deleted = 1, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE local_id = :id AND remote_id IS NOT NULL; +} + +getUnsyncedNotes: + SELECT * FROM note + WHERE remote_id IS NULL + OR deleted = 1 + OR modified_at > :last_modified + ORDER BY created_at DESC; + +persistRemoteNote { + INSERT OR IGNORE INTO note (remote_id, note, start_ayah_id, end_ayah_id, created_at, modified_at, deleted) + VALUES (:remote_id, :note, :start_ayah_id, :end_ayah_id, :created_at, :modified_at, 0); + UPDATE note + SET note = :note, + start_ayah_id = :start_ayah_id, + end_ayah_id = :end_ayah_id, + modified_at = :modified_at, + deleted = 0 + WHERE remote_id = :remote_id; +} + +deleteRemoteNote: + DELETE FROM note WHERE remote_id = :remote_id; + +clearLocalMutationFor { + DELETE FROM note WHERE remote_id IS NULL AND local_id = :id; + DELETE FROM note WHERE local_id = :id AND deleted = 1; + UPDATE note + SET deleted = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE local_id = :id AND deleted = 0; +} + +checkRemoteIDsExistence: + SELECT remote_id FROM note WHERE remote_id IN :queried_ids; diff --git a/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/page_bookmarks.sq b/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/page_bookmarks.sq new file mode 100644 index 00000000..65689913 --- /dev/null +++ b/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/page_bookmarks.sq @@ -0,0 +1,108 @@ +CREATE TABLE IF NOT EXISTS page_bookmark( + local_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + remote_id TEXT, + page INTEGER NOT NULL UNIQUE, + created_at INTEGER DEFAULT (CAST(strftime('%s', 'now') AS INTEGER) * 1000) NOT NULL, + modified_at INTEGER DEFAULT (CAST(strftime('%s', 'now') AS INTEGER) * 1000) NOT NULL, + deleted INTEGER NOT NULL DEFAULT 0, + -- Ensure deleted is either 0 or 1 + CHECK (deleted IN (0, 1)) +); + +CREATE INDEX IF NOT EXISTS page_bookmark_remote_id_idx ON page_bookmark(remote_id); + +getBookmarks: + SELECT * FROM page_bookmark WHERE deleted = 0 ORDER BY created_at DESC; + +getBookmarkForPage: + SELECT * FROM page_bookmark WHERE page = ? LIMIT 1; + +getBookmarkByLocalId: + SELECT * FROM page_bookmark WHERE local_id = ? LIMIT 1; + +createRemoteBookmark: + INSERT INTO page_bookmark (remote_id, page, deleted) + VALUES (?, ?, 0); + +persistRemoteBookmark { + INSERT OR IGNORE INTO page_bookmark (remote_id, page, created_at, deleted) + VALUES (:remote_id, :page, :created_at, 0); + UPDATE page_bookmark + SET remote_id = :remote_id, + created_at = :created_at, + modified_at = :modified_at, + deleted = 0 + WHERE page = :page; +} + +addNewBookmark { + INSERT OR IGNORE INTO page_bookmark (remote_id, page, deleted) + VALUES (NULL, :page, 0); + UPDATE page_bookmark + SET deleted = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE page = :page; +} + +insertBookmarkIfMissing { + INSERT OR IGNORE INTO page_bookmark (remote_id, page, deleted) + VALUES (NULL, :page, 0); + UPDATE page_bookmark + SET deleted = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE page = :page AND deleted = 1; +} + +-- TODO: Rename +getUnsyncedBookmarks: + SELECT * FROM page_bookmark WHERE remote_id IS NULL OR deleted = 1 ORDER BY created_at DESC; + +setDeleted: + UPDATE page_bookmark + SET deleted = 1, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE local_id = ?; + +resetDeleted: + UPDATE page_bookmark + SET deleted = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE local_id = ?; + +-- Removes the record of a local bookmark or marks it as deleted if it's remote. +deleteBookmark { + DELETE FROM page_bookmark WHERE page=:page AND remote_id IS NULL; + UPDATE page_bookmark + SET deleted = 1, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE page=:page AND remote_id IS NOT NULL; +} + +hardDeleteBookmarkFor { + DELETE FROM page_bookmark WHERE remote_id=:remoteID; +} + +deleteByRemoteID: + DELETE FROM page_bookmark WHERE remote_id=? AND remote_id IS NOT NULL; + +clearLocalMutations { + DELETE FROM page_bookmark WHERE remote_id IS NULL; + UPDATE page_bookmark + SET deleted = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000; +} + +clearLocalMutationFor { + DELETE FROM page_bookmark WHERE remote_id IS NULL AND local_id = :id; + UPDATE page_bookmark + SET deleted = 0, + modified_at = CAST(strftime('%s', 'now') AS INTEGER) * 1000 + WHERE local_id = :id; +} + +-- Returns all records for the given page, whether it's deleted or not. +getAllRecordsFor: + SELECT * FROM page_bookmark WHERE page = ?; + +checkRemoteIDsExistence: + SELECT remote_id FROM page_bookmark WHERE remote_id IN :queried_ids; diff --git a/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/recent_pages.sq b/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/recent_pages.sq new file mode 100644 index 00000000..298bf14f --- /dev/null +++ b/persistence/src/commonMain/sqldelight/com/quran/shared/persistence/recent_pages.sq @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS recent_page( + local_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + remote_id TEXT, + page INTEGER NOT NULL UNIQUE, + created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL, + modified_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL, + deleted INTEGER NOT NULL DEFAULT 0, + CHECK (deleted IN (0, 1)) +); \ No newline at end of file diff --git a/persistence/src/commonTest/kotlin/com/quran/shared/persistence/TestDatabaseDriver.kt b/persistence/src/commonTest/kotlin/com/quran/shared/persistence/TestDatabaseDriver.kt new file mode 100644 index 00000000..3b296f45 --- /dev/null +++ b/persistence/src/commonTest/kotlin/com/quran/shared/persistence/TestDatabaseDriver.kt @@ -0,0 +1,7 @@ +package com.quran.shared.persistence + +import app.cash.sqldelight.db.SqlDriver + +expect class TestDatabaseDriver() { + fun createDriver(): SqlDriver +} \ No newline at end of file diff --git a/persistence/src/commonTest/kotlin/com/quran/shared/persistence/repository/BookmarksRepositoryTest.kt b/persistence/src/commonTest/kotlin/com/quran/shared/persistence/repository/BookmarksRepositoryTest.kt new file mode 100644 index 00000000..62ea7693 --- /dev/null +++ b/persistence/src/commonTest/kotlin/com/quran/shared/persistence/repository/BookmarksRepositoryTest.kt @@ -0,0 +1,619 @@ +@file:OptIn(kotlin.time.ExperimentalTime::class) +package com.quran.shared.persistence.repository + +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.persistence.Page_bookmarksQueries +import com.quran.shared.persistence.QuranDatabase +import com.quran.shared.persistence.TestDatabaseDriver +import com.quran.shared.persistence.input.BookmarkMigration +import com.quran.shared.persistence.input.RemoteBookmark +import com.quran.shared.persistence.model.Bookmark +import com.quran.shared.persistence.repository.bookmark.repository.BookmarksRepository +import com.quran.shared.persistence.repository.bookmark.repository.BookmarksRepositoryImpl +import com.quran.shared.persistence.repository.bookmark.repository.BookmarksSynchronizationRepository +import com.quran.shared.persistence.util.toPlatform +import kotlinx.coroutines.test.runTest +import kotlin.test.BeforeTest +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.test.assertFails +import kotlin.test.assertNotNull +import kotlin.test.assertNull +import kotlin.test.assertTrue +import kotlin.time.Instant + +class BookmarksRepositoryTest { + private lateinit var database: QuranDatabase + private lateinit var repository: BookmarksRepository + private lateinit var syncRepository: BookmarksSynchronizationRepository + + private lateinit var pageBookmarksQueries: Page_bookmarksQueries + + @BeforeTest + fun setup() { + database = createInMemoryDatabase() + repository = BookmarksRepositoryImpl(database) + syncRepository = repository as BookmarksSynchronizationRepository + pageBookmarksQueries = database.page_bookmarksQueries + } + + @Test + fun `getAllBookmarks returns empty list when no bookmarks exist`() = runTest { + val bookmarks = repository.getAllBookmarks() + assertTrue(bookmarks.isEmpty(), "Expected empty list when no bookmarks exist") + } + + @Test + fun `getAllBookmarks returns bookmarks`() = runTest { + pageBookmarksQueries.addNewBookmark(11) + pageBookmarksQueries.createRemoteBookmark("rem_id_1", 50) + pageBookmarksQueries.addNewBookmark(60) + + val bookmarks = repository.getAllBookmarks() + assertEquals(3, bookmarks.size, "Expected 3 bookmarks") + assertEquals(bookmarks.requirePageBookmarks().map { it.page }.toSet(), setOf(11, 50, 60)) + } + + @Test + fun `getAllBookmarks excludes deleted bookmarks`() = runTest { + pageBookmarksQueries.createRemoteBookmark("rem_id_1", 11) + pageBookmarksQueries.createRemoteBookmark("rem_id_2", 50) + // Mark one as deleted + pageBookmarksQueries.setDeleted(1L) + + val bookmarks = repository.getAllBookmarks() + assertEquals(1, bookmarks.size, "Expected only non-deleted bookmarks") + assertEquals(50, bookmarks[0].requirePageBookmark().page) + } + + @Test + fun `adding bookmarks on an empty list`() = runTest { + repository.addBookmark(10) + var bookmarks = pageBookmarksQueries.getBookmarks().executeAsList() + assertEquals(1, bookmarks.size) + assertEquals(10L, bookmarks[0].page) + assertNull(bookmarks[0].remote_id, "Locally added bookmarks should not have remote IDs (not synced yet)") + + repository.addBookmark(20) + repository.addBookmark(30) + bookmarks = pageBookmarksQueries.getBookmarks().executeAsList() + assertEquals(3, bookmarks.size) + assertEquals(listOf(10L, 20L, 30L), bookmarks.map { it.page }) + + // Verify all locally added bookmarks don't have remote IDs (not synced yet) + bookmarks.forEach { bookmark -> + assertNull(bookmark.remote_id, "Locally added bookmarks should not have remote IDs (not synced yet)") + } + } + + @Test + fun `adding should not duplicate bookmarks`() = runTest { + // Add initial page bookmark + repository.addBookmark(12) + + // Try to add the same page bookmark again + repository.addBookmark(12) + + // Verify only one bookmark exists + var bookmarks = repository.getAllBookmarks() + assertEquals(1, bookmarks.size, "Should only have one bookmark") + assertEquals(12, bookmarks[0].requirePageBookmark().page, "Should only have page 12") + + // Test with remote bookmarks + pageBookmarksQueries.createRemoteBookmark("rem_id_1", 105) + repository.addBookmark(105) + + bookmarks = repository.getAllBookmarks() + assertEquals(2, bookmarks.size, "Should only have one bookmark") + assertEquals(setOf(12, 105), bookmarks.requirePageBookmarks().map { it.page }.toSet(), "Expected bookmarked pages") + } + + @Test + fun `deleting local bookmarks removes them from the database`() = runTest { + repository.addBookmark(12) + repository.addBookmark(13) + repository.addBookmark(14) + + // Delete a page bookmark + repository.deleteBookmark(12) + var bookmarks = repository.getAllBookmarks() + assertEquals(2, bookmarks.size, "Should have two bookmarks after deleting page bookmark") + assertTrue(bookmarks.requirePageBookmarks().none { it.page == 12 }, "Page bookmark should be deleted") + + // Delete another page bookmark + repository.deleteBookmark(13) + bookmarks = repository.getAllBookmarks() + assertEquals(1, bookmarks.size, "Should have one bookmark after deleting second page bookmark") + assertTrue(bookmarks.requirePageBookmarks().none { it.page == 13 }, "Expected page 13 bookmark to be deleted") + + // Try to delete non-existent bookmarks + repository.deleteBookmark(999) // Non-existent page + + // Verify state hasn't changed + bookmarks = repository.getAllBookmarks() + assertEquals(1, bookmarks.size) + assertEquals(14, bookmarks[0].requirePageBookmark().page, "Other bookmarks should be returned") + + // Verify that no un-synced bookmark records are returned for deleted bookmarks + val unSyncedRecords = pageBookmarksQueries.getUnsyncedBookmarks().executeAsList() + assertEquals(1, unSyncedRecords.count(), "Only one is expected now") + assertEquals(setOf(14L), unSyncedRecords.map { it.page }.toSet()) + } + + @Test + fun `deleting remote bookmarks`() = runTest { + pageBookmarksQueries.createRemoteBookmark("rem_id_1", 10) + pageBookmarksQueries.createRemoteBookmark("rem_id_2", 15) + pageBookmarksQueries.createRemoteBookmark("rem_id_3", 20) + + repository.deleteBookmark(10) + repository.deleteBookmark(20) + + val bookmarks = repository.getAllBookmarks() + assertEquals(1, bookmarks.size) + assertEquals(15, bookmarks[0].requirePageBookmark().page) + + // Delete again + repository.deleteBookmark(10) + + val allBookmarks = pageBookmarksQueries.getBookmarks().executeAsList() + assertEquals(1, allBookmarks.size, "Should only have one non-deleted bookmark") + + val deletedBookmarks = pageBookmarksQueries.getAllRecordsFor(10L).executeAsList() + assertEquals(1, deletedBookmarks.size, "Should have one deleted bookmark for page 10") + assertEquals(1L, deletedBookmarks[0].deleted, "Bookmark should be marked as deleted") + } + + @Test + fun `adding a bookmark after deleting a remote bookmark like it`() = runTest { + // Add a remote bookmark and mark it as deleted + pageBookmarksQueries.createRemoteBookmark("rem_id_1", 15) + pageBookmarksQueries.setDeleted(1L) + + // Add another bookmark + repository.addBookmark(25) + + // Try to add a bookmark at the same location as the deleted one + repository.addBookmark(15) + + val bookmarks = repository.getAllBookmarks() + assertEquals(2, bookmarks.size) + assertEquals(listOf(15, 25), bookmarks.requirePageBookmarks().map { it.page }.sorted()) + + val allRecordsPage15 = pageBookmarksQueries.getAllRecordsFor(15L).executeAsList() + assertEquals(1, allRecordsPage15.count(), "Should only have one record for page 15") + assertEquals(0L, allRecordsPage15[0].deleted, "Re-adding should restore delete flag to false") + } + + @Test + fun `fetchMutatedBookmarks returns all mutated bookmarks`() = runTest { + val emptyResult = syncRepository.fetchMutatedBookmarks() + pageBookmarksQueries.createRemoteBookmark("rem-id-1", 10L) + assertTrue(emptyResult.isEmpty(), "Expected to return nothing when no mutations have been added.") + + repository.addBookmark(1) + repository.addBookmark(2) + + repository.deleteBookmark(10) + + val result = syncRepository.fetchMutatedBookmarks() + + assertEquals(3, result.size) + assertTrue(result.any { it.model.requirePageBookmark().page == 1 && it.mutation == Mutation.CREATED }) + assertTrue(result.any { it.model.requirePageBookmark().page == 2 && it.mutation == Mutation.CREATED }) + assertTrue(result.any { it.model.requirePageBookmark().page == 10 && it.mutation == Mutation.DELETED }) + + // Assert that all returned mutations have non-null local IDs and match the database + result.forEach { mutation -> + val pageBookmark = mutation.model.requirePageBookmark() + assertNotNull(mutation.localID, "Local ID should not be null for mutation on page ${pageBookmark.page}") + + // Get the corresponding database record and verify the local ID matches + val dbRecords = pageBookmarksQueries.getAllRecordsFor(pageBookmark.page.toLong()).executeAsList() + val matchingRecord = dbRecords.find { it.local_id.toString() == mutation.localID } + assertNotNull(matchingRecord, "Should find database record with matching local ID ${mutation.localID} for page ${pageBookmark.page}") + } + } + + @Test + fun `migrateBookmarks succeeds when table is empty`() = runTest { + val bookmarks = listOf( + BookmarkMigration.Page(page = 1), + BookmarkMigration.Page(page = 2) + ) + + repository.migrateBookmarks(bookmarks) + + val migratedBookmarks = pageBookmarksQueries.getBookmarks().executeAsList() + assertEquals(2, migratedBookmarks.size) + + val pageBookmark1 = migratedBookmarks.find { it.page == 1L } + assertEquals(0L, pageBookmark1?.deleted, "Should not be marked as deleted") + assertNull(pageBookmark1?.remote_id, "Should not have remote ID") + + val pageBookmark2 = migratedBookmarks.find { it.page == 2L } + assertEquals(0L, pageBookmark2?.deleted, "Should not be marked as deleted") + assertNull(pageBookmark2?.remote_id, "Should not have remote ID") + } + + @Test + fun `migrateBookmarks fails when table is not empty`() = runTest { + val bookmarks = listOf( + BookmarkMigration.Page(page = 1) + ) + + pageBookmarksQueries.createRemoteBookmark("existing-1", 1L) + assertFails("Should fail if table is not empty") { + repository.migrateBookmarks(bookmarks) + } + } + + @Test + fun `migrateBookmarks succeeds with any bookmarks`() = runTest { + val bookmarks = listOf( + BookmarkMigration.Page(page = 1) + ) + repository.migrateBookmarks(bookmarks) + + val migratedBookmarks = pageBookmarksQueries.getBookmarks().executeAsList() + assertEquals(1, migratedBookmarks.size) + assertEquals(1L, migratedBookmarks[0].page) + } + + @Test + fun `getAllBookmarks reflects mutations as they occur`() = runTest { + // Initial state should be empty + assertTrue(repository.getAllBookmarks().isEmpty(), "Initial state should be empty") + + // Add a page bookmark + repository.addBookmark(1) + var bookmarks = repository.getAllBookmarks() + assertEquals(1, bookmarks.size, "Should have one bookmark after adding") + assertEquals(1, bookmarks[0].requirePageBookmark().page, "Should be page 1") + + // Add another page bookmark + repository.addBookmark(2) + bookmarks = repository.getAllBookmarks() + assertEquals(2, bookmarks.size, "Should have two bookmarks after adding second page") + assertTrue(bookmarks.requirePageBookmarks().any { it.page == 1 }, "Should have page bookmark 1") + assertTrue(bookmarks.requirePageBookmarks().any { it.page == 2 }, "Should have page bookmark 2") + + // Delete the first page bookmark + repository.deleteBookmark(1) + bookmarks = repository.getAllBookmarks() + assertEquals(1, bookmarks.size, "Should have one bookmark after deletion") + assertEquals(2, bookmarks[0].requirePageBookmark().page, "Should only have page bookmark 2") + } + + @Test + fun `applyRemoteChanges committing all local mutations and nothing else`() = runTest { + // Setup: Create local mutations + repository.addBookmark(10) // Local creation + repository.addBookmark(20) // Local creation + + // Create remote bookmark first, then delete it to create a deletion mutation + pageBookmarksQueries.createRemoteBookmark("remote-30", 30L) + repository.deleteBookmark(30) // Local deletion of remote bookmark + + // Get the local mutations to clear + val localMutations = syncRepository.fetchMutatedBookmarks() + assertEquals(3, localMutations.size) + + // Action: Apply remote changes - commit the local mutations + val updatesToPersist: List> = listOf( + RemoteModelMutation( + model = RemoteBookmark.Page( + page = 10, + lastUpdated = Instant.fromEpochMilliseconds(1000).toPlatform() + ), + remoteID = "remote-10", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = RemoteBookmark.Page( + page = 20, + lastUpdated = Instant.fromEpochMilliseconds(1001).toPlatform() + ), + remoteID = "remote-20", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = RemoteBookmark.Page( + page = 30, + lastUpdated = Instant.fromEpochMilliseconds(1002).toPlatform() + ), + remoteID = "remote-30", + mutation = Mutation.DELETED + ) + ) + + syncRepository.applyRemoteChanges(updatesToPersist, localMutations) + + // Assert: Final state + val finalBookmarks = repository.getAllBookmarks() + assertEquals(2, finalBookmarks.size, "Should have 2 bookmarks after sync") + assertEquals(setOf(10, 20), finalBookmarks.requirePageBookmarks().map { it.page }.toSet()) + + // Verify no remaining mutations + val remainingMutations = syncRepository.fetchMutatedBookmarks() + assertEquals(0, remainingMutations.size, "Should have no remaining mutations") + + // Verify remote IDs are set correctly + val dbBookmarks = pageBookmarksQueries.getBookmarks().executeAsList() + assertEquals(setOf("remote-10", "remote-20"), dbBookmarks.map { it.remote_id }.toSet()) + } + + @Test + fun `applyRemoteChanges overriding all local and committing only some of them`() = runTest { + // Setup: Create local mutations + repository.addBookmark(10) // Will be committed + repository.addBookmark(20) // Will be ignored + + // Create remote bookmarks first, then delete them to create deletion mutations + pageBookmarksQueries.createRemoteBookmark("remote-30", 30L) + pageBookmarksQueries.createRemoteBookmark("remote-40", 40L) + repository.deleteBookmark(30) // Will be committed + repository.deleteBookmark(40) // Will be ignored + + // Get the local mutations to clear + val localMutations = syncRepository.fetchMutatedBookmarks() + assertEquals(4, localMutations.size) + + // Action: Apply remote changes - mix of committed and overridden + val updatesToPersist = listOf>( + // Committed mutations (local state matches remote) + RemoteModelMutation( + model = RemoteBookmark.Page( + page = 10, + lastUpdated = Instant.fromEpochMilliseconds(1000).toPlatform() + ), + remoteID = "remote-10", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = RemoteBookmark.Page( + page = 30, + lastUpdated = Instant.fromEpochMilliseconds(1001).toPlatform() + ), + remoteID = "remote-30", + mutation = Mutation.DELETED + ) + ) + + syncRepository.applyRemoteChanges(updatesToPersist, localMutations) + + // Assert: Final state + val finalBookmarks = repository.getAllBookmarks() + assertEquals(2, finalBookmarks.size, "Should have 2 bookmarks after sync") + assertEquals(setOf(10, 40), finalBookmarks.requirePageBookmarks().map { it.page }.toSet()) + + // Verify no remaining mutations + val remainingMutations = syncRepository.fetchMutatedBookmarks() + assertEquals(0, remainingMutations.size, "Should have no remaining mutations") + + // Verify remote IDs are set correctly + val dbBookmarks = pageBookmarksQueries.getBookmarks().executeAsList() + assertEquals(setOf("remote-10", "remote-40"), dbBookmarks.map { it.remote_id }.toSet()) + } + + @Test + fun `applyRemoteChanges with new remote mutations not in local mutations`() = runTest { + // Setup: Create some local mutations + repository.addBookmark(10) + + // Create remote bookmark first, then delete it to create a deletion mutation + pageBookmarksQueries.createRemoteBookmark("remote-20", 20L) + repository.deleteBookmark(20) + + // Create the remote bookmark that will be deleted by the new remote mutation + pageBookmarksQueries.createRemoteBookmark("remote-40", 40L) + + val localMutations = syncRepository.fetchMutatedBookmarks() + assertEquals(2, localMutations.size) + + // Action: Apply remote changes including new mutations not in local list + val updatesToPersist = listOf>( + RemoteModelMutation( + model = RemoteBookmark.Page( + page = 10, + lastUpdated = Instant.fromEpochMilliseconds(1000).toPlatform() + ), + remoteID = "remote-10", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = RemoteBookmark.Page( + page = 20, + lastUpdated = Instant.fromEpochMilliseconds(1001).toPlatform() + ), + remoteID = "remote-20", + mutation = Mutation.DELETED + ), + // New remote mutations not in local mutations + RemoteModelMutation( + model = RemoteBookmark.Page( + page = 30, + lastUpdated = Instant.fromEpochMilliseconds(1002).toPlatform() + ), + remoteID = "remote-30", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = RemoteBookmark.Page( + page = 40, + lastUpdated = Instant.fromEpochMilliseconds(1003).toPlatform() + ), + remoteID = "remote-40", + mutation = Mutation.DELETED + ) + ) + + syncRepository.applyRemoteChanges(updatesToPersist, localMutations) + + // Assert: Final state includes new remote mutations + val finalBookmarks = repository.getAllBookmarks() + assertEquals(2, finalBookmarks.size, "Should have 2 bookmarks after sync") + assertEquals(setOf(10, 30), finalBookmarks.requirePageBookmarks().map { it.page }.toSet()) + + // Verify no remaining mutations + val remainingMutations = syncRepository.fetchMutatedBookmarks() + assertEquals(0, remainingMutations.size, "Should have no remaining mutations") + + // Verify remote IDs are set correctly + val dbBookmarks = pageBookmarksQueries.getBookmarks().executeAsList() + assertEquals(setOf("remote-10", "remote-30"), dbBookmarks.map { it.remote_id }.toSet()) + } + + @Test + fun `applyRemoteChanges with empty lists`() = runTest { + // Setup: Create some local mutations + repository.addBookmark(10) + repository.addBookmark(20) + + val localMutations = syncRepository.fetchMutatedBookmarks() + assertEquals(2, localMutations.size) + + // Action: Apply empty remote changes + syncRepository.applyRemoteChanges(emptyList(), localMutations) + + // Assert: Local mutations are cleared but no new bookmarks added + val finalBookmarks = repository.getAllBookmarks() + assertEquals(0, finalBookmarks.size, "Should have no bookmarks after clearing local mutations") + + // Verify no remaining mutations + val remainingMutations = syncRepository.fetchMutatedBookmarks() + assertEquals(0, remainingMutations.size, "Should have no remaining mutations") + } + + @Test + fun `applyRemoteChanges preserves existing remote bookmarks not in updates`() = runTest { + // Setup: Create existing remote bookmarks + pageBookmarksQueries.createRemoteBookmark("remote-10", 10L) + pageBookmarksQueries.createRemoteBookmark("remote-20", 20L) + pageBookmarksQueries.createRemoteBookmark("remote-30", 30L) + + // Create some local mutations + repository.addBookmark(40) + + // Delete existing remote bookmark to create a deletion mutation + repository.deleteBookmark(20) + + val localMutations = syncRepository.fetchMutatedBookmarks() + assertEquals(2, localMutations.size) + + // Action: Apply remote changes for local mutations only + val updatesToPersist = listOf>( + RemoteModelMutation( + model = RemoteBookmark.Page( + page = 40, + lastUpdated = Instant.fromEpochMilliseconds(1000).toPlatform() + ), + remoteID = "remote-40", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = RemoteBookmark.Page( + page = 20, + lastUpdated = Instant.fromEpochMilliseconds(1001).toPlatform() + ), + remoteID = "remote-20", + mutation = Mutation.DELETED + ) + ) + + syncRepository.applyRemoteChanges(updatesToPersist, localMutations) + + // Assert: Final state preserves existing remote bookmarks + val finalBookmarks = repository.getAllBookmarks() + assertEquals(3, finalBookmarks.size, "Should have 3 bookmarks after sync") + assertEquals(setOf(10, 30, 40), finalBookmarks.requirePageBookmarks().map { it.page }.toSet()) + + // Verify no remaining mutations + val remainingMutations = syncRepository.fetchMutatedBookmarks() + assertEquals(0, remainingMutations.size, "Should have no remaining mutations") + + // Verify existing remote bookmarks are preserved + val dbBookmarks = pageBookmarksQueries.getBookmarks().executeAsList() + assertEquals(setOf("remote-10", "remote-30", "remote-40"), dbBookmarks.map { it.remote_id }.toSet()) + } + + @Test + fun `PageBookmark localId is properly populated from database`() = runTest { + // Add a bookmark and verify localId is set + repository.addBookmark(10) + + val bookmarks = repository.getAllBookmarks() + assertEquals(1, bookmarks.size) + + val bookmark = bookmarks[0].requirePageBookmark() + assertEquals(10, bookmark.page) + assertNotNull(bookmark.localId, "localId should not be null") + assertTrue(bookmark.localId.isNotEmpty(), "localId should not be empty") + + // Verify the localId matches the database local_id + val dbBookmarks = pageBookmarksQueries.getBookmarks().executeAsList() + assertEquals(1, dbBookmarks.size) + assertEquals(dbBookmarks[0].local_id.toString(), bookmark.localId) + } + + @Test + fun `test remoteResourcesExist returns correct existence map`() = runTest { + // Arrange + // Add some remote bookmarks + repository.addBookmark(1) // This will be local + repository.addBookmark(2) // This will be local + + // Simulate remote bookmarks by directly persisting them + // Note: In a real scenario, these would come from applyRemoteChanges + val remoteBookmark1: RemoteModelMutation = RemoteModelMutation( + model = RemoteBookmark.Page(3, Instant.fromEpochMilliseconds(1000).toPlatform()), + remoteID = "remote-1", + mutation = Mutation.CREATED + ) + val remoteBookmark2: RemoteModelMutation = RemoteModelMutation( + model = RemoteBookmark.Page(4, Instant.fromEpochMilliseconds(1000).toPlatform()), + remoteID = "remote-2", + mutation = Mutation.CREATED + ) + + syncRepository.applyRemoteChanges(listOf(remoteBookmark1, remoteBookmark2), emptyList()) + + // Act & Assert - Test with existing and non-existing remote IDs + val existenceMap = syncRepository.remoteResourcesExist(listOf("remote-1", "remote-2", "non-existent")) + assertEquals(3, existenceMap.size, "Should return existence for all requested remote IDs") + assertEquals(existenceMap["remote-1"], true, "remote-1 should exist") + assertEquals(existenceMap["remote-2"], true, "remote-2 should exist") + assertEquals(existenceMap["non-existent"], false, "non-existent should not exist") + + // Test with empty list + val emptyExistenceMap = syncRepository.remoteResourcesExist(emptyList()) + assertTrue(emptyExistenceMap.isEmpty(), "Should return empty map for empty input") + + // Test with only non-existent remote IDs + val nonExistentExistenceMap = syncRepository.remoteResourcesExist(listOf("non-existent-1", "non-existent-2")) + assertEquals(2, nonExistentExistenceMap.size, "Should return existence for all requested remote IDs") + assertEquals(nonExistentExistenceMap["non-existent-1"], false, "non-existent-1 should not exist") + assertEquals(nonExistentExistenceMap["non-existent-2"], false, "non-existent-2 should not exist") + } + + private fun Bookmark.requirePageBookmark(): Bookmark.PageBookmark { + assertTrue(this is Bookmark.PageBookmark, "Expected PageBookmark but was ${this::class.simpleName}") + return this as Bookmark.PageBookmark + } + + private fun List.requirePageBookmarks(): List { + assertTrue(all { it is Bookmark.PageBookmark }, "Expected only page bookmarks") + return map { it as Bookmark.PageBookmark } + } + + private fun createInMemoryDatabase(): QuranDatabase { + // Create in-memory database using platform-specific driver + // Due to differences to how schema is handled between iOS and + // Android target, schema creation is delegated to the driver factory's + // actual implementations. + return QuranDatabase( + TestDatabaseDriver().createDriver() + ) + } +} diff --git a/persistence/src/jvmMain/kotlin/com/quran/shared/persistence/DriverFactory.jvm.kt b/persistence/src/jvmMain/kotlin/com/quran/shared/persistence/DriverFactory.jvm.kt new file mode 100644 index 00000000..fc6e74ca --- /dev/null +++ b/persistence/src/jvmMain/kotlin/com/quran/shared/persistence/DriverFactory.jvm.kt @@ -0,0 +1,11 @@ +package com.quran.shared.persistence + +import app.cash.sqldelight.db.SqlDriver +import app.cash.sqldelight.driver.jdbc.sqlite.JdbcSqliteDriver +import java.util.* + +actual class DriverFactory { + actual fun makeDriver(): SqlDriver { + return JdbcSqliteDriver("jdbc:sqlite:quran.db", Properties(), QuranDatabase.Schema) + } +} \ No newline at end of file diff --git a/persistence/src/jvmMain/kotlin/com/quran/shared/persistence/util/PlatformDateTime.jvm.kt b/persistence/src/jvmMain/kotlin/com/quran/shared/persistence/util/PlatformDateTime.jvm.kt new file mode 100644 index 00000000..ba4d6095 --- /dev/null +++ b/persistence/src/jvmMain/kotlin/com/quran/shared/persistence/util/PlatformDateTime.jvm.kt @@ -0,0 +1,7 @@ +package com.quran.shared.persistence.util + +import kotlin.time.Instant + +actual typealias PlatformDateTime = Instant +actual fun PlatformDateTime.fromPlatform(): Instant = this +actual fun PlatformDateTime.toPlatform(): PlatformDateTime = this \ No newline at end of file diff --git a/persistence/src/jvmTest/kotlin/com/quran/shared/persistence/TestDatabaseDriver.jvm.kt b/persistence/src/jvmTest/kotlin/com/quran/shared/persistence/TestDatabaseDriver.jvm.kt new file mode 100644 index 00000000..6c31ca8d --- /dev/null +++ b/persistence/src/jvmTest/kotlin/com/quran/shared/persistence/TestDatabaseDriver.jvm.kt @@ -0,0 +1,12 @@ +package com.quran.shared.persistence + +import app.cash.sqldelight.db.SqlDriver +import app.cash.sqldelight.driver.jdbc.sqlite.JdbcSqliteDriver + +actual class TestDatabaseDriver { + actual fun createDriver(): SqlDriver { + val driver = JdbcSqliteDriver(JdbcSqliteDriver.IN_MEMORY) + QuranDatabase.Schema.create(driver) + return driver + } +} \ No newline at end of file diff --git a/persistence/src/nativeTest/kotlin/com/quran/shared/persistence/TestDatabaseDriver.kt b/persistence/src/nativeTest/kotlin/com/quran/shared/persistence/TestDatabaseDriver.kt new file mode 100644 index 00000000..b05e42a6 --- /dev/null +++ b/persistence/src/nativeTest/kotlin/com/quran/shared/persistence/TestDatabaseDriver.kt @@ -0,0 +1,12 @@ +package com.quran.shared.persistence + +import app.cash.sqldelight.db.SqlDriver +import app.cash.sqldelight.driver.native.NativeSqliteDriver +import kotlin.random.Random + +actual class TestDatabaseDriver { + actual fun createDriver(): SqlDriver { + val uniqueId = Random.nextInt() + return NativeSqliteDriver(QuranDatabase.Schema, "test_${uniqueId}.db") + } +} \ No newline at end of file diff --git a/renovate.json b/renovate.json new file mode 100644 index 00000000..5db72dd6 --- /dev/null +++ b/renovate.json @@ -0,0 +1,6 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json", + "extends": [ + "config:recommended" + ] +} diff --git a/settings.gradle.kts b/settings.gradle.kts index daeb85a4..edce2d09 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -21,3 +21,6 @@ include(":syncengine") include(":persistence") include(":umbrella") include(":demo:android") +include(":mutations-definitions") +include(":sync-pipelines") +include(":auth") \ No newline at end of file diff --git a/sync-pipelines/build.gradle.kts b/sync-pipelines/build.gradle.kts new file mode 100644 index 00000000..d64c1c00 --- /dev/null +++ b/sync-pipelines/build.gradle.kts @@ -0,0 +1,78 @@ +import org.jetbrains.kotlin.gradle.dsl.JvmTarget + +plugins { + alias(libs.plugins.kotlin.multiplatform) + alias(libs.plugins.android.library) + alias(libs.plugins.vanniktech.maven.publish) +} + +kotlin { + iosX64() + iosArm64() + iosSimulatorArm64() + + + jvm() + androidTarget { + publishLibraryVariants("release") + compilerOptions { + jvmTarget.set(JvmTarget.JVM_17) + } + } + + sourceSets { + + commonMain.dependencies { + implementation(libs.kotlinx.coroutines.core) + implementation(libs.kotlinx.datetime) + implementation(libs.kermit) + api(projects.syncengine) + api(projects.persistence) + api(projects.mutationsDefinitions) + } + + commonTest.dependencies { + implementation(libs.kotlin.test) + } + } + + sourceSets.all { + languageSettings.optIn("kotlin.time.ExperimentalTime") + } + + // don't show warnings for expect/actual classes + targets.configureEach { + compilations.configureEach { + compileTaskProvider.get().compilerOptions { + freeCompilerArgs.add("-Xexpect-actual-classes") + } + } + } +} + +android { + namespace = "com.quran.shared.sync.pipelines" + compileSdk = libs.versions.android.compile.sdk.get().toInt() + + defaultConfig { + minSdk = libs.versions.android.min.sdk.get().toInt() + } + + compileOptions { + sourceCompatibility = JavaVersion.valueOf("VERSION_${libs.versions.android.java.version.get()}") + targetCompatibility = JavaVersion.valueOf("VERSION_${libs.versions.android.java.version.get()}") + } +} + +mavenPublishing { + signAllPublications() + coordinates(libs.versions.project.group.get(), "sync-pipelines", libs.versions.project.version.get()) + + pom { + name = "Quran.com Sync Integration-Pipeline" + description = "A library for integrating syncengine and persistence" + inceptionYear = "2025" + url = "https://github.com/quran/mobile-sync/" + } +} + diff --git a/sync-pipelines/src/commonMain/kotlin/com/quran/shared/pipeline/SyncEnginePipeline.kt b/sync-pipelines/src/commonMain/kotlin/com/quran/shared/pipeline/SyncEnginePipeline.kt new file mode 100644 index 00000000..cd37ffba --- /dev/null +++ b/sync-pipelines/src/commonMain/kotlin/com/quran/shared/pipeline/SyncEnginePipeline.kt @@ -0,0 +1,601 @@ +package com.quran.shared.pipeline + +import co.touchlab.kermit.Logger +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.persistence.input.RemoteBookmark +import com.quran.shared.persistence.input.RemoteCollectionBookmark +import com.quran.shared.persistence.input.RemoteCollection +import com.quran.shared.persistence.input.RemoteNote +import com.quran.shared.persistence.model.Bookmark +import com.quran.shared.persistence.model.CollectionBookmark +import com.quran.shared.persistence.model.Note +import com.quran.shared.persistence.model.Collection as PersistenceCollection +import com.quran.shared.persistence.repository.bookmark.repository.BookmarksSynchronizationRepository +import com.quran.shared.persistence.repository.collectionbookmark.repository.CollectionBookmarksSynchronizationRepository +import com.quran.shared.persistence.repository.collection.repository.CollectionsSynchronizationRepository +import com.quran.shared.persistence.repository.note.repository.NotesSynchronizationRepository +import com.quran.shared.persistence.util.fromPlatform +import com.quran.shared.persistence.util.toPlatform +import com.quran.shared.syncengine.AuthenticationDataFetcher +import com.quran.shared.syncengine.LocalDataFetcher +import com.quran.shared.syncengine.LocalModificationDateFetcher +import com.quran.shared.syncengine.BookmarksSynchronizationConfigurations +import com.quran.shared.syncengine.CollectionBookmarksSynchronizationConfigurations +import com.quran.shared.syncengine.CollectionsSynchronizationConfigurations +import com.quran.shared.syncengine.NotesSynchronizationConfigurations +import com.quran.shared.syncengine.ResultNotifier +import com.quran.shared.syncengine.SynchronizationClient +import com.quran.shared.syncengine.SynchronizationClientBuilder +import com.quran.shared.syncengine.SynchronizationEnvironment +import com.quran.shared.syncengine.model.NoteAyah +import com.quran.shared.syncengine.model.NoteRange +import com.quran.shared.syncengine.model.SyncBookmark +import com.quran.shared.syncengine.model.SyncCollectionBookmark +import com.quran.shared.syncengine.model.SyncCollection +import com.quran.shared.syncengine.model.SyncNote + +interface SyncEngineCallback { + fun synchronizationDone(newLastModificationDate: Long) + fun encounteredError(errorMsg: String) +} + +public class SyncEnginePipeline( + val bookmarksRepository: BookmarksSynchronizationRepository, + val collectionsRepository: CollectionsSynchronizationRepository, + val collectionBookmarksRepository: CollectionBookmarksSynchronizationRepository? = null, + val notesRepository: NotesSynchronizationRepository? = null +) { + private lateinit var syncClient: SynchronizationClient + + fun setup( + environment: SynchronizationEnvironment, + localModificationDateFetcher: LocalModificationDateFetcher, + authenticationDataFetcher: AuthenticationDataFetcher, + callback: SyncEngineCallback + ): SynchronizationClient { + + val bookmarksConf = BookmarksSynchronizationConfigurations( + localModificationDateFetcher = localModificationDateFetcher, + resultNotifier = ResultReceiver(bookmarksRepository, callback), + localDataFetcher = RepositoryDataFetcher(bookmarksRepository) + ) + val collectionsConf = CollectionsSynchronizationConfigurations( + localModificationDateFetcher = localModificationDateFetcher, + resultNotifier = CollectionsResultReceiver(collectionsRepository, callback), + localDataFetcher = CollectionsRepositoryDataFetcher(collectionsRepository) + ) + val collectionBookmarksConf = collectionBookmarksRepository?.let { repository -> + CollectionBookmarksSynchronizationConfigurations( + localModificationDateFetcher = localModificationDateFetcher, + resultNotifier = CollectionBookmarksResultReceiver(repository, callback), + localDataFetcher = CollectionBookmarksRepositoryDataFetcher(repository) + ) + } + val notesConf = notesRepository?.let { repository -> + NotesSynchronizationConfigurations( + localModificationDateFetcher = localModificationDateFetcher, + resultNotifier = NotesResultReceiver(repository, callback), + localDataFetcher = NotesRepositoryDataFetcher(repository) + ) + } + val syncClient = SynchronizationClientBuilder.build( + environment = environment, + authFetcher = authenticationDataFetcher, + bookmarksConfigurations = bookmarksConf, + collectionsConfigurations = collectionsConf, + collectionBookmarksConfigurations = collectionBookmarksConf, + notesConfigurations = notesConf + ) + + this.syncClient = syncClient + + return syncClient + } + + fun startListening() { + // TODO: + } +} + +private class RepositoryDataFetcher(val bookmarksRepository: BookmarksSynchronizationRepository): LocalDataFetcher { + + override suspend fun fetchLocalMutations(lastModified: Long): List> { + return bookmarksRepository.fetchMutatedBookmarks().map { repoMutation -> + LocalModelMutation( + model = repoMutation.model.toSyncEngine(), + remoteID = repoMutation.remoteID, + localID = repoMutation.localID, + mutation = repoMutation.mutation + ) + } + } + + override suspend fun checkLocalExistence(remoteIDs: List): Map { + return bookmarksRepository.remoteResourcesExist(remoteIDs) + } +} + +private class CollectionsRepositoryDataFetcher( + val collectionsRepository: CollectionsSynchronizationRepository +) : LocalDataFetcher { + + override suspend fun fetchLocalMutations(lastModified: Long): List> { + return collectionsRepository.fetchMutatedCollections().map { repoMutation -> + LocalModelMutation( + model = repoMutation.model.toSyncEngine(), + remoteID = repoMutation.remoteID, + localID = repoMutation.localID, + mutation = repoMutation.mutation + ) + } + } + + override suspend fun checkLocalExistence(remoteIDs: List): Map { + return collectionsRepository.remoteResourcesExist(remoteIDs) + } +} + +private class CollectionBookmarksRepositoryDataFetcher( + val collectionBookmarksRepository: CollectionBookmarksSynchronizationRepository +) : LocalDataFetcher { + + override suspend fun fetchLocalMutations(lastModified: Long): List> { + return collectionBookmarksRepository.fetchMutatedCollectionBookmarks().map { repoMutation -> + LocalModelMutation( + model = repoMutation.model.toSyncEngine(), + remoteID = repoMutation.remoteID, + localID = repoMutation.localID, + mutation = repoMutation.mutation + ) + } + } + + override suspend fun checkLocalExistence(remoteIDs: List): Map { + return collectionBookmarksRepository.remoteResourcesExist(remoteIDs) + } +} + +private class NotesRepositoryDataFetcher( + val notesRepository: NotesSynchronizationRepository +) : LocalDataFetcher { + private val logger = Logger.withTag("NotesRepositoryDataFetcher") + + override suspend fun fetchLocalMutations(lastModified: Long): List> { + return notesRepository.fetchMutatedNotes(lastModified).mapNotNull { repoMutation -> + val syncNote = repoMutation.model.toSyncEngine() + if (syncNote == null) { + logger.w { "Skipping note mutation with invalid ayah range: localId=${repoMutation.localID}" } + null + } else { + LocalModelMutation( + model = syncNote, + remoteID = repoMutation.remoteID, + localID = repoMutation.localID, + mutation = repoMutation.mutation + ) + } + } + } + + override suspend fun checkLocalExistence(remoteIDs: List): Map { + return notesRepository.remoteResourcesExist(remoteIDs) + } +} + +private class ResultReceiver( + val repository: BookmarksSynchronizationRepository, + val callback: SyncEngineCallback): ResultNotifier { + + override suspend fun didFail(message: String) { + callback.encounteredError(message) + } + + override suspend fun didSucceed( + newToken: Long, + newRemoteMutations: List>, + processedLocalMutations: List> + ) { + val mappedRemotes = newRemoteMutations.map { remoteMutation -> + RemoteModelMutation( + model = remoteMutation.model.toRemoteInput(), + remoteID = remoteMutation.remoteID, + mutation = remoteMutation.mutation + ) + } + val mappedLocals = processedLocalMutations.map { localMutation -> + LocalModelMutation( + model = localMutation.model.toPersistence(), + localID = localMutation.localID, + remoteID = localMutation.remoteID, + mutation = localMutation.mutation + ) + } + + Logger.i { "Persisting ${mappedRemotes.count()} remote updates, and clearing ${mappedLocals.count()} local updates." } + + repository.applyRemoteChanges(mappedRemotes, mappedLocals) + callback.synchronizationDone(newToken) + } +} + +private class CollectionsResultReceiver( + val repository: CollectionsSynchronizationRepository, + val callback: SyncEngineCallback +) : ResultNotifier { + + override suspend fun didFail(message: String) { + callback.encounteredError(message) + } + + override suspend fun didSucceed( + newToken: Long, + newRemoteMutations: List>, + processedLocalMutations: List> + ) { + val mappedRemotes = newRemoteMutations.map { remoteMutation -> + RemoteModelMutation( + model = remoteMutation.model.toRemoteInput(), + remoteID = remoteMutation.remoteID, + mutation = remoteMutation.mutation + ) + } + val mappedLocals = processedLocalMutations.map { localMutation -> + LocalModelMutation( + model = localMutation.model.toPersistence(), + localID = localMutation.localID, + remoteID = localMutation.remoteID, + mutation = localMutation.mutation + ) + } + + Logger.i { + "Persisting ${mappedRemotes.count()} collection remote updates, " + + "and clearing ${mappedLocals.count()} local updates." + } + + repository.applyRemoteChanges(mappedRemotes, mappedLocals) + callback.synchronizationDone(newToken) + } +} + +private class CollectionBookmarksResultReceiver( + val repository: CollectionBookmarksSynchronizationRepository, + val callback: SyncEngineCallback +) : ResultNotifier { + + override suspend fun didFail(message: String) { + callback.encounteredError(message) + } + + override suspend fun didSucceed( + newToken: Long, + newRemoteMutations: List>, + processedLocalMutations: List> + ) { + val mappedRemotes = newRemoteMutations.map { remoteMutation -> + RemoteModelMutation( + model = remoteMutation.model.toRemoteInput(), + remoteID = remoteMutation.remoteID, + mutation = remoteMutation.mutation + ) + } + val mappedLocals = processedLocalMutations.map { localMutation -> + LocalModelMutation( + model = localMutation.model.toPersistence(localMutation.localID), + localID = localMutation.localID, + remoteID = localMutation.remoteID, + mutation = localMutation.mutation + ) + } + + Logger.i { + "Persisting ${mappedRemotes.count()} collection bookmark remote updates, " + + "and clearing ${mappedLocals.count()} local updates." + } + + repository.applyRemoteChanges(mappedRemotes, mappedLocals) + callback.synchronizationDone(newToken) + } +} + +private class NotesResultReceiver( + val repository: NotesSynchronizationRepository, + val callback: SyncEngineCallback +) : ResultNotifier { + private val logger = Logger.withTag("NotesResultReceiver") + + override suspend fun didFail(message: String) { + callback.encounteredError(message) + } + + override suspend fun didSucceed( + newToken: Long, + newRemoteMutations: List>, + processedLocalMutations: List> + ) { + val mappedRemotes = newRemoteMutations.mapNotNull { remoteMutation -> + val remoteNote = when (remoteMutation.mutation) { + Mutation.DELETED -> RemoteNote( + body = null, + startAyahId = null, + endAyahId = null, + lastUpdated = remoteMutation.model.lastModified.toPlatform() + ) + Mutation.CREATED, Mutation.MODIFIED -> remoteMutation.model.toRemoteInput() + } + + if (remoteNote == null) { + logger.w { "Skipping remote note mutation without valid ranges: remoteId=${remoteMutation.remoteID}" } + null + } else { + RemoteModelMutation( + model = remoteNote, + remoteID = remoteMutation.remoteID, + mutation = remoteMutation.mutation + ) + } + } + + val mappedLocals = processedLocalMutations.mapNotNull { localMutation -> + val persistenceNote = localMutation.model.toPersistence(localMutation.localID) + if (persistenceNote == null) { + logger.w { "Skipping local note mutation without valid ranges: localId=${localMutation.localID}" } + null + } else { + LocalModelMutation( + model = persistenceNote, + localID = localMutation.localID, + remoteID = localMutation.remoteID, + mutation = localMutation.mutation + ) + } + } + + Logger.i { + "Persisting ${mappedRemotes.count()} note remote updates, " + + "and clearing ${mappedLocals.count()} local updates." + } + + repository.applyRemoteChanges(mappedRemotes, mappedLocals) + callback.synchronizationDone(newToken) + } +} + +private fun Bookmark.toSyncEngine(): SyncBookmark { + return when (this) { + is Bookmark.PageBookmark -> { + SyncBookmark.PageBookmark( + page = this.page, + id = this.localId, + lastModified = this.lastUpdated.fromPlatform() + ) + } + is Bookmark.AyahBookmark -> { + SyncBookmark.AyahBookmark( + id = this.localId, + sura = this.sura, + ayah = this.ayah, + lastModified = this.lastUpdated.fromPlatform() + ) + } + } +} + +private fun SyncBookmark.toPersistence(): Bookmark { + return when (this) { + is SyncBookmark.PageBookmark -> + Bookmark.PageBookmark( + page = this.page, + lastUpdated = this.lastModified.toPlatform(), + localId = this.id + ) + is SyncBookmark.AyahBookmark -> + Bookmark.AyahBookmark( + sura = this.sura, + ayah = this.ayah, + lastUpdated = this.lastModified.toPlatform(), + localId = this.id + ) + } +} + +private fun PersistenceCollection.toSyncEngine(): SyncCollection { + return SyncCollection( + id = this.localId, + name = this.name, + lastModified = this.lastUpdated.fromPlatform() + ) +} + +private fun SyncCollection.toPersistence(): PersistenceCollection { + return PersistenceCollection( + name = requireNotNull(this.name) { "Transforming a collection without a name." }, + lastUpdated = this.lastModified.toPlatform(), + localId = this.id + ) +} + +private fun SyncBookmark.toRemoteInput(): RemoteBookmark { + return when (this) { + is SyncBookmark.PageBookmark -> + RemoteBookmark.Page( + page = this.page, + lastUpdated = this.lastModified.toPlatform() + ) + is SyncBookmark.AyahBookmark -> + RemoteBookmark.Ayah( + sura = this.sura, + ayah = this.ayah, + lastUpdated = this.lastModified.toPlatform() + ) + } +} + +private fun SyncCollection.toRemoteInput(): RemoteCollection { + return RemoteCollection( + name = this.name, + lastUpdated = this.lastModified.toPlatform() + ) +} + +private fun CollectionBookmark.toSyncEngine(): SyncCollectionBookmark { + val collectionId = requireNotNull(collectionRemoteId) { "Collection remote ID is required for sync." } + return when (this) { + is CollectionBookmark.PageBookmark -> + SyncCollectionBookmark.PageBookmark( + collectionId = collectionId, + page = this.page, + lastModified = this.lastUpdated.fromPlatform() + ) + is CollectionBookmark.AyahBookmark -> + SyncCollectionBookmark.AyahBookmark( + collectionId = collectionId, + sura = this.sura, + ayah = this.ayah, + lastModified = this.lastUpdated.fromPlatform() + ) + } +} + +private fun SyncCollectionBookmark.toPersistence(localId: String): CollectionBookmark { + val updatedAt = lastModified.toPlatform() + return when (this) { + is SyncCollectionBookmark.PageBookmark -> + CollectionBookmark.PageBookmark( + collectionLocalId = "", + collectionRemoteId = collectionId, + bookmarkLocalId = "", + page = page, + lastUpdated = updatedAt, + localId = localId + ) + is SyncCollectionBookmark.AyahBookmark -> + CollectionBookmark.AyahBookmark( + collectionLocalId = "", + collectionRemoteId = collectionId, + bookmarkLocalId = "", + sura = sura, + ayah = ayah, + lastUpdated = updatedAt, + localId = localId + ) + } +} + +private fun SyncCollectionBookmark.toRemoteInput(): RemoteCollectionBookmark { + val updatedAt = lastModified.toPlatform() + return when (this) { + is SyncCollectionBookmark.PageBookmark -> + RemoteCollectionBookmark.Page( + collectionId = collectionId, + page = page, + lastUpdated = updatedAt, + bookmarkId = bookmarkId + ) + is SyncCollectionBookmark.AyahBookmark -> + RemoteCollectionBookmark.Ayah( + collectionId = collectionId, + sura = sura, + ayah = ayah, + lastUpdated = updatedAt, + bookmarkId = bookmarkId + ) + } +} + +private fun Note.toSyncEngine(): SyncNote? { + val start = ayahIdToSuraAyah(startAyahId) ?: return null + val end = ayahIdToSuraAyah(endAyahId) ?: return null + return SyncNote( + id = localId, + body = body, + ranges = listOf(NoteRange(start = start, end = end)), + lastModified = lastUpdated.fromPlatform() + ) +} + +private fun SyncNote.toPersistence(localId: String): Note? { + val range = primaryRangeOrNull() ?: return null + val startId = suraAyahToAyahId(range.start.sura, range.start.ayah) ?: return null + val endId = suraAyahToAyahId(range.end.sura, range.end.ayah) ?: return null + val noteBody = requireNotNull(body) { "Transforming a note without a body." } + return Note( + body = noteBody, + startAyahId = startId, + endAyahId = endId, + lastUpdated = lastModified.toPlatform(), + localId = localId + ) +} + +private fun SyncNote.toRemoteInput(): RemoteNote? { + val range = primaryRangeOrNull() ?: return null + val startId = suraAyahToAyahId(range.start.sura, range.start.ayah) ?: return null + val endId = suraAyahToAyahId(range.end.sura, range.end.ayah) ?: return null + return RemoteNote( + body = body, + startAyahId = startId, + endAyahId = endId, + lastUpdated = lastModified.toPlatform() + ) +} + +private val notesRangeLogger = Logger.withTag("NotesRangeMapper") + +private fun SyncNote.primaryRangeOrNull(): NoteRange? { + if (ranges.isEmpty()) { + return null + } + if (ranges.size > 1) { + notesRangeLogger.w { "Note contains ${ranges.size} ranges; only the first will be synced. noteId=$id" } + } + return ranges.first() +} + +private val suraAyahCounts = intArrayOf( + 7, 286, 200, 176, 120, 165, 206, 75, 129, 109, 123, 111, 43, 52, 99, 128, 111, 110, 98, 135, + 112, 78, 118, 64, 77, 227, 93, 88, 69, 60, 34, 30, 73, 54, 45, 83, 182, 88, 75, 85, 54, 53, + 89, 59, 37, 35, 38, 29, 18, 45, 60, 49, 62, 55, 78, 96, 29, 22, 24, 13, 14, 11, 11, 18, 12, + 12, 30, 52, 52, 44, 28, 28, 20, 56, 40, 31, 50, 40, 46, 42, 29, 19, 36, 25, 22, 17, 19, 26, + 30, 20, 15, 21, 11, 8, 8, 19, 5, 8, 8, 11, 11, 8, 3, 9, 5, 4, 7, 3, 6, 3, 5, 4, 5, 6 +) + +private val suraAyahOffsets: IntArray = run { + val offsets = IntArray(suraAyahCounts.size + 1) + var total = 0 + for (index in suraAyahCounts.indices) { + offsets[index] = total + total += suraAyahCounts[index] + } + offsets[suraAyahCounts.size] = total + offsets +} + +private fun suraAyahToAyahId(sura: Int, ayah: Int): Long? { + if (sura !in 1..suraAyahCounts.size) { + return null + } + val count = suraAyahCounts[sura - 1] + if (ayah !in 1..count) { + return null + } + val offset = suraAyahOffsets[sura - 1] + return (offset + ayah).toLong() +} + +private fun ayahIdToSuraAyah(ayahId: Long): NoteAyah? { + if (ayahId <= 0) { + return null + } + var remaining = ayahId.toInt() + for (index in suraAyahCounts.indices) { + val count = suraAyahCounts[index] + if (remaining <= count) { + return NoteAyah(sura = index + 1, ayah = remaining) + } + remaining -= count + } + return null +} diff --git a/syncengine/build.gradle.kts b/syncengine/build.gradle.kts index 9f019bff..13538bf9 100644 --- a/syncengine/build.gradle.kts +++ b/syncengine/build.gradle.kts @@ -1,38 +1,70 @@ -import com.vanniktech.maven.publish.SonatypeHost - plugins { - alias(libs.plugins.kotlin.multiplatform) - alias(libs.plugins.vanniktech.maven.publish) + alias(libs.plugins.kotlin.multiplatform) + alias(libs.plugins.kotlin.serialization) + alias(libs.plugins.vanniktech.maven.publish) } kotlin { - jvm() - iosX64() - iosArm64() - iosSimulatorArm64() - - sourceSets { - val commonMain by getting { - dependencies { - } - } - - val commonTest by getting { - dependencies { - } - } - } + iosX64() + iosArm64() + iosSimulatorArm64() + + jvm() + + sourceSets { + + commonMain.dependencies { + implementation(libs.kotlinx.coroutines.core) + implementation(libs.kotlinx.serialization.json) + implementation(libs.kotlinx.datetime) + implementation(libs.ktor.client.core) + implementation(libs.ktor.client.content.negotiation) + implementation(libs.ktor.client.logging) + implementation(libs.ktor.serialization.json) + implementation(libs.kermit) + api(projects.mutationsDefinitions) + } + + jvmMain.dependencies { + implementation(libs.ktor.client.okhttp) + } + + val appleMain by creating { + dependsOn(commonMain.get()) + dependencies { + implementation(libs.ktor.client.darwin) + } + } + + iosX64Main.get().dependsOn(appleMain) + iosArm64Main.get().dependsOn(appleMain) + iosSimulatorArm64Main.get().dependsOn(appleMain) + + commonTest.dependencies { + implementation(libs.kotlin.test) + implementation(libs.kotlinx.coroutines.test) + } + } + + // don't show warnings for expect/actual classes + targets.configureEach { + compilations.configureEach { + compileTaskProvider.get().compilerOptions { + freeCompilerArgs.add("-Xexpect-actual-classes") + } + } + } } mavenPublishing { - publishToMavenCentral(SonatypeHost.CENTRAL_PORTAL) - signAllPublications() - coordinates(group.toString(), "syncengine", version.toString()) - - pom { - name = "Quran.com Sync Engine" - description = "A library for synchronizing data with Quran.com" - inceptionYear = "2025" - url = "https://github.com/quran/syncengine" - } + publishToMavenCentral() + signAllPublications() + coordinates(libs.versions.project.group.get(), "syncengine", libs.versions.project.version.get()) + + pom { + name = "Quran.com Sync Engine" + description = "A library for synchronizing data with Quran.com" + inceptionYear = "2025" + url = "https://github.com/quran/mobile-sync/" + } } diff --git a/syncengine/src/appleMain/kotlin/com/quran/shared/syncengine/network/HttpClientFactory.apple.kt b/syncengine/src/appleMain/kotlin/com/quran/shared/syncengine/network/HttpClientFactory.apple.kt new file mode 100644 index 00000000..896819f4 --- /dev/null +++ b/syncengine/src/appleMain/kotlin/com/quran/shared/syncengine/network/HttpClientFactory.apple.kt @@ -0,0 +1,21 @@ +package com.quran.shared.syncengine.network + +import io.ktor.client.HttpClient +import io.ktor.client.engine.darwin.Darwin +import io.ktor.client.plugins.contentnegotiation.ContentNegotiation +import io.ktor.client.plugins.logging.LogLevel +import io.ktor.client.plugins.logging.Logging +import io.ktor.serialization.kotlinx.json.json + +actual object HttpClientFactory { + actual fun createHttpClient(): HttpClient { + return HttpClient(Darwin) { + install(ContentNegotiation) { + json() + } + install(Logging) { + level = LogLevel.INFO + } + } + } +} \ No newline at end of file diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/BookmarksSyncAdapter.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/BookmarksSyncAdapter.kt new file mode 100644 index 00000000..967b17ad --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/BookmarksSyncAdapter.kt @@ -0,0 +1,271 @@ +package com.quran.shared.syncengine + +import co.touchlab.kermit.Logger +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.conflict.ConflictDetector +import com.quran.shared.syncengine.conflict.ConflictResolutionResult +import com.quran.shared.syncengine.conflict.ConflictResolver +import com.quran.shared.syncengine.conflict.ConflictDetectionResult +import com.quran.shared.syncengine.conflict.ResourceConflict +import com.quran.shared.syncengine.model.SyncBookmark +import com.quran.shared.syncengine.preprocessing.BookmarksLocalMutationsPreprocessor +import com.quran.shared.syncengine.preprocessing.BookmarksRemoteMutationsPreprocessor +import kotlinx.serialization.json.JsonObject +import kotlinx.serialization.json.buildJsonObject +import kotlinx.serialization.json.contentOrNull +import kotlinx.serialization.json.intOrNull +import kotlinx.serialization.json.jsonPrimitive +import kotlinx.serialization.json.put +import kotlin.time.Instant + +internal class BookmarksSyncAdapter( + private val configurations: BookmarksSynchronizationConfigurations +) : SyncResourceAdapter { + + override val resourceName: String = "BOOKMARK" + override val localModificationDateFetcher: LocalModificationDateFetcher = + configurations.localModificationDateFetcher + + private val logger = Logger.withTag("BookmarksSyncAdapter") + + override suspend fun buildPlan( + lastModificationDate: Long, + remoteMutations: List + ): ResourceSyncPlan { + val localMutations = configurations.localDataFetcher.fetchLocalMutations(lastModificationDate) + logger.i { + "Local data fetched for $resourceName: " + + "lastModificationDate=$lastModificationDate, localMutations=${localMutations.size}" + } + val preprocessedLocal = preprocessLocalMutations(localMutations) + logger.d { + "Local mutations preprocessed for $resourceName: " + + "${localMutations.size} -> ${preprocessedLocal.size}" + } + + val parsedRemote = parseRemoteMutations(remoteMutations) + val preprocessedRemote = preprocessRemoteMutations(parsedRemote) + logger.d { + "Remote mutations preprocessed for $resourceName: " + + "${parsedRemote.size} -> ${preprocessedRemote.size}" + } + + val conflictDetection = detectConflicts(preprocessedRemote, preprocessedLocal) + logger.d { + "Conflict detection for $resourceName: " + + "conflicts=${conflictDetection.conflicts.size}, " + + "nonConflictingLocal=${conflictDetection.nonConflictingLocalMutations.size}, " + + "nonConflictingRemote=${conflictDetection.nonConflictingRemoteMutations.size}" + } + + val conflictResolution = resolveConflicts(conflictDetection.conflicts) + logger.d { + "Conflict resolution for $resourceName: " + + "persist=${conflictResolution.mutationsToPersist.size}, " + + "push=${conflictResolution.mutationsToPush.size}" + } + + val mutationsToPush = conflictDetection.nonConflictingLocalMutations + conflictResolution.mutationsToPush + val mutationsToPersist = conflictDetection.nonConflictingRemoteMutations + conflictResolution.mutationsToPersist + + return BookmarksResourceSyncPlan( + localMutationsToClear = preprocessedLocal, + remoteMutationsToPersist = mutationsToPersist, + localMutationsToPush = mutationsToPush + ) + } + + override suspend fun didFail(message: String) { + configurations.resultNotifier.didFail(message) + } + + private fun parseRemoteMutations( + mutations: List + ): List> { + return mutations.mapNotNull { mutation -> + if (!mutation.resource.equals(resourceName, ignoreCase = true)) { + return@mapNotNull null + } + val resourceId = mutation.resourceId + if (resourceId == null) { + logger.w { "Skipping bookmark mutation without resourceId" } + return@mapNotNull null + } + val bookmark = mutation.toSyncBookmark(logger) ?: return@mapNotNull null + RemoteModelMutation( + model = bookmark, + remoteID = resourceId, + mutation = mutation.mutation + ) + } + } + + private fun toSyncMutation(localMutation: LocalModelMutation): SyncMutation { + return SyncMutation( + resource = resourceName, + resourceId = localMutation.remoteID, + mutation = localMutation.mutation, + data = if (localMutation.mutation == Mutation.DELETED) null else localMutation.model.toResourceData(), + timestamp = null + ) + } + + private fun preprocessLocalMutations( + mutations: List> + ): List> { + val preprocessor = BookmarksLocalMutationsPreprocessor() + return preprocessor.preprocess(mutations) + } + + private suspend fun preprocessRemoteMutations( + mutations: List> + ): List> { + val preprocessor = BookmarksRemoteMutationsPreprocessor { remoteIds -> + configurations.localDataFetcher.checkLocalExistence(remoteIds) + } + return preprocessor.preprocess(mutations) + } + + private fun detectConflicts( + remote: List>, + local: List> + ): ConflictDetectionResult { + val conflictDetector = ConflictDetector(remote, local) + return conflictDetector.getConflicts() + } + + private fun resolveConflicts( + conflicts: List> + ): ConflictResolutionResult { + val resolver = ConflictResolver(conflicts) + return resolver.resolve() + } + + private fun mapPushedMutations( + localMutations: List>, + pushedMutations: List + ): List> { + if (localMutations.size != pushedMutations.size) { + val message = "Mismatched pushed mutation counts for $resourceName: " + + "local=${localMutations.size}, remote=${pushedMutations.size}" + logger.e { message } + throw IllegalStateException(message) + } + + return localMutations.mapIndexed { index, localMutation -> + val pushedMutation = pushedMutations[index] + if (!pushedMutation.resource.equals(resourceName, ignoreCase = true)) { + val message = "Unexpected pushed mutation resource=${pushedMutation.resource} for $resourceName" + logger.e { message } + throw IllegalStateException(message) + } + val remoteId = pushedMutation.resourceId + if (remoteId == null) { + val message = "Missing resourceId for pushed mutation at index=$index for $resourceName" + logger.e { message } + throw IllegalStateException(message) + } + if (pushedMutation.mutation != localMutation.mutation) { + logger.w { + "Mutation type mismatch at index=$index for $resourceName: " + + "local=${localMutation.mutation}, remote=${pushedMutation.mutation}" + } + } + + RemoteModelMutation( + model = localMutation.model, + remoteID = remoteId, + mutation = pushedMutation.mutation + ) + } + } + + private inner class BookmarksResourceSyncPlan( + private val localMutationsToClear: List>, + private val remoteMutationsToPersist: List>, + private val localMutationsToPush: List> + ) : ResourceSyncPlan { + override val resourceName: String = this@BookmarksSyncAdapter.resourceName + + override fun mutationsToPush(): List { + return localMutationsToPush.map { toSyncMutation(it) } + } + + override suspend fun complete(newToken: Long, pushedMutations: List) { + val mappedPushed = mapPushedMutations(localMutationsToPush, pushedMutations) + val preprocessedPushed = preprocessRemoteMutations(mappedPushed) + val finalRemoteMutations = remoteMutationsToPersist + preprocessedPushed + configurations.resultNotifier.didSucceed( + newToken, + finalRemoteMutations, + localMutationsToClear + ) + } + } +} + +private fun SyncMutation.toSyncBookmark(logger: Logger): SyncBookmark? { + val data = data ?: return null + val id = resourceId ?: return null + val normalizedType = data.stringOrNull("bookmarkType") ?: data.stringOrNull("type") + val lastModified = Instant.fromEpochMilliseconds(timestamp ?: 0) + return when (normalizedType?.lowercase()) { + "page" -> { + val page = data.intOrNull("key") + if (page == null) { + logger.w { "Skipping bookmark mutation without page key: resourceId=$resourceId" } + null + } else { + SyncBookmark.PageBookmark( + id = id, + page = page, + lastModified = lastModified + ) + } + } + "ayah" -> { + val sura = data.intOrNull("key") + val ayah = data.intOrNull("verseNumber") + if (sura != null && ayah != null) { + SyncBookmark.AyahBookmark( + id = id, + sura = sura, + ayah = ayah, + lastModified = lastModified + ) + } else { + null + } + } + else -> { + logger.w { "Skipping bookmark mutation with unsupported type=$normalizedType: resourceId=$resourceId" } + null + } + } +} + +private fun SyncBookmark.toResourceData(): JsonObject { + return when (this) { + is SyncBookmark.PageBookmark -> + buildJsonObject { + put("type", "page") + put("key", page) + put("mushaf", 1) + } + is SyncBookmark.AyahBookmark -> + buildJsonObject { + put("type", "ayah") + put("key", sura) + put("verseNumber", ayah) + put("mushaf", 1) + } + } +} + +private fun JsonObject.stringOrNull(key: String): String? = + this[key]?.jsonPrimitive?.contentOrNull + +private fun JsonObject.intOrNull(key: String): Int? = + this[key]?.jsonPrimitive?.intOrNull diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/BookmarksSynchronizationExecutor.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/BookmarksSynchronizationExecutor.kt new file mode 100644 index 00000000..dd647190 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/BookmarksSynchronizationExecutor.kt @@ -0,0 +1,138 @@ +package com.quran.shared.syncengine + +import co.touchlab.kermit.Logger +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.conflict.ConflictDetectionResult +import com.quran.shared.syncengine.conflict.ConflictDetector +import com.quran.shared.syncengine.conflict.ConflictResolutionResult +import com.quran.shared.syncengine.conflict.ConflictResolver +import com.quran.shared.syncengine.conflict.ResourceConflict +import com.quran.shared.syncengine.model.SyncBookmark +import com.quran.shared.syncengine.preprocessing.BookmarksLocalMutationsPreprocessor +import com.quran.shared.syncengine.preprocessing.BookmarksRemoteMutationsPreprocessor + +/** + * Pure business logic executor for bookmark synchronization operations. + * Contains no external dependencies and is fully testable. + */ +class BookmarksSynchronizationExecutor { + + private val logger = Logger.withTag("SynchronizationExecutor") + + // Pipeline Step Data Classes + data class PipelineInitData( + val lastModificationDate: Long, + val localMutations: List> + ) + + data class FetchedRemoteData( + val remoteMutations: List>, + val lastModificationDate: Long + ) + + data class PushResultData( + val pushedMutations: List>, + val lastModificationDate: Long + ) + + data class PipelineResult( + val lastModificationDate: Long, + val remoteMutations: List>, + val localMutations: List> + ) + + /** + * Executes the complete synchronization pipeline. + * + * @param fetchLocal Function to fetch local data (last modification date and local mutations) + * @param fetchRemote Function to fetch remote mutations + * @param checkLocalExistence Function to check if remote resources exist locally + * @param pushLocal Function to push local mutations + * @return PipelineResult containing the final state + */ + suspend fun executePipeline( + fetchLocal: suspend () -> PipelineInitData, + fetchRemote: suspend (Long) -> FetchedRemoteData, + checkLocalExistence: suspend (List) -> Map, + pushLocal: suspend (List>, Long) -> PushResultData + ): PipelineResult { + logger.i { "Starting synchronization execution for bookmarks." } + + val pipelineData = fetchLocal() + logger.i { "Initialized with lastModificationDate=${pipelineData.lastModificationDate}, localMutations=${pipelineData.localMutations.size}" } + + val preprocessedLocalMutations = preprocessLocalMutations(pipelineData.localMutations) + logger.d { "Local mutations preprocessed: ${pipelineData.localMutations.size} -> ${preprocessedLocalMutations.size}" } + + val fetchedData = fetchRemote(pipelineData.lastModificationDate) + logger.d { "Remote mutations fetched: ${fetchedData.remoteMutations.size}, new lastModificationDate=${fetchedData.lastModificationDate}" } + + val preprocessedRemoteMutations = preprocessRemoteMutations(fetchedData.remoteMutations, checkLocalExistence) + logger.d { "Remote mutations preprocessed: ${fetchedData.remoteMutations.size} -> ${preprocessedRemoteMutations.size}" } + + val conflictDetectionResult = detectConflicts(preprocessedRemoteMutations, preprocessedLocalMutations) + logger.d { "Conflict detection completed: ${conflictDetectionResult.conflicts.size} conflicts found, ${conflictDetectionResult.nonConflictingLocalMutations.size} non-conflicting local mutations, ${conflictDetectionResult.nonConflictingRemoteMutations.size} non-conflicting remote mutations" } + + val conflictResolutionResult = resolveConflicts(conflictDetectionResult.conflicts) + logger.d { "Conflict resolution completed: ${conflictResolutionResult.mutationsToPersist.size} mutations to persist, ${conflictResolutionResult.mutationsToPush.size} mutations to push" } + + val mutationsToPush = conflictDetectionResult.nonConflictingLocalMutations + conflictResolutionResult.mutationsToPush + logger.i { "Pushing ${mutationsToPush.size} local mutations to server" } + + val pushResult = pushLocal(mutationsToPush, fetchedData.lastModificationDate) + logger.d { "Push completed: received ${pushResult.pushedMutations.size} pushed remote mutations, new lastModificationDate=${pushResult.lastModificationDate}" } + + val preprocessedPushedMutations = preprocessRemoteMutations(pushResult.pushedMutations, checkLocalExistence) + logger.d { "Pushed mutations preprocessed: ${pushResult.pushedMutations.size} -> ${preprocessedPushedMutations.size}" } + + val finalRemoteMutations = combineRemoteMutations( + conflictDetectionResult.nonConflictingRemoteMutations, + conflictResolutionResult.mutationsToPersist, + preprocessedPushedMutations + ) + + logger.i { "Synchronization completed successfully: ${finalRemoteMutations.size} remote mutations to persist, ${preprocessedLocalMutations.size} local mutations to clear" } + return PipelineResult( + lastModificationDate = pushResult.lastModificationDate, + remoteMutations = finalRemoteMutations, + localMutations = preprocessedLocalMutations + ) + } + + private fun preprocessLocalMutations( + localMutations: List> + ): List> { + val preprocessor = BookmarksLocalMutationsPreprocessor() + return preprocessor.preprocess(localMutations) + } + + private suspend fun preprocessRemoteMutations( + remoteMutations: List>, + checkLocalExistence: suspend (List) -> Map + ): List> { + val preprocessor = BookmarksRemoteMutationsPreprocessor(checkLocalExistence) + return preprocessor.preprocess(remoteMutations) + } + + private fun detectConflicts( + remoteMutations: List>, + localMutations: List> + ): ConflictDetectionResult { + val conflictDetector = ConflictDetector(remoteMutations, localMutations) + return conflictDetector.getConflicts() + } + + private fun resolveConflicts(conflicts: List>): ConflictResolutionResult { + val conflictResolver = ConflictResolver(conflicts) + return conflictResolver.resolve() + } + + private fun combineRemoteMutations( + otherRemoteMutations: List>, + mutationsToPersist: List>, + pushedMutations: List> + ): List> { + return otherRemoteMutations + mutationsToPersist + pushedMutations + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/CollectionBookmarksSyncAdapter.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/CollectionBookmarksSyncAdapter.kt new file mode 100644 index 00000000..a187fc37 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/CollectionBookmarksSyncAdapter.kt @@ -0,0 +1,283 @@ +package com.quran.shared.syncengine + +import co.touchlab.kermit.Logger +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.conflict.CollectionBookmarksConflictDetector +import com.quran.shared.syncengine.conflict.CollectionBookmarksConflictResolver +import com.quran.shared.syncengine.conflict.ConflictDetectionResult +import com.quran.shared.syncengine.conflict.ConflictResolutionResult +import com.quran.shared.syncengine.conflict.ResourceConflict +import com.quran.shared.syncengine.model.SyncCollectionBookmark +import com.quran.shared.syncengine.preprocessing.CollectionBookmarksRemoteMutationsPreprocessor +import kotlinx.serialization.json.JsonObject +import kotlinx.serialization.json.buildJsonObject +import kotlinx.serialization.json.contentOrNull +import kotlinx.serialization.json.intOrNull +import kotlinx.serialization.json.jsonPrimitive +import kotlinx.serialization.json.put +import kotlin.time.Instant + +internal class CollectionBookmarksSyncAdapter( + private val configurations: CollectionBookmarksSynchronizationConfigurations +) : SyncResourceAdapter { + + override val resourceName: String = "COLLECTION_BOOKMARK" + override val localModificationDateFetcher: LocalModificationDateFetcher = + configurations.localModificationDateFetcher + + private val logger = Logger.withTag("CollectionBookmarksSyncAdapter") + + override suspend fun buildPlan( + lastModificationDate: Long, + remoteMutations: List + ): ResourceSyncPlan { + val localMutations = configurations.localDataFetcher.fetchLocalMutations(lastModificationDate) + logger.i { + "Local data fetched for $resourceName: " + + "lastModificationDate=$lastModificationDate, localMutations=${localMutations.size}" + } + + val parsedRemote = parseRemoteMutations(remoteMutations) + val preprocessedRemote = preprocessRemoteMutations(parsedRemote) + logger.d { + "Remote mutations preprocessed for $resourceName: " + + "${parsedRemote.size} -> ${preprocessedRemote.size}" + } + + val conflictDetection = detectConflicts(preprocessedRemote, localMutations) + logger.d { + "Conflict detection for $resourceName: " + + "conflicts=${conflictDetection.conflicts.size}, " + + "nonConflictingLocal=${conflictDetection.nonConflictingLocalMutations.size}, " + + "nonConflictingRemote=${conflictDetection.nonConflictingRemoteMutations.size}" + } + + val conflictResolution = resolveConflicts(conflictDetection.conflicts) + logger.d { + "Conflict resolution for $resourceName: " + + "persist=${conflictResolution.mutationsToPersist.size}, " + + "push=${conflictResolution.mutationsToPush.size}" + } + + val mutationsToPush = conflictDetection.nonConflictingLocalMutations + conflictResolution.mutationsToPush + val mutationsToPersist = conflictDetection.nonConflictingRemoteMutations + conflictResolution.mutationsToPersist + + return CollectionBookmarksResourceSyncPlan( + localMutationsToClear = localMutations, + remoteMutationsToPersist = mutationsToPersist, + localMutationsToPush = mutationsToPush + ) + } + + override suspend fun didFail(message: String) { + configurations.resultNotifier.didFail(message) + } + + private fun parseRemoteMutations( + mutations: List + ): List> { + return mutations.mapNotNull { mutation -> + if (!mutation.resource.equals(resourceName, ignoreCase = true)) { + return@mapNotNull null + } + val resourceId = mutation.resourceId + if (resourceId == null) { + logger.w { "Skipping collection bookmark mutation without resourceId" } + return@mapNotNull null + } + val collectionBookmark = mutation.toSyncCollectionBookmark(logger) ?: return@mapNotNull null + RemoteModelMutation( + model = collectionBookmark, + remoteID = resourceId, + mutation = mutation.mutation + ) + } + } + + private fun toSyncMutation(localMutation: LocalModelMutation): SyncMutation { + return SyncMutation( + resource = resourceName, + resourceId = localMutation.remoteID, + mutation = localMutation.mutation, + data = localMutation.model.toResourceData(), + timestamp = null + ) + } + + private suspend fun preprocessRemoteMutations( + mutations: List> + ): List> { + val preprocessor = CollectionBookmarksRemoteMutationsPreprocessor { remoteIds -> + configurations.localDataFetcher.checkLocalExistence(remoteIds) + } + return preprocessor.preprocess(mutations) + } + + private fun detectConflicts( + remote: List>, + local: List> + ): ConflictDetectionResult { + val conflictDetector = CollectionBookmarksConflictDetector(remote, local) + return conflictDetector.getConflicts() + } + + private fun resolveConflicts( + conflicts: List> + ): ConflictResolutionResult { + val resolver = CollectionBookmarksConflictResolver(conflicts) + return resolver.resolve() + } + + private fun mapPushedMutations( + localMutations: List>, + pushedMutations: List + ): List> { + if (localMutations.size != pushedMutations.size) { + val message = "Mismatched pushed mutation counts for $resourceName: " + + "local=${localMutations.size}, remote=${pushedMutations.size}" + logger.e { message } + throw IllegalStateException(message) + } + + return localMutations.mapIndexed { index, localMutation -> + val pushedMutation = pushedMutations[index] + if (!pushedMutation.resource.equals(resourceName, ignoreCase = true)) { + val message = "Unexpected pushed mutation resource=${pushedMutation.resource} for $resourceName" + logger.e { message } + throw IllegalStateException(message) + } + val remoteId = pushedMutation.resourceId + if (remoteId == null) { + val message = "Missing resourceId for pushed mutation at index=$index for $resourceName" + logger.e { message } + throw IllegalStateException(message) + } + if (pushedMutation.mutation != localMutation.mutation) { + logger.w { + "Mutation type mismatch at index=$index for $resourceName: " + + "local=${localMutation.mutation}, remote=${pushedMutation.mutation}" + } + } + + RemoteModelMutation( + model = localMutation.model, + remoteID = remoteId, + mutation = pushedMutation.mutation + ) + } + } + + private inner class CollectionBookmarksResourceSyncPlan( + private val localMutationsToClear: List>, + private val remoteMutationsToPersist: List>, + private val localMutationsToPush: List> + ) : ResourceSyncPlan { + override val resourceName: String = this@CollectionBookmarksSyncAdapter.resourceName + + override fun mutationsToPush(): List { + return localMutationsToPush.map { toSyncMutation(it) } + } + + override suspend fun complete(newToken: Long, pushedMutations: List) { + val mappedPushed = mapPushedMutations(localMutationsToPush, pushedMutations) + val preprocessedPushed = preprocessRemoteMutations(mappedPushed) + val finalRemoteMutations = remoteMutationsToPersist + preprocessedPushed + configurations.resultNotifier.didSucceed( + newToken, + finalRemoteMutations, + localMutationsToClear + ) + } + } +} + +private fun SyncMutation.toSyncCollectionBookmark(logger: Logger): SyncCollectionBookmark? { + val data = data ?: return null + val collectionId = data.stringOrNull("collectionId") + if (collectionId.isNullOrEmpty()) { + logger.w { "Skipping collection bookmark mutation without collectionId: resourceId=$resourceId" } + return null + } + val normalizedType = data.stringOrNull("bookmarkType") ?: data.stringOrNull("type") + val lastModified = Instant.fromEpochMilliseconds(timestamp ?: 0) + val bookmarkId = data.stringOrNull("bookmarkId") + ?: data.stringOrNull("bookmark_id") + ?: parseBookmarkId(resourceId, collectionId) + return when (normalizedType?.lowercase()) { + "page" -> { + val page = data.intOrNull("key") + if (page == null) { + logger.w { "Skipping collection bookmark mutation without page key: resourceId=$resourceId" } + null + } else { + SyncCollectionBookmark.PageBookmark( + collectionId = collectionId, + page = page, + lastModified = lastModified, + bookmarkId = bookmarkId + ) + } + } + "ayah" -> { + val sura = data.intOrNull("key") + val ayah = data.intOrNull("verseNumber") + if (sura != null && ayah != null) { + SyncCollectionBookmark.AyahBookmark( + collectionId = collectionId, + sura = sura, + ayah = ayah, + lastModified = lastModified, + bookmarkId = bookmarkId + ) + } else { + null + } + } + else -> { + logger.w { "Skipping collection bookmark mutation with unsupported type=$normalizedType: resourceId=$resourceId" } + null + } + } +} + +private fun SyncCollectionBookmark.toResourceData(): JsonObject { + return when (this) { + is SyncCollectionBookmark.PageBookmark -> + buildJsonObject { + put("collectionId", collectionId) + put("type", "page") + put("key", page) + put("mushaf", 1) + bookmarkId?.let { put("bookmarkId", it) } + } + is SyncCollectionBookmark.AyahBookmark -> + buildJsonObject { + put("collectionId", collectionId) + put("type", "ayah") + put("key", sura) + put("verseNumber", ayah) + put("mushaf", 1) + bookmarkId?.let { put("bookmarkId", it) } + } + } +} + +private fun parseBookmarkId(resourceId: String?, collectionId: String): String? { + if (resourceId.isNullOrEmpty()) { + return null + } + val prefix = "$collectionId-" + return if (resourceId.startsWith(prefix)) { + resourceId.removePrefix(prefix) + } else { + null + } +} + +private fun JsonObject.stringOrNull(key: String): String? = + this[key]?.jsonPrimitive?.contentOrNull + +private fun JsonObject.intOrNull(key: String): Int? = + this[key]?.jsonPrimitive?.intOrNull diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/CollectionsSyncAdapter.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/CollectionsSyncAdapter.kt new file mode 100644 index 00000000..bd593f3b --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/CollectionsSyncAdapter.kt @@ -0,0 +1,242 @@ +package com.quran.shared.syncengine + +import co.touchlab.kermit.Logger +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.conflict.CollectionsConflictDetector +import com.quran.shared.syncengine.conflict.CollectionsConflictResolver +import com.quran.shared.syncengine.conflict.ConflictDetectionResult +import com.quran.shared.syncengine.conflict.ConflictResolutionResult +import com.quran.shared.syncengine.conflict.ResourceConflict +import com.quran.shared.syncengine.model.SyncCollection +import com.quran.shared.syncengine.preprocessing.CollectionsLocalMutationsPreprocessor +import com.quran.shared.syncengine.preprocessing.CollectionsRemoteMutationsPreprocessor +import kotlinx.serialization.json.JsonObject +import kotlinx.serialization.json.buildJsonObject +import kotlinx.serialization.json.contentOrNull +import kotlinx.serialization.json.jsonPrimitive +import kotlinx.serialization.json.put +import kotlin.time.Instant + +internal class CollectionsSyncAdapter( + private val configurations: CollectionsSynchronizationConfigurations +) : SyncResourceAdapter { + + override val resourceName: String = "COLLECTION" + override val localModificationDateFetcher: LocalModificationDateFetcher = + configurations.localModificationDateFetcher + + private val logger = Logger.withTag("CollectionsSyncAdapter") + + override suspend fun buildPlan( + lastModificationDate: Long, + remoteMutations: List + ): ResourceSyncPlan { + val localMutations = configurations.localDataFetcher.fetchLocalMutations(lastModificationDate) + logger.i { + "Local data fetched for $resourceName: " + + "lastModificationDate=$lastModificationDate, localMutations=${localMutations.size}" + } + val preprocessedLocal = preprocessLocalMutations(localMutations) + logger.d { + "Local mutations preprocessed for $resourceName: " + + "${localMutations.size} -> ${preprocessedLocal.size}" + } + + val parsedRemote = parseRemoteMutations(remoteMutations) + val preprocessedRemote = preprocessRemoteMutations(parsedRemote) + logger.d { + "Remote mutations preprocessed for $resourceName: " + + "${parsedRemote.size} -> ${preprocessedRemote.size}" + } + + val conflictDetection = detectConflicts(preprocessedRemote, preprocessedLocal) + logger.d { + "Conflict detection for $resourceName: " + + "conflicts=${conflictDetection.conflicts.size}, " + + "nonConflictingLocal=${conflictDetection.nonConflictingLocalMutations.size}, " + + "nonConflictingRemote=${conflictDetection.nonConflictingRemoteMutations.size}" + } + + val conflictResolution = resolveConflicts(conflictDetection.conflicts) + logger.d { + "Conflict resolution for $resourceName: " + + "persist=${conflictResolution.mutationsToPersist.size}, " + + "push=${conflictResolution.mutationsToPush.size}" + } + + val mutationsToPush = conflictDetection.nonConflictingLocalMutations + conflictResolution.mutationsToPush + val mutationsToPersist = conflictDetection.nonConflictingRemoteMutations + conflictResolution.mutationsToPersist + + return CollectionsResourceSyncPlan( + localMutationsToClear = preprocessedLocal, + remoteMutationsToPersist = mutationsToPersist, + localMutationsToPush = mutationsToPush + ) + } + + override suspend fun didFail(message: String) { + configurations.resultNotifier.didFail(message) + } + + private fun parseRemoteMutations( + mutations: List + ): List> { + return mutations.mapNotNull { mutation -> + if (!mutation.resource.equals(resourceName, ignoreCase = true)) { + return@mapNotNull null + } + val resourceId = mutation.resourceId + if (resourceId == null) { + logger.w { "Skipping collection mutation without resourceId" } + return@mapNotNull null + } + val collection = mutation.toSyncCollection(logger) ?: return@mapNotNull null + RemoteModelMutation( + model = collection, + remoteID = resourceId, + mutation = mutation.mutation + ) + } + } + + private fun toSyncMutation(localMutation: LocalModelMutation): SyncMutation { + return SyncMutation( + resource = resourceName, + resourceId = localMutation.remoteID, + mutation = localMutation.mutation, + data = if (localMutation.mutation == Mutation.DELETED) null else localMutation.model.toResourceData(), + timestamp = null + ) + } + + private fun preprocessLocalMutations( + mutations: List> + ): List> { + val preprocessor = CollectionsLocalMutationsPreprocessor() + return preprocessor.preprocess(mutations) + } + + private suspend fun preprocessRemoteMutations( + mutations: List> + ): List> { + val preprocessor = CollectionsRemoteMutationsPreprocessor { remoteIds -> + configurations.localDataFetcher.checkLocalExistence(remoteIds) + } + return preprocessor.preprocess(mutations) + } + + private fun detectConflicts( + remote: List>, + local: List> + ): ConflictDetectionResult { + val conflictDetector = CollectionsConflictDetector(remote, local) + return conflictDetector.getConflicts() + } + + private fun resolveConflicts( + conflicts: List> + ): ConflictResolutionResult { + val resolver = CollectionsConflictResolver(conflicts) + return resolver.resolve() + } + + private fun mapPushedMutations( + localMutations: List>, + pushedMutations: List + ): List> { + if (localMutations.size != pushedMutations.size) { + val message = "Mismatched pushed mutation counts for $resourceName: " + + "local=${localMutations.size}, remote=${pushedMutations.size}" + logger.e { message } + throw IllegalStateException(message) + } + + return localMutations.mapIndexed { index, localMutation -> + val pushedMutation = pushedMutations[index] + if (!pushedMutation.resource.equals(resourceName, ignoreCase = true)) { + val message = "Unexpected pushed mutation resource=${pushedMutation.resource} for $resourceName" + logger.e { message } + throw IllegalStateException(message) + } + val remoteId = pushedMutation.resourceId + if (remoteId == null) { + val message = "Missing resourceId for pushed mutation at index=$index for $resourceName" + logger.e { message } + throw IllegalStateException(message) + } + if (pushedMutation.mutation != localMutation.mutation) { + logger.w { + "Mutation type mismatch at index=$index for $resourceName: " + + "local=${localMutation.mutation}, remote=${pushedMutation.mutation}" + } + } + + RemoteModelMutation( + model = localMutation.model, + remoteID = remoteId, + mutation = pushedMutation.mutation + ) + } + } + + private inner class CollectionsResourceSyncPlan( + private val localMutationsToClear: List>, + private val remoteMutationsToPersist: List>, + private val localMutationsToPush: List> + ) : ResourceSyncPlan { + override val resourceName: String = this@CollectionsSyncAdapter.resourceName + + override fun mutationsToPush(): List { + return localMutationsToPush.map { toSyncMutation(it) } + } + + override suspend fun complete(newToken: Long, pushedMutations: List) { + val mappedPushed = mapPushedMutations(localMutationsToPush, pushedMutations) + val preprocessedPushed = preprocessRemoteMutations(mappedPushed) + val finalRemoteMutations = remoteMutationsToPersist + preprocessedPushed + configurations.resultNotifier.didSucceed( + newToken, + finalRemoteMutations, + localMutationsToClear + ) + } + } +} + +private fun SyncMutation.toSyncCollection(logger: Logger): SyncCollection? { + val id = resourceId ?: return null + val lastModified = Instant.fromEpochMilliseconds(timestamp ?: 0) + return when (mutation) { + Mutation.DELETED -> SyncCollection( + id = id, + name = null, + lastModified = lastModified + ) + Mutation.CREATED, Mutation.MODIFIED -> { + val data = data ?: return null + val name = data.stringOrNull("name") + if (name.isNullOrEmpty()) { + logger.w { "Skipping collection mutation without name: resourceId=$resourceId" } + null + } else { + SyncCollection( + id = id, + name = name, + lastModified = lastModified + ) + } + } + } +} + +private fun SyncCollection.toResourceData(): JsonObject { + val collectionName = requireNotNull(name) { "Collection name is required for resource data." } + return buildJsonObject { + put("name", collectionName) + } +} + +private fun JsonObject.stringOrNull(key: String): String? = + this[key]?.jsonPrimitive?.contentOrNull diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/NotesSyncAdapter.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/NotesSyncAdapter.kt new file mode 100644 index 00000000..536fc91b --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/NotesSyncAdapter.kt @@ -0,0 +1,283 @@ +package com.quran.shared.syncengine + +import co.touchlab.kermit.Logger +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.conflict.ConflictDetectionResult +import com.quran.shared.syncengine.conflict.ConflictResolutionResult +import com.quran.shared.syncengine.conflict.NotesConflictDetector +import com.quran.shared.syncengine.conflict.NotesConflictResolver +import com.quran.shared.syncengine.conflict.ResourceConflict +import com.quran.shared.syncengine.model.SyncNote +import com.quran.shared.syncengine.model.parseNoteRange +import com.quran.shared.syncengine.model.toRangeString +import com.quran.shared.syncengine.preprocessing.NotesLocalMutationsPreprocessor +import com.quran.shared.syncengine.preprocessing.NotesRemoteMutationsPreprocessor +import kotlinx.serialization.json.JsonObject +import kotlinx.serialization.json.add +import kotlinx.serialization.json.buildJsonArray +import kotlinx.serialization.json.buildJsonObject +import kotlinx.serialization.json.contentOrNull +import kotlinx.serialization.json.jsonArray +import kotlinx.serialization.json.jsonPrimitive +import kotlinx.serialization.json.put +import kotlin.time.Instant + +internal class NotesSyncAdapter( + private val configurations: NotesSynchronizationConfigurations +) : SyncResourceAdapter { + + override val resourceName: String = "NOTE" + override val localModificationDateFetcher: LocalModificationDateFetcher = + configurations.localModificationDateFetcher + + private val logger = Logger.withTag("NotesSyncAdapter") + + override suspend fun buildPlan( + lastModificationDate: Long, + remoteMutations: List + ): ResourceSyncPlan { + val localMutations = configurations.localDataFetcher.fetchLocalMutations(lastModificationDate) + logger.i { + "Local data fetched for $resourceName: " + + "lastModificationDate=$lastModificationDate, localMutations=${localMutations.size}" + } + val preprocessedLocal = preprocessLocalMutations(localMutations) + logger.d { + "Local mutations preprocessed for $resourceName: " + + "${localMutations.size} -> ${preprocessedLocal.size}" + } + + val parsedRemote = parseRemoteMutations(remoteMutations) + val preprocessedRemote = preprocessRemoteMutations(parsedRemote) + logger.d { + "Remote mutations preprocessed for $resourceName: " + + "${parsedRemote.size} -> ${preprocessedRemote.size}" + } + + val conflictDetection = detectConflicts(preprocessedRemote, preprocessedLocal) + logger.d { + "Conflict detection for $resourceName: " + + "conflicts=${conflictDetection.conflicts.size}, " + + "nonConflictingLocal=${conflictDetection.nonConflictingLocalMutations.size}, " + + "nonConflictingRemote=${conflictDetection.nonConflictingRemoteMutations.size}" + } + + val conflictResolution = resolveConflicts(conflictDetection.conflicts) + logger.d { + "Conflict resolution for $resourceName: " + + "persist=${conflictResolution.mutationsToPersist.size}, " + + "push=${conflictResolution.mutationsToPush.size}" + } + + val mutationsToPush = conflictDetection.nonConflictingLocalMutations + conflictResolution.mutationsToPush + val mutationsToPersist = conflictDetection.nonConflictingRemoteMutations + conflictResolution.mutationsToPersist + + return NotesResourceSyncPlan( + localMutationsToClear = preprocessedLocal, + remoteMutationsToPersist = mutationsToPersist, + localMutationsToPush = mutationsToPush + ) + } + + override suspend fun didFail(message: String) { + configurations.resultNotifier.didFail(message) + } + + private fun parseRemoteMutations( + mutations: List + ): List> { + return mutations.mapNotNull { mutation -> + if (!mutation.resource.equals(resourceName, ignoreCase = true)) { + return@mapNotNull null + } + val resourceId = mutation.resourceId + if (resourceId == null) { + logger.w { "Skipping note mutation without resourceId" } + return@mapNotNull null + } + val note = mutation.toSyncNote(logger) ?: return@mapNotNull null + RemoteModelMutation( + model = note, + remoteID = resourceId, + mutation = mutation.mutation + ) + } + } + + private fun toSyncMutation(localMutation: LocalModelMutation): SyncMutation { + return SyncMutation( + resource = resourceName, + resourceId = localMutation.remoteID, + mutation = localMutation.mutation, + data = if (localMutation.mutation == Mutation.DELETED) null else localMutation.model.toResourceData(), + timestamp = null + ) + } + + private fun preprocessLocalMutations( + mutations: List> + ): List> { + val preprocessor = NotesLocalMutationsPreprocessor() + return preprocessor.preprocess(mutations) + } + + private suspend fun preprocessRemoteMutations( + mutations: List> + ): List> { + val preprocessor = NotesRemoteMutationsPreprocessor { remoteIds -> + configurations.localDataFetcher.checkLocalExistence(remoteIds) + } + return preprocessor.preprocess(mutations) + } + + private fun detectConflicts( + remote: List>, + local: List> + ): ConflictDetectionResult { + val conflictDetector = NotesConflictDetector(remote, local) + return conflictDetector.getConflicts() + } + + private fun resolveConflicts( + conflicts: List> + ): ConflictResolutionResult { + val resolver = NotesConflictResolver(conflicts) + return resolver.resolve() + } + + private fun mapPushedMutations( + localMutations: List>, + pushedMutations: List + ): List> { + if (localMutations.size != pushedMutations.size) { + val message = "Mismatched pushed mutation counts for $resourceName: " + + "local=${localMutations.size}, remote=${pushedMutations.size}" + logger.e { message } + throw IllegalStateException(message) + } + + return localMutations.mapIndexed { index, localMutation -> + val pushedMutation = pushedMutations[index] + if (!pushedMutation.resource.equals(resourceName, ignoreCase = true)) { + val message = "Unexpected pushed mutation resource=${pushedMutation.resource} for $resourceName" + logger.e { message } + throw IllegalStateException(message) + } + val remoteId = pushedMutation.resourceId + if (remoteId == null) { + val message = "Missing resourceId for pushed mutation at index=$index for $resourceName" + logger.e { message } + throw IllegalStateException(message) + } + if (pushedMutation.mutation != localMutation.mutation) { + logger.w { + "Mutation type mismatch at index=$index for $resourceName: " + + "local=${localMutation.mutation}, remote=${pushedMutation.mutation}" + } + } + + RemoteModelMutation( + model = localMutation.model, + remoteID = remoteId, + mutation = pushedMutation.mutation + ) + } + } + + private inner class NotesResourceSyncPlan( + private val localMutationsToClear: List>, + private val remoteMutationsToPersist: List>, + private val localMutationsToPush: List> + ) : ResourceSyncPlan { + override val resourceName: String = this@NotesSyncAdapter.resourceName + + override fun mutationsToPush(): List { + return localMutationsToPush.map { toSyncMutation(it) } + } + + override suspend fun complete(newToken: Long, pushedMutations: List) { + val mappedPushed = mapPushedMutations(localMutationsToPush, pushedMutations) + val preprocessedPushed = preprocessRemoteMutations(mappedPushed) + val finalRemoteMutations = remoteMutationsToPersist + preprocessedPushed + configurations.resultNotifier.didSucceed( + newToken, + finalRemoteMutations, + localMutationsToClear + ) + } + } +} + +private fun SyncMutation.toSyncNote(logger: Logger): SyncNote? { + val id = resourceId ?: return null + val lastModified = Instant.fromEpochMilliseconds(timestamp ?: 0) + if (mutation == Mutation.DELETED) { + return SyncNote( + id = id, + body = null, + ranges = emptyList(), + lastModified = lastModified + ) + } + + val payload = data + if (payload == null) { + logger.w { "Skipping note mutation without data: resourceId=$resourceId" } + return null + } + + val body = payload.stringOrNull("body") + if (body.isNullOrEmpty()) { + logger.w { "Skipping note mutation without body: resourceId=$resourceId" } + return null + } + + val rangeStrings = payload.stringListOrNull("ranges") + if (rangeStrings.isNullOrEmpty()) { + logger.w { "Skipping note mutation without ranges: resourceId=$resourceId" } + return null + } + + val parsedRanges = rangeStrings.mapNotNull { range -> + val parsed = parseNoteRange(range) + if (parsed == null) { + logger.w { "Skipping invalid note range=$range: resourceId=$resourceId" } + } + parsed + } + + if (parsedRanges.isEmpty()) { + logger.w { "Skipping note mutation without parsable ranges: resourceId=$resourceId" } + return null + } + + return SyncNote( + id = id, + body = body, + ranges = parsedRanges, + lastModified = lastModified + ) +} + +private fun SyncNote.toResourceData(): JsonObject { + val noteBody = requireNotNull(body) { "Note body is required for resource data." } + require(ranges.isNotEmpty()) { "Note ranges are required for resource data." } + return buildJsonObject { + put("body", noteBody) + put("ranges", buildJsonArray { + ranges.forEach { range -> + add(range.toRangeString()) + } + }) + } +} + +private fun JsonObject.stringOrNull(key: String): String? = + this[key]?.jsonPrimitive?.contentOrNull + +private fun JsonObject.stringListOrNull(key: String): List? { + val jsonArray = this[key]?.jsonArray ?: return null + return jsonArray.mapNotNull { element -> element.jsonPrimitive.contentOrNull } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/SyncMutation.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/SyncMutation.kt new file mode 100644 index 00000000..97bbb706 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/SyncMutation.kt @@ -0,0 +1,12 @@ +package com.quran.shared.syncengine + +import com.quran.shared.mutations.Mutation +import kotlinx.serialization.json.JsonObject + +data class SyncMutation( + val resource: String, + val resourceId: String?, + val mutation: Mutation, + val data: JsonObject?, + val timestamp: Long? +) diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/SyncResourceAdapter.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/SyncResourceAdapter.kt new file mode 100644 index 00000000..0c4be560 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/SyncResourceAdapter.kt @@ -0,0 +1,21 @@ +package com.quran.shared.syncengine + +interface SyncResourceAdapter { + val resourceName: String + val localModificationDateFetcher: LocalModificationDateFetcher + + suspend fun buildPlan( + lastModificationDate: Long, + remoteMutations: List + ): ResourceSyncPlan + + suspend fun didFail(message: String) +} + +interface ResourceSyncPlan { + val resourceName: String + + fun mutationsToPush(): List + + suspend fun complete(newToken: Long, pushedMutations: List) +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/SynchronizationClient.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/SynchronizationClient.kt new file mode 100644 index 00000000..00f65cff --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/SynchronizationClient.kt @@ -0,0 +1,105 @@ +@file:OptIn(kotlin.time.ExperimentalTime::class) + +package com.quran.shared.syncengine + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncBookmark +import com.quran.shared.syncengine.model.SyncCollectionBookmark +import com.quran.shared.syncengine.model.SyncCollection +import com.quran.shared.syncengine.model.SyncNote +import com.quran.shared.syncengine.network.HttpClientFactory +import io.ktor.client.HttpClient + +interface LocalDataFetcher { + /** + * Fetches local mutations that have occurred since the given timestamp (epoch milliseconds). + */ + suspend fun fetchLocalMutations(lastModified: Long): List> + + /** + * Checks if the given remote IDs exist locally. + * @param remoteIDs List of remote IDs to check + * @return Map of remote ID to boolean indicating if it exists locally + */ + suspend fun checkLocalExistence(remoteIDs: List): Map +} + +interface ResultNotifier { + suspend fun didSucceed( + newToken: Long, + newRemoteMutations: List>, + processedLocalMutations: List> + ) + + suspend fun didFail(message: String) +} + +interface LocalModificationDateFetcher { + /** + * Returns the last local modification timestamp in epoch milliseconds. + */ + suspend fun localLastModificationDate(): Long? +} + +// This will be duplicated per each model type (or generalized), as defined by the BE. +class BookmarksSynchronizationConfigurations( + // Probably, add configurations to select bookmark types to process. + val localDataFetcher: LocalDataFetcher, + val resultNotifier: ResultNotifier, + val localModificationDateFetcher: LocalModificationDateFetcher +) + +class CollectionsSynchronizationConfigurations( + val localDataFetcher: LocalDataFetcher, + val resultNotifier: ResultNotifier, + val localModificationDateFetcher: LocalModificationDateFetcher +) + +class CollectionBookmarksSynchronizationConfigurations( + val localDataFetcher: LocalDataFetcher, + val resultNotifier: ResultNotifier, + val localModificationDateFetcher: LocalModificationDateFetcher +) + +class NotesSynchronizationConfigurations( + val localDataFetcher: LocalDataFetcher, + val resultNotifier: ResultNotifier, + val localModificationDateFetcher: LocalModificationDateFetcher +) + +interface AuthenticationDataFetcher { + suspend fun fetchAuthenticationHeaders(): Map +} + +interface SynchronizationClient { + fun localDataUpdated() + fun applicationStarted() +} + +data class SynchronizationEnvironment(val endPointURL: String) + +object SynchronizationClientBuilder { + fun build( + environment: SynchronizationEnvironment, + authFetcher: AuthenticationDataFetcher, + bookmarksConfigurations: BookmarksSynchronizationConfigurations, + collectionsConfigurations: CollectionsSynchronizationConfigurations? = null, + collectionBookmarksConfigurations: CollectionBookmarksSynchronizationConfigurations? = null, + notesConfigurations: NotesSynchronizationConfigurations? = null, + httpClient: HttpClient? = null + ): SynchronizationClient { + val adapters = buildList { + add(BookmarksSyncAdapter(bookmarksConfigurations)) + collectionsConfigurations?.let { add(CollectionsSyncAdapter(it)) } + collectionBookmarksConfigurations?.let { add(CollectionBookmarksSyncAdapter(it)) } + notesConfigurations?.let { add(NotesSyncAdapter(it)) } + } + return SynchronizationClientImpl( + environment, + httpClient ?: HttpClientFactory.createHttpClient(), + adapters, + authFetcher + ) + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/SynchronizationClientImpl.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/SynchronizationClientImpl.kt new file mode 100644 index 00000000..601271eb --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/SynchronizationClientImpl.kt @@ -0,0 +1,110 @@ +package com.quran.shared.syncengine + +import co.touchlab.kermit.Logger +import com.quran.shared.syncengine.network.GetMutationsRequest +import com.quran.shared.syncengine.network.MutationsResponse +import com.quran.shared.syncengine.network.PostMutationsRequest +import com.quran.shared.syncengine.scheduling.Scheduler +import com.quran.shared.syncengine.scheduling.Trigger +import com.quran.shared.syncengine.scheduling.createScheduler +import io.ktor.client.HttpClient + +internal class SynchronizationClientImpl( + private val environment: SynchronizationEnvironment, + private val httpClient: HttpClient, + private val resourceAdapters: List, + private val authenticationDataFetcher: AuthenticationDataFetcher): SynchronizationClient { + + private val logger = Logger.withTag("SynchronizationClient") + + private val scheduler: Scheduler = createScheduler( + taskFunction = ::startSyncOperation, + reachedMaximumFailureRetries = { exception -> + val message = "Sync operation failed after maximum retries: ${exception.message}" + logger.e(exception) { message } + resourceAdapters.forEach { adapter -> + adapter.didFail(message) + } + } + ) + + override fun localDataUpdated() { + logger.i { "Local data updated, triggering scheduler" } + scheduler.invoke(Trigger.LOCAL_DATA_MODIFIED) + } + + override fun applicationStarted() { + logger.i { "Application started, triggering scheduler" } + scheduler.invoke(Trigger.APP_REFRESH) + } + + private suspend fun startSyncOperation() { + if (resourceAdapters.isEmpty()) { + logger.w { "No sync resources configured, skipping sync operation" } + return + } + + logger.i { "Starting sync operation for ${resourceAdapters.size} resource(s)" } + + val authHeaders = getAuthHeaders() + // Assume a shared sync token across resources. + val lastModificationDate = resourceAdapters.first() + .localModificationDateFetcher + .localLastModificationDate() ?: 0L + + val resources = resourceAdapters.map { it.resourceName }.distinct() + val remoteResponse = fetchRemoteMutations(lastModificationDate, authHeaders, resources) + + val plans = resourceAdapters.map { adapter -> + adapter.buildPlan(lastModificationDate, remoteResponse.mutations) + } + + val mutationsToPush = plans.flatMap { it.mutationsToPush() } + val pushResponse = pushMutations(mutationsToPush, remoteResponse.lastModificationDate, authHeaders) + + val pushedMutationsByResource = pushResponse.mutations.groupBy { it.resource.uppercase() } + plans.forEach { plan -> + val pushedForResource = pushedMutationsByResource[plan.resourceName.uppercase()].orEmpty() + plan.complete(pushResponse.lastModificationDate, pushedForResource) + } + } + + private suspend fun pushMutations( + mutations: List, + lastModificationDate: Long, + authHeaders: Map + ): MutationsResponse { + if (mutations.isEmpty()) { + logger.d { "No local mutations to push, skipping network request" } + return MutationsResponse(lastModificationDate, listOf()) + } + + logger.i { "Pushing ${mutations.size} local mutations" } + val url = environment.endPointURL + val request = PostMutationsRequest(httpClient, url) + val response = request.postMutations(mutations, lastModificationDate, authHeaders) + logger.i { "Successfully pushed mutations: received ${response.mutations.size} pushed remote mutations" } + return response + } + + private suspend fun getAuthHeaders(): Map { + logger.d { "Fetching fresh authentication headers from external source" } + val headers = authenticationDataFetcher.fetchAuthenticationHeaders() + logger.d { "Authentication headers fetched: ${headers.size} headers" } + return headers + } + + private suspend fun fetchRemoteMutations( + lastModificationDate: Long, + authHeaders: Map, + resources: List + ): MutationsResponse { + logger.d { + "Fetching remote modifications from ${environment.endPointURL} with " + + "lastModificationDate=$lastModificationDate, resources=${resources.joinToString(",")}" + } + val url = environment.endPointURL + val request = GetMutationsRequest(httpClient, url) + return request.getMutations(lastModificationDate, authHeaders, resources) + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/CollectionBookmarksConflictDetector.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/CollectionBookmarksConflictDetector.kt new file mode 100644 index 00000000..6ddf6fab --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/CollectionBookmarksConflictDetector.kt @@ -0,0 +1,73 @@ +package com.quran.shared.syncengine.conflict + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncCollectionBookmark +import com.quran.shared.syncengine.model.SyncCollectionBookmarkKey +import com.quran.shared.syncengine.model.conflictKey + +/** + * Detects conflicts between local and remote mutations for collection bookmarks. + * + * A conflict is detected whenever a set of local and remote mutations reference the same + * collection-bookmark key, or when a remote mutation matches a local remote ID. + */ +class CollectionBookmarksConflictDetector( + private val remoteMutations: List>, + private val localMutations: List> +) { + + fun getConflicts(): ConflictDetectionResult { + val remoteMutationsByKey = remoteMutations + .map { mutation -> mutation.model.conflictKey().let { key -> key to mutation } } + .groupBy({ it.first }, { it.second }) + val remoteMutationsByRemoteID = remoteMutations.associateBy { it.remoteID } + + val resourceConflicts = localMutations + .groupBy { mutation -> mutation.model.conflictKey() } + .mapNotNull { (bookmarkKey, localMutations) -> + val conflictingRemoteMutations = findConflictingRemoteMutations( + bookmarkKey, + localMutations, + remoteMutationsByKey, + remoteMutationsByRemoteID + ) + if (conflictingRemoteMutations.isNotEmpty()) { + ResourceConflict( + localMutations = localMutations, + remoteMutations = conflictingRemoteMutations + ) + } else { + null + } + } + + val conflictingRemoteIDs = resourceConflicts + .flatMap { it.remoteMutations } + .map { it.remoteID } + .toSet() + + val conflictingLocalIDs = resourceConflicts + .flatMap { it.localMutations } + .map { it.localID } + .toSet() + + return ConflictDetectionResult( + conflicts = resourceConflicts, + nonConflictingRemoteMutations = remoteMutations.filterNot { conflictingRemoteIDs.contains(it.remoteID) }, + nonConflictingLocalMutations = localMutations.filterNot { conflictingLocalIDs.contains(it.localID) } + ) + } + + private fun findConflictingRemoteMutations( + bookmarkKey: SyncCollectionBookmarkKey, + localMutations: List>, + remoteMutationsByKey: Map>>, + remoteMutationsByRemoteID: Map> + ): List> { + val remoteMutationsById = localMutations.mapNotNull { it.remoteID } + .mapNotNull { remoteMutationsByRemoteID[it] } + + return (remoteMutationsByKey[bookmarkKey].orEmpty() + remoteMutationsById).distinct() + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/CollectionBookmarksConflictResolver.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/CollectionBookmarksConflictResolver.kt new file mode 100644 index 00000000..d6a07440 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/CollectionBookmarksConflictResolver.kt @@ -0,0 +1,26 @@ +package com.quran.shared.syncengine.conflict + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.syncengine.model.SyncCollectionBookmark + +/** + * Resolves conflicts between local and remote mutations for collection bookmarks. + * + * The current strategy prefers remote mutations whenever a conflict is detected. + */ +class CollectionBookmarksConflictResolver( + private val conflicts: List> +) { + + fun resolve(): ConflictResolutionResult { + if (conflicts.isEmpty()) { + return ConflictResolutionResult(listOf(), listOf()) + } + + val mutationsToPersist = conflicts.flatMap { it.remoteMutations } + return ConflictResolutionResult( + mutationsToPersist = mutationsToPersist, + mutationsToPush = emptyList>() + ) + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/CollectionsConflictDetector.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/CollectionsConflictDetector.kt new file mode 100644 index 00000000..3827ccd1 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/CollectionsConflictDetector.kt @@ -0,0 +1,77 @@ +package com.quran.shared.syncengine.conflict + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncCollection +import com.quran.shared.syncengine.model.SyncCollectionKey +import com.quran.shared.syncengine.model.conflictKeyOrNull + +/** + * Detects conflicts between local and remote mutations for collections. + * + * A conflict is detected whenever a set of local and remote mutations reference the same + * collection key, or when a remote mutation matches a local remote ID. + */ +class CollectionsConflictDetector( + private val remoteMutations: List>, + private val localMutations: List> +) { + + fun getConflicts(): ConflictDetectionResult { + val remoteMutationsByKey = remoteMutations + .mapNotNull { mutation -> + mutation.model.conflictKeyOrNull()?.let { key -> key to mutation } + } + .groupBy({ it.first }, { it.second }) + val remoteMutationsByRemoteID = remoteMutations.associateBy { it.remoteID } + + val resourceConflicts = localMutations + .groupBy { mutation -> + mutation.model.conflictKeyOrNull() ?: SyncCollectionKey.LocalId(mutation.localID) + } + .mapNotNull { (collectionKey, localMutations) -> + val conflictingRemoteMutations = findConflictingRemoteMutations( + collectionKey, + localMutations, + remoteMutationsByKey, + remoteMutationsByRemoteID + ) + if (conflictingRemoteMutations.isNotEmpty()) { + ResourceConflict( + localMutations = localMutations, + remoteMutations = conflictingRemoteMutations + ) + } else { + null + } + } + + val conflictingRemoteIDs = resourceConflicts + .flatMap { it.remoteMutations } + .map { it.remoteID } + .toSet() + + val conflictingLocalIDs = resourceConflicts + .flatMap { it.localMutations } + .map { it.localID } + .toSet() + + return ConflictDetectionResult( + conflicts = resourceConflicts, + nonConflictingRemoteMutations = remoteMutations.filterNot { conflictingRemoteIDs.contains(it.remoteID) }, + nonConflictingLocalMutations = localMutations.filterNot { conflictingLocalIDs.contains(it.localID) } + ) + } + + private fun findConflictingRemoteMutations( + collectionKey: SyncCollectionKey, + localMutations: List>, + remoteMutationsByKey: Map>>, + remoteMutationsByRemoteID: Map> + ): List> { + val remoteMutationsById = localMutations.mapNotNull { it.remoteID } + .mapNotNull { remoteMutationsByRemoteID[it] } + + return (remoteMutationsByKey[collectionKey].orEmpty() + remoteMutationsById).distinct() + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/CollectionsConflictResolver.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/CollectionsConflictResolver.kt new file mode 100644 index 00000000..117dd30a --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/CollectionsConflictResolver.kt @@ -0,0 +1,27 @@ +package com.quran.shared.syncengine.conflict + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncCollection + +/** + * Resolves conflicts between local and remote mutations for collections. + * + * The current strategy prefers remote mutations whenever a conflict is detected. + */ +class CollectionsConflictResolver( + private val conflicts: List> +) { + + fun resolve(): ConflictResolutionResult { + if (conflicts.isEmpty()) { + return ConflictResolutionResult(listOf(), listOf()) + } + + val mutationsToPersist = conflicts.flatMap { it.remoteMutations } + return ConflictResolutionResult( + mutationsToPersist = mutationsToPersist, + mutationsToPush = emptyList>() + ) + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/ConflictDetector.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/ConflictDetector.kt new file mode 100644 index 00000000..3c2377f7 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/ConflictDetector.kt @@ -0,0 +1,120 @@ +package com.quran.shared.syncengine.conflict + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncBookmark +import com.quran.shared.syncengine.model.SyncBookmarkKey +import com.quran.shared.syncengine.model.conflictKey +import com.quran.shared.syncengine.model.conflictKeyOrNull + +/** + * + * @param conflicts Groups of mutations that have conflicts + * @param nonConflictingRemoteMutations Remote mutations that don't conflict with any local mutations + * @param nonConflictingLocalMutations Local mutations that don't conflict with any remote mutations + */ +data class ConflictDetectionResult( + val conflicts: List>, + val nonConflictingRemoteMutations: List>, + val nonConflictingLocalMutations: List> +) + +/** + * Detects conflicts between local and remote mutations for bookmarks. + * + * A conflict is detected whenever a set of local and remote mutations reference the same bookmark + * key, or when a remote deletion is missing resource data but matches a local remote ID. + * + * The detector groups conflicts by bookmark key and provides separate lists for non-conflicting mutations. + */ +class ConflictDetector( + private val remoteMutations: List>, + private val localMutations: List> +) { + + fun getConflicts(): ConflictDetectionResult { + val remoteMutationsByKey = remoteMutations + .map { mutation -> + mutation.model.conflictKeyOrNull().let { key -> key to mutation } + } + .groupBy({ it.first }, { it.second }) + val remoteMutationsByRemoteID = remoteMutations.associateBy { it.remoteID } + + val resourceConflicts = buildResourceConflicts(remoteMutationsByKey, remoteMutationsByRemoteID) + val conflictingIDs = extractConflictingIDs(resourceConflicts) + + return buildResult(resourceConflicts, conflictingIDs) + } + + /** + * Builds resource conflicts by analyzing local mutations and finding corresponding remote conflicts. + */ + private fun buildResourceConflicts( + remoteMutationsByKey: Map>>, + remoteMutationsByRemoteID: Map> + ): List> { + return localMutations + .groupBy { it.model.conflictKey() } + .mapNotNull { (bookmarkKey, localMutations) -> + val conflictingRemoteMutations = findConflictingRemoteMutations( + bookmarkKey, + localMutations, + remoteMutationsByKey, + remoteMutationsByRemoteID + ) + + if (conflictingRemoteMutations.isNotEmpty()) { + ResourceConflict( + localMutations = localMutations, + remoteMutations = conflictingRemoteMutations + ) + } else null + } + } + + private fun findConflictingRemoteMutations( + bookmarkKey: SyncBookmarkKey, + localMutations: List>, + remoteMutationsByKey: Map>>, + remoteMutationsByRemoteID: Map> + ): List> { + val remoteMutationsById = localMutations.mapNotNull { it.remoteID } + .mapNotNull { remoteMutationsByRemoteID[it] } + + return (remoteMutationsByKey[bookmarkKey].orEmpty() + remoteMutationsById) + .distinct() + } + + private fun extractConflictingIDs(resourceConflicts: List>): Pair, Set> { + val conflictingRemoteIDs = resourceConflicts + .flatMap { it.remoteMutations } + .map { it.remoteID } + .toSet() + + val conflictingLocalIDs = resourceConflicts + .flatMap { it.localMutations } + .map { it.localID } + .toSet() + + return Pair(conflictingRemoteIDs, conflictingLocalIDs) + } + + private fun buildResult( + resourceConflicts: List>, + conflictingIDs: Pair, Set> + ): ConflictDetectionResult { + val (conflictingRemoteIDs, conflictingLocalIDs) = conflictingIDs + + val nonConflictingRemoteMutations = remoteMutations + .filterNot { conflictingRemoteIDs.contains(it.remoteID) } + + val nonConflictingLocalMutations = localMutations + .filterNot { conflictingLocalIDs.contains(it.localID) } + + return ConflictDetectionResult( + conflicts = resourceConflicts, + nonConflictingRemoteMutations = nonConflictingRemoteMutations, + nonConflictingLocalMutations = nonConflictingLocalMutations + ) + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/ConflictResolver.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/ConflictResolver.kt new file mode 100644 index 00000000..dfba701c --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/ConflictResolver.kt @@ -0,0 +1,178 @@ +package com.quran.shared.syncengine.conflict + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncBookmark + +// region: Result +data class ConflictResolutionResult( + val mutationsToPersist: List>, + val mutationsToPush: List> +) + +private fun ConflictResolutionResult.mergeWith(other: ConflictResolutionResult): ConflictResolutionResult { + return ConflictResolutionResult( + mutationsToPersist = this.mutationsToPersist + other.mutationsToPersist, + mutationsToPush = this.mutationsToPush + other.mutationsToPush + ) +} +// endregion: + +/** + * Resolves conflicts between local and remote mutations for bookmarks. + * + * Analyzes conflict groups and determines which mutations should be persisted locally + * and which should be pushed to the remote server. + * + * Note: Illogical scenarios (e.g., local creation vs remote deletion) will raise + * [IllegalArgumentException] as they indicate the two sides were not in sync. + */ +class ConflictResolver(private val conflicts: List>) { + + /** + * Resolves all conflicts and returns the mutations to persist and push. + * + * @return [ConflictResolutionResult] containing mutations to persist locally and push remotely + * @throws IllegalArgumentException when illogical conflict scenarios are detected + */ + fun resolve(): ConflictResolutionResult { + return if (conflicts.isNotEmpty()) { + conflicts.map { processConflict(it) } + .reduce { one, other -> one.mergeWith(other) } + } else { + ConflictResolutionResult(listOf(), listOf()) + } + } + + private fun processConflict(resourceConflict: ResourceConflict): ConflictResolutionResult { + // Illogical scenarios + if (resourceConflict.mustHave(Mutation.CREATED, MutationSide.LOCAL) + .and(Mutation.DELETED, MutationSide.REMOTE) + .only()) { + throw IllegalArgumentException( + "Illogical scenario detected: Local creation conflicts with remote deletion. " + + "This indicates the two sides were not in sync. " + + "Local mutations: ${resourceConflict.localMutations.map { "${it.mutation}(${it.localID})" }}, " + + "Remote mutations: ${resourceConflict.remoteMutations.map { "${it.mutation}(${it.remoteID})" }}" + ) + } + + if (resourceConflict.mustHave(Mutation.DELETED, MutationSide.LOCAL) + .and(Mutation.CREATED, MutationSide.REMOTE) + .only()) { + throw IllegalArgumentException( + "Illogical scenario detected: Local deletion conflicts with remote creation. " + + "This indicates the two sides were not in sync. " + + "Local mutations: ${resourceConflict.localMutations.map { "${it.mutation}(${it.localID})" }}, " + + "Remote mutations: ${resourceConflict.remoteMutations.map { "${it.mutation}(${it.remoteID})" }}" + ) + } + + // Handling conflicts + if (resourceConflict.mustHave(Mutation.CREATED, MutationSide.BOTH).only() || + resourceConflict.mustHave(Mutation.DELETED, MutationSide.BOTH).only() || + resourceConflict.mustHave(Mutation.DELETED, MutationSide.BOTH) + .and(Mutation.CREATED, MutationSide.REMOTE) + .only() || + resourceConflict.mustHave(Mutation.CREATED, MutationSide.BOTH) + .and(Mutation.DELETED, MutationSide.BOTH) + .only()) { + return ConflictResolutionResult( + mutationsToPush = listOf(), + mutationsToPersist = resourceConflict.remoteMutations + ) + } + else if (resourceConflict.mustHave(Mutation.DELETED, MutationSide.BOTH) + .and(Mutation.CREATED, MutationSide.LOCAL) + .only()) { + return ConflictResolutionResult( + mutationsToPush = resourceConflict.localMutations.filter { it.mutation == Mutation.CREATED }, + mutationsToPersist = resourceConflict.remoteMutations + ) + } + else { + // This shouldn't happen if ConflictDetector is working correctly + // Throw an error instead of returning empty result as fallback + throw IllegalArgumentException( + "Unexpected conflict scenario detected. " + + "Local mutations: ${resourceConflict.localMutations.map { "${it.mutation}(${it.localID})" }}, " + + "Remote mutations: ${resourceConflict.remoteMutations.map { "${it.mutation}(${it.remoteID})" }}" + ) + } + } +} + +// region: ConflictPredicate + +/** + * A DSL that makes checking for specific conflicts clearer. + */ +private class ConflictPredicate( + private val hasFailed: Boolean, + private val remainingConflicts: ResourceConflict +) { + + fun and(mutation: Mutation, side: MutationSide): ConflictPredicate { + if (hasFailed) { + return this + } + return remainingConflicts.mustHave(mutation, side) + } + + fun only(): Boolean = !hasFailed + && remainingConflicts.remoteMutations.isEmpty() + && remainingConflicts.localMutations.isEmpty() +} + +private enum class MutationSide { + REMOTE, LOCAL, BOTH +} + +/** + * Checks if this conflict group contains a specific mutation on the specified side. + * + * @return A predicate that can be chained with additional checks + */ +private fun ResourceConflict.mustHave( + mutation: Mutation, + side: MutationSide +): ConflictPredicate { + return when (side) { + MutationSide.REMOTE -> checkRemoteSide(mutation) + MutationSide.LOCAL -> checkLocalSide(mutation) + MutationSide.BOTH -> checkBothSides(mutation) + } +} + +private fun ResourceConflict.checkRemoteSide(mutation: Mutation): ConflictPredicate { + val matchingRemoteMutation = remoteMutations.firstOrNull { it.mutation == mutation } + return if (matchingRemoteMutation == null) { + ConflictPredicate(hasFailed = true, remainingConflicts = this) + } else { + val remainingGroup = ResourceConflict( + remoteMutations = remoteMutations.minus(matchingRemoteMutation), + localMutations = localMutations + ) + ConflictPredicate(hasFailed = false, remainingConflicts = remainingGroup) + } +} + +private fun ResourceConflict.checkLocalSide(mutation: Mutation): ConflictPredicate { + val matchingLocalMutation = localMutations.firstOrNull { it.mutation == mutation } + return if (matchingLocalMutation == null) { + ConflictPredicate(hasFailed = true, remainingConflicts = this) + } else { + val remainingGroup = ResourceConflict( + remoteMutations = remoteMutations, + localMutations = localMutations.minus(matchingLocalMutation) + ) + ConflictPredicate(hasFailed = false, remainingConflicts = remainingGroup) + } +} + +private fun ResourceConflict.checkBothSides(mutation: Mutation): ConflictPredicate { + return checkRemoteSide(mutation).and(mutation, MutationSide.LOCAL) +} + +// endregion: diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/NotesConflictDetector.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/NotesConflictDetector.kt new file mode 100644 index 00000000..c2994970 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/NotesConflictDetector.kt @@ -0,0 +1,51 @@ +package com.quran.shared.syncengine.conflict + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncNote + +/** + * Detects conflicts between local and remote mutations for notes. + * + * A conflict is detected whenever local and remote mutations reference the same remote ID. + */ +class NotesConflictDetector( + private val remoteMutations: List>, + private val localMutations: List> +) { + + fun getConflicts(): ConflictDetectionResult { + val remoteMutationsByRemoteId = remoteMutations.groupBy { it.remoteID } + val localMutationsByRemoteId = localMutations + .filter { it.remoteID != null } + .groupBy { it.remoteID!! } + + val resourceConflicts = localMutationsByRemoteId.mapNotNull { (remoteId, locals) -> + val remotes = remoteMutationsByRemoteId[remoteId].orEmpty() + if (remotes.isEmpty()) { + null + } else { + ResourceConflict( + localMutations = locals, + remoteMutations = remotes + ) + } + } + + val conflictingRemoteIds = resourceConflicts + .flatMap { it.remoteMutations } + .map { it.remoteID } + .toSet() + + val conflictingLocalIds = resourceConflicts + .flatMap { it.localMutations } + .map { it.localID } + .toSet() + + return ConflictDetectionResult( + conflicts = resourceConflicts, + nonConflictingRemoteMutations = remoteMutations.filterNot { conflictingRemoteIds.contains(it.remoteID) }, + nonConflictingLocalMutations = localMutations.filterNot { conflictingLocalIds.contains(it.localID) } + ) + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/NotesConflictResolver.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/NotesConflictResolver.kt new file mode 100644 index 00000000..20228863 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/NotesConflictResolver.kt @@ -0,0 +1,26 @@ +package com.quran.shared.syncengine.conflict + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.syncengine.model.SyncNote + +/** + * Resolves conflicts between local and remote mutations for notes. + * + * The current strategy prefers remote mutations whenever a conflict is detected. + */ +class NotesConflictResolver( + private val conflicts: List> +) { + + fun resolve(): ConflictResolutionResult { + if (conflicts.isEmpty()) { + return ConflictResolutionResult(listOf(), listOf()) + } + + val mutationsToPersist = conflicts.flatMap { it.remoteMutations } + return ConflictResolutionResult( + mutationsToPersist = mutationsToPersist, + mutationsToPush = emptyList>() + ) + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/ResourceConflict.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/ResourceConflict.kt new file mode 100644 index 00000000..0d6f3ef0 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/conflict/ResourceConflict.kt @@ -0,0 +1,12 @@ +package com.quran.shared.syncengine.conflict + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.RemoteModelMutation + +/** + * Represents a group of conflicting local and remote mutations for the same resource. + */ +data class ResourceConflict( + val localMutations: List>, + val remoteMutations: List> +) \ No newline at end of file diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/model/SyncBookmark.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/model/SyncBookmark.kt new file mode 100644 index 00000000..1d1d9e6e --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/model/SyncBookmark.kt @@ -0,0 +1,36 @@ +package com.quran.shared.syncengine.model + +import kotlin.time.Instant + +sealed class SyncBookmark { + data class PageBookmark(val id: String, val page: Int, val lastModified: Instant) : + SyncBookmark() + + data class AyahBookmark( + val id: String, + val sura: Int, + val ayah: Int, + val lastModified: Instant + ) : SyncBookmark() +} + +internal sealed class SyncBookmarkKey { + data class Page(val page: Int) : SyncBookmarkKey() { + override fun toString(): String = "page=$page" + } + + data class Ayah(val sura: Int, val ayah: Int) : SyncBookmarkKey() { + override fun toString(): String = "sura=$sura, ayah=$ayah" + } +} + +internal fun SyncBookmark.conflictKeyOrNull(): SyncBookmarkKey { + return when (this) { + is SyncBookmark.PageBookmark -> SyncBookmarkKey.Page(page) + is SyncBookmark.AyahBookmark -> SyncBookmarkKey.Ayah(sura, ayah) + } +} + +internal fun SyncBookmark.conflictKey(): SyncBookmarkKey { + return conflictKeyOrNull() +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/model/SyncCollection.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/model/SyncCollection.kt new file mode 100644 index 00000000..69639b25 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/model/SyncCollection.kt @@ -0,0 +1,23 @@ +package com.quran.shared.syncengine.model + +import kotlin.time.Instant + +data class SyncCollection( + val id: String, + val name: String?, + val lastModified: Instant +) + +internal sealed class SyncCollectionKey { + data class Name(val name: String) : SyncCollectionKey() { + override fun toString(): String = "name=$name" + } + + data class LocalId(val localId: String) : SyncCollectionKey() { + override fun toString(): String = "localId=$localId" + } +} + +internal fun SyncCollection.conflictKeyOrNull(): SyncCollectionKey? { + return name?.let { SyncCollectionKey.Name(it) } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/model/SyncCollectionBookmark.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/model/SyncCollectionBookmark.kt new file mode 100644 index 00000000..5fcc8b4f --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/model/SyncCollectionBookmark.kt @@ -0,0 +1,40 @@ +package com.quran.shared.syncengine.model + +import kotlin.time.Instant + +sealed class SyncCollectionBookmark { + abstract val collectionId: String + abstract val lastModified: Instant + + data class PageBookmark( + override val collectionId: String, + val page: Int, + override val lastModified: Instant, + val bookmarkId: String? = null + ) : SyncCollectionBookmark() + + data class AyahBookmark( + override val collectionId: String, + val sura: Int, + val ayah: Int, + override val lastModified: Instant, + val bookmarkId: String? = null + ) : SyncCollectionBookmark() +} + +internal sealed class SyncCollectionBookmarkKey { + data class Page(val collectionId: String, val page: Int) : SyncCollectionBookmarkKey() { + override fun toString(): String = "collection=$collectionId, page=$page" + } + + data class Ayah(val collectionId: String, val sura: Int, val ayah: Int) : SyncCollectionBookmarkKey() { + override fun toString(): String = "collection=$collectionId, sura=$sura, ayah=$ayah" + } +} + +internal fun SyncCollectionBookmark.conflictKey(): SyncCollectionBookmarkKey { + return when (this) { + is SyncCollectionBookmark.PageBookmark -> SyncCollectionBookmarkKey.Page(collectionId, page) + is SyncCollectionBookmark.AyahBookmark -> SyncCollectionBookmarkKey.Ayah(collectionId, sura, ayah) + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/model/SyncNote.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/model/SyncNote.kt new file mode 100644 index 00000000..4b908fe6 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/model/SyncNote.kt @@ -0,0 +1,52 @@ +package com.quran.shared.syncengine.model + +import kotlin.time.Instant + +data class SyncNote( + val id: String, + val body: String?, + val ranges: List, + val lastModified: Instant +) + +data class NoteAyah( + val sura: Int, + val ayah: Int +) + +data class NoteRange( + val start: NoteAyah, + val end: NoteAyah +) + +internal fun NoteRange.toRangeString(): String { + return "${start.sura}:${start.ayah}-${end.sura}:${end.ayah}" +} + +internal fun parseNoteRange(value: String): NoteRange? { + val trimmed = value.trim() + if (trimmed.isEmpty()) { + return null + } + + val parts = trimmed.split("-", limit = 2) + val start = parseNoteAyah(parts.first()) ?: return null + val end = if (parts.size > 1) { + parseNoteAyah(parts[1]) ?: return null + } else { + start + } + + return NoteRange(start = start, end = end) +} + +private fun parseNoteAyah(value: String): NoteAyah? { + val trimmed = value.trim() + val pieces = trimmed.split(":", limit = 2) + if (pieces.size != 2) { + return null + } + val sura = pieces[0].toIntOrNull() ?: return null + val ayah = pieces[1].toIntOrNull() ?: return null + return NoteAyah(sura = sura, ayah = ayah) +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/GetMutationsRequest.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/GetMutationsRequest.kt new file mode 100644 index 00000000..d828b8f4 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/GetMutationsRequest.kt @@ -0,0 +1,110 @@ +@file:OptIn(kotlin.time.ExperimentalTime::class) +package com.quran.shared.syncengine.network + +import co.touchlab.kermit.Logger +import com.quran.shared.syncengine.SyncMutation +import io.ktor.client.HttpClient +import io.ktor.client.call.body +import io.ktor.client.request.get +import io.ktor.client.request.headers +import io.ktor.client.request.parameter +import io.ktor.http.ContentType +import io.ktor.http.contentType +import io.ktor.http.isSuccess +import kotlinx.serialization.Serializable +import kotlinx.serialization.json.JsonObject + +class GetMutationsRequest( + private val httpClient: HttpClient, + private val url: String +) { + private val logger = Logger.withTag("GetMutationsRequest") + + // region: JSON Mapping. + @Serializable + private data class ApiResponse( + val success: Boolean, + val data: ApiResponseData + ) + + @Serializable + private data class ApiResponseData( + val lastMutationAt: Long, + val mutations: List + ) + + @Serializable + private data class ApiMutation( + val resource: String, + val resourceId: String, + val type: String, + val data: JsonObject? = null, + val timestamp: Long + ) + + @Serializable + private data class ErrorResponse( + val message: String, + val type: String, + val success: Boolean + ) + // endregion + + suspend fun getMutations( + lastModificationDate: Long, + authHeaders: Map, + resources: List = emptyList() + ): MutationsResponse { + val httpResponse = httpClient.get("$url/auth/v1/sync") { + headers { + authHeaders.forEach { (key, value) -> + append(key, value) + } + contentType(ContentType.Application.Json) + } + parameter("mutationsSince", lastModificationDate) + if (resources.isNotEmpty()) { + parameter("resources", resources.joinToString(",")) + } + } + + logger.d { "HTTP response status: ${httpResponse.status}" } + if (!httpResponse.status.isSuccess()) { + httpResponse.processError(logger) { + httpResponse.body().message + } + } + + val apiResponse: ApiResponse = httpResponse.body() + if (!apiResponse.success) { + logger.e { "Server returned success=false in response body" } + logger.e { "Response data: lastMutationAt=${apiResponse.data.lastMutationAt}, mutations count=${apiResponse.data.mutations.size}" } + throw RuntimeException("Server returned success=false in response body") + } + + logger.i { "Received response: success=${apiResponse.success}" } + logger.d { "Response data: lastMutationAt=${apiResponse.data.lastMutationAt}, mutations count=${apiResponse.data.mutations.size}" } + + return apiResponse.data.toMutationsResponse() + } + + private fun ApiResponseData.toMutationsResponse(): MutationsResponse { + val mutations = mutations.map { apiMutation -> + val mutation = apiMutation.type.asMutation(logger) + SyncMutation( + resource = apiMutation.resource, + resourceId = apiMutation.resourceId, + mutation = mutation, + data = apiMutation.data, + timestamp = apiMutation.timestamp + ) + } + + val result = MutationsResponse( + lastModificationDate = lastMutationAt, + mutations = mutations + ) + + return result + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/HttpClientFactory.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/HttpClientFactory.kt new file mode 100644 index 00000000..34a6f547 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/HttpClientFactory.kt @@ -0,0 +1,7 @@ +package com.quran.shared.syncengine.network + +import io.ktor.client.HttpClient + +expect object HttpClientFactory { + fun createHttpClient(): HttpClient +} \ No newline at end of file diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/MutationsResponse.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/MutationsResponse.kt new file mode 100644 index 00000000..a95b8331 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/MutationsResponse.kt @@ -0,0 +1,8 @@ +package com.quran.shared.syncengine.network + +import com.quran.shared.syncengine.SyncMutation + +data class MutationsResponse( + val lastModificationDate: Long, + val mutations: List +) diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/NetworkUtil.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/NetworkUtil.kt new file mode 100644 index 00000000..309fb574 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/NetworkUtil.kt @@ -0,0 +1,32 @@ +package com.quran.shared.syncengine.network + +import co.touchlab.kermit.Logger +import com.quran.shared.mutations.Mutation +import io.ktor.client.statement.HttpResponse +import io.ktor.client.statement.bodyAsText + +internal fun String.asMutation(logger: Logger): Mutation { + return when (this) { + "CREATE" -> Mutation.CREATED + "UPDATE" -> Mutation.MODIFIED + "DELETE" -> Mutation.DELETED + else -> { + logger.e { "Unknown mutation type: $this" } + throw IllegalArgumentException("Unknown mutation type: $this") + } + } +} + +internal suspend fun HttpResponse.processError(logger: Logger, errorMessageExtractor: suspend () -> String? = { null }) { + val errorBody = bodyAsText() + logger.e { "HTTP error response: status=${status}, body=$errorBody" } + + val parsedMessage = try { + errorMessageExtractor() + } catch (e: Exception) { + logger.w { "Failed to parse error response, using raw body: ${e.message}" } + null + } + + throw SyncNetworkException(status, errorBody, parsedMessage) +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/PostMutationsRequest.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/PostMutationsRequest.kt new file mode 100644 index 00000000..04c80040 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/PostMutationsRequest.kt @@ -0,0 +1,144 @@ +@file:OptIn(kotlin.time.ExperimentalTime::class) +package com.quran.shared.syncengine.network + +import co.touchlab.kermit.Logger +import com.quran.shared.mutations.Mutation +import com.quran.shared.syncengine.SyncMutation +import io.ktor.client.HttpClient +import io.ktor.client.call.body +import io.ktor.client.request.headers +import io.ktor.client.request.parameter +import io.ktor.client.request.post +import io.ktor.client.request.setBody +import io.ktor.http.ContentType +import io.ktor.http.contentType +import io.ktor.http.isSuccess +import kotlinx.serialization.Serializable +import kotlinx.serialization.json.JsonObject + +class PostMutationsRequest( + private val httpClient: HttpClient, + private val url: String +) { + private val logger = Logger.withTag("PostMutationsRequestClient") + + // region: JSON Mapping. + @Serializable + private data class PostMutationsRequestData( + val mutations: List + ) + + @Serializable + private data class PostMutationRequestData( + val type: String, + val resource: String, + val resourceId: String?, + val data: JsonObject? + ) + + @Serializable + private data class PostMutationsResponse( + val success: Boolean, + val data: PostMutationsResponseData + ) + + @Serializable + private data class PostMutationsResponseData( + val lastMutationAt: Long, + val mutations: List + ) + + @Serializable + private data class PostMutationResponse( + val type: String, + val resource: String, + val data: JsonObject?, + val resourceId: String, + val timestamp: Long? = null + ) + + @Serializable + private data class ErrorResponse( + val message: String, + val type: String, + val success: Boolean + ) + // endregion + + suspend fun postMutations( + mutations: List, + lastModificationDate: Long, + authHeaders: Map + ): MutationsResponse { + logger.i { "Starting POST mutations request to $url" } + + val requestBody = PostMutationsRequestData( + mutations = mutations.map { localMutation -> + PostMutationRequestData( + type = localMutation.mutation.toRequestType(), + resource = localMutation.resource, + resourceId = localMutation.resourceId, + data = localMutation.data + ) + } + ) + + val httpResponse = httpClient.post("$url/auth/v1/sync") { + headers { + authHeaders.forEach { (key, value) -> + append(key, value) + } + contentType(ContentType.Application.Json) + } + parameter("lastMutationAt", lastModificationDate) + setBody(requestBody) + } + + logger.d { "HTTP response status: ${httpResponse.status}" } + + if (!httpResponse.status.isSuccess()) { + httpResponse.processError(logger) { + httpResponse.body().message + } + } + + val response: PostMutationsResponse = httpResponse.body() + + logger.i { "Received response: success=${response.success}" } + + val result = response.data.toMutationsResponse() + logger.i { "lastModificationDate=${result.lastModificationDate}, mutations count=${result.mutations.size}" } + + return result + } + + private fun PostMutationsResponseData.toMutationsResponse(): MutationsResponse { + val logger = Logger.withTag("PostMutationsResponseConverter") + + val mutations = mutations.map { postMutation -> + val mutation = postMutation.type.asMutation(logger) + SyncMutation( + resource = postMutation.resource, + resourceId = postMutation.resourceId, + mutation = mutation, + data = postMutation.data, + timestamp = postMutation.timestamp + ) + } + + val result = MutationsResponse( + lastModificationDate = lastMutationAt, + mutations = mutations + ) + + return result + } +} + +private fun Mutation.toRequestType(): String { + return when (this) { + Mutation.CREATED -> "CREATE" + Mutation.DELETED -> "DELETE" + Mutation.MODIFIED -> "UPDATE" + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/SyncNetworkException.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/SyncNetworkException.kt new file mode 100644 index 00000000..b5f0f3db --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/network/SyncNetworkException.kt @@ -0,0 +1,9 @@ +package com.quran.shared.syncengine.network + +import io.ktor.http.HttpStatusCode + +class SyncNetworkException( + status: HttpStatusCode, + rawBody: String, + parsedMessage: String? +) : Exception("HTTP request failed with status ${status.value}: ${parsedMessage ?: rawBody}") diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/BookmarksLocalMutationsPreprocessor.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/BookmarksLocalMutationsPreprocessor.kt new file mode 100644 index 00000000..96e3ff82 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/BookmarksLocalMutationsPreprocessor.kt @@ -0,0 +1,109 @@ +package com.quran.shared.syncengine.preprocessing + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.syncengine.model.SyncBookmark +import com.quran.shared.syncengine.model.SyncBookmarkKey +import com.quran.shared.syncengine.model.conflictKey + +class BookmarksLocalMutationsPreprocessor { + + /** + * Preprocesses local mutations and throws an error if illogical scenarios are detected. + * + * Converts MODIFIED mutations to CREATED mutations. + * + * @param localMutations List of local mutations to preprocess + * @return List of local mutations if no illogical scenarios are detected + * @throws IllegalArgumentException if illogical scenarios are detected + */ + fun preprocess(localMutations: List>): List> { + // Combine all mutations + val allMutations = localMutations.map { it.mapModified() } + + // Group mutations by bookmark key + val mutationsByKey = allMutations.groupBy { mutation -> + mutation.model.conflictKey() + } + + val processedMutations = mutationsByKey.flatMap { (bookmarkKey, mutations) -> + processBookmarkMutations(bookmarkKey, mutations) + } + + return processedMutations + } + + private fun processBookmarkMutations( + bookmarkKey: SyncBookmarkKey, + mutations: List> + ): List> { + // Check for too many mutations + if (mutations.size > 2) { + throw IllegalArgumentException( + "Illogical scenario detected: Bookmark $bookmarkKey has ${mutations.size} mutations, " + + "which exceeds logical limit of 2. Make sure to properly merge and aggregate" + + "the local mutations to reflect the final propert state of the data" + + "Mutations: ${mutations.map { "${it.mutation}(${it.localID})" }}" + ) + } + + // Check for multiple deletions + val deletions = mutations.filter { it.mutation == Mutation.DELETED } + if (deletions.size > 1) { + throw IllegalArgumentException( + "Illogical scenario detected: Bookmark $bookmarkKey has ${deletions.size} deletions, " + + "which is not allowed. Mutations: ${mutations.map { "${it.mutation}(${it.localID})" }}" + ) + } + + // Check that deletions have remote IDs + deletions.forEach { deletion -> + if (deletion.remoteID == null) { + throw IllegalArgumentException( + "Illogical scenario detected: Bookmark $bookmarkKey has deletion without remote ID, " + + "which is not allowed. Deletion must reference an existing remote resource. " + + "Mutation: ${deletion.mutation}(${deletion.localID})" + ) + } + } + + // Check for multiple creations + val creations = mutations.filter { it.mutation == Mutation.CREATED } + if (creations.size > 1) { + throw IllegalArgumentException( + "Illogical scenario detected: Bookmark $bookmarkKey has ${creations.size} creations, " + + "which is not allowed. Mutations: ${mutations.map { "${it.mutation}(${it.localID})" }}" + ) + } + + // Handle creation followed by deletion (always invalid since deletions must have remote IDs) + if (mutations.size == 2) { + val first = mutations[0] + val second = mutations[1] + + if (first.mutation == Mutation.CREATED && second.mutation == Mutation.DELETED) { + // Invalid scenario - creation followed by deletion always indicates two bookmarks on same page + throw IllegalArgumentException( + "Illogical scenario detected: Bookmark $bookmarkKey has creation followed by deletion, " + + "indicating there were two bookmarks with the same key. " + + "Mutations: ${mutations.map { "${it.mutation}(${it.localID})" }}" + ) + } + } + + // All other scenarios are valid + return mutations + } +} + +private fun LocalModelMutation.mapModified(): LocalModelMutation = + when (this.mutation) { + Mutation.MODIFIED -> + LocalModelMutation( + model = this.model, + remoteID = this.remoteID, + localID = this.localID, + mutation = Mutation.CREATED + ) + Mutation.DELETED, Mutation.CREATED -> this + } diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/BookmarksRemoteMutationsPreprocessor.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/BookmarksRemoteMutationsPreprocessor.kt new file mode 100644 index 00000000..e4486257 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/BookmarksRemoteMutationsPreprocessor.kt @@ -0,0 +1,38 @@ +package com.quran.shared.syncengine.preprocessing + +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncBookmark + +class BookmarksRemoteMutationsPreprocessor( + private val checkLocalExistence: suspend (List) -> Map +) { + + /** + * Preprocesses remote mutations to filter out DELETE mutations for resources that don't exist + * locally and convert ALL MODIFIED mutations to CREATED mutations. + * + * @param remoteMutations List of remote mutations to preprocess + * @return Filtered and transformed list of remote mutations + */ + suspend fun preprocess(remoteMutations: List>): List> { + val remoteIDsToCheck = remoteMutations.filter { it.mutation == Mutation.DELETED } + .map { it.remoteID } + val existenceMap = if (remoteIDsToCheck.isNotEmpty()) checkLocalExistence(remoteIDsToCheck) else emptyMap() + + return remoteMutations + .filter { it.mutation != Mutation.DELETED || existenceMap[it.remoteID] == true } + .map { it.mapModified() } + } +} + +private fun RemoteModelMutation.mapModified(): RemoteModelMutation = + when (this.mutation) { + Mutation.MODIFIED -> + RemoteModelMutation( + model = this.model, + remoteID = this.remoteID, + mutation = Mutation.CREATED + ) + Mutation.DELETED, Mutation.CREATED -> this + } diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/CollectionBookmarksRemoteMutationsPreprocessor.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/CollectionBookmarksRemoteMutationsPreprocessor.kt new file mode 100644 index 00000000..17c4cec5 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/CollectionBookmarksRemoteMutationsPreprocessor.kt @@ -0,0 +1,37 @@ +package com.quran.shared.syncengine.preprocessing + +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncCollectionBookmark + +class CollectionBookmarksRemoteMutationsPreprocessor( + private val checkLocalExistence: suspend (List) -> Map +) { + + /** + * Preprocesses remote mutations to filter out DELETE mutations for resources that don't exist + * locally and convert ALL MODIFIED mutations to CREATED mutations. + */ + suspend fun preprocess( + remoteMutations: List> + ): List> { + val remoteIDsToCheck = remoteMutations.filter { it.mutation == Mutation.DELETED } + .map { it.remoteID } + val existenceMap = if (remoteIDsToCheck.isNotEmpty()) checkLocalExistence(remoteIDsToCheck) else emptyMap() + + return remoteMutations + .filter { it.mutation != Mutation.DELETED || existenceMap[it.remoteID] == true } + .map { it.mapModified() } + } +} + +private fun RemoteModelMutation.mapModified(): RemoteModelMutation = + when (this.mutation) { + Mutation.MODIFIED -> + RemoteModelMutation( + model = this.model, + remoteID = this.remoteID, + mutation = Mutation.CREATED + ) + Mutation.DELETED, Mutation.CREATED -> this + } diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/CollectionsLocalMutationsPreprocessor.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/CollectionsLocalMutationsPreprocessor.kt new file mode 100644 index 00000000..af2e2a34 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/CollectionsLocalMutationsPreprocessor.kt @@ -0,0 +1,28 @@ +package com.quran.shared.syncengine.preprocessing + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.syncengine.model.SyncCollection + +class CollectionsLocalMutationsPreprocessor { + + /** + * Validates local mutations for collections. + * + * Ensures deletions always reference a remote ID. + */ + fun preprocess( + localMutations: List> + ): List> { + localMutations.forEach { mutation -> + if (mutation.mutation == Mutation.DELETED && mutation.remoteID == null) { + throw IllegalArgumentException( + "Collection deletion without remote ID is not allowed. " + + "Mutation: ${mutation.mutation}(${mutation.localID})" + ) + } + } + + return localMutations + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/CollectionsRemoteMutationsPreprocessor.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/CollectionsRemoteMutationsPreprocessor.kt new file mode 100644 index 00000000..66ab5dc7 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/CollectionsRemoteMutationsPreprocessor.kt @@ -0,0 +1,29 @@ +package com.quran.shared.syncengine.preprocessing + +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncCollection + +class CollectionsRemoteMutationsPreprocessor( + private val checkLocalExistence: suspend (List) -> Map +) { + + /** + * Preprocesses remote mutations to filter out DELETE mutations for resources that don't exist + * locally. + */ + suspend fun preprocess( + remoteMutations: List> + ): List> { + val remoteIDsToCheck = remoteMutations.filter { it.mutation == Mutation.DELETED } + .map { it.remoteID } + val existenceMap = if (remoteIDsToCheck.isNotEmpty()) { + checkLocalExistence(remoteIDsToCheck) + } else { + emptyMap() + } + + return remoteMutations + .filter { it.mutation != Mutation.DELETED || existenceMap[it.remoteID] == true } + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/NotesLocalMutationsPreprocessor.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/NotesLocalMutationsPreprocessor.kt new file mode 100644 index 00000000..1fc993ca --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/NotesLocalMutationsPreprocessor.kt @@ -0,0 +1,28 @@ +package com.quran.shared.syncengine.preprocessing + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.syncengine.model.SyncNote + +class NotesLocalMutationsPreprocessor { + + /** + * Validates local mutations for notes. + * + * Ensures deletions always reference a remote ID. + */ + fun preprocess( + localMutations: List> + ): List> { + localMutations.forEach { mutation -> + if (mutation.mutation == Mutation.DELETED && mutation.remoteID == null) { + throw IllegalArgumentException( + "Note deletion without remote ID is not allowed. " + + "Mutation: ${mutation.mutation}(${mutation.localID})" + ) + } + } + + return localMutations + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/NotesRemoteMutationsPreprocessor.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/NotesRemoteMutationsPreprocessor.kt new file mode 100644 index 00000000..7c481c08 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/preprocessing/NotesRemoteMutationsPreprocessor.kt @@ -0,0 +1,29 @@ +package com.quran.shared.syncengine.preprocessing + +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncNote + +class NotesRemoteMutationsPreprocessor( + private val checkLocalExistence: suspend (List) -> Map +) { + + /** + * Preprocesses remote mutations to filter out DELETE mutations for resources that don't exist + * locally. + */ + suspend fun preprocess( + remoteMutations: List> + ): List> { + val remoteIDsToCheck = remoteMutations.filter { it.mutation == Mutation.DELETED } + .map { it.remoteID } + val existenceMap = if (remoteIDsToCheck.isNotEmpty()) { + checkLocalExistence(remoteIDsToCheck) + } else { + emptyMap() + } + + return remoteMutations + .filter { it.mutation != Mutation.DELETED || existenceMap[it.remoteID] == true } + } +} diff --git a/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/scheduling/Scheduler.kt b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/scheduling/Scheduler.kt new file mode 100644 index 00000000..d801bbc5 --- /dev/null +++ b/syncengine/src/commonMain/kotlin/com/quran/shared/syncengine/scheduling/Scheduler.kt @@ -0,0 +1,308 @@ +package com.quran.shared.syncengine.scheduling + +import co.touchlab.kermit.Logger +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.Job +import kotlinx.coroutines.SupervisorJob +import kotlinx.coroutines.delay +import kotlinx.coroutines.launch +import kotlinx.coroutines.sync.Mutex +import kotlinx.coroutines.sync.withLock +import kotlin.math.pow +import kotlin.time.Clock +import kotlin.time.Duration +import kotlin.time.Duration.Companion.milliseconds +import kotlin.time.Duration.Companion.minutes +import kotlin.time.Duration.Companion.seconds +import kotlin.time.ExperimentalTime +import kotlin.time.Instant + +enum class Trigger { + APP_REFRESH, + LOCAL_DATA_MODIFIED, + IMMEDIATE +} + +data class SchedulerTimings( + val standardInterval: Duration, + val appRefreshInterval: Duration, + val localDataModifiedInterval: Duration, + val failureRetryingConfig: FailureRetryingConfig +) + +data class FailureRetryingConfig( + val baseDelay: Duration, + val multiplier: Double, + val maximumRetries: Int +) + +private val DefaultTimings = SchedulerTimings( + standardInterval = 30.minutes, + appRefreshInterval = 30.seconds, + localDataModifiedInterval = 5.seconds, + failureRetryingConfig = FailureRetryingConfig(baseDelay = 200.milliseconds, multiplier = 2.5, maximumRetries = 5) +) + +private sealed class SchedulerState { + /** Scheduler is operational, but nothing is scheduled.*/ + data object Idle: SchedulerState() + /** A non-triggered standard job has been scheduled. */ + data object StandardDelay: SchedulerState() + /** The job has been fired, and the scheduler is waiting on the taskFunction to return. */ + data class WaitingForReply(val original: SchedulerState): SchedulerState() + /** The task function's response is being processed. */ + data class Replied(val original: SchedulerState): SchedulerState() + /** A job is currently scheduled due to the associated trigger. */ + data class Triggered(val trigger: Trigger): SchedulerState() + /** A job is currently scheduled to retry an earlier failed taskFunction execution. */ + data class Retrying(val original: SchedulerState, val retryNumber: Int): SchedulerState() + /** Scheduler has been stopped and will not accept any further triggers. */ + data object Stopped: SchedulerState() +} + +/** + * A scheduler that manages the execution of a task function with configurable timing and retry logic. + * + * ## Error Handling + * The taskFunction should throw an exception to indicate failure. The scheduler will: + * - Catch exceptions and retry according to the configured retry timings + * - After maximum retries are exhausted, report the final exception to reachedMaximumFailureRetries + * - Continue normal operation if taskFunction completes without throwing + * + * @param timings Configuration for scheduling intervals and retry behavior + * @param taskFunction The task to execute. Should throw an exception on failure. + * @param reachedMaximumFailureRetries Called when max retries are exhausted with the final exception + */ +@OptIn(ExperimentalTime::class) +class Scheduler( + val timings: SchedulerTimings, + val taskFunction: suspend () -> Unit, + val reachedMaximumFailureRetries: suspend (Exception) -> Unit, +) { + private val scope: CoroutineScope = CoroutineScope(Dispatchers.Default + SupervisorJob()) + private val logger = Logger.withTag("Scheduler") + + private var mutex = Mutex() + + // region: Synchronized internal state + private var bufferedTrigger: Trigger? = null + private var state: SchedulerState = SchedulerState.Idle + private var expectedExecutionTime: Long? = null + private var currentJob: Job? = null + //endregion: + + // region: Public + fun invoke(trigger: Trigger) { + scope.launch { + processInvokedTrigger(trigger) + } + } + + // Entry point. Starts a critical section + fun stop() { + scope.launch { + executeStop() + } + } + // endregion: + + // region: Internal-state entry points + + // Entry point: starts a critical section + private suspend fun processInvokedTrigger(trigger: Trigger) { + mutex.withLock { + when (state) { + SchedulerState.Idle, SchedulerState.StandardDelay, is SchedulerState.Triggered -> { + logger.d { "Trigger invoked: $trigger" } + executeTrigger(trigger) + } + + is SchedulerState.WaitingForReply, is SchedulerState.Replied -> { + buffer(trigger) + } + + is SchedulerState.Retrying -> {} + + SchedulerState.Stopped -> { + logger.d { "Ignoring trigger $trigger: scheduler has been stopped" } + } + } + } + } + + // Entry point: starts a critical section + private suspend fun executeStop() { + mutex.withLock { + logger.i { "Stopping scheduler, cancelling current job" } + resetAllState(SchedulerState.Stopped) + } + } + + // Entry point. Starts a critical section + private suspend fun timeTaskFunctionCall(timeMS: Long, startingState: SchedulerState) { + delay(timeMS) + + logger.d { "Starting scheduled job execution" } + mutex.withLock { + state = SchedulerState.WaitingForReply(original = startingState) + } + + logger.d { "Executing task function, state: WaitingForReply" } + try { + taskFunction() + + mutex.withLock { + state = SchedulerState.Replied(original = startingState) + logger.i { "Task completed successfully" } + processSuccess() + } + + } catch (e: Exception) { + mutex.withLock { + state = SchedulerState.Replied(original = startingState) + logger.e { "Task failed with exception: ${e.message}, processing failure logic" } + scheduleForFailure(e) + } + } + } + // endregion: + + // region: Internal-state manipulators. Critical-section bound + + // Critical-section bound + private fun buffer(trigger: Trigger) { + when(trigger) { + Trigger.APP_REFRESH, Trigger.IMMEDIATE -> { + logger.d { "Ignoring redundant trigger: $trigger. Job is already being processed" } + return + } + Trigger.LOCAL_DATA_MODIFIED -> { + logger.d { "Buffering trigger: $trigger after job is done." } + bufferedTrigger = trigger + } + } + } + + // Critical-section bound + private fun executeTrigger(trigger: Trigger) { + when (trigger) { + Trigger.APP_REFRESH -> schedule(timings.appRefreshInterval, SchedulerState.Triggered(Trigger.APP_REFRESH)) + Trigger.LOCAL_DATA_MODIFIED -> schedule(timings.localDataModifiedInterval, SchedulerState.Triggered(Trigger.LOCAL_DATA_MODIFIED)) + Trigger.IMMEDIATE -> schedule(Duration.ZERO, SchedulerState.Triggered(Trigger.IMMEDIATE)) + } + } + + // Critical-section bound + private fun schedule(time: Duration, newState: SchedulerState) { + val currentTime = Clock.System.now().toEpochMilliseconds() + val firingTime = currentTime + time.inWholeMilliseconds + if (state.currentlyScheduled() && firingTime >= (expectedExecutionTime ?: 0)) { + logger.d { "Ignored schedule request: new firing time $firingTime >= current expected time $expectedExecutionTime" } + return + } + expectedExecutionTime = firingTime + + currentJob?.cancel() + + this.state = newState + logger.d { "Scheduling task in $time. Expected firing time: ${Instant.fromEpochMilliseconds(firingTime)}, State: $newState" } + currentJob = scope.launch { + // This will start a critical section. + timeTaskFunctionCall(time.inWholeMilliseconds, newState) + } + } + + // Critical-section bound + private fun scheduleDefault() { + logger.i { "Scheduling default task with standard interval: ${timings.standardInterval}" } + schedule(timings.standardInterval, SchedulerState.StandardDelay) + } + + // Critical section bound + private fun processSuccess() { + bufferedTrigger?.let { + bufferedTrigger = null + logger.d { "Executing buffered trigger: $it" } + executeTrigger(it) + } ?: also { scheduleDefault() } + } + + // Critical-section bound + private fun scheduleForFailure(exception: Exception) { + // If there's a failure, then the whole process will be restarted again, so no need to keep + // the buffered trigger. + bufferedTrigger = null + val state = this.state + if (state is SchedulerState.Replied) { + val count = state.original.getRetryCount() + logger.d { "Task failed, processing retry logic. Original state: ${state.original}, current retry count: $count" } + if (count < timings.failureRetryingConfig.maximumRetries) { + val nextCount = count + 1 + val nextTime = (timings.failureRetryingConfig.baseDelay * timings.failureRetryingConfig.multiplier.pow(count)) + logger.d { "Scheduling retry $nextCount/${timings.failureRetryingConfig.maximumRetries} in $nextTime" } + schedule(nextTime, SchedulerState.Retrying(original = state.original.originalState(), + retryNumber = nextCount)) + } + else { + logger.i { "Maximum retries (${timings.failureRetryingConfig.maximumRetries}) reached, reporting failure and stopping scheduler" } + reportFailureAndReset(exception) + } + } + } + + // Critical-section bound + private fun reportFailureAndReset(exception: Exception) { + scope.launch { + reachedMaximumFailureRetries(exception) + } + resetAllState(SchedulerState.Idle) + } + + // Critical-section bound + private fun resetAllState(newState: SchedulerState) { + this.state = newState + this.currentJob?.cancel() + this.currentJob = null + this.bufferedTrigger = null + this.expectedExecutionTime = null + } + // endregion: +} + +private fun SchedulerState.getRetryCount(): Int = when (this) { + is SchedulerState.Retrying -> this.retryNumber + else -> 0 +} + +private fun SchedulerState.originalState(): SchedulerState = when(this) { + is SchedulerState.Retrying -> original + is SchedulerState.Replied -> original + is SchedulerState.WaitingForReply -> original + is SchedulerState.Triggered, SchedulerState.Idle, SchedulerState.StandardDelay, SchedulerState.Stopped -> this +} + +private fun SchedulerState.currentlyScheduled(): Boolean = + when (this) { + SchedulerState.Idle, is SchedulerState.Replied, SchedulerState.Stopped -> false + is SchedulerState.WaitingForReply -> false + SchedulerState.StandardDelay, is SchedulerState.Triggered, is SchedulerState.Retrying -> true + } + +/** + * Factory function to create a Scheduler with default timings. + * + * @param taskFunction The task to execute. Should throw an exception on failure. + * @param reachedMaximumFailureRetries Called when max retries are exhausted with the final exception + * @return A new Scheduler instance configured with default timings + */ +fun createScheduler( + taskFunction: suspend () -> Unit, + reachedMaximumFailureRetries: suspend (Exception) -> Unit +): Scheduler { + return Scheduler( + timings = DefaultTimings, + taskFunction = taskFunction, + reachedMaximumFailureRetries = reachedMaximumFailureRetries + ) +} \ No newline at end of file diff --git a/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/BookmarksSyncAdapterTest.kt b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/BookmarksSyncAdapterTest.kt new file mode 100644 index 00000000..81c48ff7 --- /dev/null +++ b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/BookmarksSyncAdapterTest.kt @@ -0,0 +1,95 @@ +@file:OptIn(kotlin.time.ExperimentalTime::class) + +package com.quran.shared.syncengine + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncBookmark +import kotlinx.coroutines.test.runTest +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.test.assertNotNull +import kotlin.test.fail +import kotlin.time.Instant + +class BookmarksSyncAdapterTest { + + @Test + fun `complete maps pushed mutations by order and uses local models`() = runTest { + val localMutation = LocalModelMutation( + model = SyncBookmark.PageBookmark( + id = "local-1", + page = 12, + lastModified = Instant.fromEpochMilliseconds(1000) + ), + remoteID = null, + localID = "local-1", + mutation = Mutation.CREATED + ) + + val localDataFetcher = object : LocalDataFetcher { + override suspend fun fetchLocalMutations(lastModified: Long): List> = + listOf(localMutation) + + override suspend fun checkLocalExistence(remoteIDs: List): Map = + remoteIDs.associateWith { true } + } + + var capturedRemote: List>? = null + var capturedLocal: List>? = null + + val resultNotifier = object : ResultNotifier { + override suspend fun didSucceed( + newToken: Long, + newRemoteMutations: List>, + processedLocalMutations: List> + ) { + capturedRemote = newRemoteMutations + capturedLocal = processedLocalMutations + } + + override suspend fun didFail(message: String) { + fail("didFail called: $message") + } + } + + val localModificationDateFetcher = object : LocalModificationDateFetcher { + override suspend fun localLastModificationDate(): Long? = 0L + } + + val adapter = BookmarksSyncAdapter( + BookmarksSynchronizationConfigurations( + localDataFetcher = localDataFetcher, + resultNotifier = resultNotifier, + localModificationDateFetcher = localModificationDateFetcher + ) + ) + + val plan = adapter.buildPlan( + lastModificationDate = 0L, + remoteMutations = emptyList() + ) + + val pushedMutations = listOf( + SyncMutation( + resource = "BOOKMARK", + resourceId = "remote-123", + mutation = Mutation.CREATED, + data = null, + timestamp = null + ) + ) + + plan.complete(newToken = 5L, pushedMutations = pushedMutations) + + val remote = assertNotNull(capturedRemote) + assertEquals(1, remote.size) + assertEquals("remote-123", remote[0].remoteID) + assertEquals("local-1", (remote[0].model as SyncBookmark.PageBookmark).id) + + val local = assertNotNull(capturedLocal) + assertEquals(1, local.size) + assertEquals("local-1", local[0].localID) + } +} diff --git a/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/BookmarksSynchronizationExecutorTest.kt b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/BookmarksSynchronizationExecutorTest.kt new file mode 100644 index 00000000..28203963 --- /dev/null +++ b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/BookmarksSynchronizationExecutorTest.kt @@ -0,0 +1,314 @@ +@file:OptIn(kotlin.time.ExperimentalTime::class) +package com.quran.shared.syncengine + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncBookmark +import com.quran.shared.syncengine.model.SyncBookmark.PageBookmark +import kotlinx.coroutines.test.runTest +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.test.assertFailsWith +import kotlin.test.assertNotNull +import kotlin.test.assertTrue +import kotlin.time.Instant + +class BookmarksSynchronizationExecutorTest { + + private val pipeline = BookmarksSynchronizationExecutor() + + @Test + fun `test successful synchronization with no conflicts`() = runTest { + // Given: Remote and local mutations on different pages (no conflicts) + val remoteMutations = listOf( + RemoteModelMutation( + model = PageBookmark(id = "remote1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote1", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = PageBookmark(id = "remote2", page = 20, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = "remote2", + mutation = Mutation.MODIFIED + ), + RemoteModelMutation( + model = PageBookmark(id = "remote3", page = 30, lastModified = Instant.fromEpochMilliseconds(1002)), + remoteID = "remote3", + mutation = Mutation.DELETED + ) + ) + + val localMutations = listOf( + LocalModelMutation( + model = PageBookmark(id = "local1", page = 15, lastModified = Instant.fromEpochMilliseconds(1002)), + remoteID = null, + localID = "local1", + mutation = Mutation.CREATED + ), + LocalModelMutation( + model = PageBookmark(id = "local2", page = 25, lastModified = Instant.fromEpochMilliseconds(1003)), + remoteID = "remote2", + localID = "local2", + mutation = Mutation.MODIFIED + ) + ) + + val lastModificationDate = 500L + val updatedModificationDate = 1500L + + // When: Execute pipeline + val result = pipeline.executePipeline( + fetchLocal = { + BookmarksSynchronizationExecutor.PipelineInitData(lastModificationDate, localMutations) + }, + fetchRemote = { _ -> + BookmarksSynchronizationExecutor.FetchedRemoteData(remoteMutations, updatedModificationDate) + }, + checkLocalExistence = { remoteIDs -> + // Mock existence check - all remote IDs exist + remoteIDs.associateWith { true } + }, + pushLocal = { _, _ -> + // Mock push that returns empty response + BookmarksSynchronizationExecutor.PushResultData(emptyList(), updatedModificationDate) + } + ) + + // Then: Verify results + assertNotNull(result) + assertEquals(updatedModificationDate, result.lastModificationDate) + // After preprocessing, MODIFIED mutations are converted to CREATED, so we expect 2 local mutations + assertEquals(2, result.localMutations.size) // local1, local2 (MODIFIED converted to CREATED) + + // Should have 3 remote mutations (the original ones, no conflicts) + assertEquals(3, result.remoteMutations.size) + assertTrue(result.remoteMutations.any { it.remoteID == "remote1" && it.mutation == Mutation.CREATED }) + assertTrue(result.remoteMutations.any { it.remoteID == "remote2" && it.mutation == Mutation.CREATED }) + assertTrue(result.remoteMutations.any { it.remoteID == "remote3" && it.mutation == Mutation.DELETED }) + } + + @Test + fun `test multiple conflicts detection`() = runTest { + // Given: Multiple conflicts between remote and local mutations + val remoteMutations = listOf( + RemoteModelMutation( + model = PageBookmark(id = "remote1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote1", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = PageBookmark(id = "remote2", page = 20, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = "remote2", + mutation = Mutation.MODIFIED + ), + RemoteModelMutation( + model = PageBookmark(id = "remote3", page = 30, lastModified = Instant.fromEpochMilliseconds(1002)), + remoteID = "remote3", + mutation = Mutation.DELETED + ) + ) + + val localMutations = listOf( + // Conflict 1: Same page as remote1 + LocalModelMutation( + model = PageBookmark(id = "local1", page = 10, lastModified = Instant.fromEpochMilliseconds(1003)), + remoteID = null, + localID = "local1", + mutation = Mutation.CREATED + ), + // Conflict 2: Same page as remote2 + LocalModelMutation( + model = PageBookmark(id = "local2", page = 20, lastModified = Instant.fromEpochMilliseconds(1004)), + remoteID = null, + localID = "local2", + mutation = Mutation.MODIFIED + ), + // Conflict 3: Local deletion of remote3 + LocalModelMutation( + model = PageBookmark(id = "local3", page = 30, lastModified = Instant.fromEpochMilliseconds(1005)), + remoteID = "remote3", + localID = "local3", + mutation = Mutation.DELETED + ), + // No conflict + LocalModelMutation( + model = PageBookmark(id = "local4", page = 40, lastModified = Instant.fromEpochMilliseconds(1006)), + remoteID = null, + localID = "local4", + mutation = Mutation.CREATED + ) + ) + + val lastModificationDate = 500L + val updatedModificationDate = 1500L + + // When: Execute pipeline + val result = pipeline.executePipeline( + fetchLocal = { + BookmarksSynchronizationExecutor.PipelineInitData(lastModificationDate, localMutations) + }, + fetchRemote = { _ -> + BookmarksSynchronizationExecutor.FetchedRemoteData(remoteMutations, updatedModificationDate) + }, + checkLocalExistence = { remoteIDs -> + // Mock existence check - all remote IDs exist + remoteIDs.associateWith { true } + }, + pushLocal = { _, _ -> + // Mock push that returns empty response + BookmarksSynchronizationExecutor.PushResultData(emptyList(), updatedModificationDate) + } + ) + + // Then: Verify results + assertNotNull(result) + assertEquals(updatedModificationDate, result.lastModificationDate) + // After preprocessing, MODIFIED mutations are converted to CREATED, so we expect 4 local mutations + assertEquals(4, result.localMutations.size) // local1, local2 (MODIFIED converted to CREATED), local3, local4 + + // Should have remote mutations (conflicts are resolved) + assertTrue(result.remoteMutations.isNotEmpty(), "Should have remote mutations after conflict resolution") + } + + @Test + fun `test illogical scenario - too many mutations for same page`() = runTest { + val localMutations = listOf( + // 3 mutations for the same page + LocalModelMutation( + model = PageBookmark(id = "local1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = null, + localID = "local1", + mutation = Mutation.CREATED + ), + LocalModelMutation( + model = PageBookmark(id = "local2", page = 10, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = null, + localID = "local2", + mutation = Mutation.CREATED + ), + LocalModelMutation( + model = PageBookmark(id = "local3", page = 10, lastModified = Instant.fromEpochMilliseconds(1002)), + remoteID = "remote1", + localID = "local3", + mutation = Mutation.DELETED + ) + ) + + val lastModificationDate = 500L + val updatedModificationDate = 1500L + + // When & Then: Execute pipeline should throw exception + val exception = assertFailsWith { + pipeline.executePipeline( + fetchLocal = { + BookmarksSynchronizationExecutor.PipelineInitData(lastModificationDate, localMutations) + }, + fetchRemote = { _ -> + BookmarksSynchronizationExecutor.FetchedRemoteData(emptyList(), updatedModificationDate) + }, + checkLocalExistence = { remoteIDs -> + remoteIDs.associateWith { true } + }, + pushLocal = { _, _ -> + BookmarksSynchronizationExecutor.PushResultData(emptyList(), updatedModificationDate) + } + ) + } + + assertEquals( + exception.message?.contains("Illogical scenario detected"), + true, + "Error message should include illogical scenario details" + ) + assertEquals( + exception.message?.contains("Bookmark page=10 has 3 mutations"), + true, + "Error message should include page and mutation count details" + ) + } + + @Test + fun `test illogical scenario - deletion without remote ID`() = runTest { + val localMutations = listOf( + LocalModelMutation( + model = PageBookmark(id = "local1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = null, // This should cause an error + localID = "local1", + mutation = Mutation.DELETED + ) + ) + + val lastModificationDate = 500L + val updatedModificationDate = 1500L + + // When & Then: Execute pipeline should throw exception + val exception = assertFailsWith { + pipeline.executePipeline( + fetchLocal = { + BookmarksSynchronizationExecutor.PipelineInitData(lastModificationDate, localMutations) + }, + fetchRemote = { _ -> + BookmarksSynchronizationExecutor.FetchedRemoteData(emptyList(), updatedModificationDate) + }, + checkLocalExistence = { remoteIDs -> + remoteIDs.associateWith { true } + }, + pushLocal = { _, _ -> + BookmarksSynchronizationExecutor.PushResultData(emptyList(), updatedModificationDate) + } + ) + } + + assertEquals( + exception.message?.contains("deletion without remote ID"), + true, + "Error message should include deletion without remote ID details" + ) + } + + @Test + fun `test pipeline filters out DELETE mutations for non-existent resources`() = runTest { + // Given: Remote mutations including DELETE for non-existent resource + val remoteMutations = listOf( + RemoteModelMutation( + model = PageBookmark(id = "remote1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote1", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = PageBookmark(id = "remote2", page = 20, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = "remote2", + mutation = Mutation.DELETED // This should be filtered out if it doesn't exist locally + ) + ) + + val localMutations = emptyList>() + val lastModificationDate = 500L + val updatedModificationDate = 1500L + + // When: Execute pipeline with existence check that says remote2 doesn't exist + val result = pipeline.executePipeline( + fetchLocal = { + BookmarksSynchronizationExecutor.PipelineInitData(lastModificationDate, localMutations) + }, + fetchRemote = { _ -> + BookmarksSynchronizationExecutor.FetchedRemoteData(remoteMutations, updatedModificationDate) + }, + checkLocalExistence = { remoteIDs -> + // Mock existence check - only remote1 exists, remote2 doesn't + mapOf("remote1" to true, "remote2" to false) + }, + pushLocal = { _, _ -> + BookmarksSynchronizationExecutor.PushResultData(emptyList(), updatedModificationDate) + } + ) + + // Then: Verify only the CREATED mutation remains (DELETE was filtered out) + assertNotNull(result) + assertEquals(1, result.remoteMutations.size) + val remainingMutation = result.remoteMutations.first() + assertEquals("remote1", remainingMutation.remoteID) + assertEquals(Mutation.CREATED, remainingMutation.mutation) + } +} diff --git a/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/SynchronizationClientIntegrationTest.kt b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/SynchronizationClientIntegrationTest.kt new file mode 100644 index 00000000..26571123 --- /dev/null +++ b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/SynchronizationClientIntegrationTest.kt @@ -0,0 +1,397 @@ +@file:OptIn(kotlin.time.ExperimentalTime::class) +package com.quran.shared.syncengine + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncBookmark +import com.quran.shared.syncengine.model.SyncBookmark.PageBookmark +import io.ktor.client.HttpClient +import kotlinx.coroutines.CompletableDeferred +import kotlinx.coroutines.test.runTest +import kotlin.test.Ignore +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.test.assertNotNull +import kotlin.test.assertTrue +import kotlin.time.Instant + +/** + * Integration tests for SynchronizationClient. + * + * These tests make real network calls to the testing API to verify end-to-end functionality. + * + * To run these tests: + * 1. Ensure you have valid authentication credentials (accessToken and clientId) + * 2. Be aware that tests will make actual network requests to https://apis-testing.quran.foundation + * 3. Tests verify the actual sync behavior against the real API + */ +class SynchronizationClientIntegrationTest { + + private val accessToken = "eyJhbGciOiJSUzI1NiIsImtpZCI6IjE0ZDQ1ZDI4LTY1ZDgtNDMyYi04Y2EzLTZmZjM5MjEyYWQ0YiIsInR5cCI6IkpXVCJ9.eyJhdWQiOltdLCJjbGllbnRfaWQiOiI5NTRlYjU0OS0zNTY2LTRmOWEtYjY1Zi1mYTYxYmY5YTllMzciLCJleHAiOjE3NTU3NDE1MTgsImV4dCI6e30sImlhdCI6MTc1NTczNzkxOCwiaXNzIjoiaHR0cHM6Ly90ZXN0aW5nLW9hdXRoMi5xdXJhbi5mb3VuZGF0aW9uIiwianRpIjoiMzAxM2E4OWQtZmJlZi00OTBjLWE1NTYtNzExOWQyZDcwN2RkIiwibmJmIjoxNzU1NzM3OTE4LCJzY3AiOlsib3BlbmlkIiwicHJvZmlsZSIsImJvb2ttYXJrIiwic3luYyJdLCJzdWIiOiJjZmY2ZGFhNy05NzZlLTRlYmMtOWViOC1iMTY4ZTlmNTNiYmIifQ.hDXuFBP9Tde4qtKYerk6WDdXpb8Se7LmHpJ8SquUpXSkGsXYRn8kCFWP9p5ZqeLJ8BPyhJMAF00byFpaa5XkTBy9OaUujiSOro-qQtT5DoGvJC2z-El7juYeD8-UjY88bLqW6VkfVRAPTlXweKZVqJrk5A_QGuWkCdH_-KiDo5tGYkhc8An2DI-FWoL7enwdAjhk5ctAJtCbUXUQfV00GTIaDKfcqvsrgAdicBEFDn8gM999XBE2Fh51CAqx1lrq6_YCux8jum24hLM5qHpViCquq8AkA0jp9PSKMW7BGNNEx1JK5aMxYZeXS2ANRUeEXSLGdhG1xJOnWyctIGOR9tOc2gAjz8OgfaLr7vexmBkeMz8z19hI7vZX9T8Dg63OX9nlSHDzNsL2pbpGSLn4bfIHVWpryh8g6BEZx2CQaPbcMfp3p8Bnj3VRPFAUSGl0csQ6EV8IzFuew8cu0KFKKqBmX3zcEj6LL63d9AsvTyXrIj4YNFBJGdt0-j3cTKdjcukn-QLUX1MyLgoEIO3V8e7UTsdAV-M1vTAYULm3ELIdl7Kf0-AOhIEiwHl5sLRyWaYHcRN76IJrg-4SMxMvWk7CiYJCZ6hSXr6l_2d4u7aby9xG9Vo765SxMqWuQrYnJDqU5U_UkU9PLCXdefp729MkT4OTN79SuQZnDkFlM4M" + private val clientId = "954eb549-3566-4f9a-b65f-fa61bf9a9e37" + private val baseUrl = "https://apis-testing.quran.foundation" + private val lastModificationDate: Long? = null + + private fun createEnvironment(): SynchronizationEnvironment { + return SynchronizationEnvironment(baseUrl) + } + + private fun createAuthenticationDataFetcher(): AuthenticationDataFetcher { + return object : AuthenticationDataFetcher { + override suspend fun fetchAuthenticationHeaders(): Map { + return mapOf( + "x-client-id" to clientId, + "x-auth-token" to accessToken + ) + } + } + } + + private fun createLocalModificationDateFetcher(lastModificationDate: Long?): LocalModificationDateFetcher { + return object : LocalModificationDateFetcher { + override suspend fun localLastModificationDate(): Long? { + return lastModificationDate + } + } + } + + private fun createLocalMutationsFetcher( + mutations: List>, + existingRemoteIDs: Set = emptySet() + ): LocalDataFetcher { + return object : LocalDataFetcher { + override suspend fun fetchLocalMutations(lastModified: Long): List> { + println("Mock fetcher called with lastModified: $lastModified") + return mutations + } + + override suspend fun checkLocalExistence(remoteIDs: List): Map { + // Mock implementation - return true only for IDs in the existingRemoteIDs set + return remoteIDs.associateWith { it in existingRemoteIDs } + } + } + } + + private fun createResultNotifier( + syncCompleted: CompletableDeferred, + expectedLocalMutationsCount: Int = 0, + expectedRemoteMutationsMinCount: Int = 0, + expectedProcessedPages: Set? = null, + expectOnlyCreationEvents: Boolean = false + ): ResultNotifier { + return object : ResultNotifier { + + override suspend fun didFail(message: String) { + println("Got a failure. $message") + syncCompleted.complete(Unit) + } + + override suspend fun didSucceed( + newToken: Long, + newRemoteMutations: List>, + processedLocalMutations: List> + ) { + // Verify the results + println("Got response. Last modification date: $newToken") + println("Got response. Remote mutations count: ${newRemoteMutations.count()}") + println("Got response. Processed local mutations count: ${processedLocalMutations.count()}") + println("Got response. Remote mutations IDs: ${newRemoteMutations.map { it.model.idOrThrow() }}") + println("Got response. Remote mutations pages: ${newRemoteMutations.map { it.model.pageOrThrow() }}") + println("Got response. Remote mutations types: ${newRemoteMutations.map { it.mutation }}") + println("Got response. Processed local mutations pages: ${processedLocalMutations.map { it.model.pageOrThrow() }}") + + assertTrue(newToken > 0L, "Should return a new timestamp") + assertEquals(expectedLocalMutationsCount, processedLocalMutations.size, "Should have expected processed local mutations") + assertTrue(newRemoteMutations.count() >= expectedRemoteMutationsMinCount, "Expect to return at least expected remote mutations") + + // Verify specific pages if provided + expectedProcessedPages?.let { expectedPages -> + val processedPages = processedLocalMutations.map { it.model.pageOrThrow() }.toSet() + assertEquals(expectedPages, processedPages, "Should have processed expected mutations") + } + + // Verify only CREATION events if specified + if (expectOnlyCreationEvents) { + println("Creation events: ${newRemoteMutations.count { it.mutation == Mutation.CREATED }}") + println("Modification events: ${newRemoteMutations.count { it.mutation == Mutation.MODIFIED }}") + println("Deletion events: ${newRemoteMutations.count { it.mutation == Mutation.DELETED }}") + val hasOnlyCreationEvents = newRemoteMutations.all { it.mutation == Mutation.CREATED } + assertTrue(hasOnlyCreationEvents, "Should only receive CREATION events for first-time sync") + + val hasNoDeleteEvents = newRemoteMutations.none { it.mutation == Mutation.DELETED } + assertTrue(hasNoDeleteEvents, "Should not receive any DELETE events for first-time sync") + + println("Verified: Only CREATION events received, no DELETE events") + } + + // Signal that sync is complete + syncCompleted.complete(Unit) + } + } + } + + private fun createBookmarksConfigurations( + localDataFetcher: LocalDataFetcher, + resultNotifier: ResultNotifier, + localModificationDateFetcher: LocalModificationDateFetcher + ): BookmarksSynchronizationConfigurations { + return BookmarksSynchronizationConfigurations( + localDataFetcher = localDataFetcher, + resultNotifier = resultNotifier, + localModificationDateFetcher = localModificationDateFetcher + ) + } + + private fun createSynchronizationClient( + environment: SynchronizationEnvironment, + authFetcher: AuthenticationDataFetcher, + bookmarksConfigurations: BookmarksSynchronizationConfigurations, + httpClient: HttpClient? = null + ): SynchronizationClient { + return SynchronizationClientBuilder.build( + environment = environment, + authFetcher = authFetcher, + bookmarksConfigurations = bookmarksConfigurations, + httpClient = httpClient + ) + } + + @Test + @Ignore + fun `test first time sync with no local changes and zero timestamp`() = runTest { + // Arrange + val syncCompleted = CompletableDeferred() + + val environment = createEnvironment() + val authFetcher = createAuthenticationDataFetcher() + val localMutationsFetcher = createLocalMutationsFetcher( + mutations = emptyList(), + existingRemoteIDs = emptySet() // No existing remote IDs for first-time sync + ) + val localModificationDateFetcher = createLocalModificationDateFetcher(null) // Zero timestamp for first time sync + val resultNotifier = createResultNotifier( + syncCompleted = syncCompleted, + expectedLocalMutationsCount = 0, + expectedRemoteMutationsMinCount = 1, + expectOnlyCreationEvents = true + ) + + val bookmarksConfigurations = createBookmarksConfigurations( + localDataFetcher = localMutationsFetcher, + resultNotifier = resultNotifier, + localModificationDateFetcher = localModificationDateFetcher + ) + + val synchronizationClient = createSynchronizationClient( + environment = environment, + authFetcher = authFetcher, + bookmarksConfigurations = bookmarksConfigurations + ) + + // Assert + assertNotNull(synchronizationClient, "SynchronizationClient should be created successfully") + + // Trigger the sync operation + println("Running the first-time sync integration test.") + synchronizationClient.applicationStarted() + + // Wait for the sync operation to complete + syncCompleted.await() + println("First-time sync operation completed successfully!") + } + + @Test + @Ignore + fun `test running without local changes and zero timestamp`() = runTest { + // Arrange + val syncCompleted = CompletableDeferred() + + val environment = createEnvironment() + val authFetcher = createAuthenticationDataFetcher() + val localMutationsFetcher = createLocalMutationsFetcher( + mutations = emptyList(), + existingRemoteIDs = emptySet() + ) + val localModificationDateFetcher = createLocalModificationDateFetcher(null) + val resultNotifier = createResultNotifier( + syncCompleted = syncCompleted, + expectedLocalMutationsCount = 0, + expectedRemoteMutationsMinCount = 1 + ) + + val bookmarksConfigurations = createBookmarksConfigurations( + localDataFetcher = localMutationsFetcher, + resultNotifier = resultNotifier, + localModificationDateFetcher = localModificationDateFetcher + ) + + val synchronizationClient = createSynchronizationClient( + environment = environment, + authFetcher = authFetcher, + bookmarksConfigurations = bookmarksConfigurations + ) + + // Assert + assertNotNull(synchronizationClient, "SynchronizationClient should be created successfully") + + // Trigger the sync operation + println("Running the integration test for synchronization.") + synchronizationClient.applicationStarted() + + // Wait for the sync operation to complete + syncCompleted.await() + println("Sync operation completed successfully!") + } + + @Test + @Ignore + fun `test running and pushing some local updates`() = runTest { + // Arrange + val syncCompleted = CompletableDeferred() + + // Create test data for local mutations: 2 creations and 1 deletion + val testLocalMutations = listOf( + LocalModelMutation( + model = PageBookmark(id = "local-2", page = 200, lastModified = Instant.fromEpochMilliseconds(1752350137423)), + remoteID = null, // No remote ID for local mutations + localID = "local-id-2", + mutation = Mutation.CREATED + ), +// LocalModelMutation( +// model = PageBookmark(id = "hvpyr0q863etejgc4l4dpmhj", page = 50, lastModified = Instant.fromEpochMilliseconds(1752350137423)), +// remoteID = "hvpyr0q863etejgc4l4dpmhj", // This was a remote bookmark that we're deleting +// localID = "local-id-3", +// mutation = Mutation.DELETED +// ), +// LocalModelMutation( +// model = PageBookmark(id = "t8sx6yrl55oft086mx5bygl5", page = 107, lastModified = Instant.fromEpochMilliseconds(1752350137423)), +// remoteID = "t8sx6yrl55oft086mx5bygl5", // This was a remote bookmark that we're deleting +// localID = "local-id-3", +// mutation = Mutation.DELETED +// ) + ) + + val environment = createEnvironment() + val authFetcher = createAuthenticationDataFetcher() + val localMutationsFetcher = createLocalMutationsFetcher( + mutations = testLocalMutations, + existingRemoteIDs = setOf("hvpyr0q863etejgc4l4dpmhj", "t8sx6yrl55oft086mx5bygl5") // Example: these IDs exist locally + ) + val localModificationDateFetcher = createLocalModificationDateFetcher(lastModificationDate) + val resultNotifier = createResultNotifier( + syncCompleted = syncCompleted, + expectedLocalMutationsCount = 3, + expectedRemoteMutationsMinCount = 3, + expectedProcessedPages = setOf(10, 20, 30) + ) + + val bookmarksConfigurations = createBookmarksConfigurations( + localDataFetcher = localMutationsFetcher, + resultNotifier = resultNotifier, + localModificationDateFetcher = localModificationDateFetcher + ) + + val synchronizationClient = createSynchronizationClient( + environment = environment, + authFetcher = authFetcher, + bookmarksConfigurations = bookmarksConfigurations + ) + + // Assert + assertNotNull(synchronizationClient, "SynchronizationClient should be created successfully") + + // Trigger the sync operation + println("Running the integration test for synchronization with local updates.") + synchronizationClient.applicationStarted() + + // Wait for the sync operation to complete + syncCompleted.await() + println("Sync operation with local updates completed successfully!") + } + + @Test + @Ignore + fun `test a couple of conflicts with expected deletions from BE as well`() = runTest { + + assertTrue( (lastModificationDate ?: 0) > 0, "The last modification date should be bigger than 0 for this test.") + // Arrange + val syncCompleted = CompletableDeferred() + + // Create test data for local mutations: 2 creations and 1 deletion + val testLocalMutations = listOf( + LocalModelMutation( + // For this to work, there needs to be an expected remote delete mutation for that remote ID. + model = PageBookmark(id = "bnz3yxj9hqsepxtteov57bvt", page = 20, lastModified = Instant.fromEpochMilliseconds(1752350137423)), + remoteID = "bnz3yxj9hqsepxtteov57bvt", // To be filled + localID = "bnz3yxj9hqsepxtteov57bvt", + mutation = Mutation.DELETED + ), + LocalModelMutation( + model = PageBookmark(id = "local-2", page = 20, lastModified = Instant.fromEpochMilliseconds(1752350137493)), + remoteID = null, // No remote ID for local mutations + localID = "local-id-2", + mutation = Mutation.CREATED + ), +// LocalModelMutation( +// // TODO: Should clash with something on the BE +// model = PageBookmark(id = "local-2", page = 200, lastModified = Instant.fromEpochMilliseconds(1752350137423)), +// remoteID = null, // No remote ID for local mutations +// localID = "local-id-5", +// mutation = Mutation.CREATED +// ), + LocalModelMutation( + model = PageBookmark(id = "local-2", page = 600, lastModified = Instant.fromEpochMilliseconds(1752350137423)), + remoteID = null, // No remote ID for local mutations + localID = "non-clashing-local-id", + mutation = Mutation.CREATED + ), + ) + + val environment = createEnvironment() + val authFetcher = createAuthenticationDataFetcher() + val localMutationsFetcher = createLocalMutationsFetcher( + mutations = testLocalMutations, + existingRemoteIDs = setOf("chqcraq024hde90cwwxo14a0", "f5u2hbakgomknm828nsfltwk") + ) + val localModificationDateFetcher = createLocalModificationDateFetcher(lastModificationDate) + val resultNotifier = createResultNotifier( + syncCompleted = syncCompleted, + expectedLocalMutationsCount = 3, + expectedRemoteMutationsMinCount = 3, + expectedProcessedPages = setOf(10, 20, 30) + ) + + val bookmarksConfigurations = createBookmarksConfigurations( + localDataFetcher = localMutationsFetcher, + resultNotifier = resultNotifier, + localModificationDateFetcher = localModificationDateFetcher + ) + + val synchronizationClient = createSynchronizationClient( + environment = environment, + authFetcher = authFetcher, + bookmarksConfigurations = bookmarksConfigurations + ) + + // Assert + assertNotNull(synchronizationClient, "SynchronizationClient should be created successfully") + + // Trigger the sync operation + println("Running the integration test for synchronization with local updates.") + synchronizationClient.applicationStarted() + + // Wait for the sync operation to complete + syncCompleted.await() + println("Sync operation with local updates completed successfully!") + } +} + +private fun SyncBookmark.pageOrThrow(): Int = + (this as SyncBookmark.PageBookmark).page + +private fun SyncBookmark.idOrThrow(): String = + when (this) { + is SyncBookmark.PageBookmark -> id + is SyncBookmark.AyahBookmark -> id + } diff --git a/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/SynchronizationClientTest.kt b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/SynchronizationClientTest.kt new file mode 100644 index 00000000..02c544cd --- /dev/null +++ b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/SynchronizationClientTest.kt @@ -0,0 +1,14 @@ +package com.quran.shared.syncengine + +import kotlin.test.Test +import kotlin.test.assertTrue + +class SynchronizationClientTest { + + @Test + fun `test SynchronizationClient can be created`() { + // This is a basic test to ensure the SynchronizationClient can be instantiated + // The main business logic testing is done in PageBookmarksSynchronizationExecutorTest + assertTrue(true, "SynchronizationClient scaffolding is working") + } +} \ No newline at end of file diff --git a/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/conflict/ConflictDetectorTest.kt b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/conflict/ConflictDetectorTest.kt new file mode 100644 index 00000000..c4193abe --- /dev/null +++ b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/conflict/ConflictDetectorTest.kt @@ -0,0 +1,335 @@ +@file:OptIn(kotlin.time.ExperimentalTime::class) +package com.quran.shared.syncengine.conflict + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncBookmark +import com.quran.shared.syncengine.model.SyncBookmark.PageBookmark +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.time.Instant + +class ConflictDetectorTest { + + @Test + fun `getConflicts with empty lists should return empty result`() { + // Given + val conflictDetector = ConflictDetector(emptyList(), emptyList()) + + // When + val result = conflictDetector.getConflicts() + + // Then + assertEquals(0, result.conflicts.size, "Number of resource conflicts") + assertEquals(0, result.nonConflictingRemoteMutations.size, "Number of other remote mutations") + assertEquals(0, result.nonConflictingLocalMutations.size, "Number of other local mutations") + } + + @Test + fun `getConflicts with different pages should return no conflicts`() { + // Given + val remoteModelMutations = listOf( + RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote-1", + mutation = Mutation.CREATED + ) + ) + val localModelMutations = listOf( + LocalModelMutation( + model = PageBookmark(id = "local-1", page = 20, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = null, + localID = "local-1", + mutation = Mutation.CREATED + ) + ) + val conflictDetector = ConflictDetector(remoteModelMutations, localModelMutations) + + // When + val result = conflictDetector.getConflicts() + + // Then + assertEquals(0, result.conflicts.size, "Number of resource conflicts") + assertEquals(1, result.nonConflictingRemoteMutations.size, "Number of other remote mutations") + assertEquals(1, result.nonConflictingLocalMutations.size, "Number of other local mutations") + } + + @Test + fun `getConflicts with creation events for same page should detect conflict`() { + // Given + val remoteModelMutations = listOf( + RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote-1", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = PageBookmark(id = "remote-2", page = 20, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = "remote-2", + mutation = Mutation.CREATED + ) + ) + val localModelMutations = listOf( + LocalModelMutation( + model = PageBookmark(id = "local-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1003)), + remoteID = null, + localID = "local-1", + mutation = Mutation.CREATED + ), + LocalModelMutation( + model = PageBookmark(id = "local-2", page = 40, lastModified = Instant.fromEpochMilliseconds(1004)), + remoteID = null, + localID = "local-2", + mutation = Mutation.CREATED + ) + ) + val conflictDetector = ConflictDetector(remoteModelMutations, localModelMutations) + + // When + val result = conflictDetector.getConflicts() + + // Then + assertEquals(1, result.conflicts.size, "Number of resource conflicts") + val resourceConflict = result.conflicts.first() + assertEquals(1, resourceConflict.localMutations.size, "Number of local mutations in conflict") + assertEquals(1, resourceConflict.remoteMutations.size, "Number of remote mutations in conflict") + assertEquals(10, resourceConflict.localMutations.first().model.pageOrThrow(), "Page number of local mutation") + assertEquals(10, resourceConflict.remoteMutations.first().model.pageOrThrow(), "Page number of remote mutation") + + // Verify other mutations + assertEquals(1, result.nonConflictingRemoteMutations.size, "Number of other remote mutations") + assertEquals(1, result.nonConflictingLocalMutations.size, "Number of other local mutations") + } + + @Test + fun `getConflicts with multiple remote mutations for same page should group them together`() { + // Given + val remoteModelMutations = listOf( + RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote-1", + mutation = Mutation.DELETED + ), + RemoteModelMutation( + model = PageBookmark(id = "remote-2", page = 10, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = "remote-2", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = PageBookmark(id = "remote-3", page = 20, lastModified = Instant.fromEpochMilliseconds(1002)), + remoteID = "remote-3", + mutation = Mutation.CREATED + ) + ) + val localModelMutations = listOf( + LocalModelMutation( + model = PageBookmark(id = "local-1", page = 10, lastModified = Instant.fromEpochMilliseconds(200)), + remoteID = null, + localID = "local-1", + mutation = Mutation.CREATED + ) + ) + val conflictDetector = ConflictDetector(remoteModelMutations, localModelMutations) + + // When + val result = conflictDetector.getConflicts() + + // Then + assertEquals(1, result.conflicts.size, "Number of resource conflicts") + val resourceConflict = result.conflicts.first() + assertEquals(1, resourceConflict.localMutations.size, "Number of local mutations in conflict") + assertEquals(2, resourceConflict.remoteMutations.size, "Number of remote mutations in conflict") + assertEquals(10, resourceConflict.localMutations.first().model.pageOrThrow(), "Page number of local mutation") + + // Verify remote mutations are grouped correctly + val remoteIDs = resourceConflict.remoteMutations.map { it.remoteID }.toSet() + assertEquals(setOf("remote-1", "remote-2"), remoteIDs, "Remote IDs in conflict group") + + // Verify other mutations + assertEquals(1, result.nonConflictingRemoteMutations.size, "Number of other remote mutations") + assertEquals(0, result.nonConflictingLocalMutations.size, "Number of other local mutations") + } + + @Test + fun `getConflicts with delete and create events on both sides for same page should detect conflict`() { + // Given + val remoteModelMutations = listOf( + RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote-1", + mutation = Mutation.DELETED + ), + RemoteModelMutation( + model = PageBookmark(id = "remote-2", page = 10, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = "remote-2", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = PageBookmark(id = "remote-3", page = 20, lastModified = Instant.fromEpochMilliseconds(1002)), + remoteID = "remote-3", + mutation = Mutation.CREATED + ) + ) + val localModelMutations = listOf( + LocalModelMutation( + model = PageBookmark(id = "remote-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1003)), + remoteID = "remote-1", + localID = "local-1", + mutation = Mutation.DELETED + ), + LocalModelMutation( + model = PageBookmark(id = "local-2", page = 10, lastModified = Instant.fromEpochMilliseconds(1004)), + remoteID = null, + localID = "local-2", + mutation = Mutation.CREATED + ), + LocalModelMutation( + model = PageBookmark(id = "local-3", page = 30, lastModified = Instant.fromEpochMilliseconds(1005)), + remoteID = null, + localID = "local-3", + mutation = Mutation.CREATED + ) + ) + val conflictDetector = ConflictDetector(remoteModelMutations, localModelMutations) + + // When + val result = conflictDetector.getConflicts() + + // Then + assertEquals(1, result.conflicts.size, "Number of resource conflicts") + val resourceConflict = result.conflicts.first() + assertEquals(2, resourceConflict.localMutations.size, "Number of local mutations in conflict") + assertEquals(2, resourceConflict.remoteMutations.size, "Number of remote mutations in conflict") + assertEquals(10, resourceConflict.localMutations.first().model.pageOrThrow(), "Page number of local mutation") + + // Verify other mutations + assertEquals(1, result.nonConflictingRemoteMutations.size, "Number of other remote mutations") + assertEquals(1, result.nonConflictingLocalMutations.size, "Number of other local mutations") + } + + @Test + fun `getConflicts with multiple local mutations for same page should group them together`() { + // Given + val remoteModelMutations = listOf( + RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote-1", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = PageBookmark(id = "remote-2", page = 20, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = "remote-2", + mutation = Mutation.CREATED + ) + ) + val localModelMutations = listOf( + LocalModelMutation( + model = PageBookmark(id = "local-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1002)), + remoteID = "remote-1", + localID = "local-1", + mutation = Mutation.DELETED + ), + LocalModelMutation( + model = PageBookmark(id = "local-2", page = 10, lastModified = Instant.fromEpochMilliseconds(1003)), + remoteID = null, + localID = "local-2", + mutation = Mutation.CREATED + ), + LocalModelMutation( + model = PageBookmark(id = "local-3", page = 30, lastModified = Instant.fromEpochMilliseconds(1004)), + remoteID = null, + localID = "local-3", + mutation = Mutation.CREATED + ) + ) + val conflictDetector = ConflictDetector(remoteModelMutations, localModelMutations) + + // When + val result = conflictDetector.getConflicts() + + // Then + assertEquals(1, result.conflicts.size, "Number of resource conflicts") + val resourceConflict = result.conflicts.first() + assertEquals(2, resourceConflict.localMutations.size, "Number of local mutations in conflict") + assertEquals(1, resourceConflict.remoteMutations.size, "Number of remote mutations in conflict") + assertEquals(10, resourceConflict.localMutations.first().model.pageOrThrow(), "Page number of local mutation") + + // Verify local mutations are grouped correctly + val localIDs = resourceConflict.localMutations.map { it.localID }.toSet() + assertEquals(setOf("local-1", "local-2"), localIDs, "Local IDs in conflict group") + + // Verify other mutations + assertEquals(1, result.nonConflictingRemoteMutations.size, "Number of other remote mutations") + assertEquals(1, result.nonConflictingLocalMutations.size, "Number of other local mutations") + } + + @Test + fun `getConflicts with matching remote IDs should detect conflict even with zeroed model properties`() { + // Given + val remoteModelMutations = listOf( + RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 0, lastModified = Instant.fromEpochMilliseconds(0)), // Zeroed properties for DELETE + remoteID = "remote-1", + mutation = Mutation.DELETED + ), + RemoteModelMutation( + model = PageBookmark(id = "remote-2", page = 20, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = "remote-2", + mutation = Mutation.CREATED + ) + ) + val localModelMutations = listOf( + LocalModelMutation( + model = PageBookmark(id = "local-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1002)), // Original page info for local DELETE + remoteID = "remote-1", // Same remote ID as remote mutation + localID = "local-1", + mutation = Mutation.DELETED + ), + LocalModelMutation( + model = PageBookmark(id = "local-2", page = 30, lastModified = Instant.fromEpochMilliseconds(1002)), + remoteID = null, + localID = "local-2", + mutation = Mutation.CREATED + ) + ) + val conflictDetector = ConflictDetector(remoteModelMutations, localModelMutations) + + // When + val result = conflictDetector.getConflicts() + + // Then + assertEquals(1, result.conflicts.size, "Number of resource conflicts") + val resourceConflict = result.conflicts.first() + assertEquals(1, resourceConflict.localMutations.size, "Number of local mutations in conflict") + assertEquals(1, resourceConflict.remoteMutations.size, "Number of remote mutations in conflict") + + // Verify the conflict is based on remote ID, not page + val localMutation = resourceConflict.localMutations.first() + val remoteMutation = resourceConflict.remoteMutations.first() + + assertEquals("remote-1", localMutation.remoteID, "Remote ID of local mutation") + assertEquals("remote-1", remoteMutation.remoteID, "Remote ID of remote mutation") + assertEquals(Mutation.DELETED, localMutation.mutation, "Mutation type of local mutation") + assertEquals(Mutation.DELETED, remoteMutation.mutation, "Mutation type of remote mutation") + + // Verify that the local mutation retains original page info while remote mutation has zeroed properties + assertEquals(10, localMutation.model.pageOrThrow(), "Page number of local mutation") + assertEquals(Instant.fromEpochMilliseconds(1002), localMutation.model.lastModifiedOrThrow(), "Last modified of local mutation") + assertEquals(0, remoteMutation.model.pageOrThrow(), "Page number of remote mutation") + assertEquals(Instant.fromEpochMilliseconds(0), remoteMutation.model.lastModifiedOrThrow(), "Last modified of remote mutation") + + // Verify other mutations + assertEquals(1, result.nonConflictingRemoteMutations.size, "Number of other remote mutations") + assertEquals(1, result.nonConflictingLocalMutations.size, "Number of other local mutations") + } +} + +private fun SyncBookmark.pageOrThrow(): Int = + (this as SyncBookmark.PageBookmark).page + +private fun SyncBookmark.lastModifiedOrThrow(): Instant = + when (this) { + is SyncBookmark.PageBookmark -> lastModified + is SyncBookmark.AyahBookmark -> lastModified + } diff --git a/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/conflict/ConflictResolverTest.kt b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/conflict/ConflictResolverTest.kt new file mode 100644 index 00000000..901a1e9c --- /dev/null +++ b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/conflict/ConflictResolverTest.kt @@ -0,0 +1,349 @@ +@file:OptIn(kotlin.time.ExperimentalTime::class) +package com.quran.shared.syncengine.conflict + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncBookmark +import com.quran.shared.syncengine.model.SyncBookmark.PageBookmark +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.test.assertFailsWith +import kotlin.test.assertTrue +import kotlin.time.Instant + +class ConflictResolverTest { + + @Test + fun `resolve with empty conflict groups should return empty result`() { + // Given + val conflictResolver = ConflictResolver(emptyList()) + + // When + val result = conflictResolver.resolve() + + // Then + assertEquals(0, result.mutationsToPersist.size, "Number of mutations to persist") + assertEquals(0, result.mutationsToPush.size, "Number of mutations to push") + } + + @Test + fun `resolve with single page created locally and remotely should persist remote mutation`() { + // Given + val remoteMutation = RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote-1", + mutation = Mutation.CREATED + ) + val localMutation = LocalModelMutation( + model = PageBookmark(id = "local-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = null, + localID = "local-1", + mutation = Mutation.CREATED + ) + val resourceConflict = ResourceConflict( + localMutations = listOf(localMutation), + remoteMutations = listOf(remoteMutation) + ) + val conflictResolver = ConflictResolver(listOf(resourceConflict)) + + // When + val result = conflictResolver.resolve() + + // Then + assertEquals(1, result.mutationsToPersist.size, "Number of mutations to persist") + assertEquals(0, result.mutationsToPush.size, "Number of mutations to push") + assertEquals(remoteMutation, result.mutationsToPersist.first(), "Persisted mutation should be the remote mutation") + } + + @Test + fun `resolve with single resource deleted locally and remotely should persist remote mutation`() { + // Given + val remoteMutation = RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote-1", + mutation = Mutation.DELETED + ) + val localMutation = LocalModelMutation( + model = PageBookmark(id = "local-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = "remote-1", + localID = "local-1", + mutation = Mutation.DELETED + ) + val resourceConflict = ResourceConflict( + localMutations = listOf(localMutation), + remoteMutations = listOf(remoteMutation) + ) + val conflictResolver = ConflictResolver(listOf(resourceConflict)) + + // When + val result = conflictResolver.resolve() + + // Then + assertEquals(1, result.mutationsToPersist.size, "Number of mutations to persist") + assertEquals(0, result.mutationsToPush.size, "Number of mutations to push") + assertEquals(remoteMutation, result.mutationsToPersist.first(), "Persisted mutation should be the remote mutation") + } + + @Test + fun `resolve with remote delete and create vs local delete should persist both remote mutations`() { + // Given + val remoteDeleteMutation = RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 0, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote-1", + mutation = Mutation.DELETED + ) + val remoteCreateMutation = RemoteModelMutation( + model = PageBookmark(id = "remote-2", page = 10, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = "remote-2", + mutation = Mutation.CREATED + ) + val localDeleteMutation = LocalModelMutation( + model = PageBookmark(id = "local-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1002)), + remoteID = "remote-1", + localID = "local-1", + mutation = Mutation.DELETED + ) + val resourceConflict = ResourceConflict( + localMutations = listOf(localDeleteMutation), + remoteMutations = listOf(remoteDeleteMutation, remoteCreateMutation) + ) + val conflictResolver = ConflictResolver(listOf(resourceConflict)) + + // When + val result = conflictResolver.resolve() + + // Then + assertEquals(2, result.mutationsToPersist.size, "Number of mutations to persist") + assertEquals(0, result.mutationsToPush.size, "Number of mutations to push") + assertEquals(remoteDeleteMutation, result.mutationsToPersist[0], "First persisted mutation should be the remote delete") + assertEquals(remoteCreateMutation, result.mutationsToPersist[1], "Second persisted mutation should be the remote create") + } + + @Test + fun `resolve with remote delete vs local delete and create should persist remote delete and push local create`() { + // Given + val remoteDeleteMutation = RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 0, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote-1", + mutation = Mutation.DELETED + ) + val localDeleteMutation = LocalModelMutation( + model = PageBookmark(id = "local-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = "remote-1", + localID = "local-1", + mutation = Mutation.DELETED + ) + val localCreateMutation = LocalModelMutation( + model = PageBookmark(id = "local-2", page = 10, lastModified = Instant.fromEpochMilliseconds(1002)), + remoteID = null, + localID = "local-2", + mutation = Mutation.CREATED + ) + val resourceConflict = ResourceConflict( + localMutations = listOf(localDeleteMutation, localCreateMutation), + remoteMutations = listOf(remoteDeleteMutation) + ) + val conflictResolver = ConflictResolver(listOf(resourceConflict)) + + // When + val result = conflictResolver.resolve() + + // Then + assertEquals(1, result.mutationsToPersist.size, "Number of mutations to persist") + assertEquals(1, result.mutationsToPush.size, "Number of mutations to push") + assertEquals(remoteDeleteMutation, result.mutationsToPersist.first(), "Persisted mutation should be the remote delete") + assertEquals(localCreateMutation, result.mutationsToPush.first(), "Pushed mutation should be the local create") + } + + @Test + fun `resolve with delete and create on both sides should persist remote mutations only`() { + // Given + val remoteDeleteMutation = RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 0, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote-1", + mutation = Mutation.DELETED + ) + val remoteCreateMutation = RemoteModelMutation( + model = PageBookmark(id = "remote-2", page = 10, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = "remote-2", + mutation = Mutation.CREATED + ) + val localDeleteMutation = LocalModelMutation( + model = PageBookmark(id = "local-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1002)), + remoteID = "remote-1", + localID = "local-1", + mutation = Mutation.DELETED + ) + val localCreateMutation = LocalModelMutation( + model = PageBookmark(id = "local-2", page = 10, lastModified = Instant.fromEpochMilliseconds(1003)), + remoteID = null, + localID = "local-2", + mutation = Mutation.CREATED + ) + val resourceConflict = ResourceConflict( + localMutations = listOf(localDeleteMutation, localCreateMutation), + remoteMutations = listOf(remoteDeleteMutation, remoteCreateMutation) + ) + val conflictResolver = ConflictResolver(listOf(resourceConflict)) + + // When + val result = conflictResolver.resolve() + + // Then + assertEquals(2, result.mutationsToPersist.size, "Number of mutations to persist") + assertEquals(0, result.mutationsToPush.size, "Number of mutations to push") + assertEquals(remoteDeleteMutation, result.mutationsToPersist[0], "First persisted mutation should be the remote delete") + assertEquals(remoteCreateMutation, result.mutationsToPersist[1], "Second persisted mutation should be the remote create") + } + + @Test + fun `resolve with multiple conflict groups should handle each group independently`() { + // Given - First conflict group: CREATE vs CREATE on page 10 + val remoteCreate1 = RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote-1", + mutation = Mutation.CREATED + ) + val localCreate1 = LocalModelMutation( + model = PageBookmark(id = "local-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = null, + localID = "local-1", + mutation = Mutation.CREATED + ) + val resourceConflict1 = ResourceConflict( + localMutations = listOf(localCreate1), + remoteMutations = listOf(remoteCreate1) + ) + + // Given - Second conflict group: DELETE vs DELETE on page 20 + val remoteDelete2 = RemoteModelMutation( + model = PageBookmark(id = "remote-2", page = 0, lastModified = Instant.fromEpochMilliseconds(1002)), + remoteID = "remote-2", + mutation = Mutation.DELETED + ) + val localDelete2 = LocalModelMutation( + model = PageBookmark(id = "local-2", page = 20, lastModified = Instant.fromEpochMilliseconds(1003)), + remoteID = "remote-2", + localID = "local-2", + mutation = Mutation.DELETED + ) + val resourceConflict2 = ResourceConflict( + localMutations = listOf(localDelete2), + remoteMutations = listOf(remoteDelete2) + ) + + // Given - Third conflict group: Remote delete+create vs local delete on page 30 + val remoteDelete3 = RemoteModelMutation( + model = PageBookmark(id = "remote-3", page = 0, lastModified = Instant.fromEpochMilliseconds(1004)), + remoteID = "remote-3", + mutation = Mutation.DELETED + ) + val remoteCreate3 = RemoteModelMutation( + model = PageBookmark(id = "remote-4", page = 30, lastModified = Instant.fromEpochMilliseconds(1005)), + remoteID = "remote-4", + mutation = Mutation.CREATED + ) + val localDelete3 = LocalModelMutation( + model = PageBookmark(id = "local-3", page = 30, lastModified = Instant.fromEpochMilliseconds(1006)), + remoteID = "remote-3", + localID = "local-3", + mutation = Mutation.DELETED + ) + val resourceConflict3 = ResourceConflict( + localMutations = listOf(localDelete3), + remoteMutations = listOf(remoteDelete3, remoteCreate3) + ) + + val conflictResolver = ConflictResolver(listOf(resourceConflict1, resourceConflict2, resourceConflict3)) + + // When + val result = conflictResolver.resolve() + + // Then + assertEquals(4, result.mutationsToPersist.size, "Number of mutations to persist") + assertEquals(0, result.mutationsToPush.size, "Number of mutations to push") + + // Verify all remote mutations are persisted + val persistedRemoteIDs = result.mutationsToPersist.map { it.remoteID }.toSet() + assertEquals(setOf("remote-1", "remote-2", "remote-3", "remote-4"), persistedRemoteIDs, "All remote mutations should be persisted") + } + + @Test + fun `resolve with local creation vs remote deletion should throw error`() { + // Given + val remoteDeleteMutation = RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 0, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote-1", + mutation = Mutation.DELETED + ) + val localCreateMutation = LocalModelMutation( + model = PageBookmark(id = "local-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = null, + localID = "local-1", + mutation = Mutation.CREATED + ) + val resourceConflict = ResourceConflict( + localMutations = listOf(localCreateMutation), + remoteMutations = listOf(remoteDeleteMutation) + ) + val conflictResolver = ConflictResolver(listOf(resourceConflict)) + + // When & Then + val exception = assertFailsWith { + conflictResolver.resolve() + } + + assertTrue( + exception.message?.contains("Local creation conflicts with remote deletion") == true, + "Error message should mention local creation vs remote deletion conflict" + ) + assertTrue( + exception.message?.contains("CREATED(local-1)") == true, + "Error message should include local mutation details" + ) + assertTrue( + exception.message?.contains("DELETED(remote-1)") == true, + "Error message should include remote mutation details" + ) + } + + @Test + fun `resolve with local deletion vs remote creation should throw error`() { + // Given + val remoteCreateMutation = RemoteModelMutation( + model = PageBookmark(id = "remote-1", page = 10, lastModified = Instant.fromEpochMilliseconds(1000)), + remoteID = "remote-1", + mutation = Mutation.CREATED + ) + val localDeleteMutation = LocalModelMutation( + model = PageBookmark(id = "local-1", page = 0, lastModified = Instant.fromEpochMilliseconds(1001)), + remoteID = "remote-1", + localID = "local-1", + mutation = Mutation.DELETED + ) + val resourceConflict = ResourceConflict( + localMutations = listOf(localDeleteMutation), + remoteMutations = listOf(remoteCreateMutation) + ) + val conflictResolver = ConflictResolver(listOf(resourceConflict)) + + // When & Then + val exception = assertFailsWith { + conflictResolver.resolve() + } + + assertTrue( + exception.message?.contains("Local deletion conflicts with remote creation") == true, + "Error message should mention local deletion vs remote creation conflict" + ) + assertTrue( + exception.message?.contains("DELETED(local-1)") == true, + "Error message should include local mutation details" + ) + assertTrue( + exception.message?.contains("CREATED(remote-1)") == true, + "Error message should include remote mutation details" + ) + } +} \ No newline at end of file diff --git a/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/preprocessing/BookmarksLocalMutationsPreprocessorTest.kt b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/preprocessing/BookmarksLocalMutationsPreprocessorTest.kt new file mode 100644 index 00000000..79595cfb --- /dev/null +++ b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/preprocessing/BookmarksLocalMutationsPreprocessorTest.kt @@ -0,0 +1,292 @@ +@file:OptIn(kotlin.time.ExperimentalTime::class) +package com.quran.shared.syncengine.preprocessing + +import com.quran.shared.mutations.LocalModelMutation +import com.quran.shared.mutations.Mutation +import com.quran.shared.syncengine.model.SyncBookmark +import com.quran.shared.syncengine.model.SyncBookmark.PageBookmark +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.test.assertFailsWith +import kotlin.test.assertTrue +import kotlin.time.Instant + +class BookmarksLocalMutationsPreprocessorTest { + + private val preprocessor = BookmarksLocalMutationsPreprocessor() + + @Test + fun `should return empty list when no mutations provided`() { + val result = preprocessor.preprocess(emptyList()) + assertTrue(result.isEmpty()) + } + + @Test + fun `should return single mutation unchanged`() { + val mutation = createLocalMutation(1, Mutation.CREATED) + val result = preprocessor.preprocess(listOf(mutation)) + + assertEquals(1, result.size) + assertEquals(mutation, result[0]) + } + + @Test + fun `should return two mutations for same page unchanged`() { + val mutation1 = createLocalMutation(1, Mutation.CREATED) + val mutation2 = createLocalMutation(1, Mutation.MODIFIED) + + // This should now throw an error because after conversion we have 2 CREATED mutations + val exception = assertFailsWith { + preprocessor.preprocess(listOf(mutation1, mutation2)) + } + + assertTrue(exception.message?.contains("Bookmark page=1 has 2 creations") == true) + assertTrue(exception.message?.contains("which is not allowed") == true) + } + + @Test + fun `should throw error when more than two mutations for same page`() { + val mutation1 = createLocalMutation(1, Mutation.CREATED) + val mutation2 = createLocalMutation(1, Mutation.MODIFIED) // This will be converted to CREATED + val mutation3 = createLocalMutation(1, Mutation.DELETED) + val mutation4 = createLocalMutation(1, Mutation.CREATED) + + val exception = assertFailsWith { + preprocessor.preprocess(listOf(mutation1, mutation2, mutation3, mutation4)) + } + + assertTrue(exception.message?.contains("Bookmark page=1 has 4 mutations") == true) // After conversion, there are 4 mutations + assertTrue(exception.message?.contains("exceeds logical limit of 2") == true) + } + + @Test + fun `should handle multiple pages with valid mutation counts`() { + // Page 1: 2 mutations (valid) - MODIFIED will be converted to CREATED, causing error + val page1Mutation1 = createLocalMutation(1, Mutation.CREATED) + val page1Mutation2 = createLocalMutation(1, Mutation.MODIFIED) // This will be converted to CREATED + + // Page 2: 1 mutation (valid) + val page2Mutation = createLocalMutation(2, Mutation.CREATED) + + // Page 3: 2 mutations (valid) - MODIFIED will be converted to CREATED, causing error + val page3Mutation1 = createLocalMutation(3, Mutation.CREATED) + val page3Mutation2 = createLocalMutation(3, Mutation.MODIFIED) // This will be converted to CREATED + + val allMutations = listOf( + page1Mutation1, page1Mutation2, + page2Mutation, + page3Mutation1, page3Mutation2 + ) + + // This should throw an error because pages 1 and 3 will have multiple CREATED mutations + val exception = assertFailsWith { + preprocessor.preprocess(allMutations) + } + + assertTrue(exception.message?.contains("Bookmark page=1 has 2 creations") == true || + exception.message?.contains("Bookmark page=3 has 2 creations") == true) + } + + @Test + fun `should throw error when any page has more than two mutations`() { + val page1Mutation1 = createLocalMutation(1, Mutation.CREATED) + val page1Mutation2 = createLocalMutation(1, Mutation.MODIFIED) // This will be converted to CREATED + val page1Mutation3 = createLocalMutationWithRemoteID(1, Mutation.DELETED, "remote1") // This makes page 1 have 3 mutations after conversion + val page1Mutation4 = createLocalMutation(1, Mutation.CREATED) // This makes page 1 have 4 mutations after conversion + + val page2Mutation1 = createLocalMutation(2, Mutation.CREATED) + val page2Mutation2 = createLocalMutation(2, Mutation.MODIFIED) // This will be converted to CREATED + + val allMutations = listOf( + page1Mutation1, page1Mutation2, page1Mutation3, page1Mutation4, + page2Mutation1, page2Mutation2 + ) + + val exception = assertFailsWith { + preprocessor.preprocess(allMutations) + } + + assertTrue(exception.message?.contains("Bookmark page=1 has 4 mutations") == true) + assertTrue(exception.message?.contains("exceeds logical limit of 2") == true) + } + + @Test + fun `should handle mutations for different pages independently when all are valid`() { + val page1Mutation1 = createLocalMutation(1, Mutation.CREATED) + val page1Mutation2 = createLocalMutation(1, Mutation.MODIFIED) // This will be converted to CREATED + + val page2Mutation1 = createLocalMutation(2, Mutation.CREATED) + val page2Mutation2 = createLocalMutation(2, Mutation.MODIFIED) // This will be converted to CREATED + + val allMutations = listOf( + page1Mutation1, page1Mutation2, + page2Mutation1, page2Mutation2 + ) + + // This should throw an error because both pages will have multiple CREATED mutations + val exception = assertFailsWith { + preprocessor.preprocess(allMutations) + } + + assertTrue(exception.message?.contains("Bookmark page=1 has 2 creations") == true || + exception.message?.contains("Bookmark page=2 has 2 creations") == true) + } + + @Test + fun `should throw error when there are two deletions for same page`() { + val deletion1 = createLocalMutationWithRemoteID(1, Mutation.DELETED, "remote1") + val deletion2 = createLocalMutationWithRemoteID(1, Mutation.DELETED, "remote2") + + val exception = assertFailsWith { + preprocessor.preprocess(listOf(deletion1, deletion2)) + } + + assertTrue(exception.message?.contains("Bookmark page=1 has 2 deletions") == true) + assertTrue(exception.message?.contains("which is not allowed") == true) + } + + @Test + fun `should throw error when there are two creations for same page`() { + val creation1 = createLocalMutation(1, Mutation.CREATED) + val creation2 = createLocalMutation(1, Mutation.CREATED) + + val exception = assertFailsWith { + preprocessor.preprocess(listOf(creation1, creation2)) + } + + assertTrue(exception.message?.contains("Bookmark page=1 has 2 creations") == true) + assertTrue(exception.message?.contains("which is not allowed") == true) + } + + @Test + fun `should throw error when creation followed by deletion`() { + val creation = createLocalMutation(1, Mutation.CREATED) + val deletion = createLocalMutationWithRemoteID(1, Mutation.DELETED, "remote123") + + val exception = assertFailsWith { + preprocessor.preprocess(listOf(creation, deletion)) + } + + assertTrue(exception.message?.contains("creation followed by deletion") == true) + assertTrue(exception.message?.contains("two bookmarks with the same key") == true) + } + + @Test + fun `should throw error when deletion has null remoteID`() { + val deletion = createLocalMutationWithRemoteID(1, Mutation.DELETED, null) + + val exception = assertFailsWith { + preprocessor.preprocess(listOf(deletion)) + } + + assertTrue(exception.message?.contains("deletion without remote ID") == true) + assertTrue(exception.message?.contains("must reference an existing remote resource") == true) + } + + @Test + fun `should not allow modified followed by creation`() { + val deletion = createLocalMutationWithRemoteID(1, Mutation.MODIFIED, "remote123") + val creation = createLocalMutation(1, Mutation.CREATED) + + // This should throw an error because after conversion we have two creations + assertFailsWith { + preprocessor.preprocess(listOf(deletion, creation)) + } + } + + @Test + fun `should allow single creation`() { + val creation = createLocalMutation(1, Mutation.CREATED) + + val result = preprocessor.preprocess(listOf(creation)) + + assertEquals(1, result.size) + assertEquals(creation, result[0]) + } + + @Test + fun `should allow single deletion`() { + val deletion = createLocalMutationWithRemoteID(1, Mutation.DELETED, "remote123") + + val result = preprocessor.preprocess(listOf(deletion)) + + assertEquals(1, result.size) + assertEquals(deletion, result[0]) + } + + @Test + fun `should allow creation and modification`() { + val creation = createLocalMutation(1, Mutation.CREATED) + val modification = createLocalMutation(1, Mutation.MODIFIED) + + // This should throw an error because after conversion we have 2 CREATED mutations + val exception = assertFailsWith { + preprocessor.preprocess(listOf(creation, modification)) + } + + assertTrue(exception.message?.contains("Bookmark page=1 has 2 creations") == true) + assertTrue(exception.message?.contains("which is not allowed") == true) + } + + @Test + fun `should allow deletion and modification`() { + val deletion = createLocalMutationWithRemoteID(1, Mutation.DELETED, "remote123") + val modification = createLocalMutation(1, Mutation.MODIFIED) + + val result = preprocessor.preprocess(listOf(deletion, modification)) + + assertEquals(2, result.size) // MODIFIED mutation is converted to CREATED + assertEquals(deletion, result[0]) + assertTrue(result.any { it.localID == modification.localID && it.mutation == Mutation.CREATED }) // MODIFIED converted to CREATED + } + + @Test + fun `should maintain order of input relative to same logical resource`() { + // Create mutations for different pages in a specific order + val mutation1 = createLocalMutation(1, Mutation.CREATED) + val mutation2 = createLocalMutation(2, Mutation.MODIFIED) // Will be converted to CREATED + val mutation3 = createLocalMutationWithRemoteID(3, Mutation.DELETED, "remote3") + val mutation4 = createLocalMutation(4, Mutation.CREATED) + val mutation5 = createLocalMutationWithRemoteID(5, Mutation.DELETED, "remote5") + + val inputMutations = listOf(mutation1, mutation2, mutation3, mutation4, mutation5) + + val result = preprocessor.preprocess(inputMutations) + + // All mutations should be kept since they're for different pages + assertEquals(5, result.size, "Should keep all mutations for different pages") + + // Check that the relative order is maintained + assertEquals(inputMutations.map { it.localID}, result.map { it.localID}, "Order should be maintained in output") + } + + private fun createLocalMutation(page: Int, mutation: Mutation): LocalModelMutation { + val timestamp = Instant.fromEpochMilliseconds(1000L + page * 100L) + val model = PageBookmark( + id = "local_${page}_${timestamp.toEpochMilliseconds()}", + page = page, + lastModified = timestamp + ) + return LocalModelMutation( + model = model, + remoteID = null, + localID = "local_${page}_${timestamp}", + mutation = mutation + ) + } + + private fun createLocalMutationWithRemoteID(page: Int, mutation: Mutation, remoteID: String?): LocalModelMutation { + val timestamp = Instant.fromEpochMilliseconds(1000L + page * 100L) + val model = PageBookmark( + id = "local_${page}_${timestamp.toEpochMilliseconds()}", + page = page, + lastModified = timestamp + ) + return LocalModelMutation( + model = model, + remoteID = remoteID, + localID = "local_${page}_${timestamp}", + mutation = mutation + ) + } +} diff --git a/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/preprocessing/BookmarksRemoteMutationsPreprocessorTest.kt b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/preprocessing/BookmarksRemoteMutationsPreprocessorTest.kt new file mode 100644 index 00000000..b8be5776 --- /dev/null +++ b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/preprocessing/BookmarksRemoteMutationsPreprocessorTest.kt @@ -0,0 +1,357 @@ +@file:OptIn(kotlin.time.ExperimentalTime::class) +package com.quran.shared.syncengine.preprocessing + +import com.quran.shared.mutations.Mutation +import com.quran.shared.mutations.RemoteModelMutation +import com.quran.shared.syncengine.model.SyncBookmark +import com.quran.shared.syncengine.model.SyncBookmark.PageBookmark +import kotlinx.coroutines.test.runTest +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.test.assertTrue +import kotlin.time.Instant + +class BookmarksRemoteMutationsPreprocessorTest { + + @Test + fun `test preprocess with empty mutations list`() = runTest { + // Arrange + val checkLocalExistence = createMockExistenceChecker(emptySet()) + val preprocessor = BookmarksRemoteMutationsPreprocessor(checkLocalExistence) + val remoteMutations = emptyList>() + + // Act + val result = preprocessor.preprocess(remoteMutations) + + // Assert + assertTrue(result.isEmpty(), "Should return empty list for empty input") + } + + @Test + fun `test preprocess with only CREATED mutations`() = runTest { + // Arrange + val checkLocalExistence = createMockExistenceChecker(emptySet()) + val preprocessor = BookmarksRemoteMutationsPreprocessor(checkLocalExistence) + val remoteMutations = listOf( + RemoteModelMutation( + model = PageBookmark("new-1", 10, Instant.fromEpochMilliseconds(1000)), + remoteID = "new-1", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = PageBookmark("new-2", 20, Instant.fromEpochMilliseconds(1000)), + remoteID = "new-2", + mutation = Mutation.CREATED + ) + ) + + // Act + val result = preprocessor.preprocess(remoteMutations) + + // Assert + assertEquals(2, result.size, "Should keep all CREATED mutations") + assertEquals("new-1", result[0].remoteID) + assertEquals("new-2", result[1].remoteID) + } + + @Test + fun `test preprocess filters out DELETE mutations for non-existent resources`() = runTest { + // Arrange + val existingRemoteIDs = setOf("existing-1") + val checkLocalExistence = createMockExistenceChecker(existingRemoteIDs) + val preprocessor = BookmarksRemoteMutationsPreprocessor(checkLocalExistence) + val remoteMutations = listOf( + RemoteModelMutation( + model = PageBookmark("existing-1", 10, Instant.fromEpochMilliseconds(1000)), + remoteID = "existing-1", + mutation = Mutation.DELETED + ), + RemoteModelMutation( + model = PageBookmark("non-existent-1", 20, Instant.fromEpochMilliseconds(1000)), + remoteID = "non-existent-1", + mutation = Mutation.DELETED + ) + ) + + // Act + val result = preprocessor.preprocess(remoteMutations) + + // Assert + assertEquals(1, result.size, "Should filter out DELETE mutation for non-existent resource") + assertEquals("existing-1", result[0].remoteID, "Should keep DELETE mutation for existing resource") + } + + @Test + fun `test preprocess filters out MODIFIED mutations for non-existent resources`() = runTest { + // Arrange + val existingRemoteIDs = setOf("existing-1") + val checkLocalExistence = createMockExistenceChecker(existingRemoteIDs) + val preprocessor = BookmarksRemoteMutationsPreprocessor(checkLocalExistence) + val remoteMutations = listOf( + RemoteModelMutation( + model = PageBookmark("existing-1", 10, Instant.fromEpochMilliseconds(1000)), + remoteID = "existing-1", + mutation = Mutation.MODIFIED + ), + RemoteModelMutation( + model = PageBookmark("non-existent-1", 20, Instant.fromEpochMilliseconds(1000)), + remoteID = "non-existent-1", + mutation = Mutation.MODIFIED + ) + ) + + // Act + val result = preprocessor.preprocess(remoteMutations) + + // Assert + assertEquals(2, result.size, "Should keep ALL MODIFIED mutations and convert them to CREATED") + result.forEach { mutation -> + assertEquals(Mutation.CREATED, mutation.mutation, "All MODIFIED mutations should be converted to CREATED") + } + val resultRemoteIDs = result.map { it.remoteID }.toSet() + val expectedRemoteIDs = setOf("existing-1", "non-existent-1") + assertEquals(expectedRemoteIDs, resultRemoteIDs, "Should have all remote IDs") + } + + @Test + fun `test preprocess converts ALL MODIFIED mutations to CREATED mutations`() = runTest { + // Arrange + val existingRemoteIDs = setOf("existing-1", "existing-2") + val checkLocalExistence = createMockExistenceChecker(existingRemoteIDs) + val preprocessor = BookmarksRemoteMutationsPreprocessor(checkLocalExistence) + val remoteMutations = listOf( + RemoteModelMutation( + model = PageBookmark("existing-1", 10, Instant.fromEpochMilliseconds(1000)), + remoteID = "existing-1", + mutation = Mutation.MODIFIED + ), + RemoteModelMutation( + model = PageBookmark("existing-2", 20, Instant.fromEpochMilliseconds(1000)), + remoteID = "existing-2", + mutation = Mutation.MODIFIED + ) + ) + + // Act + val result = preprocessor.preprocess(remoteMutations) + + // Assert + assertEquals(2, result.size, "Should keep all MODIFIED mutations and convert them to CREATED") + result.forEach { mutation -> + assertEquals(Mutation.CREATED, mutation.mutation, "All MODIFIED mutations should be converted to CREATED") + } + val resultRemoteIDs = result.map { it.remoteID }.toSet() + val expectedRemoteIDs = setOf("existing-1", "existing-2") + assertEquals(expectedRemoteIDs, resultRemoteIDs, "Should have expected remote IDs") + } + + @Test + fun `test preprocess keeps CREATED mutations regardless of local existence`() = runTest { + // Arrange + val existingRemoteIDs = setOf("existing-1") + val checkLocalExistence = createMockExistenceChecker(existingRemoteIDs) + val preprocessor = BookmarksRemoteMutationsPreprocessor(checkLocalExistence) + val remoteMutations = listOf( + RemoteModelMutation( + model = PageBookmark("new-1", 10, Instant.fromEpochMilliseconds(1000)), + remoteID = "new-1", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = PageBookmark("new-2", 20, Instant.fromEpochMilliseconds(1000)), + remoteID = "new-2", + mutation = Mutation.CREATED + ) + ) + + // Act + val result = preprocessor.preprocess(remoteMutations) + + // Assert + assertEquals(2, result.size, "Should keep all CREATED mutations regardless of local existence") + assertEquals("new-1", result[0].remoteID) + assertEquals("new-2", result[1].remoteID) + } + + @Test + fun `test preprocess with mixed mutation types`() = runTest { + // Arrange + val existingRemoteIDs = setOf("existing-1", "existing-2") + val checkLocalExistence = createMockExistenceChecker(existingRemoteIDs) + val preprocessor = BookmarksRemoteMutationsPreprocessor(checkLocalExistence) + val remoteMutations = listOf( + // CREATED mutations (should be kept) + RemoteModelMutation( + model = PageBookmark("new-1", 10, Instant.fromEpochMilliseconds(1000)), + remoteID = "new-1", + mutation = Mutation.CREATED + ), + // DELETE mutations (should be filtered based on existence) + RemoteModelMutation( + model = PageBookmark("existing-1", 20, Instant.fromEpochMilliseconds(1000)), + remoteID = "existing-1", + mutation = Mutation.DELETED + ), + RemoteModelMutation( + model = PageBookmark("non-existent-1", 30, Instant.fromEpochMilliseconds(1000)), + remoteID = "non-existent-1", + mutation = Mutation.DELETED + ), + // MODIFIED mutations (should ALL be converted to CREATED, regardless of existence) + RemoteModelMutation( + model = PageBookmark("existing-2", 40, Instant.fromEpochMilliseconds(1000)), + remoteID = "existing-2", + mutation = Mutation.MODIFIED + ), + RemoteModelMutation( + model = PageBookmark("non-existent-2", 50, Instant.fromEpochMilliseconds(1000)), + remoteID = "non-existent-2", + mutation = Mutation.MODIFIED + ) + ) + + // Act + val result = preprocessor.preprocess(remoteMutations) + + // Assert + assertEquals(4, result.size, "Should keep CREATED mutations, existing DELETE mutations, and ALL MODIFIED mutations") + + val resultRemoteIDs = result.map { it.remoteID }.toSet() + val expectedRemoteIDs = setOf("new-1", "existing-1", "existing-2", "non-existent-2") + assertEquals(expectedRemoteIDs, resultRemoteIDs, "Should have expected remote IDs") + + // Check mutation types + val createdMutation = result.find { it.remoteID == "new-1" } + val deletedMutation = result.find { it.remoteID == "existing-1" } + val existingModifiedMutation = result.find { it.remoteID == "existing-2" } + val nonExistentModifiedMutation = result.find { it.remoteID == "non-existent-2" } + + assertEquals(Mutation.CREATED, createdMutation?.mutation, "CREATED mutation should remain CREATED") + assertEquals(Mutation.DELETED, deletedMutation?.mutation, "DELETE mutation should remain DELETED") + assertEquals(Mutation.CREATED, existingModifiedMutation?.mutation, "MODIFIED mutation should be converted to CREATED") + assertEquals(Mutation.CREATED, nonExistentModifiedMutation?.mutation, "MODIFIED mutation for non-existent resource should also be converted to CREATED") + } + + @Test + fun `test preprocess when local data fetcher returns empty existence map`() = runTest { + // Arrange + val checkLocalExistence = createMockExistenceChecker(emptySet()) + val preprocessor = BookmarksRemoteMutationsPreprocessor(checkLocalExistence) + val remoteMutations = listOf( + RemoteModelMutation( + model = PageBookmark("any-id", 10, Instant.fromEpochMilliseconds(1000)), + remoteID = "any-id", + mutation = Mutation.DELETED + ), + RemoteModelMutation( + model = PageBookmark("any-id-2", 20, Instant.fromEpochMilliseconds(1000)), + remoteID = "any-id-2", + mutation = Mutation.MODIFIED + ) + ) + + // Act + val result = preprocessor.preprocess(remoteMutations) + + // Assert + assertEquals(1, result.size, "Should filter out DELETE mutations but keep MODIFIED mutations converted to CREATED") + assertEquals("any-id-2", result[0].remoteID, "Should keep the MODIFIED mutation") + assertEquals(Mutation.CREATED, result[0].mutation, "MODIFIED mutation should be converted to CREATED") + } + + @Test + fun `test preprocess preserves mutation order`() = runTest { + // Arrange + val existingRemoteIDs = setOf("existing-1", "existing-2") + val checkLocalExistence = createMockExistenceChecker(existingRemoteIDs) + val preprocessor = BookmarksRemoteMutationsPreprocessor(checkLocalExistence) + val remoteMutations = listOf( + RemoteModelMutation( + model = PageBookmark("existing-1", 10, Instant.fromEpochMilliseconds(1000)), + remoteID = "existing-1", + mutation = Mutation.DELETED + ), + RemoteModelMutation( + model = PageBookmark("new-1", 20, Instant.fromEpochMilliseconds(1000)), + remoteID = "new-1", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = PageBookmark("existing-2", 30, Instant.fromEpochMilliseconds(1000)), + remoteID = "existing-2", + mutation = Mutation.MODIFIED + ) + ) + + // Act + val result = preprocessor.preprocess(remoteMutations) + + // Assert + assertEquals(3, result.size, "Should keep all mutations") + val resultRemoteIDs = result.map { it.remoteID } + assertTrue(resultRemoteIDs.contains("existing-1"), "Should contain existing-1") + assertTrue(resultRemoteIDs.contains("new-1"), "Should contain new-1") + assertTrue(resultRemoteIDs.contains("existing-2"), "Should contain existing-2") + + // Check that MODIFIED mutation was converted to CREATED + val modifiedMutation = result.find { it.remoteID == "existing-2" } + assertEquals(Mutation.CREATED, modifiedMutation?.mutation, "MODIFIED mutation should be converted to CREATED") + } + + @Test + fun `test preprocess should maintain order of input relative to same logical resource`() = runTest { + // Arrange + val existingRemoteIDs = setOf("existing-1", "existing-3") + val checkLocalExistence = createMockExistenceChecker(existingRemoteIDs) + val preprocessor = BookmarksRemoteMutationsPreprocessor(checkLocalExistence) + + // Create mutations in a specific order + val remoteMutations = listOf( + RemoteModelMutation( + model = PageBookmark("non-existent-1", 30, Instant.fromEpochMilliseconds(1000)), + remoteID = "non-existent-1", + mutation = Mutation.DELETED + ), + RemoteModelMutation( + model = PageBookmark("new-1", 20, Instant.fromEpochMilliseconds(1000)), + remoteID = "new-1", + mutation = Mutation.CREATED + ), + RemoteModelMutation( + model = PageBookmark("existing-1", 10, Instant.fromEpochMilliseconds(1000)), + remoteID = "existing-1", + mutation = Mutation.DELETED + ), + RemoteModelMutation( + model = PageBookmark("existing-3", 40, Instant.fromEpochMilliseconds(1000)), + remoteID = "existing-3", + mutation = Mutation.MODIFIED + ), + RemoteModelMutation( + model = PageBookmark("new-2", 50, Instant.fromEpochMilliseconds(1000)), + remoteID = "new-2", + mutation = Mutation.CREATED + ) + ) + + // Act + val result = preprocessor.preprocess(remoteMutations) + + // Assert + // Should keep: existing-1 (DELETE), new-1 (CREATED), existing-3 (MODIFIED->CREATED), new-2 (CREATED) + // Should filter out: non-existent-1 (DELETE) + assertEquals(4, result.size, "Should keep 4 mutations and filter out 1") + + // Check that the relative order is maintained for kept mutations + assertEquals( + listOf("new-1", "existing-1", "existing-3", "new-2"), + result.map { it.remoteID }, + "Output should maintain same order") + } + + private fun createMockExistenceChecker(existingRemoteIDs: Set): suspend (List) -> Map { + return { remoteIDs -> + remoteIDs.associateWith { it in existingRemoteIDs } + } + } +} \ No newline at end of file diff --git a/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/scheduling/SchedulerTest.kt b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/scheduling/SchedulerTest.kt new file mode 100644 index 00000000..748ab621 --- /dev/null +++ b/syncengine/src/commonTest/kotlin/com/quran/shared/syncengine/scheduling/SchedulerTest.kt @@ -0,0 +1,435 @@ +package com.quran.shared.syncengine.scheduling + +import kotlinx.coroutines.CompletableDeferred +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.delay +import kotlinx.coroutines.test.TestScope +import kotlinx.coroutines.test.runTest +import kotlinx.coroutines.withContext +import kotlinx.coroutines.withTimeout +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.test.assertTrue +import kotlin.time.Clock +import kotlin.time.Duration.Companion.milliseconds +import kotlin.time.Duration.Companion.seconds +import kotlin.time.ExperimentalTime + +class SchedulerTest { + + companion object { + private val TIMING_TOLERANCE = 300.milliseconds + private val DEFAULT_TIMEOUT = 6.seconds + + private val STANDARD_TEST_TIMINGS = SchedulerTimings( + appRefreshInterval = 900.milliseconds, + standardInterval = 1350.milliseconds, + localDataModifiedInterval = 450.milliseconds, + failureRetryingConfig = FailureRetryingConfig(baseDelay = 450.milliseconds, multiplier = 2.5, maximumRetries = 3) + ) + + private val OVERLAP_TEST_TIMINGS = SchedulerTimings( + appRefreshInterval = 900.milliseconds, + standardInterval = 2250.milliseconds, + localDataModifiedInterval = 450.milliseconds, + failureRetryingConfig = FailureRetryingConfig(baseDelay = 450.milliseconds, multiplier = 2.5, maximumRetries = 3) + ) + + private val SINGLE_RETRY_TIMINGS = SchedulerTimings( + appRefreshInterval = 900.milliseconds, + standardInterval = 1350.milliseconds, + localDataModifiedInterval = 600.milliseconds, + failureRetryingConfig = FailureRetryingConfig(baseDelay = 300.milliseconds, multiplier = 2.5, maximumRetries = 1) + ) + + @OptIn(ExperimentalTime::class) + private fun currentTimeMs(): Long = Clock.System.now().toEpochMilliseconds() + } + + private fun runTimeoutTest(testBody: suspend TestScope.() -> Unit) = runTest { + withContext(Dispatchers.Default.limitedParallelism(1)) { + withTimeout(DEFAULT_TIMEOUT) { + testBody() + } + } + } + + + @Test + fun `basic trigger timing and standard interval scheduling`() = runTimeoutTest { + val timings = STANDARD_TEST_TIMINGS + var taskCompleted = CompletableDeferred() + var count = 0 + + val scheduler = Scheduler(timings, { + count++ + taskCompleted.complete(currentTimeMs()) + }, { _ -> }) + + val timeBeforeCall = currentTimeMs() + scheduler.invoke(Trigger.APP_REFRESH) + + val firstCallTime = taskCompleted.await() + val firstDelay = (firstCallTime - timeBeforeCall).milliseconds + assertTimingWithinTolerance(firstDelay, timings.appRefreshInterval, "First call timing") + + taskCompleted = CompletableDeferred() + val secondCallTime = taskCompleted.await() + val totalTime = (secondCallTime - timeBeforeCall).milliseconds + val expectedTotalTime = timings.appRefreshInterval + timings.standardInterval + assertTimingWithinTolerance(totalTime, expectedTotalTime, "Standard interval scheduling") + + assertEquals(2, count, "Should be called twice") + scheduler.stop() + } + + @Test + fun `LOCAL_DATA_MODIFIED trigger should use faster interval than APP_START`() = runTimeoutTest { + val timings = STANDARD_TEST_TIMINGS + val taskCompleted = CompletableDeferred() + var callCount = 0 + + val scheduler = Scheduler(timings, { + callCount++ + taskCompleted.complete(currentTimeMs()) + }, { _ -> }) + + scheduler.invoke(Trigger.APP_REFRESH) + + delay(100.milliseconds) + val timeBeforeDataModifiedTrigger = currentTimeMs() + scheduler.invoke(Trigger.LOCAL_DATA_MODIFIED) + + val timeAfterCall = taskCompleted.await() + + val actualDelay = (timeAfterCall - timeBeforeDataModifiedTrigger).milliseconds + val expectedDelay = timings.localDataModifiedInterval + + assertTimingWithinTolerance( + actualDelay, + expectedDelay, + "Task should be called with LOCAL_DATA_MODIFIED timing" + ) + + assertEquals(1, callCount, "Should be called once") + scheduler.stop() + } + + @Test + fun `LOCAL_DATA_MODIFIED as first trigger should schedule with localDataModifiedInterval then standard interval`() = runTimeoutTest { + val timings = STANDARD_TEST_TIMINGS + var taskCompleted = CompletableDeferred() + var callCount = 0 + + val scheduler = Scheduler(timings, { + callCount++ + taskCompleted.complete(currentTimeMs()) + }, { _ -> }) + + val timeBeforeFirstTrigger = currentTimeMs() + scheduler.invoke(Trigger.LOCAL_DATA_MODIFIED) + + val firstCallTime = taskCompleted.await() + val firstCallDelay = (firstCallTime - timeBeforeFirstTrigger).milliseconds + val expectedFirstCallDelay = timings.localDataModifiedInterval + + assertTimingWithinTolerance( + firstCallDelay, + expectedFirstCallDelay, + "First call should use LOCAL_DATA_MODIFIED timing" + ) + assertEquals(1, callCount, "Should be called once for first call") + + taskCompleted = CompletableDeferred() + val timeBeforeSecondCall = currentTimeMs() + val secondCallTime = taskCompleted.await() + val secondCallDelay = (secondCallTime - timeBeforeSecondCall).milliseconds + val expectedSecondCallDelay = timings.standardInterval + + assertTimingWithinTolerance( + secondCallDelay, + expectedSecondCallDelay, + "Second call should use standard interval timing" + ) + assertEquals(2, callCount, "Should be called twice total") + + scheduler.stop() + } + + @Test + fun `trigger during standard delay should cancel and reschedule since it should fire quicker`() = runTimeoutTest { + val timings = OVERLAP_TEST_TIMINGS + var taskCompleted = CompletableDeferred() + var callCount = 0 + + val scheduler = Scheduler(timings, { + callCount++ + taskCompleted.complete(currentTimeMs()) + }, { _ -> }) + + val timeBeforeAppStartTrigger = currentTimeMs() + scheduler.invoke(Trigger.APP_REFRESH) + + val firstCallTime = taskCompleted.await() + val firstCallDelay = (firstCallTime - timeBeforeAppStartTrigger).milliseconds + val expectedFirstCallDelay = timings.appRefreshInterval + + assertTimingWithinTolerance( + firstCallDelay, + expectedFirstCallDelay, + "First call should use APP_START timing" + ) + assertEquals(1, callCount, "Should be called once for first call") + + delay(50.milliseconds) + val timeBeforeLocalDataTrigger = currentTimeMs() + scheduler.invoke(Trigger.LOCAL_DATA_MODIFIED) + + taskCompleted = CompletableDeferred() + val secondCallTime = taskCompleted.await() + val secondCallDelay = (secondCallTime - timeBeforeLocalDataTrigger).milliseconds + val expectedSecondCallDelay = timings.localDataModifiedInterval + + assertTimingWithinTolerance( + secondCallDelay, + expectedSecondCallDelay, + "Second call should use LOCAL_DATA_MODIFIED timing, not standard interval timing" + ) + assertEquals(2, callCount, "Should be called twice total") + + scheduler.stop() + } + + @Test + fun `IMMEDIATE trigger should fire immediately`() = runTimeoutTest { + val timings = STANDARD_TEST_TIMINGS + val taskCompleted = CompletableDeferred() + + val scheduler = Scheduler(timings, { + taskCompleted.complete(currentTimeMs()) + }, { _ -> }) + + val timeBeforeCall = currentTimeMs() + scheduler.invoke(Trigger.IMMEDIATE) + val callTime = taskCompleted.await() + + assertTrue( + (callTime - timeBeforeCall).milliseconds < TIMING_TOLERANCE, + "IMMEDIATE trigger should fire immediately" + ) + scheduler.stop() + } + + @Test + fun `task function failures should retry with exponential backoff until maximum retries reached`() = runTimeoutTest { + val timings = STANDARD_TEST_TIMINGS + var callCount = 0 + var maxRetriesError: Exception? = null + val maxRetriesDeferred = CompletableDeferred() + + val scheduler = Scheduler(timings, { + callCount++ + throw Exception("Test failure") + }, { error -> + maxRetriesError = error + maxRetriesDeferred.complete(Unit) + }) + + val timeBeforeTrigger = currentTimeMs() + scheduler.invoke(Trigger.IMMEDIATE) + + maxRetriesDeferred.await() + + assertEquals(timings.failureRetryingConfig.maximumRetries + 1, callCount, + "Should be called maximum retries + 1 times (initial + retries)") + assertEquals("Test failure", maxRetriesError?.message, "Exception should be passed to max retries callback") + + val totalTime = (currentTimeMs() - timeBeforeTrigger).milliseconds + val expectedMinTime = timings.failureRetryingConfig.baseDelay * + (1 + timings.failureRetryingConfig.multiplier + + timings.failureRetryingConfig.multiplier * timings.failureRetryingConfig.multiplier) + + assertTrue(totalTime >= expectedMinTime, + "Total time should account for retry delays. Expected at least $expectedMinTime, got $totalTime") + + scheduler.stop() + } + + @Test + fun `after task function failures standard interval scheduling should not occur`() = runTimeoutTest { + val timings = SINGLE_RETRY_TIMINGS + var callCount = 0 + val maxRetriesDeferred = CompletableDeferred() + + val scheduler = Scheduler(timings, { + callCount++ + throw Exception("Test failure") + }, { _ -> + maxRetriesDeferred.complete(Unit) + }) + + scheduler.invoke(Trigger.IMMEDIATE) + + maxRetriesDeferred.await() + + delay(timings.standardInterval + 50.milliseconds) + + assertEquals(timings.failureRetryingConfig.maximumRetries + 1, callCount, + "Call count should remain at maximum retries + 1, no additional calls should be scheduled") + + scheduler.stop() + } + + @Test + fun `after maximum retries reached applying any trigger should fire normally`() = runTimeoutTest { + val timings = SINGLE_RETRY_TIMINGS + var callCount = 0 + var shouldSucceed = false + val maxRetriesDeferred = CompletableDeferred() + val newTaskDeferred = CompletableDeferred() + + val scheduler = Scheduler(timings, { + callCount++ + if (shouldSucceed) { + newTaskDeferred.complete(Unit) + } else { + throw Exception("Test failure") + } + }, { _ -> + maxRetriesDeferred.complete(Unit) + }) + + scheduler.invoke(Trigger.IMMEDIATE) + + maxRetriesDeferred.await() + + assertEquals(timings.failureRetryingConfig.maximumRetries + 1, callCount, + "Should be called maximum retries + 1 times before max retries reached") + + shouldSucceed = true + val timeBeforeNewTrigger = currentTimeMs() + scheduler.invoke(Trigger.LOCAL_DATA_MODIFIED) + + newTaskDeferred.await() + + val timeAfterNewCall = currentTimeMs() + val newCallDelay = (timeAfterNewCall - timeBeforeNewTrigger).milliseconds + val expectedDelay = timings.localDataModifiedInterval + + assertEquals(timings.failureRetryingConfig.maximumRetries + 2, callCount, + "Should be called one more time after applying new trigger") + + assertTimingWithinTolerance( + newCallDelay, + expectedDelay, + "New trigger should use LOCAL_DATA_MODIFIED timing after maximum retries reached" + ) + + scheduler.stop() + } + + @Test + fun `triggers during job execution are handled correctly`() = runTimeoutTest { + val timings = STANDARD_TEST_TIMINGS + val taskStarted = CompletableDeferred() + val taskCanProceed = CompletableDeferred() + val secondTaskCompleted = CompletableDeferred() + var callCount = 0 + + val scheduler = Scheduler(timings, { + callCount++ + if (callCount == 1) { + taskStarted.complete(Unit) + taskCanProceed.await() + } else if (callCount == 2) { + secondTaskCompleted.complete(currentTimeMs()) + } + }, { _ -> }) + + scheduler.invoke(Trigger.APP_REFRESH) + taskStarted.await() + + val timeBeforeTrigger = currentTimeMs() + scheduler.invoke(Trigger.LOCAL_DATA_MODIFIED) + taskCanProceed.complete(Unit) + + val secondTaskTime = secondTaskCompleted.await() + val actualDelay = (secondTaskTime - timeBeforeTrigger).milliseconds + + assertTimingWithinTolerance( + actualDelay, + timings.localDataModifiedInterval, + "Buffered LOCAL_DATA_MODIFIED should be used" + ) + + assertEquals(2, callCount) + scheduler.stop() + } + + + @Test + fun `triggers during failed jobs should be ignored`() = runTimeoutTest { + val timings = SINGLE_RETRY_TIMINGS + val taskCanProceed = CompletableDeferred() + val maxRetriesReached = CompletableDeferred() + var callCount = 0 + + val scheduler = Scheduler(timings, { + callCount++ + if (callCount == 1) { + taskCanProceed.await() + } + throw Exception("Simulated failure") + }, { _ -> + maxRetriesReached.complete(Unit) + }) + + scheduler.invoke(Trigger.APP_REFRESH) + delay(50.milliseconds) + scheduler.invoke(Trigger.LOCAL_DATA_MODIFIED) + taskCanProceed.complete(Unit) + maxRetriesReached.await() + + val callCountAfterFailure = callCount + delay(timings.localDataModifiedInterval + TIMING_TOLERANCE) + + assertEquals(callCountAfterFailure, callCount, "No additional tasks should be scheduled") + scheduler.stop() + } + + + @Test + fun `triggers should be ignored after scheduler is stopped`() = runTimeoutTest { + val timings = STANDARD_TEST_TIMINGS + val taskCompleted = CompletableDeferred() + var callCount = 0 + + val scheduler = Scheduler(timings, { + callCount++ + taskCompleted.complete(Unit) + }, { _ -> }) + + scheduler.stop() + + // Give it a moment to process the stop + delay(50.milliseconds) + + scheduler.invoke(Trigger.APP_REFRESH) + scheduler.invoke(Trigger.LOCAL_DATA_MODIFIED) + scheduler.invoke(Trigger.IMMEDIATE) + + delay(timings.appRefreshInterval + 200.milliseconds) + + assertEquals(0, callCount, "Task should not be called after scheduler is stopped") + + assertTrue(taskCompleted.isCompleted.not(), "Task completion should not be triggered after stop") + } + + private fun assertTimingWithinTolerance(actual: kotlin.time.Duration, expected: kotlin.time.Duration, message: String) { + val difference = kotlin.math.abs((actual - expected).inWholeMilliseconds) + assertTrue( + difference <= TIMING_TOLERANCE.inWholeMilliseconds, + "$message. Expected: $expected ± $TIMING_TOLERANCE, Actual: $actual (difference: ${difference}ms)" + ) + } +} diff --git a/syncengine/src/jvmMain/kotlin/com/quran/shared/syncengine/network/HttpClientFactory.jvm.kt b/syncengine/src/jvmMain/kotlin/com/quran/shared/syncengine/network/HttpClientFactory.jvm.kt new file mode 100644 index 00000000..3904fdd5 --- /dev/null +++ b/syncengine/src/jvmMain/kotlin/com/quran/shared/syncengine/network/HttpClientFactory.jvm.kt @@ -0,0 +1,21 @@ +package com.quran.shared.syncengine.network + +import io.ktor.client.HttpClient +import io.ktor.client.engine.okhttp.OkHttp +import io.ktor.client.plugins.contentnegotiation.ContentNegotiation +import io.ktor.client.plugins.logging.LogLevel +import io.ktor.client.plugins.logging.Logging +import io.ktor.serialization.kotlinx.json.json + +actual object HttpClientFactory { + actual fun createHttpClient(): HttpClient { + return HttpClient(OkHttp) { + install(ContentNegotiation) { + json() + } + install(Logging) { + level = LogLevel.INFO + } + } + } +} \ No newline at end of file diff --git a/umbrella/build.gradle.kts b/umbrella/build.gradle.kts index d9be1e62..313d4b05 100644 --- a/umbrella/build.gradle.kts +++ b/umbrella/build.gradle.kts @@ -1,4 +1,4 @@ -import com.vanniktech.maven.publish.SonatypeHost +import org.jetbrains.kotlin.gradle.plugin.mpp.apple.XCFramework plugins { alias(libs.plugins.kotlin.multiplatform) @@ -6,6 +6,9 @@ plugins { } kotlin { + val xcfName = "Shared" + val xcf = XCFramework(xcfName) + listOf( iosX64(), iosArm64(), @@ -17,6 +20,11 @@ kotlin { export(projects.syncengine) export(projects.persistence) + export(projects.syncPipelines) + export(projects.auth) + + binaryOption("bundleId", "com.quran.sync.$xcfName") + xcf.add(this) } } @@ -24,12 +32,14 @@ kotlin { commonMain.dependencies { api(projects.syncengine) api(projects.persistence) + api(projects.syncPipelines) + api(projects.auth) } } } mavenPublishing { - publishToMavenCentral(SonatypeHost.CENTRAL_PORTAL) + publishToMavenCentral() signAllPublications() coordinates(group.toString(), "syncengine", version.toString()) @@ -37,6 +47,6 @@ mavenPublishing { name = "Quran.com Umbrella Framework" description = "An umbrella framework for Quran.com Persistence and SyncEngine" inceptionYear = "2025" - url = "https://github.com/quran/syncengine" + url = "https://github.com/quran/mobile-sync" } }