Skip to content

Commit 221c177

Browse files
committed
Merge remote-tracking branch 'origin/dev-r85-v2-pull' into dev-backend-libs-upgrade
2 parents 9f2e121 + 612ed7d commit 221c177

File tree

7 files changed

+178
-37
lines changed

7 files changed

+178
-37
lines changed

LICENSES/CLA-signed-list.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,3 +21,4 @@ C/ My company has custom contribution contract with Lutra Consulting Ltd. or I a
2121
* luxusko, 25th August 2023
2222
* jozef-budac, 30th January 2024
2323
* fernandinand, 13th March 2025
24+
* xkello, 27th January 2025

server/mergin/sync/models.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1735,9 +1735,12 @@ def clear(self):
17351735
"""Clean up pending upload.
17361736
Uploaded files and table records are removed, and another upload can start.
17371737
"""
1738-
move_to_tmp(self.upload_dir, self.id)
1739-
db.session.delete(self)
1740-
db.session.commit()
1738+
try:
1739+
move_to_tmp(self.upload_dir, self.id)
1740+
db.session.delete(self)
1741+
db.session.commit()
1742+
except Exception:
1743+
logging.exception(f"Failed to clear upload.")
17411744

17421745
def process_chunks(
17431746
self, use_shared_chunk_dir: bool

server/mergin/sync/public_api_v2_controller.py

Lines changed: 22 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
from flask_login import current_user
1717
from marshmallow import ValidationError
1818
from sqlalchemy.exc import IntegrityError
19+
from sqlalchemy.orm.exc import ObjectDeletedError
1920

2021
from ..app import db
2122
from ..auth import auth_required
@@ -59,7 +60,7 @@
5960
)
6061
from .tasks import remove_transaction_chunks
6162
from .workspace import WorkspaceRole
62-
from ..utils import parse_order_params
63+
from ..utils import parse_order_params, get_schema_fields_map
6364

6465

6566
@auth_required
@@ -328,6 +329,7 @@ def create_project_version(id):
328329
upload.clear()
329330
return DataSyncError(failed_files=errors).response(422)
330331

332+
upload_deleted = False
331333
try:
332334
pv = ProjectVersion(
333335
project,
@@ -356,15 +358,20 @@ def create_project_version(id):
356358
remove_transaction_chunks.delay(chunks_ids)
357359

358360
logging.info(
359-
f"Push finished for project: {project.id}, project version: {v_next_version}, upload id: {upload.id}."
361+
f"Push finished for project: {project.id}, project version: {v_next_version}."
360362
)
361363
project_version_created.send(pv)
362364
push_finished.send(pv)
363-
except (psycopg2.Error, FileNotFoundError, IntegrityError) as err:
365+
except (
366+
psycopg2.Error,
367+
FileNotFoundError,
368+
IntegrityError,
369+
ObjectDeletedError,
370+
) as err:
364371
db.session.rollback()
372+
upload_deleted = isinstance(err, ObjectDeletedError)
365373
logging.exception(
366-
f"Failed to finish push for project: {project.id}, project version: {v_next_version}, "
367-
f"upload id: {upload.id}.: {str(err)}"
374+
f"Failed to finish push for project: {project.id}, project version: {v_next_version}: {str(err)}"
368375
)
369376
if (
370377
os.path.exists(version_dir)
@@ -386,8 +393,9 @@ def create_project_version(id):
386393
move_to_tmp(version_dir)
387394
raise
388395
finally:
389-
# remove artifacts
390-
upload.clear()
396+
# remove artifacts only if upload object is still valid
397+
if not upload_deleted:
398+
upload.clear()
391399

392400
result = ProjectSchemaV2().dump(project)
393401
result["files"] = ProjectFileSchema(
@@ -497,11 +505,15 @@ def list_workspace_projects(workspace_id, page, per_page, order_params=None, q=N
497505
projects = projects.filter(Project.name.ilike(f"%{q}%"))
498506

499507
if order_params:
500-
order_by_params = parse_order_params(Project, order_params)
508+
schema_map = get_schema_fields_map(ProjectSchemaV2)
509+
order_by_params = parse_order_params(
510+
Project, order_params, field_map=schema_map
511+
)
501512
projects = projects.order_by(*order_by_params)
502513

503-
result = projects.paginate(page=page, per_page=per_page).items
504-
total = projects.paginate(page=page, per_page=per_page).total
514+
pagination = projects.paginate(page=page, per_page=per_page)
515+
result = pagination.items
516+
total = pagination.total
505517

506518
data = ProjectSchemaV2(many=True).dump(result)
507519
return jsonify(projects=data, count=total, page=page, per_page=per_page), 200

server/mergin/tests/test_public_api_v2.py

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@
4646
from ..sync.files import DeltaChange, PushChangeType
4747
from ..sync.utils import Checkpoint, is_versioned_file
4848
from sqlalchemy.exc import IntegrityError
49+
from sqlalchemy.orm.exc import ObjectDeletedError
4950
import pytest
5051
from datetime import datetime, timedelta, timezone
5152
import json
@@ -953,6 +954,46 @@ def test_create_version_failures(client):
953954
assert response.status_code == 409
954955

955956

957+
def test_create_version_object_deleted_error(client):
958+
"""Test that ObjectDeletedError during push returns 422 without secondary exception"""
959+
project = Project.query.filter_by(
960+
workspace_id=test_workspace_id, name=test_project
961+
).first()
962+
963+
data = {
964+
"version": "v1",
965+
"changes": {
966+
"added": [],
967+
"removed": [
968+
file_info(test_project_dir, "base.gpkg"),
969+
],
970+
"updated": [],
971+
},
972+
}
973+
974+
# Create a real ObjectDeletedError by using internal SQLAlchemy state
975+
def raise_object_deleted(*args, **kwargs):
976+
# Create a minimal state-like object that ObjectDeletedError can use
977+
class FakeState:
978+
class_ = Upload
979+
980+
def obj(self):
981+
return None
982+
983+
raise ObjectDeletedError(FakeState())
984+
985+
with patch.object(
986+
ProjectVersion,
987+
"__init__",
988+
side_effect=raise_object_deleted,
989+
):
990+
response = client.post(f"v2/projects/{project.id}/versions", json=data)
991+
992+
# Should return 422 UploadError, not 500 from secondary exception
993+
assert response.status_code == 422
994+
assert response.json["code"] == UploadError.code
995+
996+
956997
def test_upload_chunk(client):
957998
"""Test pushing a chunk to a project"""
958999
project = Project.query.filter_by(
@@ -1225,6 +1266,17 @@ def test_list_workspace_projects(client):
12251266
url + f"?page={page}&per_page={per_page}&q=1&order_params=created DESC"
12261267
)
12271268
assert response.json["projects"][0]["name"] == "project_10"
1269+
# using field name instead column names for sorting
1270+
p4 = Project.query.filter(Project.name == project_name).first()
1271+
p4.disk_usage = 1234567
1272+
db.session.commit()
1273+
response = client.get(url + f"?page=1&per_page=10&order_params=size DESC")
1274+
resp_data = json.loads(response.data)
1275+
assert resp_data["projects"][0]["name"] == project_name
1276+
1277+
# invalid order param
1278+
response = client.get(url + f"?page=1&per_page=10&order_params=invalid DESC")
1279+
assert response.status_code == 200
12281280

12291281
# no permissions to workspace
12301282
user2 = add_user("user", "password")

server/mergin/tests/test_utils.py

Lines changed: 25 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,15 @@
77
import json
88
import pytest
99
from flask import url_for, current_app
10+
from marshmallow import Schema, fields
1011
from sqlalchemy import desc
1112
import os
1213
from unittest.mock import patch
1314
from pathvalidate import sanitize_filename
1415
from pygeodiff import GeoDiff
1516
from pathlib import PureWindowsPath
1617

17-
from ..utils import save_diagnostic_log_file
18+
from ..utils import save_diagnostic_log_file, get_schema_fields_map
1819

1920
from ..sync.utils import (
2021
is_reserved_word,
@@ -331,3 +332,26 @@ def test_checkpoint_utils():
331332
# dummy request
332333
versions = Checkpoint.get_checkpoints(2, 1)
333334
assert len(versions) == 0
335+
336+
def test_get_schema_fields_map():
337+
"""Test that schema map correctly resolves DB attributes, keeps all fields, and ignores virtual fields."""
338+
339+
# dummy schema for testing
340+
class TestSchema(Schema):
341+
# standard field -> map 'name': 'name'
342+
name = fields.String()
343+
# aliased field -> map 'size': 'disk_usage
344+
size = fields.Integer(attribute="disk_usage")
345+
# virtual fields -> skip
346+
version = fields.Function(lambda obj: "v1")
347+
role = fields.Method("get_role")
348+
# excluded field - set to None in schema inheritance -> skip
349+
hidden_field = None
350+
351+
schema_map = get_schema_fields_map(TestSchema)
352+
353+
expected_map = {
354+
"name": "name",
355+
"size": "disk_usage",
356+
}
357+
assert schema_map == expected_map

server/mergin/utils.py

Lines changed: 45 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,20 @@
11
# Copyright (C) Lutra Consulting Limited
22
#
33
# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-MerginMaps-Commercial
4+
import logging
5+
46
import math
57
from collections import namedtuple
68
from datetime import datetime, timedelta, timezone
79
from enum import Enum
810
import os
911
from flask import current_app
1012
from flask_sqlalchemy.model import Model
13+
from marshmallow import Schema, fields
1114
from pathvalidate import sanitize_filename
1215
from sqlalchemy import Column, JSON
1316
from sqlalchemy.sql.elements import UnaryExpression
14-
from typing import Optional
15-
17+
from typing import Optional, Type
1618

1719
OrderParam = namedtuple("OrderParam", "name direction")
1820

@@ -33,7 +35,7 @@ def split_order_param(order_param: str) -> Optional[OrderParam]:
3335

3436

3537
def get_order_param(
36-
cls: Model, order_param: OrderParam, json_sort: dict = None
38+
cls: Model, order_param: OrderParam, json_sort: dict = None, field_map: dict = None
3739
) -> Optional[UnaryExpression]:
3840
"""Return order by clause parameter for SQL query
3941
@@ -43,15 +45,22 @@ def get_order_param(
4345
:type order_param: OrderParam
4446
:param json_sort: type mapping for sort by json field, e.g. '{"storage": "int"}', defaults to None
4547
:type json_sort: dict
48+
:param field_map: mapping for translating public field names to internal DB columns, e.g. '{"size": "disk_usage"}'
49+
:type field_map: dict
4650
"""
51+
# translate field name to column name
52+
db_column_name = order_param.name
53+
if field_map and order_param.name in field_map:
54+
db_column_name = field_map[order_param.name]
4755
# find candidate for nested json sort
48-
if "." in order_param.name:
49-
col, attr = order_param.name.split(".")
56+
if "." in db_column_name:
57+
col, attr = db_column_name.split(".")
5058
else:
51-
col = order_param.name
59+
col = db_column_name
5260
attr = None
5361
order_attr = cls.__table__.c.get(col, None)
5462
if not isinstance(order_attr, Column):
63+
logging.warning("Ignoring invalid order parameter.")
5564
return
5665
# sort by key in JSON field
5766
if attr:
@@ -80,7 +89,9 @@ def get_order_param(
8089
return order_attr.desc()
8190

8291

83-
def parse_order_params(cls: Model, order_params: str, json_sort: dict = None):
92+
def parse_order_params(
93+
cls: Model, order_params: str, json_sort: dict = None, field_map: dict = None
94+
) -> list[UnaryExpression]:
8495
"""Convert order parameters in query string to list of order by clauses.
8596
8697
:param cls: Db model class
@@ -89,6 +100,8 @@ def parse_order_params(cls: Model, order_params: str, json_sort: dict = None):
89100
:type order_params: str
90101
:param json_sort: type mapping for sort by json field, e.g. '{"storage": "int"}', defaults to None
91102
:type json_sort: dict
103+
:param field_map: mapping response fields to database column names, e.g. '{"size": "disk_usage"}'
104+
:type field_map: dict
92105
93106
:rtype: List[Column]
94107
"""
@@ -97,7 +110,7 @@ def parse_order_params(cls: Model, order_params: str, json_sort: dict = None):
97110
order_param = split_order_param(p)
98111
if not order_param:
99112
continue
100-
order_attr = get_order_param(cls, order_param, json_sort)
113+
order_attr = get_order_param(cls, order_param, json_sort, field_map)
101114
if order_attr is not None:
102115
order_by_params.append(order_attr)
103116
return order_by_params
@@ -135,3 +148,27 @@ def save_diagnostic_log_file(app: str, username: str, body: bytes) -> str:
135148
f.write(content)
136149

137150
return file_name
151+
152+
153+
def get_schema_fields_map(schema: Type[Schema]) -> dict:
154+
"""
155+
Creates a mapping of schema field names to corresponding DB columns.
156+
This allows sorting by the API field name (e.g. 'size') while
157+
actually sorting by the database column (e.g. 'disk_usage').
158+
"""
159+
mapping = {}
160+
for name, field in schema._declared_fields.items():
161+
# some fields could have been overridden with None to be excluded
162+
if not field:
163+
continue
164+
# skip virtual fields as DB cannot sort by them
165+
if isinstance(
166+
field, (fields.Function, fields.Method, fields.Nested, fields.List)
167+
):
168+
continue
169+
if field.attribute:
170+
mapping[name] = field.attribute
171+
# keep the map complete
172+
else:
173+
mapping[name] = name
174+
return mapping

0 commit comments

Comments
 (0)