From ada3cc147e752ea7065e268c54fc7caf8707435c Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 14 Aug 2025 14:39:52 +0200 Subject: [PATCH 001/111] add celery-library as api-server dependency --- services/api-server/requirements/_base.in | 1 + services/api-server/requirements/_base.txt | 266 ++++++++++++++++++++- services/api-server/requirements/_test.txt | 6 +- 3 files changed, 270 insertions(+), 3 deletions(-) diff --git a/services/api-server/requirements/_base.in b/services/api-server/requirements/_base.in index 6cad895115c..58825ffc491 100644 --- a/services/api-server/requirements/_base.in +++ b/services/api-server/requirements/_base.in @@ -7,6 +7,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/celery-library/requirements/_base.in --requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt index b93d186b0dd..2733185971e 100644 --- a/services/api-server/requirements/_base.txt +++ b/services/api-server/requirements/_base.txt @@ -1,22 +1,27 @@ aio-pika==9.5.3 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in aiocache==0.12.3 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in aiodebug==2.3.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in aiodocker==0.24.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in aiofiles==24.1.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in @@ -25,6 +30,18 @@ aiohappyeyeballs==2.6.1 # via aiohttp aiohttp==3.12.12 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -63,6 +80,8 @@ alembic==1.14.0 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in +amqp==5.3.1 + # via kombu annotated-types==0.7.0 # via pydantic anyio==4.7.0 @@ -74,6 +93,9 @@ anyio==4.7.0 # watchfiles arrow==1.3.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -88,8 +110,24 @@ attrs==24.2.0 # via # aiohttp # jsonschema +billiard==4.2.1 + # via celery +celery==5.5.3 + # via -r requirements/../../../packages/celery-library/requirements/_base.in certifi==2024.8.30 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -127,11 +165,33 @@ charset-normalizer==3.4.0 # via requests click==8.1.7 # via + # celery + # click-didyoumean + # click-plugins + # click-repl # rich-toolkit # typer # uvicorn +click-didyoumean==0.3.1 + # via celery +click-plugins==1.1.1.2 + # via celery +click-repl==0.3.0 + # via celery cryptography==44.0.0 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -183,6 +243,7 @@ fastapi-pagination==0.12.32 # via -r requirements/_base.in faststream==0.5.33 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in flexcache==0.3 @@ -215,6 +276,18 @@ httptools==0.6.4 # via uvicorn httpx==0.27.2 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -259,6 +332,18 @@ importlib-metadata==8.5.0 # via opentelemetry-api jinja2==3.1.6 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -291,12 +376,28 @@ jinja2==3.1.6 jsonschema==3.2.0 # via # -c requirements/./constraints.txt + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +kombu==5.5.4 + # via celery mako==1.3.10 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -340,6 +441,7 @@ multidict==6.1.0 # yarl opentelemetry-api==1.34.1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -357,6 +459,7 @@ opentelemetry-api==1.34.1 # opentelemetry-semantic-conventions opentelemetry-exporter-otlp==1.34.1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in opentelemetry-exporter-otlp-proto-common==1.34.1 @@ -379,12 +482,14 @@ opentelemetry-instrumentation==0.55b1 # opentelemetry-instrumentation-requests opentelemetry-instrumentation-aio-pika==0.55b1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi opentelemetry-instrumentation-asyncpg==0.55b1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-fastapi==0.55b1 @@ -393,14 +498,17 @@ opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-logging==0.55b1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-redis==0.55b1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-requests==0.55b1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in opentelemetry-proto==1.34.1 @@ -410,6 +518,7 @@ opentelemetry-proto==1.34.1 # opentelemetry-exporter-otlp-proto-http opentelemetry-sdk==1.34.1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -432,6 +541,18 @@ opentelemetry-util-http==0.55b1 # opentelemetry-instrumentation-requests orjson==3.10.12 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -460,6 +581,14 @@ orjson==3.10.12 # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -483,6 +612,7 @@ packaging==24.2 # via # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in + # kombu # opentelemetry-instrumentation pamqp==3.3.0 # via aiormq @@ -494,6 +624,8 @@ platformdirs==4.3.6 # via pint prometheus-client==0.21.1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +prompt-toolkit==3.0.51 + # via click-repl propcache==0.2.1 # via # aiohttp @@ -504,6 +636,7 @@ protobuf==5.29.5 # opentelemetry-proto psutil==6.1.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in psycopg2-binary==2.9.10 @@ -514,6 +647,18 @@ pycryptodome==3.21.0 # via stream-zip pydantic==2.11.7 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -542,6 +687,17 @@ pydantic==2.11.7 # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -579,6 +735,14 @@ pydantic-core==2.33.2 # via pydantic pydantic-extra-types==2.10.5 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -599,6 +763,18 @@ pydantic-extra-types==2.10.5 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in pydantic-settings==2.7.0 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -627,6 +803,10 @@ pydantic-settings==2.7.0 # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -639,12 +819,15 @@ pygments==2.18.0 # via rich pyinstrument==5.0.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in pyrsistent==0.20.0 # via jsonschema python-dateutil==2.9.0.post0 - # via arrow + # via + # arrow + # celery python-dotenv==1.0.1 # via # pydantic-settings @@ -653,6 +836,18 @@ python-multipart==0.0.19 # via fastapi pyyaml==6.0.2 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -681,12 +876,25 @@ pyyaml==6.0.2 # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in # uvicorn redis==5.2.1 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -715,12 +923,16 @@ redis==5.2.1 # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in + # kombu requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http rich==13.9.4 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -743,6 +955,18 @@ sniffio==1.3.1 # httpx sqlalchemy==1.4.54 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -777,6 +1001,18 @@ sqlalchemy==1.4.54 # alembic starlette==0.41.3 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -808,25 +1044,31 @@ starlette==0.41.3 # fastapi stream-zip==0.0.83 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in toolz==1.0.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in typer==0.15.1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -859,8 +1101,22 @@ typing-extensions==4.14.1 # typing-inspection typing-inspection==0.4.1 # via pydantic +tzdata==2025.2 + # via kombu urllib3==2.5.0 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -896,8 +1152,15 @@ uvicorn==0.34.2 # fastapi-cli uvloop==0.21.0 # via uvicorn +vine==5.1.0 + # via + # amqp + # celery + # kombu watchfiles==1.0.0 # via uvicorn +wcwidth==0.2.13 + # via prompt-toolkit websockets==14.1 # via uvicorn wrapt==1.17.0 @@ -908,6 +1171,7 @@ wrapt==1.17.0 # opentelemetry-instrumentation-redis yarl==1.18.3 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt index a3a2bc1690e..7b97932b9fe 100644 --- a/services/api-server/requirements/_test.txt +++ b/services/api-server/requirements/_test.txt @@ -390,8 +390,10 @@ typing-inspection==0.4.1 # via # -c requirements/_base.txt # pydantic -tzdata==2025.1 - # via faker +tzdata==2025.2 + # via + # -c requirements/_base.txt + # faker urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt From 1350f84a573c7073f91b3bdd5630a8d8852a6dbd Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 14 Aug 2025 15:23:18 +0200 Subject: [PATCH 002/111] initial setup of celery worker --- .../celery/__init__.py | 0 .../celery/_worker_tasks.py/tasks.py | 15 ++++++ .../celery/worker_main.py | 46 +++++++++++++++++++ .../core/settings.py | 9 ++++ 4 files changed, 70 insertions(+) create mode 100644 services/api-server/src/simcore_service_api_server/celery/__init__.py create mode 100644 services/api-server/src/simcore_service_api_server/celery/_worker_tasks.py/tasks.py create mode 100644 services/api-server/src/simcore_service_api_server/celery/worker_main.py diff --git a/services/api-server/src/simcore_service_api_server/celery/__init__.py b/services/api-server/src/simcore_service_api_server/celery/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/api-server/src/simcore_service_api_server/celery/_worker_tasks.py/tasks.py b/services/api-server/src/simcore_service_api_server/celery/_worker_tasks.py/tasks.py new file mode 100644 index 00000000000..bc8edc856e8 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/celery/_worker_tasks.py/tasks.py @@ -0,0 +1,15 @@ +import logging + +from celery import Celery # type: ignore[import-untyped] +from celery_library.types import register_celery_types, register_pydantic_types +from servicelib.logging_utils import log_context + +_logger = logging.getLogger(__name__) + + +def setup_worker_tasks(app: Celery) -> None: + register_celery_types() + register_pydantic_types() + + with log_context(_logger, logging.INFO, msg="worker task registration"): + pass diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_main.py b/services/api-server/src/simcore_service_api_server/celery/worker_main.py new file mode 100644 index 00000000000..7e3297b6c4c --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/celery/worker_main.py @@ -0,0 +1,46 @@ +"""Main application to be deployed in for example uvicorn.""" + +from functools import partial + +from celery.signals import worker_init, worker_shutdown # type: ignore[import-untyped] +from celery_library.common import create_app as create_celery_app +from celery_library.signals import ( + on_worker_init, + on_worker_shutdown, +) +from servicelib.fastapi.celery.app_server import FastAPIAppServer +from servicelib.logging_utils import setup_loggers + +from ..core.application import create_app +from ..core.settings import ApplicationSettings +from ._worker_tasks import setup_worker_tasks + +_settings = ApplicationSettings.create_from_envs() + +setup_loggers( + log_format_local_dev_enabled=_settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=_settings.API_SERVER_LOG_FILTER_MAPPING, + tracing_settings=_settings.API_SERVER_TRACING, + log_base_level=_settings.log_level, + noisy_loggers=None, +) + + +assert _settings.API_SERVER_CELERY # nosec +app = create_celery_app(_settings.API_SERVER_CELERY) + +app_server = FastAPIAppServer(app=create_app(_settings)) + + +def worker_init_wrapper(sender, **_kwargs): + assert _settings.API_SERVER_CELERY # nosec + return partial(on_worker_init, app_server, _settings.API_SERVER_CELERY)( + sender, **_kwargs + ) + + +worker_init.connect(worker_init_wrapper) +worker_shutdown.connect(on_worker_shutdown) + + +setup_worker_tasks(app) diff --git a/services/api-server/src/simcore_service_api_server/core/settings.py b/services/api-server/src/simcore_service_api_server/core/settings.py index 59f6812b896..9b622a5ddc9 100644 --- a/services/api-server/src/simcore_service_api_server/core/settings.py +++ b/services/api-server/src/simcore_service_api_server/core/settings.py @@ -13,6 +13,7 @@ ) from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.base import BaseCustomSettings +from settings_library.celery import CelerySettings from settings_library.director_v2 import DirectorV2Settings from settings_library.postgres import PostgresSettings from settings_library.rabbit import RabbitSettings @@ -102,6 +103,10 @@ class ApplicationSettings(BasicSettings): # DOCKER BOOT SC_BOOT_MODE: BootModeEnum | None = None + API_SERVER_CELERY: Annotated[ + CelerySettings | None, Field(json_schema_extra={"auto_default_from_env": True}) + ] = None + API_SERVER_POSTGRES: Annotated[ PostgresSettings | None, Field(json_schema_extra={"auto_default_from_env": True}), @@ -142,6 +147,10 @@ class ApplicationSettings(BasicSettings): ), ] + API_SERVER_WORKER_MODE: Annotated[ + bool, Field(description="If True, the API server runs in worker mode") + ] = False + @cached_property def debug(self) -> bool: """If True, debug tracebacks should be returned on errors.""" From 8813fa5de15d052bbf0f67b786395faaab5de0b9 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 19 Aug 2025 14:10:17 +0200 Subject: [PATCH 003/111] fix openapi-specs make target in api-server --- services/api-server/.env-devel | 6 ++++++ services/api-server/Makefile | 4 +++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/services/api-server/.env-devel b/services/api-server/.env-devel index 29d4830d47f..a466dad4ee9 100644 --- a/services/api-server/.env-devel +++ b/services/api-server/.env-devel @@ -31,6 +31,12 @@ POSTGRES_HOST=127.0.0.1 # Enables debug SC_BOOT_MODE=debug +# rabbit settings +RABBIT_HOST=rabbit +RABBIT_PASSWORD=adminadmin +RABBIT_PORT=5672 +RABBIT_SECURE=false +RABBIT_USER=admin # webserver WEBSERVER_HOST=webserver diff --git a/services/api-server/Makefile b/services/api-server/Makefile index 4db8527326b..555c88f6ec3 100644 --- a/services/api-server/Makefile +++ b/services/api-server/Makefile @@ -28,7 +28,9 @@ reqs: ## compiles pip requirements (.in -> .txt) define _create_and_validate_openapi # generating openapi specs file under $< (NOTE: Skips DEV FEATURES since this OAS is the 'offically released'!) - @source .env; \ + set -o allexport; \ + source .env; \ + set +o allexport; \ export API_SERVER_DEV_FEATURES_ENABLED=$1; \ python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ From 4a822e58843a8da3c6827af7002dad0e4b89e903 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 19 Aug 2025 14:16:22 +0200 Subject: [PATCH 004/111] fix openapi specs generation in api-server --- services/api-server/.env-devel | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/services/api-server/.env-devel b/services/api-server/.env-devel index a466dad4ee9..a18401e3a5b 100644 --- a/services/api-server/.env-devel +++ b/services/api-server/.env-devel @@ -28,10 +28,7 @@ POSTGRES_PASSWORD=test POSTGRES_DB=test POSTGRES_HOST=127.0.0.1 -# Enables debug -SC_BOOT_MODE=debug - -# rabbit settings +# rabbit RABBIT_HOST=rabbit RABBIT_PASSWORD=adminadmin RABBIT_PORT=5672 From 22c6c2e1c43ca6c701ec05b7232115b5e36708fd Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 19 Aug 2025 14:19:09 +0200 Subject: [PATCH 005/111] name fix --- .../celery/{_worker_tasks.py => _worker_tasks}/tasks.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename services/api-server/src/simcore_service_api_server/celery/{_worker_tasks.py => _worker_tasks}/tasks.py (100%) diff --git a/services/api-server/src/simcore_service_api_server/celery/_worker_tasks.py/tasks.py b/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/tasks.py similarity index 100% rename from services/api-server/src/simcore_service_api_server/celery/_worker_tasks.py/tasks.py rename to services/api-server/src/simcore_service_api_server/celery/_worker_tasks/tasks.py From b8325d881ed359bc6aad41b8e332da3a8676ed3a Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 19 Aug 2025 16:24:40 +0200 Subject: [PATCH 006/111] first attempt at definining run function task --- .../api/dependencies/webserver_http.py | 10 +- .../celery/_worker_tasks/_functions_tasks.py | 108 ++++++++++++++++++ .../celery/worker_main.py | 2 +- 3 files changed, 112 insertions(+), 8 deletions(-) create mode 100644 services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/webserver_http.py b/services/api-server/src/simcore_service_api_server/api/dependencies/webserver_http.py index 377356f22c0..df4325dc1ff 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/webserver_http.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/webserver_http.py @@ -4,7 +4,6 @@ from common_library.json_serialization import json_dumps from cryptography.fernet import Fernet from fastapi import Depends, FastAPI, HTTPException, status -from fastapi.requests import Request from ..._constants import MSG_BACKEND_SERVICE_UNAVAILABLE from ...core.settings import ApplicationSettings, WebServerSettings @@ -29,19 +28,16 @@ def _get_settings( return settings -def _get_encrypt(request: Request) -> Fernet | None: - e: Fernet | None = getattr(request.app.state, "webserver_fernet", None) - return e - - def get_session_cookie( identity: Annotated[str, Depends(get_active_user_email)], settings: Annotated[WebServerSettings, Depends(_get_settings)], - fernet: Annotated[Fernet | None, Depends(_get_encrypt)], + app: Annotated[FastAPI, Depends(get_app)], ) -> dict: # Based on aiohttp_session and aiohttp_security # SEE services/web/server/tests/unit/with_dbs/test_login.py + fernet: Fernet | None = getattr(app.state, "webserver_fernet", None) + if fernet is None: raise HTTPException( status.HTTP_503_SERVICE_UNAVAILABLE, detail=MSG_BACKEND_SERVICE_UNAVAILABLE diff --git a/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py new file mode 100644 index 00000000000..4e0065ecf13 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py @@ -0,0 +1,108 @@ +from celery import Task +from celery_library.utils import get_app_server +from fastapi import FastAPI # type: ignore[import-untyped] +from models_library.functions import FunctionInputs, RegisteredFunction +from models_library.projects_nodes_io import NodeID +from servicelib.celery.models import TaskID + +from ...api.dependencies.authentication import Identity +from ...api.dependencies.rabbitmq import get_rabbitmq_rpc_client +from ...api.dependencies.services import ( + get_api_client, + get_catalog_service, + get_directorv2_service, + get_function_job_service, + get_job_service, + get_solver_service, + get_storage_service, + get_wb_api_rpc_client, + get_webserver_session, +) +from ...api.dependencies.webserver_http import get_session_cookie +from ...models.api_resources import JobLinks +from ...models.schemas.jobs import JobPricingSpecification +from ...services_http.director_v2 import DirectorV2Api +from ...services_http.storage import StorageApi + + +async def _assemble_function_job_service(app: FastAPI, identity: Identity): + # to avoid this show we could introduce a dependency injection + # system which is not linked to FastAPI (i.e. can be resolved manually). + # See also https://github.com/fastapi/fastapi/issues/1105#issuecomment-609919850 + settings = app.state.settings + assert settings.API_SERVER_WEBSERVER # nosec + session_cookie = get_session_cookie( + identity=identity.email, settings=settings.API_SERVER_WEBSERVER, app=app + ) + + rpc_client = get_rabbitmq_rpc_client(app=app) + web_server_rest_client = get_webserver_session( + app=app, session_cookies=session_cookie, identity=identity + ) + web_api_rpc_client = await get_wb_api_rpc_client(app=app) + director2_api = get_api_client(DirectorV2Api) + assert isinstance(director2_api, DirectorV2Api) + storage_api = get_api_client(StorageApi) + assert isinstance(storage_api, StorageApi) + catalog_service = get_catalog_service( + rpc_client=rpc_client, + user_id=identity.user_id, + product_name=identity.product_name, + ) + + storage_service = get_storage_service( + rpc_client=rpc_client, + user_id=identity.user_id, + product_name=identity.product_name, + ) + directorv2_service = get_directorv2_service(rpc_client=rpc_client) + + solver_service = get_solver_service( + catalog_service=catalog_service, + user_id=identity.user_id, + product_name=identity.product_name, + ) + + job_service = get_job_service( + web_rest_api=web_server_rest_client, + director2_api=director2_api, + storage_api=storage_api, + web_rpc_api=web_api_rpc_client, + storage_service=storage_service, + directorv2_service=directorv2_service, + user_id=identity.user_id, + product_name=identity.product_name, + solver_service=solver_service, + ) + + return get_function_job_service( + web_rpc_api=web_api_rpc_client, + job_service=job_service, + user_id=identity.user_id, + product_name=identity.product_name, + ) + + +async def run_function( + task: Task, + task_id: TaskID, + *, + identity: Identity, # user identity + function: RegisteredFunction, + function_inputs: FunctionInputs, + pricing_spec: JobPricingSpecification | None, + job_links: JobLinks, + x_simcore_parent_project_uuid: NodeID | None, + x_simcore_parent_node_id: NodeID | None, +): + app = get_app_server(task.app).app + function_job_service = await _assemble_function_job_service(app, identity) + + return await function_job_service.run_function( + function=function, + function_inputs=function_inputs, + pricing_spec=pricing_spec, + job_links=job_links, + x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, + x_simcore_parent_node_id=x_simcore_parent_node_id, + ) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_main.py b/services/api-server/src/simcore_service_api_server/celery/worker_main.py index 7e3297b6c4c..a941526a82e 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_main.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_main.py @@ -13,7 +13,7 @@ from ..core.application import create_app from ..core.settings import ApplicationSettings -from ._worker_tasks import setup_worker_tasks +from ._worker_tasks.tasks import setup_worker_tasks _settings = ApplicationSettings.create_from_envs() From 1b43976af68105212d77ae356a22c09db49d6f9f Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 19 Aug 2025 16:25:46 +0200 Subject: [PATCH 007/111] minor adjustments --- .../celery/_worker_tasks/_functions_tasks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py index 4e0065ecf13..22308a87daa 100644 --- a/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py @@ -95,6 +95,7 @@ async def run_function( x_simcore_parent_project_uuid: NodeID | None, x_simcore_parent_node_id: NodeID | None, ): + assert task_id # nosec app = get_app_server(task.app).app function_job_service = await _assemble_function_job_service(app, identity) From 25eaaaa27b4539a89f1223709794f3cc0dd46da0 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 09:11:01 +0200 Subject: [PATCH 008/111] improve docs and minor changes --- .../api-server/docs/api-server.drawio.svg | 483 +++++++++++------- .../celery/_worker_tasks/_functions_tasks.py | 40 +- 2 files changed, 307 insertions(+), 216 deletions(-) diff --git a/services/api-server/docs/api-server.drawio.svg b/services/api-server/docs/api-server.drawio.svg index 98f7dcfdfc6..43ad9b7f592 100644 --- a/services/api-server/docs/api-server.drawio.svg +++ b/services/api-server/docs/api-server.drawio.svg @@ -1,19 +1,19 @@ - + - + - + -
+
CONTROLLER @@ -21,20 +21,20 @@
- + CONTROLLER - + -
+
SERVICE @@ -42,20 +42,20 @@
- + SERVICE - + -
+
REPOSITORY @@ -63,20 +63,20 @@
- + REPOSITORY - + -
+
CLIENTS @@ -84,7 +84,7 @@
- + CLIENTS @@ -92,13 +92,13 @@ - + -
+
CONTROLLER @@ -106,20 +106,20 @@
- + CONTROLLER - + -
+
SERVICE @@ -127,20 +127,20 @@
- + SERVICE - + -
+
REPOSITORY @@ -148,20 +148,20 @@
- + REPOSITORY - + -
+
CLIENTS @@ -169,24 +169,24 @@
- + CLIENTS - - + + - + -
+
rest @@ -194,24 +194,24 @@
- + rest - - + + - + -
+
rpc @@ -219,24 +219,24 @@
- + rpc - - + + - + -
+
projects @@ -244,24 +244,24 @@
- + projects - - + + - + -
+
projects @@ -269,20 +269,20 @@
- + projects - + -
+
sa[asyngpg] @@ -290,7 +290,7 @@
- + sa[asyngpg] @@ -298,13 +298,13 @@ - + -
+
CONTROLLER @@ -312,20 +312,20 @@
- + CONTROLLER - + -
+
SERVICE @@ -333,20 +333,20 @@
- + SERVICE - + -
+
REPOSITORY @@ -354,20 +354,20 @@
- + REPOSITORY - + -
+
CLIENTS @@ -375,20 +375,20 @@
- + CLIENTS - + -
+
rest @@ -396,24 +396,24 @@
- + rest - - + + - + -
+
rpc @@ -421,24 +421,24 @@
- + rpc - - + + - + -
+
services @@ -446,24 +446,24 @@
- + services - - + + - + -
+
catalog_srv @@ -471,20 +471,20 @@
- + catalog_srv - + -
+
sa[asyncpg] @@ -492,28 +492,28 @@
- + sa[asyncpg] - - + + - - + + - + -
+
SolverService @@ -521,24 +521,24 @@
- + SolverService - - + + - + -
+
CatalogService @@ -546,21 +546,21 @@
- + CatalogService - - + + -
+
httpx @@ -568,20 +568,20 @@
- + httpx - + -
+
AuthSession @@ -589,28 +589,28 @@
- + AuthSession - - + + - - + + - + -
+
WbApiRpcClient @@ -618,24 +618,24 @@
- + WbApiRpcClient - - + + - + -
+
sa[asyncg] @@ -643,20 +643,20 @@
- + sa[asyncg] - + -
+
simcore_service_catalog @@ -664,20 +664,20 @@
- + simcore_ser... - + -
+
simcore_service_webserver @@ -685,45 +685,24 @@
- + simcore_ser... - + + - - - -
-
-
- simcore_service_api_server -
-
-
-
- - simcore_ser... - -
-
-
- - - - - - + -
+
/solvers @@ -731,24 +710,24 @@
- + /solvers - - + + - + -
+
ProgramsService @@ -756,24 +735,24 @@
- + ProgramsService - - + + - + -
+
/programs @@ -781,24 +760,24 @@
- + /programs - - + + - + -
+
StudyService @@ -806,24 +785,24 @@
- + StudyService - - + + - + -
+
/studies @@ -831,28 +810,28 @@
- + /studies - - + + - - + + - + -
+
JobService @@ -860,35 +839,35 @@
- + JobService - - + + - - + + - + - - + + - + -
+
ApiKeysRepository @@ -896,24 +875,24 @@
- + ApiKeysRepository - - + + - + -
+
UsersRepository @@ -921,24 +900,24 @@
- + UsersRepository - - + + - + -
+
RabbitMQ @@ -948,20 +927,20 @@
- + RabbitMQ... - + -
+
Dependencies go inwards @@ -969,18 +948,126 @@
- + Dependencies go inwards - - + + + + + + + + + + + + + +
+
+
+ JobService +
+
+
+
+ + JobService + +
+
+
+ + + + + + + + + + + + + + + + + + + +
+
+
+ /studies/jobs +
+
+
+
+ + /studies/j... + +
+
+
+ + + + + + + + + + + +
+
+
+ /solvers/jobs +
+
+
+
+ + /solvers/j... + +
+
+
+ + + + + + + + + + + +
+
+
+ /programs/jobs +
+
+
+
+ + /programs/... + +
+
- + + diff --git a/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py index 22308a87daa..63b5fd8fe6a 100644 --- a/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py @@ -25,42 +25,44 @@ from ...services_http.storage import StorageApi -async def _assemble_function_job_service(app: FastAPI, identity: Identity): +async def _assemble_function_job_service(*, app: FastAPI, user_identity: Identity): # to avoid this show we could introduce a dependency injection # system which is not linked to FastAPI (i.e. can be resolved manually). - # See also https://github.com/fastapi/fastapi/issues/1105#issuecomment-609919850 + # One suggestion: https://github.com/ets-labs/python-dependency-injector, which is compatible + # with FastAPI's Depends. + # See also https://github.com/fastapi/fastapi/issues/1105#issuecomment-609919850. settings = app.state.settings assert settings.API_SERVER_WEBSERVER # nosec session_cookie = get_session_cookie( - identity=identity.email, settings=settings.API_SERVER_WEBSERVER, app=app + identity=user_identity.email, settings=settings.API_SERVER_WEBSERVER, app=app ) rpc_client = get_rabbitmq_rpc_client(app=app) web_server_rest_client = get_webserver_session( - app=app, session_cookies=session_cookie, identity=identity + app=app, session_cookies=session_cookie, identity=user_identity ) web_api_rpc_client = await get_wb_api_rpc_client(app=app) director2_api = get_api_client(DirectorV2Api) - assert isinstance(director2_api, DirectorV2Api) + assert isinstance(director2_api, DirectorV2Api) # nosec storage_api = get_api_client(StorageApi) - assert isinstance(storage_api, StorageApi) + assert isinstance(storage_api, StorageApi) # nosec catalog_service = get_catalog_service( rpc_client=rpc_client, - user_id=identity.user_id, - product_name=identity.product_name, + user_id=user_identity.user_id, + product_name=user_identity.product_name, ) storage_service = get_storage_service( rpc_client=rpc_client, - user_id=identity.user_id, - product_name=identity.product_name, + user_id=user_identity.user_id, + product_name=user_identity.product_name, ) directorv2_service = get_directorv2_service(rpc_client=rpc_client) solver_service = get_solver_service( catalog_service=catalog_service, - user_id=identity.user_id, - product_name=identity.product_name, + user_id=user_identity.user_id, + product_name=user_identity.product_name, ) job_service = get_job_service( @@ -70,16 +72,16 @@ async def _assemble_function_job_service(app: FastAPI, identity: Identity): web_rpc_api=web_api_rpc_client, storage_service=storage_service, directorv2_service=directorv2_service, - user_id=identity.user_id, - product_name=identity.product_name, + user_id=user_identity.user_id, + product_name=user_identity.product_name, solver_service=solver_service, ) return get_function_job_service( web_rpc_api=web_api_rpc_client, job_service=job_service, - user_id=identity.user_id, - product_name=identity.product_name, + user_id=user_identity.user_id, + product_name=user_identity.product_name, ) @@ -87,7 +89,7 @@ async def run_function( task: Task, task_id: TaskID, *, - identity: Identity, # user identity + user_identity: Identity, function: RegisteredFunction, function_inputs: FunctionInputs, pricing_spec: JobPricingSpecification | None, @@ -97,7 +99,9 @@ async def run_function( ): assert task_id # nosec app = get_app_server(task.app).app - function_job_service = await _assemble_function_job_service(app, identity) + function_job_service = await _assemble_function_job_service( + app=app, user_identity=user_identity + ) return await function_job_service.run_function( function=function, From ed6ed1e795d642a13a5a244f2d271e13ebad5451 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 09:23:09 +0200 Subject: [PATCH 009/111] cleanup and reset docs --- .../api-server/docs/api-server.drawio.svg | 483 +++++++----------- .../_service_studies.py | 27 - 2 files changed, 198 insertions(+), 312 deletions(-) delete mode 100644 services/api-server/src/simcore_service_api_server/_service_studies.py diff --git a/services/api-server/docs/api-server.drawio.svg b/services/api-server/docs/api-server.drawio.svg index 43ad9b7f592..98f7dcfdfc6 100644 --- a/services/api-server/docs/api-server.drawio.svg +++ b/services/api-server/docs/api-server.drawio.svg @@ -1,19 +1,19 @@ - + - + - + -
+
CONTROLLER @@ -21,20 +21,20 @@
- + CONTROLLER - + -
+
SERVICE @@ -42,20 +42,20 @@
- + SERVICE - + -
+
REPOSITORY @@ -63,20 +63,20 @@
- + REPOSITORY - + -
+
CLIENTS @@ -84,7 +84,7 @@
- + CLIENTS @@ -92,13 +92,13 @@ - + -
+
CONTROLLER @@ -106,20 +106,20 @@
- + CONTROLLER - + -
+
SERVICE @@ -127,20 +127,20 @@
- + SERVICE - + -
+
REPOSITORY @@ -148,20 +148,20 @@
- + REPOSITORY - + -
+
CLIENTS @@ -169,24 +169,24 @@
- + CLIENTS - - + + - + -
+
rest @@ -194,24 +194,24 @@
- + rest - - + + - + -
+
rpc @@ -219,24 +219,24 @@
- + rpc - - + + - + -
+
projects @@ -244,24 +244,24 @@
- + projects - - + + - + -
+
projects @@ -269,20 +269,20 @@
- + projects - + -
+
sa[asyngpg] @@ -290,7 +290,7 @@
- + sa[asyngpg] @@ -298,13 +298,13 @@ - + -
+
CONTROLLER @@ -312,20 +312,20 @@
- + CONTROLLER - + -
+
SERVICE @@ -333,20 +333,20 @@
- + SERVICE - + -
+
REPOSITORY @@ -354,20 +354,20 @@
- + REPOSITORY - + -
+
CLIENTS @@ -375,20 +375,20 @@
- + CLIENTS - + -
+
rest @@ -396,24 +396,24 @@
- + rest - - + + - + -
+
rpc @@ -421,24 +421,24 @@
- + rpc - - + + - + -
+
services @@ -446,24 +446,24 @@
- + services - - + + - + -
+
catalog_srv @@ -471,20 +471,20 @@
- + catalog_srv - + -
+
sa[asyncpg] @@ -492,28 +492,28 @@
- + sa[asyncpg] - - + + - - + + - + -
+
SolverService @@ -521,24 +521,24 @@
- + SolverService - - + + - + -
+
CatalogService @@ -546,21 +546,21 @@
- + CatalogService - - + + -
+
httpx @@ -568,20 +568,20 @@
- + httpx - + -
+
AuthSession @@ -589,28 +589,28 @@
- + AuthSession - - + + - - + + - + -
+
WbApiRpcClient @@ -618,24 +618,24 @@
- + WbApiRpcClient - - + + - + -
+
sa[asyncg] @@ -643,20 +643,20 @@
- + sa[asyncg] - + -
+
simcore_service_catalog @@ -664,20 +664,20 @@
- + simcore_ser... - + -
+
simcore_service_webserver @@ -685,24 +685,45 @@
- + simcore_ser... - - + - + + + +
+
+
+ simcore_service_api_server +
+
+
+
+ + simcore_ser... + +
+
+
+ + + + + + -
+
/solvers @@ -710,24 +731,24 @@
- + /solvers - - + + - + -
+
ProgramsService @@ -735,24 +756,24 @@
- + ProgramsService - - + + - + -
+
/programs @@ -760,24 +781,24 @@
- + /programs - - + + - + -
+
StudyService @@ -785,24 +806,24 @@
- + StudyService - - + + - + -
+
/studies @@ -810,28 +831,28 @@
- + /studies - - + + - - + + - + -
+
JobService @@ -839,35 +860,35 @@
- + JobService - - + + - - + + - + - - + + - + -
+
ApiKeysRepository @@ -875,24 +896,24 @@
- + ApiKeysRepository - - + + - + -
+
UsersRepository @@ -900,24 +921,24 @@
- + UsersRepository - - + + - + -
+
RabbitMQ @@ -927,20 +948,20 @@
- + RabbitMQ... - + -
+
Dependencies go inwards @@ -948,126 +969,18 @@
- + Dependencies go inwards - - - - - - - - - - - - - -
-
-
- JobService -
-
-
-
- - JobService - -
-
-
- - - - - - - - - - - - - - - - - - - -
-
-
- /studies/jobs -
-
-
-
- - /studies/j... - -
-
-
- - - - - - - - - - - -
-
-
- /solvers/jobs -
-
-
-
- - /solvers/j... - -
-
-
- - - - - - - - - - - -
-
-
- /programs/jobs -
-
-
-
- - /programs/... - -
-
+ +
- - + diff --git a/services/api-server/src/simcore_service_api_server/_service_studies.py b/services/api-server/src/simcore_service_api_server/_service_studies.py deleted file mode 100644 index 89fa5196e34..00000000000 --- a/services/api-server/src/simcore_service_api_server/_service_studies.py +++ /dev/null @@ -1,27 +0,0 @@ -from dataclasses import dataclass - -from models_library.products import ProductName -from models_library.rest_pagination import ( - MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, -) -from models_library.users import UserID - -from ._service_jobs import JobService -from ._service_utils import check_user_product_consistency - -DEFAULT_PAGINATION_LIMIT = MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE - 1 - - -@dataclass(frozen=True, kw_only=True) -class StudyService: - job_service: JobService - user_id: UserID - product_name: ProductName - - def __post_init__(self): - check_user_product_consistency( - service_cls_name=self.__class__.__name__, - service_provider=self.job_service, - user_id=self.user_id, - product_name=self.product_name, - ) From 2f57d5efa02b8f286d3792526c6a23293c13b2c9 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 09:56:21 +0200 Subject: [PATCH 010/111] register task --- .../celery/_worker_tasks/tasks.py | 22 +++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/tasks.py b/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/tasks.py index bc8edc856e8..45cafcf45c7 100644 --- a/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/tasks.py @@ -1,15 +1,33 @@ import logging from celery import Celery # type: ignore[import-untyped] +from celery_library.task import register_task from celery_library.types import register_celery_types, register_pydantic_types +from models_library.functions import ( + RegisteredProjectFunction, + RegisteredPythonCodeFunction, + RegisteredSolverFunction, +) from servicelib.logging_utils import log_context +from ...api.dependencies.authentication import Identity +from ...models.api_resources import JobLinks +from ...models.schemas.jobs import JobPricingSpecification +from ._functions_tasks import run_function + _logger = logging.getLogger(__name__) def setup_worker_tasks(app: Celery) -> None: register_celery_types() - register_pydantic_types() + register_pydantic_types( + Identity, + RegisteredProjectFunction, + RegisteredPythonCodeFunction, + RegisteredSolverFunction, + JobPricingSpecification, + JobLinks, + ) with log_context(_logger, logging.INFO, msg="worker task registration"): - pass + register_task(app, run_function) From f4b4af88610e2859d68be000eadd8cf09fec637c Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 10:34:24 +0200 Subject: [PATCH 011/111] add celery client inside api-server --- services/api-server/requirements/dev.txt | 1 + .../api/dependencies/celery.py | 40 +++++++++++++++++++ .../core/application.py | 4 ++ 3 files changed, 45 insertions(+) create mode 100644 services/api-server/src/simcore_service_api_server/api/dependencies/celery.py diff --git a/services/api-server/requirements/dev.txt b/services/api-server/requirements/dev.txt index 5afc552d753..85f3f1c428e 100644 --- a/services/api-server/requirements/dev.txt +++ b/services/api-server/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/celery-library/ --editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py new file mode 100644 index 00000000000..14c04da29d4 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py @@ -0,0 +1,40 @@ +from celery_library.common import create_app, create_task_manager +from celery_library.task_manager import CeleryTaskManager +from celery_library.types import register_celery_types, register_pydantic_types +from fastapi import FastAPI +from models_library.functions import ( + RegisteredProjectFunction, + RegisteredPythonCodeFunction, + RegisteredSolverFunction, +) +from settings_library.celery import CelerySettings + +from ...api.dependencies.authentication import Identity +from ...models.api_resources import JobLinks +from ...models.schemas.jobs import JobPricingSpecification + + +def setup_task_manager(app: FastAPI, celery_settings: CelerySettings) -> None: + async def on_startup() -> None: + app.state.task_manager = await create_task_manager( + create_app(celery_settings), celery_settings + ) + + register_celery_types() + register_pydantic_types( + Identity, + RegisteredProjectFunction, + RegisteredPythonCodeFunction, + RegisteredSolverFunction, + JobPricingSpecification, + JobLinks, + ) + + app.add_event_handler("startup", on_startup) + + +def get_task_manager_from_app(app: FastAPI) -> CeleryTaskManager: + assert hasattr(app.state, "task_manager") # nosec + task_manager = app.state.task_manager + assert isinstance(task_manager, CeleryTaskManager) # nosec + return task_manager diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py index 33505c35c5f..ca030b39e88 100644 --- a/services/api-server/src/simcore_service_api_server/core/application.py +++ b/services/api-server/src/simcore_service_api_server/core/application.py @@ -13,6 +13,7 @@ from .. import exceptions from .._meta import API_VERSION, API_VTAG, APP_NAME +from ..api.dependencies.celery import setup_task_manager from ..api.root import create_router from ..api.routes.health import router as health_router from ..clients.postgres import setup_postgres @@ -88,6 +89,9 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: setup_rabbitmq(app) + if settings.API_SERVER_CELERY and not settings.API_SERVER_WORKER_MODE: + setup_task_manager(app, settings.API_SERVER_CELERY) + if app.state.settings.API_SERVER_PROMETHEUS_INSTRUMENTATION_ENABLED: setup_prometheus_instrumentation(app) From 8b3ed889cb070aa918d7b84967f2e9a5f314482b Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 10:34:45 +0200 Subject: [PATCH 012/111] ensure requirements are set --- services/api-server/requirements/ci.txt | 1 + services/api-server/requirements/prod.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/services/api-server/requirements/ci.txt b/services/api-server/requirements/ci.txt index cc1799cee07..9d4fff8972a 100644 --- a/services/api-server/requirements/ci.txt +++ b/services/api-server/requirements/ci.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +simcore-celery-library @ ../../packages/celery-library/ simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library simcore-postgres-database @ ../../packages/postgres-database/ diff --git a/services/api-server/requirements/prod.txt b/services/api-server/requirements/prod.txt index 9d4d747507e..9df71af1a63 100644 --- a/services/api-server/requirements/prod.txt +++ b/services/api-server/requirements/prod.txt @@ -10,6 +10,7 @@ --requirement _base.txt # installs this repo's packages +simcore-celery-library @ ../../packages/celery-library/ simcore-models-library @ ../../packages/models-library simcore-common-library @ ../../packages/common-library/ simcore-postgres-database @ ../../packages/postgres-database/ From 94324cf9df35c8039a5969d2328f5977f58f07a1 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 10:50:53 +0200 Subject: [PATCH 013/111] cleanup fixtures --- .../api-server/tests/unit/celery/conftest.py | 83 +++++++++++++++++++ .../tests/unit/celery/test_functions.py | 5 ++ 2 files changed, 88 insertions(+) create mode 100644 services/api-server/tests/unit/celery/conftest.py create mode 100644 services/api-server/tests/unit/celery/test_functions.py diff --git a/services/api-server/tests/unit/celery/conftest.py b/services/api-server/tests/unit/celery/conftest.py new file mode 100644 index 00000000000..661518973b4 --- /dev/null +++ b/services/api-server/tests/unit/celery/conftest.py @@ -0,0 +1,83 @@ +from collections.abc import AsyncIterator, Callable +from functools import partial + +import pytest +from celery import Celery +from celery.contrib.testing.worker import TestWorkController, start_worker +from celery.signals import worker_init, worker_shutdown +from celery.worker.worker import WorkController +from celery_library.signals import on_worker_init, on_worker_shutdown +from pytest_simcore.helpers.monkeypatch_envs import delenvs_from_dict, setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from servicelib.fastapi.celery.app_server import FastAPIAppServer +from simcore_service_api_server.celery.worker_main import setup_worker_tasks +from simcore_service_api_server.core.application import create_app +from simcore_service_api_server.core.settings import ApplicationSettings + +pytest_plugins = [ + "pytest_simcore.rabbit_service", +] + + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + app_environment: EnvVarsDict, + rabbit_env_vars_dict: EnvVarsDict, +) -> EnvVarsDict: + # do not init other services + delenvs_from_dict(monkeypatch, ["API_SERVER_RABBITMQ"]) + return setenvs_from_dict( + monkeypatch, + { + **rabbit_env_vars_dict, + "API_SERVER_POSTGRES": "null", + "API_SERVER_HEALTH_CHECK_TASK_PERIOD_SECONDS": "3", + "API_SERVER_HEALTH_CHECK_TASK_TIMEOUT_SECONDS": "1", + }, + ) + + +@pytest.fixture +def register_celery_tasks() -> Callable[[Celery], None]: + """override if tasks are needed""" + + def _(celery_app: Celery) -> None: ... + + return _ + + +@pytest.fixture +async def with_storage_celery_worker( + app_environment: EnvVarsDict, + celery_app: Celery, + monkeypatch: pytest.MonkeyPatch, + register_celery_tasks: Callable[[Celery], None], +) -> AsyncIterator[TestWorkController]: + # Signals must be explicitily connected + monkeypatch.setenv("API_SERVER_WORKER_MODE", "true") + app_settings = ApplicationSettings.create_from_envs() + + app_server = FastAPIAppServer(app=create_app(app_settings)) + + def _on_worker_init_wrapper(sender: WorkController, **_kwargs): + assert app_settings.API_SERVER_CELERY # nosec + return partial(on_worker_init, app_server, app_settings.API_SERVER_CELERY)( + sender, **_kwargs + ) + + worker_init.connect(_on_worker_init_wrapper) + worker_shutdown.connect(on_worker_shutdown) + + setup_worker_tasks(celery_app) + register_celery_tasks(celery_app) + + with start_worker( + celery_app, + pool="threads", + concurrency=1, + loglevel="info", + perform_ping_check=False, + queues="default", + ) as worker: + yield worker diff --git a/services/api-server/tests/unit/celery/test_functions.py b/services/api-server/tests/unit/celery/test_functions.py new file mode 100644 index 00000000000..5be6bae3096 --- /dev/null +++ b/services/api-server/tests/unit/celery/test_functions.py @@ -0,0 +1,5 @@ +from celery.contrib.testing.worker import TestWorkController + + +async def test_with_fake_run_function(with_storage_celery_worker: TestWorkController): + pass From e279206d801a00394711d528a10959483f927dc2 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 11:12:27 +0200 Subject: [PATCH 014/111] insert fake run_function --- .../api-server/tests/unit/celery/conftest.py | 10 ++- .../tests/unit/celery/test_functions.py | 63 +++++++++++++++++++ 2 files changed, 72 insertions(+), 1 deletion(-) diff --git a/services/api-server/tests/unit/celery/conftest.py b/services/api-server/tests/unit/celery/conftest.py index 661518973b4..980c33b04c2 100644 --- a/services/api-server/tests/unit/celery/conftest.py +++ b/services/api-server/tests/unit/celery/conftest.py @@ -47,12 +47,19 @@ def _(celery_app: Celery) -> None: ... return _ +@pytest.fixture +def add_worker_tasks() -> bool: + "override to not add default worker tasks" + return True + + @pytest.fixture async def with_storage_celery_worker( app_environment: EnvVarsDict, celery_app: Celery, monkeypatch: pytest.MonkeyPatch, register_celery_tasks: Callable[[Celery], None], + add_worker_tasks: bool, ) -> AsyncIterator[TestWorkController]: # Signals must be explicitily connected monkeypatch.setenv("API_SERVER_WORKER_MODE", "true") @@ -69,7 +76,8 @@ def _on_worker_init_wrapper(sender: WorkController, **_kwargs): worker_init.connect(_on_worker_init_wrapper) worker_shutdown.connect(on_worker_shutdown) - setup_worker_tasks(celery_app) + if add_worker_tasks: + setup_worker_tasks(celery_app) register_celery_tasks(celery_app) with start_worker( diff --git a/services/api-server/tests/unit/celery/test_functions.py b/services/api-server/tests/unit/celery/test_functions.py index 5be6bae3096..9a362919def 100644 --- a/services/api-server/tests/unit/celery/test_functions.py +++ b/services/api-server/tests/unit/celery/test_functions.py @@ -1,5 +1,68 @@ +from collections.abc import Callable + +import pytest +from celery import Celery, Task from celery.contrib.testing.worker import TestWorkController +from celery_library.task import register_task +from faker import Faker +from models_library.functions import ( + FunctionClass, + FunctionID, + FunctionInputs, + FunctionJobID, + RegisteredFunction, + RegisteredProjectFunctionJob, +) +from models_library.projects import ProjectID +from servicelib.celery.models import TaskID +from simcore_service_api_server.api.dependencies.authentication import Identity +from simcore_service_api_server.celery._worker_tasks._functions_tasks import ( + run_function as run_function_task, +) +from simcore_service_api_server.models.api_resources import JobLinks +from simcore_service_api_server.models.schemas.jobs import ( + JobPricingSpecification, + NodeID, +) + +_faker = Faker() + + +def _register_fake_run_function_task() -> Callable[[Celery], None]: + + async def run_function( + task: Task, + task_id: TaskID, + *, + user_identity: Identity, + function: RegisteredFunction, + function_inputs: FunctionInputs, + pricing_spec: JobPricingSpecification | None, + job_links: JobLinks, + x_simcore_parent_project_uuid: NodeID | None, + x_simcore_parent_node_id: NodeID | None, + ): + return RegisteredProjectFunctionJob( + title=_faker.sentence(), + description=_faker.paragraph(), + function_uid=FunctionID(_faker.uuid4()), + inputs=function_inputs, + outputs=None, + function_class=FunctionClass.PROJECT, + uid=FunctionJobID(_faker.uuid4()), + created_at=_faker.date_time(), + project_job_id=ProjectID(_faker.uuid4()), + ) + + assert run_function_task.__name__ == run_function.__name__ + + def _(celery_app: Celery) -> None: + register_task(celery_app, run_function) + + return _ +@pytest.mark.parametrize("register_celery_tasks", [_register_fake_run_function_task]) +@pytest.mark.parametrize("add_worker_tasks", [False]) async def test_with_fake_run_function(with_storage_celery_worker: TestWorkController): pass From 8e49543ca739c24495e4f7b9a3916af10ce7a0a8 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 11:14:56 +0200 Subject: [PATCH 015/111] check mock --- services/api-server/tests/unit/celery/test_functions.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/services/api-server/tests/unit/celery/test_functions.py b/services/api-server/tests/unit/celery/test_functions.py index 9a362919def..02709d70110 100644 --- a/services/api-server/tests/unit/celery/test_functions.py +++ b/services/api-server/tests/unit/celery/test_functions.py @@ -1,3 +1,4 @@ +import inspect from collections.abc import Callable import pytest @@ -54,7 +55,11 @@ async def run_function( project_job_id=ProjectID(_faker.uuid4()), ) + # check our mock task is correct assert run_function_task.__name__ == run_function.__name__ + assert inspect.signature(run_function_task) == inspect.signature( + run_function + ), f"Signature mismatch: {inspect.signature(run_function_task)} != {inspect.signature(run_function)}" def _(celery_app: Celery) -> None: register_task(celery_app, run_function) From 80a7cca50b64079ffd85e948744f6ff508a8a44d Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 11:43:20 +0200 Subject: [PATCH 016/111] transform run function endpoint --- .../api/dependencies/celery.py | 4 ++ .../api/routes/functions_routes.py | 45 ++++++++++++++++--- .../api/routes/tasks.py | 7 ++- 3 files changed, 45 insertions(+), 11 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py index 14c04da29d4..2dce316f07c 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py @@ -1,3 +1,5 @@ +from typing import Final + from celery_library.common import create_app, create_task_manager from celery_library.task_manager import CeleryTaskManager from celery_library.types import register_celery_types, register_pydantic_types @@ -13,6 +15,8 @@ from ...models.api_resources import JobLinks from ...models.schemas.jobs import JobPricingSpecification +ASYNC_JOB_CLIENT_NAME: Final[str] = "API_SERVER" + def setup_task_manager(app: FastAPI, celery_settings: CelerySettings) -> None: async def on_startup() -> None: diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index e1d05e129c3..2090a3f01d2 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -2,6 +2,7 @@ from collections.abc import Callable from typing import Annotated, Final, Literal +from celery_library.task_manager import CeleryTaskManager from fastapi import APIRouter, Depends, Header, Request, status from fastapi_pagination.api import create_page from fastapi_pagination.bases import AbstractPage @@ -16,19 +17,29 @@ RegisteredFunctionJob, RegisteredFunctionJobCollection, ) +from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobFilter from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID +from servicelib.celery.models import TaskFilter, TaskMetadata from servicelib.fastapi.dependencies import get_reverse_url_mapper +from servicelib.long_running_tasks.models import TaskGet from ..._service_function_jobs import FunctionJobService from ..._service_functions import FunctionService +from ...celery._worker_tasks._functions_tasks import run_function from ...models.pagination import Page, PaginationParams from ...models.schemas.errors import ErrorGet from ...models.schemas.jobs import JobPricingSpecification from ...services_rpc.wb_api_server import WbApiRpcClient -from ..dependencies.authentication import get_current_user_id, get_product_name +from ..dependencies.authentication import ( + Identity, + get_current_identity, + get_current_user_id, + get_product_name, +) +from ..dependencies.celery import ASYNC_JOB_CLIENT_NAME, get_task_manager_from_app from ..dependencies.services import ( get_function_job_service, get_function_service, @@ -304,7 +315,7 @@ async def validate_function_inputs( @function_router.post( "/{function_id:uuid}:run", - response_model=RegisteredFunctionJob, + response_model=TaskGet, responses={**_COMMON_FUNCTION_ERROR_RESPONSES}, description=create_route_description( base="Run function", @@ -313,16 +324,15 @@ async def validate_function_inputs( ) async def run_function( # noqa: PLR0913 request: Request, + user_identity: Annotated[Identity, Depends(get_current_identity)], to_run_function: Annotated[RegisteredFunction, Depends(get_function)], url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], function_inputs: FunctionInputs, function_service: Annotated[FunctionService, Depends(get_function_service)], - function_jobs_service: Annotated[ - FunctionJobService, Depends(get_function_job_service) - ], + task_manager: Annotated[CeleryTaskManager, Depends(get_task_manager_from_app)], x_simcore_parent_project_uuid: Annotated[ProjectID | Literal["null"], Header()], x_simcore_parent_node_id: Annotated[NodeID | Literal["null"], Header()], -) -> RegisteredFunctionJob: +) -> TaskGet: parent_project_uuid = ( x_simcore_parent_project_uuid if isinstance(x_simcore_parent_project_uuid, ProjectID) @@ -336,7 +346,20 @@ async def run_function( # noqa: PLR0913 pricing_spec = JobPricingSpecification.create_from_headers(request.headers) job_links = await function_service.get_function_job_links(to_run_function, url_for) - return await function_jobs_service.run_function( + job_filter = AsyncJobFilter( + user_id=user_identity.user_id, + product_name=user_identity.product_name, + client_name=ASYNC_JOB_CLIENT_NAME, + ) + task_filter = TaskFilter.model_validate(job_filter.model_dump()) + task_name = run_function.__name__ + + task_uuid = await task_manager.submit_task( + TaskMetadata( + name=task_name, + ), + task_filter=task_filter, + user_identity=user_identity, function=to_run_function, function_inputs=function_inputs, pricing_spec=pricing_spec, @@ -345,6 +368,14 @@ async def run_function( # noqa: PLR0913 x_simcore_parent_node_id=parent_node_id, ) + return TaskGet( + task_id=f"{task_uuid}", + task_name=task_name, + status_href=url_for("get_task_status", task_id=task_uuid), + result_href=url_for("get_task_result", task_id=task_uuid), + abort_href=url_for("cancel_task", task_id=task_uuid), + ) + @function_router.delete( "/{function_id:uuid}", diff --git a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py index ff0f12f2d69..fb52bc26795 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py @@ -1,5 +1,5 @@ import logging -from typing import Annotated, Any, Final +from typing import Annotated, Any from fastapi import APIRouter, Depends, FastAPI, status from models_library.api_schemas_long_running_tasks.base import TaskProgress @@ -20,21 +20,20 @@ from ...models.schemas.errors import ErrorGet from ...services_rpc.async_jobs import AsyncJobClient from ..dependencies.authentication import get_current_user_id, get_product_name +from ..dependencies.celery import ASYNC_JOB_CLIENT_NAME from ..dependencies.tasks import get_async_jobs_client from ._constants import ( FMSG_CHANGELOG_NEW_IN_VERSION, create_route_description, ) -_ASYNC_JOB_CLIENT_NAME: Final[str] = "API_SERVER" - router = APIRouter() _logger = logging.getLogger(__name__) def _get_job_filter(user_id: UserID, product_name: ProductName) -> AsyncJobFilter: return AsyncJobFilter( - user_id=user_id, product_name=product_name, client_name=_ASYNC_JOB_CLIENT_NAME + user_id=user_id, product_name=product_name, client_name=ASYNC_JOB_CLIENT_NAME ) From a06118f2c940783cde64bed0cf3fc388e1e911c9 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 13:13:37 +0200 Subject: [PATCH 017/111] add pytest-celery plugin --- services/api-server/requirements/_test.in | 1 + services/api-server/requirements/_test.txt | 64 ++++++++++++++++++++++ 2 files changed, 65 insertions(+) diff --git a/services/api-server/requirements/_test.in b/services/api-server/requirements/_test.in index 805e1f7a7af..c886d24ec33 100644 --- a/services/api-server/requirements/_test.in +++ b/services/api-server/requirements/_test.in @@ -23,6 +23,7 @@ pact-python pyinstrument pytest pytest-asyncio +pytest-celery pytest-cov pytest-docker pytest-mock diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt index 7b97932b9fe..90c81d59f9c 100644 --- a/services/api-server/requirements/_test.txt +++ b/services/api-server/requirements/_test.txt @@ -17,6 +17,10 @@ alembic==1.14.0 # via # -c requirements/_base.txt # -r requirements/_test.in +amqp==5.3.1 + # via + # -c requirements/_base.txt + # kombu annotated-types==0.7.0 # via # -c requirements/_base.txt @@ -43,6 +47,10 @@ aws-sam-translator==1.55.0 # cfn-lint aws-xray-sdk==2.14.0 # via moto +billiard==4.2.1 + # via + # -c requirements/_base.txt + # celery boto3==1.38.1 # via # aws-sam-translator @@ -55,6 +63,10 @@ botocore==1.38.1 # s3transfer botocore-stubs==1.37.4 # via types-boto3 +celery==5.5.3 + # via + # -c requirements/_base.txt + # pytest-celery certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt @@ -79,9 +91,25 @@ click==8.1.7 # via # -c requirements/_base.txt # -r requirements/_test.in + # celery + # click-didyoumean + # click-plugins + # click-repl # flask # pact-python # uvicorn +click-didyoumean==0.3.1 + # via + # -c requirements/_base.txt + # celery +click-plugins==1.1.1.2 + # via + # -c requirements/_base.txt + # celery +click-repl==0.3.0 + # via + # -c requirements/_base.txt + # celery coverage==7.6.12 # via pytest-cov cryptography==44.0.0 @@ -91,10 +119,14 @@ cryptography==44.0.0 # moto # python-jose # sshpubkeys +debugpy==1.8.16 + # via pytest-celery docker==7.1.0 # via # -r requirements/_test.in # moto + # pytest-celery + # pytest-docker-tools ecdsa==0.19.0 # via # moto @@ -181,6 +213,10 @@ jsonschema==3.2.0 # openapi-spec-validator junit-xml==1.9 # via cfn-lint +kombu==5.5.4 + # via + # -c requirements/_base.txt + # celery mako==1.3.10 # via # -c requirements/../../../requirements/constraints.txt @@ -215,6 +251,7 @@ packaging==24.2 # via # -c requirements/_base.txt # aioresponses + # kombu # pytest pact-python==2.3.1 # via -r requirements/_test.in @@ -228,6 +265,10 @@ pluggy==1.5.0 # via # pytest # pytest-cov +prompt-toolkit==3.0.51 + # via + # -c requirements/_base.txt + # click-repl propcache==0.2.1 # via # -c requirements/_base.txt @@ -237,6 +278,7 @@ psutil==6.1.0 # via # -c requirements/_base.txt # pact-python + # pytest-celery pyasn1==0.4.8 # via # python-jose @@ -274,13 +316,18 @@ pytest==8.4.1 # pytest-asyncio # pytest-cov # pytest-docker + # pytest-docker-tools # pytest-mock pytest-asyncio==1.0.0 # via -r requirements/_test.in +pytest-celery==1.1.3 + # via -r requirements/_test.in pytest-cov==6.2.1 # via -r requirements/_test.in pytest-docker==3.2.3 # via -r requirements/_test.in +pytest-docker-tools==3.1.9 + # via pytest-celery pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 @@ -289,6 +336,7 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore + # celery # moto python-jose==3.4.0 # via moto @@ -335,6 +383,7 @@ setuptools==80.9.0 # moto # openapi-spec-validator # pbr + # pytest-celery six==1.17.0 # via # -c requirements/_base.txt @@ -364,6 +413,10 @@ starlette==0.41.3 # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # fastapi +tenacity==9.0.0 + # via + # -c requirements/_base.txt + # pytest-celery types-aiofiles==24.1.0.20241221 # via -r requirements/_test.in types-awscrt==0.23.10 @@ -394,6 +447,7 @@ tzdata==2025.2 # via # -c requirements/_base.txt # faker + # kombu urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt @@ -406,6 +460,16 @@ uvicorn==0.34.2 # via # -c requirements/_base.txt # pact-python +vine==5.1.0 + # via + # -c requirements/_base.txt + # amqp + # celery + # kombu +wcwidth==0.2.13 + # via + # -c requirements/_base.txt + # prompt-toolkit werkzeug==2.1.2 # via # flask From f55363c93b25302e3c56ea1ed9734ec826c6d044 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 13:22:40 +0200 Subject: [PATCH 018/111] can run empty test --- .../api/routes/functions_routes.py | 3 +- .../celery/worker_main.py | 42 ++++++++++--------- .../api-server/tests/unit/celery/conftest.py | 20 +++++++-- .../tests/unit/celery/test_functions.py | 5 ++- services/api-server/tests/unit/conftest.py | 2 + 5 files changed, 46 insertions(+), 26 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index 2090a3f01d2..04e645deefb 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -2,7 +2,6 @@ from collections.abc import Callable from typing import Annotated, Final, Literal -from celery_library.task_manager import CeleryTaskManager from fastapi import APIRouter, Depends, Header, Request, status from fastapi_pagination.api import create_page from fastapi_pagination.bases import AbstractPage @@ -329,10 +328,10 @@ async def run_function( # noqa: PLR0913 url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], function_inputs: FunctionInputs, function_service: Annotated[FunctionService, Depends(get_function_service)], - task_manager: Annotated[CeleryTaskManager, Depends(get_task_manager_from_app)], x_simcore_parent_project_uuid: Annotated[ProjectID | Literal["null"], Header()], x_simcore_parent_node_id: Annotated[NodeID | Literal["null"], Header()], ) -> TaskGet: + task_manager = get_task_manager_from_app(request.app) parent_project_uuid = ( x_simcore_parent_project_uuid if isinstance(x_simcore_parent_project_uuid, ProjectID) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_main.py b/services/api-server/src/simcore_service_api_server/celery/worker_main.py index a941526a82e..5b566413fa2 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_main.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_main.py @@ -15,32 +15,34 @@ from ..core.settings import ApplicationSettings from ._worker_tasks.tasks import setup_worker_tasks -_settings = ApplicationSettings.create_from_envs() - -setup_loggers( - log_format_local_dev_enabled=_settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_settings.API_SERVER_LOG_FILTER_MAPPING, - tracing_settings=_settings.API_SERVER_TRACING, - log_base_level=_settings.log_level, - noisy_loggers=None, -) +def main(): + _settings = ApplicationSettings.create_from_envs() -assert _settings.API_SERVER_CELERY # nosec -app = create_celery_app(_settings.API_SERVER_CELERY) + setup_loggers( + log_format_local_dev_enabled=_settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=_settings.API_SERVER_LOG_FILTER_MAPPING, + tracing_settings=_settings.API_SERVER_TRACING, + log_base_level=_settings.log_level, + noisy_loggers=None, + ) -app_server = FastAPIAppServer(app=create_app(_settings)) + assert _settings.API_SERVER_CELERY # nosec + app = create_celery_app(_settings.API_SERVER_CELERY) + app_server = FastAPIAppServer(app=create_app(_settings)) -def worker_init_wrapper(sender, **_kwargs): - assert _settings.API_SERVER_CELERY # nosec - return partial(on_worker_init, app_server, _settings.API_SERVER_CELERY)( - sender, **_kwargs - ) + def worker_init_wrapper(sender, **_kwargs): + assert _settings.API_SERVER_CELERY # nosec + return partial(on_worker_init, app_server, _settings.API_SERVER_CELERY)( + sender, **_kwargs + ) + worker_init.connect(worker_init_wrapper) + worker_shutdown.connect(on_worker_shutdown) -worker_init.connect(worker_init_wrapper) -worker_shutdown.connect(on_worker_shutdown) + setup_worker_tasks(app) -setup_worker_tasks(app) +if __name__ == "__main__": + main() diff --git a/services/api-server/tests/unit/celery/conftest.py b/services/api-server/tests/unit/celery/conftest.py index 980c33b04c2..be9d3d7ba89 100644 --- a/services/api-server/tests/unit/celery/conftest.py +++ b/services/api-server/tests/unit/celery/conftest.py @@ -1,5 +1,7 @@ +import datetime from collections.abc import AsyncIterator, Callable from functools import partial +from typing import Any import pytest from celery import Celery @@ -14,9 +16,21 @@ from simcore_service_api_server.core.application import create_app from simcore_service_api_server.core.settings import ApplicationSettings -pytest_plugins = [ - "pytest_simcore.rabbit_service", -] + +@pytest.fixture(scope="session") +def celery_config() -> dict[str, Any]: + return { + "broker_connection_retry_on_startup": True, + "broker_url": "memory://localhost//", + "result_backend": "cache+memory://localhost//", + "result_expires": datetime.timedelta(days=7), + "result_extended": True, + "pool": "threads", + "task_default_queue": "default", + "task_send_sent_event": True, + "task_track_started": True, + "worker_send_task_events": True, + } @pytest.fixture diff --git a/services/api-server/tests/unit/celery/test_functions.py b/services/api-server/tests/unit/celery/test_functions.py index 02709d70110..6b66ef7351c 100644 --- a/services/api-server/tests/unit/celery/test_functions.py +++ b/services/api-server/tests/unit/celery/test_functions.py @@ -26,6 +26,9 @@ NodeID, ) +pytest_simcore_core_services_selection = ["postgres", "rabbit"] +pytest_simcore_ops_services_selection = ["adminer"] + _faker = Faker() @@ -67,7 +70,7 @@ def _(celery_app: Celery) -> None: return _ -@pytest.mark.parametrize("register_celery_tasks", [_register_fake_run_function_task]) +@pytest.mark.parametrize("register_celery_tasks", [_register_fake_run_function_task()]) @pytest.mark.parametrize("add_worker_tasks", [False]) async def test_with_fake_run_function(with_storage_celery_worker: TestWorkController): pass diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index 51370bb7ce1..0aa940fe496 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -88,6 +88,8 @@ def app_environment( "API_SERVER_HEALTH_CHECK_TASK_PERIOD_SECONDS": "3", "API_SERVER_HEALTH_CHECK_TASK_TIMEOUT_SECONDS": "1", "API_SERVER_LOG_CHECK_TIMEOUT_SECONDS": "1", + "API_SERVER_CELERY": "null", + "API_SERVER_WORKER_MODE": "false", **backend_env_vars_overrides, }, ) From 8f847f60accf721670e6db1fc5b81ed79948da7d Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 14:09:10 +0200 Subject: [PATCH 019/111] minor changes --- .../src/settings_library/postgres.py | 20 ++++++++++--------- services/api-server/tests/conftest.py | 3 +++ services/api-server/tests/unit/conftest.py | 3 --- 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index 325a3288414..90d456cbda0 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -1,5 +1,5 @@ from functools import cached_property -from typing import Annotated +from typing import Annotated, Self from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse from pydantic import ( @@ -7,8 +7,7 @@ Field, PostgresDsn, SecretStr, - ValidationInfo, - field_validator, + model_validator, ) from pydantic.config import JsonDict from pydantic_settings import SettingsConfigDict @@ -50,13 +49,15 @@ class PostgresSettings(BaseCustomSettings): ), ] = None - @field_validator("POSTGRES_MAXSIZE") - @classmethod - def _check_size(cls, v, info: ValidationInfo): - if info.data["POSTGRES_MINSIZE"] > v: - msg = f"assert POSTGRES_MINSIZE={info.data['POSTGRES_MINSIZE']} <= POSTGRES_MAXSIZE={v}" + @model_validator(mode="after") + def validate_postgres_sizes(self) -> Self: + if self.POSTGRES_MINSIZE > self.POSTGRES_MAXSIZE: + msg = ( + f"assert POSTGRES_MINSIZE={self.POSTGRES_MINSIZE} <= " + f"POSTGRES_MAXSIZE={self.POSTGRES_MAXSIZE}" + ) raise ValueError(msg) - return v + return self @cached_property def dsn(self) -> str: @@ -135,3 +136,4 @@ def _update_json_schema_extra(schema: JsonDict) -> None: ) model_config = SettingsConfigDict(json_schema_extra=_update_json_schema_extra) + model_config = SettingsConfigDict(json_schema_extra=_update_json_schema_extra) diff --git a/services/api-server/tests/conftest.py b/services/api-server/tests/conftest.py index 2fd59c2f626..8bdcaf870d5 100644 --- a/services/api-server/tests/conftest.py +++ b/services/api-server/tests/conftest.py @@ -71,6 +71,9 @@ def default_app_env_vars( env_vars["API_SERVER_DEV_FEATURES_ENABLED"] = "1" env_vars["API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED"] = "1" env_vars["API_SERVER_PROMETHEUS_INSTRUMENTATION_ENABLED"] = "0" + env_vars["POSTGRES_MINSIZE"] = "2" + env_vars["POSTGRES_MAXSIZE"] = "10" + env_vars["API_SERVER_CELERY"] = "null" return env_vars diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index 0aa940fe496..a7327b461eb 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -88,8 +88,6 @@ def app_environment( "API_SERVER_HEALTH_CHECK_TASK_PERIOD_SECONDS": "3", "API_SERVER_HEALTH_CHECK_TASK_TIMEOUT_SECONDS": "1", "API_SERVER_LOG_CHECK_TIMEOUT_SECONDS": "1", - "API_SERVER_CELERY": "null", - "API_SERVER_WORKER_MODE": "false", **backend_env_vars_overrides, }, ) @@ -116,7 +114,6 @@ def mock_missing_plugins(app_environment: EnvVarsDict, mocker: MockerFixture): "setup_prometheus_instrumentation", autospec=True, ) - return app_environment From 5a6d47e1fd14b71f1c0a8fa5b9ecb27617f6496f Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 14:24:02 +0200 Subject: [PATCH 020/111] test fix --- services/api-server/tests/conftest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/services/api-server/tests/conftest.py b/services/api-server/tests/conftest.py index 8bdcaf870d5..ba83c5ffd5c 100644 --- a/services/api-server/tests/conftest.py +++ b/services/api-server/tests/conftest.py @@ -74,6 +74,7 @@ def default_app_env_vars( env_vars["POSTGRES_MINSIZE"] = "2" env_vars["POSTGRES_MAXSIZE"] = "10" env_vars["API_SERVER_CELERY"] = "null" + env_vars["API_SERVER_RABBITMQ"] = "null" return env_vars From 1d4df87dc5168b233e0d424532194ea450e39096 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 14:54:26 +0200 Subject: [PATCH 021/111] further improvements to test --- .../tests/unit/celery/test_functions.py | 36 +++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/services/api-server/tests/unit/celery/test_functions.py b/services/api-server/tests/unit/celery/test_functions.py index 6b66ef7351c..8c6b19286a4 100644 --- a/services/api-server/tests/unit/celery/test_functions.py +++ b/services/api-server/tests/unit/celery/test_functions.py @@ -6,17 +6,26 @@ from celery.contrib.testing.worker import TestWorkController from celery_library.task import register_task from faker import Faker +from fastapi import FastAPI +from httpx import AsyncClient, BasicAuth from models_library.functions import ( FunctionClass, FunctionID, FunctionInputs, FunctionJobID, RegisteredFunction, + RegisteredProjectFunction, RegisteredProjectFunctionJob, ) from models_library.projects import ProjectID from servicelib.celery.models import TaskID +from servicelib.common_headers import ( + X_SIMCORE_PARENT_NODE_ID, + X_SIMCORE_PARENT_PROJECT_UUID, +) +from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.api.dependencies.authentication import Identity +from simcore_service_api_server.api.routes.functions_routes import get_function from simcore_service_api_server.celery._worker_tasks._functions_tasks import ( run_function as run_function_task, ) @@ -72,5 +81,28 @@ def _(celery_app: Celery) -> None: @pytest.mark.parametrize("register_celery_tasks", [_register_fake_run_function_task()]) @pytest.mark.parametrize("add_worker_tasks", [False]) -async def test_with_fake_run_function(with_storage_celery_worker: TestWorkController): - pass +async def test_with_fake_run_function( + app: FastAPI, + client: AsyncClient, + auth: BasicAuth, + with_storage_celery_worker: TestWorkController, +): + + extra = RegisteredProjectFunction.model_config.get("json_schema_extra") + assert extra + app.dependency_overrides[get_function] = ( + lambda: RegisteredProjectFunction.model_validate(extra["examples"][0]) + ) + + headers = {} + headers[X_SIMCORE_PARENT_PROJECT_UUID] = "null" + headers[X_SIMCORE_PARENT_NODE_ID] = "null" + + response = await client.post( + f"/{API_VTAG}/functions/{_faker.uuid4()}:run", + auth=auth, + json={}, + headers=headers, + ) + + assert response.status_code == 200 From 33653cd496bf58247fe19e44727953d058739157 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 15:06:05 +0200 Subject: [PATCH 022/111] add fakeredis dependency --- services/api-server/requirements/_test.in | 1 + services/api-server/requirements/_test.txt | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/services/api-server/requirements/_test.in b/services/api-server/requirements/_test.in index c886d24ec33..067ce3a7305 100644 --- a/services/api-server/requirements/_test.in +++ b/services/api-server/requirements/_test.in @@ -17,6 +17,7 @@ asgi_lifespan click docker faker +fakeredis jsonref moto[server] # mock out tests based on AWS-S3 pact-python diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt index 90c81d59f9c..bfb36091f16 100644 --- a/services/api-server/requirements/_test.txt +++ b/services/api-server/requirements/_test.txt @@ -134,6 +134,8 @@ ecdsa==0.19.0 # sshpubkeys faker==36.1.1 # via -r requirements/_test.in +fakeredis==2.31.0 + # via -r requirements/_test.in fastapi==0.115.12 # via # -c requirements/_base.txt @@ -351,6 +353,11 @@ pyyaml==6.0.2 # moto # openapi-spec-validator # responses +redis==5.2.1 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # fakeredis referencing==0.35.1 # via # -c requirements/../../../requirements/constraints.txt @@ -398,6 +405,8 @@ sniffio==1.3.1 # anyio # asgi-lifespan # httpx +sortedcontainers==2.4.0 + # via fakeredis sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt From c91abad2696025c59279f23d30db25f427f58531 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 15:33:33 +0200 Subject: [PATCH 023/111] further additions to tests --- .../api-server/tests/unit/celery/conftest.py | 29 +++++++++++++++++-- .../tests/unit/celery/test_functions.py | 8 +++-- 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/services/api-server/tests/unit/celery/conftest.py b/services/api-server/tests/unit/celery/conftest.py index be9d3d7ba89..353dfec4ff9 100644 --- a/services/api-server/tests/unit/celery/conftest.py +++ b/services/api-server/tests/unit/celery/conftest.py @@ -9,6 +9,8 @@ from celery.signals import worker_init, worker_shutdown from celery.worker.worker import WorkController from celery_library.signals import on_worker_init, on_worker_shutdown +from fakeredis.aioredis import FakeRedis +from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import delenvs_from_dict, setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.fastapi.celery.app_server import FastAPIAppServer @@ -33,15 +35,33 @@ def celery_config() -> dict[str, Any]: } +@pytest.fixture +async def mocked_redis_server(mocker: MockerFixture) -> None: + mock_redis = FakeRedis() + mocker.patch("redis.asyncio.from_url", return_value=mock_redis) + + +@pytest.fixture +def mock_celery_app(mocker: MockerFixture, celery_config: dict[str, Any]) -> Celery: + celery_app = Celery(**celery_config) + + for module in ("simcore_service_api_server.api.dependencies.celery.create_app",): + mocker.patch(module, return_value=celery_app) + + return celery_app + + @pytest.fixture def app_environment( + mock_celery_app: Celery, + mocked_redis_server: None, monkeypatch: pytest.MonkeyPatch, app_environment: EnvVarsDict, rabbit_env_vars_dict: EnvVarsDict, ) -> EnvVarsDict: # do not init other services - delenvs_from_dict(monkeypatch, ["API_SERVER_RABBITMQ"]) - return setenvs_from_dict( + delenvs_from_dict(monkeypatch, ["API_SERVER_RABBITMQ", "API_SERVER_CELERY"]) + env_vars_dict = setenvs_from_dict( monkeypatch, { **rabbit_env_vars_dict, @@ -51,6 +71,11 @@ def app_environment( }, ) + settings = ApplicationSettings.create_from_envs() + assert settings.API_SERVER_CELERY is not None + + return env_vars_dict + @pytest.fixture def register_celery_tasks() -> Callable[[Celery], None]: diff --git a/services/api-server/tests/unit/celery/test_functions.py b/services/api-server/tests/unit/celery/test_functions.py index 8c6b19286a4..67962a5ba76 100644 --- a/services/api-server/tests/unit/celery/test_functions.py +++ b/services/api-server/tests/unit/celery/test_functions.py @@ -88,10 +88,12 @@ async def test_with_fake_run_function( with_storage_celery_worker: TestWorkController, ): - extra = RegisteredProjectFunction.model_config.get("json_schema_extra") - assert extra app.dependency_overrides[get_function] = ( - lambda: RegisteredProjectFunction.model_validate(extra["examples"][0]) + lambda: RegisteredProjectFunction.model_validate( + RegisteredProjectFunction.model_config.get("json_schema_extra", {}).get( + "examples", [] + )[0] + ) ) headers = {} From 653dbbca6d50c4e73566d9efddc440de2eec34ed Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 16:20:01 +0200 Subject: [PATCH 024/111] fix for logstreamer --- services/api-server/tests/unit/celery/conftest.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/services/api-server/tests/unit/celery/conftest.py b/services/api-server/tests/unit/celery/conftest.py index 353dfec4ff9..2305f371142 100644 --- a/services/api-server/tests/unit/celery/conftest.py +++ b/services/api-server/tests/unit/celery/conftest.py @@ -41,6 +41,15 @@ async def mocked_redis_server(mocker: MockerFixture) -> None: mocker.patch("redis.asyncio.from_url", return_value=mock_redis) +@pytest.fixture +async def mocked_log_streamer_setup(mocker: MockerFixture) -> MockerFixture: + # mock log streamer: He is looking for non-existent queues. Should be solved more elegantly + from simcore_service_api_server.services_http import rabbitmq + + mock_log_streamer = mocker.patch.object(rabbitmq, "LogDistributor", spec=True) + return mock_log_streamer + + @pytest.fixture def mock_celery_app(mocker: MockerFixture, celery_config: dict[str, Any]) -> Celery: celery_app = Celery(**celery_config) @@ -54,6 +63,7 @@ def mock_celery_app(mocker: MockerFixture, celery_config: dict[str, Any]) -> Cel @pytest.fixture def app_environment( mock_celery_app: Celery, + mocked_log_streamer_setup: MockerFixture, mocked_redis_server: None, monkeypatch: pytest.MonkeyPatch, app_environment: EnvVarsDict, From 676aa326ea4ab746a734f5ad1265f8d9cc2352e6 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 20:33:07 +0200 Subject: [PATCH 025/111] do full round trip in test --- .../src/models_library/functions.py | 32 +++++++++++- .../tests/unit/celery/test_functions.py | 51 +++++++++++++++++-- 2 files changed, 79 insertions(+), 4 deletions(-) diff --git a/packages/models-library/src/models_library/functions.py b/packages/models-library/src/models_library/functions.py index 6f1cbaf136f..340c4db53ad 100644 --- a/packages/models-library/src/models_library/functions.py +++ b/packages/models-library/src/models_library/functions.py @@ -113,7 +113,37 @@ class ProjectFunction(FunctionBase): class RegisteredProjectFunction(ProjectFunction, RegisteredFunctionBase): - pass + model_config = ConfigDict( + populate_by_name=True, + json_schema_extra={ + "examples": [ + { + "function_class": "PROJECT", + "title": "Example Project Function", + "description": "This is an example project function.", + "input_schema": { + "schema_content": { + "type": "object", + "properties": {"input1": {"type": "integer"}}, + }, + "schema_class": "application/schema+json", + }, + "output_schema": { + "schema_content": { + "type": "object", + "properties": {"output1": {"type": "string"}}, + }, + "schema_class": "application/schema+json", + }, + "default_inputs": None, + "project_id": "11111111-1111-1111-1111-111111111111", + "uid": "22222222-2222-2222-2222-222222222222", + "created_at": "2024-01-01T12:00:00", + "modified_at": "2024-01-02T12:00:00", + }, + ] + }, + ) SolverJobID: TypeAlias = UUID diff --git a/services/api-server/tests/unit/celery/test_functions.py b/services/api-server/tests/unit/celery/test_functions.py index 67962a5ba76..10990fe2dcb 100644 --- a/services/api-server/tests/unit/celery/test_functions.py +++ b/services/api-server/tests/unit/celery/test_functions.py @@ -6,8 +6,13 @@ from celery.contrib.testing.worker import TestWorkController from celery_library.task import register_task from faker import Faker -from fastapi import FastAPI +from fastapi import FastAPI, status from httpx import AsyncClient, BasicAuth +from models_library.api_schemas_long_running_tasks.tasks import ( + TaskGet, + TaskResult, + TaskStatus, +) from models_library.functions import ( FunctionClass, FunctionID, @@ -34,6 +39,12 @@ JobPricingSpecification, NodeID, ) +from tenacity import ( + AsyncRetrying, + retry_if_exception_type, + stop_after_delay, + wait_exponential, +) pytest_simcore_core_services_selection = ["postgres", "rabbit"] pytest_simcore_ops_services_selection = ["adminer"] @@ -41,6 +52,32 @@ _faker = Faker() +async def poll_task_until_done( + client: AsyncClient, + auth: BasicAuth, + task_id: str, + timeout: float = 30.0, +) -> TaskResult: + + async for attempt in AsyncRetrying( + stop=stop_after_delay(timeout), + wait=wait_exponential(multiplier=0.5, min=0.5, max=2.0), + reraise=True, + retry=retry_if_exception_type(AssertionError), + ): + with attempt: + + response = await client.get(f"/{API_VTAG}/tasks/{task_id}", auth=auth) + response.raise_for_status() + status = TaskStatus.model_validate(response.json()) + assert status.done is True + + assert status.done is True + response = await client.get(f"/{API_VTAG}/tasks/{task_id}/result", auth=auth) + response.raise_for_status() + return TaskResult.model_validate(response.json()) + + def _register_fake_run_function_task() -> Callable[[Celery], None]: async def run_function( @@ -87,7 +124,6 @@ async def test_with_fake_run_function( auth: BasicAuth, with_storage_celery_worker: TestWorkController, ): - app.dependency_overrides[get_function] = ( lambda: RegisteredProjectFunction.model_validate( RegisteredProjectFunction.model_config.get("json_schema_extra", {}).get( @@ -107,4 +143,13 @@ async def test_with_fake_run_function( headers=headers, ) - assert response.status_code == 200 + assert response.status_code == status.HTTP_200_OK + task = TaskGet.model_validate(response.json()) + + # Poll until task completion and get result + result = await poll_task_until_done(client, auth, task.task_id) + + # Verify the result is a RegisteredProjectFunctionJob + assert result is not None + assert isinstance(result, dict) + # Add more specific assertions based on your expected result structure From 162c8ee3b9adf538065aa2aa04aa09858601d989 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 21:15:08 +0200 Subject: [PATCH 026/111] test passing --- .../api/routes/tasks.py | 120 ++++++++++++------ .../tests/unit/celery/test_functions.py | 8 +- 2 files changed, 87 insertions(+), 41 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py index fb52bc26795..031f6530f09 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py @@ -1,7 +1,8 @@ import logging from typing import Annotated, Any -from fastapi import APIRouter, Depends, FastAPI, status +from celery.exceptions import CeleryError # type: ignore[import-untyped] +from fastapi import APIRouter, Depends, FastAPI, HTTPException, status from models_library.api_schemas_long_running_tasks.base import TaskProgress from models_library.api_schemas_long_running_tasks.tasks import ( TaskGet, @@ -14,14 +15,13 @@ ) from models_library.products import ProductName from models_library.users import UserID +from servicelib.celery.models import TaskFilter, TaskUUID from servicelib.fastapi.dependencies import get_app from ...models.schemas.base import ApiServerEnvelope from ...models.schemas.errors import ErrorGet -from ...services_rpc.async_jobs import AsyncJobClient from ..dependencies.authentication import get_current_user_id, get_product_name -from ..dependencies.celery import ASYNC_JOB_CLIENT_NAME -from ..dependencies.tasks import get_async_jobs_client +from ..dependencies.celery import ASYNC_JOB_CLIENT_NAME, get_task_manager_from_app from ._constants import ( FMSG_CHANGELOG_NEW_IN_VERSION, create_route_description, @@ -31,10 +31,11 @@ _logger = logging.getLogger(__name__) -def _get_job_filter(user_id: UserID, product_name: ProductName) -> AsyncJobFilter: - return AsyncJobFilter( +def _get_task_filter(user_id: UserID, product_name: ProductName) -> TaskFilter: + job_filter = AsyncJobFilter( user_id=user_id, product_name=product_name, client_name=ASYNC_JOB_CLIENT_NAME ) + return TaskFilter.model_validate(job_filter.model_dump()) _DEFAULT_TASK_STATUS_CODES: dict[int | str, dict[str, Any]] = { @@ -61,26 +62,34 @@ async def list_tasks( app: Annotated[FastAPI, Depends(get_app)], user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], - async_jobs: Annotated[AsyncJobClient, Depends(get_async_jobs_client)], ): - user_async_jobs = await async_jobs.list_jobs( - job_filter=_get_job_filter(user_id, product_name), - filter_="", - ) + + task_manager = get_task_manager_from_app(app) + + try: + tasks = await task_manager.list_tasks( + task_filter=_get_task_filter(user_id, product_name), + ) + except CeleryError as exc: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Encountered issue when listing tasks", + ) from exc + app_router = app.router data = [ TaskGet( - task_id=f"{job.job_id}", - task_name=job.job_name, + task_id=f"{task.uuid}", + task_name=task.metadata.name, status_href=app_router.url_path_for( - "get_task_status", task_id=f"{job.job_id}" + "get_task_status", task_id=f"{task.uuid}" ), - abort_href=app_router.url_path_for("cancel_task", task_id=f"{job.job_id}"), + abort_href=app_router.url_path_for("cancel_task", task_id=f"{task.uuid}"), result_href=app_router.url_path_for( - "get_task_result", task_id=f"{job.job_id}" + "get_task_result", task_id=f"{task.uuid}" ), ) - for job in user_async_jobs + for task in tasks ] return ApiServerEnvelope(data=data) @@ -99,20 +108,29 @@ async def list_tasks( ) async def get_task_status( task_id: AsyncJobId, + app: Annotated[FastAPI, Depends(get_app)], user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], - async_jobs: Annotated[AsyncJobClient, Depends(get_async_jobs_client)], ): - async_job_rpc_status = await async_jobs.status( - job_id=task_id, - job_filter=_get_job_filter(user_id, product_name), - ) - _task_id = f"{async_job_rpc_status.job_id}" + task_manager = get_task_manager_from_app(app) + + try: + task_status = await task_manager.get_task_status( + task_filter=_get_task_filter(user_id, product_name), + task_uuid=TaskUUID(f"{task_id}"), + ) + except CeleryError as exc: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Encountered issue when getting task status", + ) from exc + return TaskStatus( task_progress=TaskProgress( - task_id=_task_id, percent=async_job_rpc_status.progress.percent_value + task_id=f"{task_status.task_uuid}", + percent=task_status.progress_report.percent_value, ), - done=async_job_rpc_status.done, + done=task_status.is_done, started=None, ) @@ -131,14 +149,22 @@ async def get_task_status( ) async def cancel_task( task_id: AsyncJobId, + app: Annotated[FastAPI, Depends(get_app)], user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], - async_jobs: Annotated[AsyncJobClient, Depends(get_async_jobs_client)], ): - await async_jobs.cancel( - job_id=task_id, - job_filter=_get_job_filter(user_id, product_name), - ) + task_manager = get_task_manager_from_app(app) + + try: + await task_manager.cancel_task( + task_filter=_get_task_filter(user_id, product_name), + task_uuid=TaskUUID(f"{task_id}"), + ) + except CeleryError as exc: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Encountered issue when cancelling task", + ) from exc @router.get( @@ -165,12 +191,34 @@ async def cancel_task( ) async def get_task_result( task_id: AsyncJobId, + app: Annotated[FastAPI, Depends(get_app)], user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], - async_jobs: Annotated[AsyncJobClient, Depends(get_async_jobs_client)], ): - async_job_rpc_result = await async_jobs.result( - job_id=task_id, - job_filter=_get_job_filter(user_id, product_name), - ) - return TaskResult(result=async_job_rpc_result.result, error=None) + task_manager = get_task_manager_from_app(app) + task_filter = _get_task_filter(user_id, product_name) + + try: + # First check if task exists and is done + task_status = await task_manager.get_task_status( + task_filter=task_filter, + task_uuid=TaskUUID(f"{task_id}"), + ) + + if not task_status.is_done: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Task result not available yet", + ) + + result = await task_manager.get_task_result( + task_filter=task_filter, + task_uuid=TaskUUID(f"{task_id}"), + ) + return TaskResult(result=result, error=None) + + except CeleryError as exc: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Encountered issue when getting task result", + ) from exc diff --git a/services/api-server/tests/unit/celery/test_functions.py b/services/api-server/tests/unit/celery/test_functions.py index 10990fe2dcb..1dc86ea56de 100644 --- a/services/api-server/tests/unit/celery/test_functions.py +++ b/services/api-server/tests/unit/celery/test_functions.py @@ -5,6 +5,7 @@ from celery import Celery, Task from celery.contrib.testing.worker import TestWorkController from celery_library.task import register_task +from celery_library.types import register_pydantic_types from faker import Faker from fastapi import FastAPI, status from httpx import AsyncClient, BasicAuth @@ -111,6 +112,7 @@ async def run_function( ), f"Signature mismatch: {inspect.signature(run_function_task)} != {inspect.signature(run_function)}" def _(celery_app: Celery) -> None: + register_pydantic_types(RegisteredProjectFunctionJob) register_task(celery_app, run_function) return _ @@ -148,8 +150,4 @@ async def test_with_fake_run_function( # Poll until task completion and get result result = await poll_task_until_done(client, auth, task.task_id) - - # Verify the result is a RegisteredProjectFunctionJob - assert result is not None - assert isinstance(result, dict) - # Add more specific assertions based on your expected result structure + RegisteredProjectFunctionJob.model_validate(result.result) From 18effae090d9fda2b8caffeec2d23821051d1b1f Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 21:48:59 +0200 Subject: [PATCH 027/111] start converting task tests --- .../api-server/tests/unit/celery/conftest.py | 2 +- .../tests/unit/celery/test_functions.py | 2 +- .../tests/unit/{ => celery}/test_tasks.py | 21 +++++++------------ 3 files changed, 10 insertions(+), 15 deletions(-) rename services/api-server/tests/unit/{ => celery}/test_tasks.py (92%) diff --git a/services/api-server/tests/unit/celery/conftest.py b/services/api-server/tests/unit/celery/conftest.py index 2305f371142..c4e0c1ab319 100644 --- a/services/api-server/tests/unit/celery/conftest.py +++ b/services/api-server/tests/unit/celery/conftest.py @@ -103,7 +103,7 @@ def add_worker_tasks() -> bool: @pytest.fixture -async def with_storage_celery_worker( +async def with_api_server_celery_worker( app_environment: EnvVarsDict, celery_app: Celery, monkeypatch: pytest.MonkeyPatch, diff --git a/services/api-server/tests/unit/celery/test_functions.py b/services/api-server/tests/unit/celery/test_functions.py index 1dc86ea56de..2e4ead95b0a 100644 --- a/services/api-server/tests/unit/celery/test_functions.py +++ b/services/api-server/tests/unit/celery/test_functions.py @@ -124,7 +124,7 @@ async def test_with_fake_run_function( app: FastAPI, client: AsyncClient, auth: BasicAuth, - with_storage_celery_worker: TestWorkController, + with_api_server_celery_worker: TestWorkController, ): app.dependency_overrides[get_function] = ( lambda: RegisteredProjectFunction.model_validate( diff --git a/services/api-server/tests/unit/test_tasks.py b/services/api-server/tests/unit/celery/test_tasks.py similarity index 92% rename from services/api-server/tests/unit/test_tasks.py rename to services/api-server/tests/unit/celery/test_tasks.py index 40f64eb31c4..f0063da2a7f 100644 --- a/services/api-server/tests/unit/test_tasks.py +++ b/services/api-server/tests/unit/celery/test_tasks.py @@ -4,6 +4,7 @@ from typing import Any import pytest +from celery.contrib.testing.worker import TestWorkController from faker import Faker from fastapi import status from httpx import AsyncClient, BasicAuth @@ -19,6 +20,9 @@ from pytest_simcore.helpers.async_jobs_server import AsyncJobSideEffects from simcore_service_api_server.models.schemas.base import ApiServerEnvelope +pytest_simcore_core_services_selection = ["postgres", "rabbit"] + + _faker = Faker() @@ -64,26 +68,17 @@ def mocked_async_jobs_rpc_api( @pytest.mark.parametrize( - "async_job_error, expected_status_code", - [ - (None, status.HTTP_200_OK), - ( - JobSchedulerError( - exc=Exception("A very rare exception raised by the scheduler") - ), - status.HTTP_500_INTERNAL_SERVER_ERROR, - ), - ], + "expected_status_code", + [status.HTTP_200_OK], ) -async def test_get_async_jobs( +async def test_get_celery_tasks( client: AsyncClient, - mocked_async_jobs_rpc_api: dict[str, MockType], auth: BasicAuth, expected_status_code: int, + with_api_server_celery_worker: TestWorkController, ): response = await client.get("/v0/tasks", auth=auth) - assert mocked_async_jobs_rpc_api["list_jobs"].called assert response.status_code == expected_status_code if response.status_code == status.HTTP_200_OK: From c297397ba4330c3de99008feff0a64f10887404a Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 20 Aug 2025 21:49:50 +0200 Subject: [PATCH 028/111] minor change --- .../tests/unit/celery/test_tasks.py | 46 +------------------ 1 file changed, 1 insertion(+), 45 deletions(-) diff --git a/services/api-server/tests/unit/celery/test_tasks.py b/services/api-server/tests/unit/celery/test_tasks.py index f0063da2a7f..8056622051a 100644 --- a/services/api-server/tests/unit/celery/test_tasks.py +++ b/services/api-server/tests/unit/celery/test_tasks.py @@ -1,7 +1,6 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument -from typing import Any import pytest from celery.contrib.testing.worker import TestWorkController @@ -10,14 +9,12 @@ from httpx import AsyncClient, BasicAuth from models_library.api_schemas_long_running_tasks.tasks import TaskGet, TaskStatus from models_library.api_schemas_rpc_async_jobs.exceptions import ( - BaseAsyncjobRpcError, JobAbortedError, JobError, JobNotDoneError, JobSchedulerError, ) -from pytest_mock import MockerFixture, MockType -from pytest_simcore.helpers.async_jobs_server import AsyncJobSideEffects +from pytest_mock import MockType from simcore_service_api_server.models.schemas.base import ApiServerEnvelope pytest_simcore_core_services_selection = ["postgres", "rabbit"] @@ -26,47 +23,6 @@ _faker = Faker() -@pytest.fixture -async def async_jobs_rpc_side_effects( - async_job_error: BaseAsyncjobRpcError | None, -) -> Any: - return AsyncJobSideEffects(exception=async_job_error) - - -@pytest.fixture -def mocked_async_jobs_rpc_api( - mocker: MockerFixture, - async_jobs_rpc_side_effects: Any, - mocked_app_dependencies: None, -) -> dict[str, MockType]: - """ - Mocks the catalog's simcore service RPC API for testing purposes. - """ - from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs - - mocks = {} - - # Get all callable methods from the side effects class that are not built-ins - side_effect_methods = [ - method_name - for method_name in dir(async_jobs_rpc_side_effects) - if not method_name.startswith("_") - and callable(getattr(async_jobs_rpc_side_effects, method_name)) - ] - - # Create mocks for each method in catalog_rpc that has a corresponding side effect - for method_name in side_effect_methods: - assert hasattr(async_jobs, method_name) - mocks[method_name] = mocker.patch.object( - async_jobs, - method_name, - autospec=True, - side_effect=getattr(async_jobs_rpc_side_effects, method_name), - ) - - return mocks - - @pytest.mark.parametrize( "expected_status_code", [status.HTTP_200_OK], From 068371555b767025bdc4081737c25106ba67fa02 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 09:46:19 +0200 Subject: [PATCH 029/111] minor changes --- .../api/dependencies/celery.py | 2 +- .../api/routes/functions_routes.py | 4 ++-- .../src/simcore_service_api_server/api/routes/tasks.py | 10 +++++----- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py index 2dce316f07c..a311a4fd436 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py @@ -37,7 +37,7 @@ async def on_startup() -> None: app.add_event_handler("startup", on_startup) -def get_task_manager_from_app(app: FastAPI) -> CeleryTaskManager: +def get_task_manager(app: FastAPI) -> CeleryTaskManager: assert hasattr(app.state, "task_manager") # nosec task_manager = app.state.task_manager assert isinstance(task_manager, CeleryTaskManager) # nosec diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index 04e645deefb..9943131eb93 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -38,7 +38,7 @@ get_current_user_id, get_product_name, ) -from ..dependencies.celery import ASYNC_JOB_CLIENT_NAME, get_task_manager_from_app +from ..dependencies.celery import ASYNC_JOB_CLIENT_NAME, get_task_manager from ..dependencies.services import ( get_function_job_service, get_function_service, @@ -331,7 +331,7 @@ async def run_function( # noqa: PLR0913 x_simcore_parent_project_uuid: Annotated[ProjectID | Literal["null"], Header()], x_simcore_parent_node_id: Annotated[NodeID | Literal["null"], Header()], ) -> TaskGet: - task_manager = get_task_manager_from_app(request.app) + task_manager = get_task_manager(request.app) parent_project_uuid = ( x_simcore_parent_project_uuid if isinstance(x_simcore_parent_project_uuid, ProjectID) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py index 031f6530f09..3ddbc615655 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py @@ -21,7 +21,7 @@ from ...models.schemas.base import ApiServerEnvelope from ...models.schemas.errors import ErrorGet from ..dependencies.authentication import get_current_user_id, get_product_name -from ..dependencies.celery import ASYNC_JOB_CLIENT_NAME, get_task_manager_from_app +from ..dependencies.celery import ASYNC_JOB_CLIENT_NAME, get_task_manager from ._constants import ( FMSG_CHANGELOG_NEW_IN_VERSION, create_route_description, @@ -64,7 +64,7 @@ async def list_tasks( product_name: Annotated[ProductName, Depends(get_product_name)], ): - task_manager = get_task_manager_from_app(app) + task_manager = get_task_manager(app) try: tasks = await task_manager.list_tasks( @@ -112,7 +112,7 @@ async def get_task_status( user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], ): - task_manager = get_task_manager_from_app(app) + task_manager = get_task_manager(app) try: task_status = await task_manager.get_task_status( @@ -153,7 +153,7 @@ async def cancel_task( user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], ): - task_manager = get_task_manager_from_app(app) + task_manager = get_task_manager(app) try: await task_manager.cancel_task( @@ -195,7 +195,7 @@ async def get_task_result( user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], ): - task_manager = get_task_manager_from_app(app) + task_manager = get_task_manager(app) task_filter = _get_task_filter(user_id, product_name) try: From 2021ac088c3d5a95e38098b5efc2659a54d206f9 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 10:25:30 +0200 Subject: [PATCH 030/111] add examples to celery-library models and use them in mocks --- .../pytest_simcore/celery_library_mocks.py | 57 +++++++++++++++++++ .../src/servicelib/celery/models.py | 49 ++++++++++++++++ .../api/dependencies/celery.py | 3 +- .../tests/unit/celery/test_tasks.py | 22 ++++--- 4 files changed, 121 insertions(+), 10 deletions(-) create mode 100644 packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py diff --git a/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py new file mode 100644 index 00000000000..5c42a238706 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py @@ -0,0 +1,57 @@ +import pytest +from faker import Faker +from pytest_mock import MockerFixture +from servicelib.celery.models import TaskStatus, TaskUUID +from servicelib.celery.task_manager import Task, TaskManager + +_faker = Faker() + + +@pytest.fixture +def mock_task_manager(mocker: MockerFixture) -> TaskManager: + """ + Returns a TaskManager mock with example return values for each method. + """ + mock = mocker.Mock(spec=TaskManager) + + # Example return values (replace with realistic objects as needed) + mock.submit_task.return_value = TaskUUID(_faker.uuid4()) + mock.cancel_task.return_value = None + mock.get_task_result.return_value = {"result": "example"} + status_extra = TaskStatus.model_config.get("json_schema_extra") + assert status_extra is not None + status_examples = status_extra.get("examples") + assert isinstance(status_examples, list) + assert len(status_examples) > 0 + mock.get_task_status.return_value = TaskStatus.model_validate(status_examples[0]) + list_extra = Task.model_config.get("json_schema_extra") + assert isinstance(list_extra, dict) + list_examples = list_extra.get("examples") + assert isinstance(list_examples, list) + assert len(list_examples) > 0 + mock.list_tasks.return_value = [ + Task.model_validate(example) for example in list_examples + ] + mock.set_task_progress.return_value = None + return mock + + +@pytest.fixture +def mock_task_manager_raising( + mocker: MockerFixture, task_manager_exception: Exception +) -> TaskManager: + """ + Returns a TaskManager mock where all methods raise the provided exception. + """ + mock = mocker.Mock(spec=TaskManager) + + def _raise_exc(*args, **kwargs): + raise task_manager_exception + + mock.submit_task.side_effect = _raise_exc + mock.cancel_task.side_effect = _raise_exc + mock.get_task_result.side_effect = _raise_exc + mock.get_task_status.side_effect = _raise_exc + mock.list_tasks.side_effect = _raise_exc + mock.set_task_progress.side_effect = _raise_exc + return mock diff --git a/packages/service-library/src/servicelib/celery/models.py b/packages/service-library/src/servicelib/celery/models.py index 40756553377..2104dc53989 100644 --- a/packages/service-library/src/servicelib/celery/models.py +++ b/packages/service-library/src/servicelib/celery/models.py @@ -40,6 +40,37 @@ class Task(BaseModel): uuid: TaskUUID metadata: TaskMetadata + model_config = { + "json_schema_extra": { + "examples": [ + { + "uuid": "123e4567-e89b-12d3-a456-426614174000", + "metadata": { + "name": "task1", + "ephemeral": True, + "queue": "default", + }, + }, + { + "uuid": "223e4567-e89b-12d3-a456-426614174001", + "metadata": { + "name": "task2", + "ephemeral": False, + "queue": "cpu_bound", + }, + }, + { + "uuid": "323e4567-e89b-12d3-a456-426614174002", + "metadata": { + "name": "task3", + "ephemeral": True, + "queue": "default", + }, + }, + ] + } + } + _TASK_DONE = {TaskState.SUCCESS, TaskState.FAILURE, TaskState.ABORTED} @@ -72,6 +103,24 @@ class TaskStatus(BaseModel): task_state: TaskState progress_report: ProgressReport + model_config = { + "json_schema_extra": { + "examples": [ + { + "task_uuid": "123e4567-e89b-12d3-a456-426614174000", + "task_state": "SUCCESS", + "progress_report": { + "actual_value": 0.5, + "total": 1.0, + "attempts": 1, + "unit": "null", + "message": "Task not done yet", + }, + } + ] + } + } + @property def is_done(self) -> bool: return self.task_state in _TASK_DONE diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py index a311a4fd436..e57f46a269f 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py @@ -9,6 +9,7 @@ RegisteredPythonCodeFunction, RegisteredSolverFunction, ) +from servicelib.celery.task_manager import TaskManager from settings_library.celery import CelerySettings from ...api.dependencies.authentication import Identity @@ -37,7 +38,7 @@ async def on_startup() -> None: app.add_event_handler("startup", on_startup) -def get_task_manager(app: FastAPI) -> CeleryTaskManager: +def get_task_manager(app: FastAPI) -> TaskManager: assert hasattr(app.state, "task_manager") # nosec task_manager = app.state.task_manager assert isinstance(task_manager, CeleryTaskManager) # nosec diff --git a/services/api-server/tests/unit/celery/test_tasks.py b/services/api-server/tests/unit/celery/test_tasks.py index 8056622051a..93d3bef0c07 100644 --- a/services/api-server/tests/unit/celery/test_tasks.py +++ b/services/api-server/tests/unit/celery/test_tasks.py @@ -3,7 +3,6 @@ import pytest -from celery.contrib.testing.worker import TestWorkController from faker import Faker from fastapi import status from httpx import AsyncClient, BasicAuth @@ -18,7 +17,9 @@ from simcore_service_api_server.models.schemas.base import ApiServerEnvelope pytest_simcore_core_services_selection = ["postgres", "rabbit"] - +pytest_plugins = [ + "pytest_simcore.celery_library_mocks", +] _faker = Faker() @@ -31,7 +32,7 @@ async def test_get_celery_tasks( client: AsyncClient, auth: BasicAuth, expected_status_code: int, - with_api_server_celery_worker: TestWorkController, + mock_task_manager_raising, ): response = await client.get("/v0/tasks", auth=auth) @@ -39,12 +40,12 @@ async def test_get_celery_tasks( if response.status_code == status.HTTP_200_OK: result = ApiServerEnvelope[list[TaskGet]].model_validate_json(response.text) - assert len(result.data) > 0 - assert all(isinstance(task, TaskGet) for task in result.data) - task = result.data[0] - assert task.abort_href == f"/v0/tasks/{task.task_id}:cancel" - assert task.result_href == f"/v0/tasks/{task.task_id}/result" - assert task.status_href == f"/v0/tasks/{task.task_id}" + # assert len(result.data) > 0 + # assert all(isinstance(task, TaskGet) for task in result.data) + # task = result.data[0] + # assert task.abort_href == f"/v0/tasks/{task.task_id}:cancel" + # assert task.result_href == f"/v0/tasks/{task.task_id}/result" + # assert task.status_href == f"/v0/tasks/{task.task_id}" @pytest.mark.parametrize( @@ -62,6 +63,7 @@ async def test_get_celery_tasks( async def test_get_async_jobs_status( client: AsyncClient, mocked_async_jobs_rpc_api: dict[str, MockType], + async_job_error: Exception | None, auth: BasicAuth, expected_status_code: int, ): @@ -89,6 +91,7 @@ async def test_get_async_jobs_status( async def test_cancel_async_job( client: AsyncClient, mocked_async_jobs_rpc_api: dict[str, MockType], + async_job_error: Exception | None, auth: BasicAuth, expected_status_code: int, ): @@ -130,6 +133,7 @@ async def test_cancel_async_job( async def test_get_async_job_result( client: AsyncClient, mocked_async_jobs_rpc_api: dict[str, MockType], + async_job_error: Exception | None, auth: BasicAuth, expected_status_code: int, ): From 51e0b9521c576ea6aee84c98b1b09bd7763d4c6c Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 11:15:08 +0200 Subject: [PATCH 031/111] first test passing --- .../pytest_simcore/celery_library_mocks.py | 38 +++++++++---------- .../src/servicelib/celery/models.py | 8 ++-- .../tests/unit/celery/test_tasks.py | 31 ++++++++++----- 3 files changed, 45 insertions(+), 32 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py index 5c42a238706..402ec57b578 100644 --- a/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py +++ b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py @@ -1,6 +1,6 @@ import pytest from faker import Faker -from pytest_mock import MockerFixture +from pytest_mock import MockerFixture, MockType from servicelib.celery.models import TaskStatus, TaskUUID from servicelib.celery.task_manager import Task, TaskManager @@ -8,7 +8,7 @@ @pytest.fixture -def mock_task_manager(mocker: MockerFixture) -> TaskManager: +def mock_task_manager_object(mocker: MockerFixture) -> MockType: """ Returns a TaskManager mock with example return values for each method. """ @@ -37,21 +37,19 @@ def mock_task_manager(mocker: MockerFixture) -> TaskManager: @pytest.fixture -def mock_task_manager_raising( - mocker: MockerFixture, task_manager_exception: Exception -) -> TaskManager: - """ - Returns a TaskManager mock where all methods raise the provided exception. - """ - mock = mocker.Mock(spec=TaskManager) - - def _raise_exc(*args, **kwargs): - raise task_manager_exception - - mock.submit_task.side_effect = _raise_exc - mock.cancel_task.side_effect = _raise_exc - mock.get_task_result.side_effect = _raise_exc - mock.get_task_status.side_effect = _raise_exc - mock.list_tasks.side_effect = _raise_exc - mock.set_task_progress.side_effect = _raise_exc - return mock +def mock_task_manager_raising_factory(mocker: MockerFixture): + def _factory(task_manager_exception: Exception) -> MockType: + mock = mocker.Mock(spec=TaskManager) + + def _raise_exc(*args, **kwargs): + raise task_manager_exception + + mock.submit_task.side_effect = _raise_exc + mock.cancel_task.side_effect = _raise_exc + mock.get_task_result.side_effect = _raise_exc + mock.get_task_status.side_effect = _raise_exc + mock.list_tasks.side_effect = _raise_exc + mock.set_task_progress.side_effect = _raise_exc + return mock + + return _factory diff --git a/packages/service-library/src/servicelib/celery/models.py b/packages/service-library/src/servicelib/celery/models.py index 2104dc53989..7a253781005 100644 --- a/packages/service-library/src/servicelib/celery/models.py +++ b/packages/service-library/src/servicelib/celery/models.py @@ -3,7 +3,7 @@ from typing import Annotated, Protocol, TypeAlias from uuid import UUID -from models_library.progress_bar import ProgressReport +from models_library.progress_bar import ProgressReport, ProgressStructuredMessage from pydantic import BaseModel, StringConstraints TaskID: TypeAlias = str @@ -113,8 +113,10 @@ class TaskStatus(BaseModel): "actual_value": 0.5, "total": 1.0, "attempts": 1, - "unit": "null", - "message": "Task not done yet", + "unit": "Byte", + "message": ProgressStructuredMessage.model_config[ + "json_schema_extra" + ]["examples"][0], }, } ] diff --git a/services/api-server/tests/unit/celery/test_tasks.py b/services/api-server/tests/unit/celery/test_tasks.py index 93d3bef0c07..f518ad05b59 100644 --- a/services/api-server/tests/unit/celery/test_tasks.py +++ b/services/api-server/tests/unit/celery/test_tasks.py @@ -13,7 +13,8 @@ JobNotDoneError, JobSchedulerError, ) -from pytest_mock import MockType +from pytest_mock import MockerFixture, MockType, mocker +from simcore_service_api_server.api.routes import tasks as task_routes from simcore_service_api_server.models.schemas.base import ApiServerEnvelope pytest_simcore_core_services_selection = ["postgres", "rabbit"] @@ -24,15 +25,27 @@ _faker = Faker() +@pytest.fixture +def mock_task_manager( + mocker: MockerFixture, mock_task_manager_object: MockType +) -> MockType: + + def _get_task_manager(app): + return mock_task_manager_object + + mocker.patch.object(task_routes, "get_task_manager", _get_task_manager) + return mock_task_manager_object + + @pytest.mark.parametrize( "expected_status_code", [status.HTTP_200_OK], ) -async def test_get_celery_tasks( +async def test_list_celery_tasks( + mock_task_manager: MockType, client: AsyncClient, auth: BasicAuth, expected_status_code: int, - mock_task_manager_raising, ): response = await client.get("/v0/tasks", auth=auth) @@ -40,12 +53,12 @@ async def test_get_celery_tasks( if response.status_code == status.HTTP_200_OK: result = ApiServerEnvelope[list[TaskGet]].model_validate_json(response.text) - # assert len(result.data) > 0 - # assert all(isinstance(task, TaskGet) for task in result.data) - # task = result.data[0] - # assert task.abort_href == f"/v0/tasks/{task.task_id}:cancel" - # assert task.result_href == f"/v0/tasks/{task.task_id}/result" - # assert task.status_href == f"/v0/tasks/{task.task_id}" + assert len(result.data) > 0 + assert all(isinstance(task, TaskGet) for task in result.data) + task = result.data[0] + assert task.abort_href == f"/v0/tasks/{task.task_id}:cancel" + assert task.result_href == f"/v0/tasks/{task.task_id}/result" + assert task.status_href == f"/v0/tasks/{task.task_id}" @pytest.mark.parametrize( From b471434c69bd7602f22e3eeb5c26b23122c51849 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 11:28:04 +0200 Subject: [PATCH 032/111] further corrections to tests --- .../pytest_simcore/celery_library_mocks.py | 6 ++- .../tests/unit/celery/test_tasks.py | 47 +++++++++++++------ 2 files changed, 38 insertions(+), 15 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py index 402ec57b578..685ce26de77 100644 --- a/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py +++ b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py @@ -1,3 +1,5 @@ +from collections.abc import Callable + import pytest from faker import Faker from pytest_mock import MockerFixture, MockType @@ -37,7 +39,9 @@ def mock_task_manager_object(mocker: MockerFixture) -> MockType: @pytest.fixture -def mock_task_manager_raising_factory(mocker: MockerFixture): +def mock_task_manager_raising_factory( + mocker: MockerFixture, +) -> Callable[[Exception], MockType]: def _factory(task_manager_exception: Exception) -> MockType: mock = mocker.Mock(spec=TaskManager) diff --git a/services/api-server/tests/unit/celery/test_tasks.py b/services/api-server/tests/unit/celery/test_tasks.py index f518ad05b59..f48f7950469 100644 --- a/services/api-server/tests/unit/celery/test_tasks.py +++ b/services/api-server/tests/unit/celery/test_tasks.py @@ -2,7 +2,11 @@ # pylint: disable=unused-argument +from collections.abc import Callable +from typing import Literal + import pytest +from celery.exceptions import CeleryError from faker import Faker from fastapi import status from httpx import AsyncClient, BasicAuth @@ -37,28 +41,43 @@ def _get_task_manager(app): return mock_task_manager_object -@pytest.mark.parametrize( - "expected_status_code", - [status.HTTP_200_OK], -) async def test_list_celery_tasks( mock_task_manager: MockType, client: AsyncClient, auth: BasicAuth, - expected_status_code: int, ): response = await client.get("/v0/tasks", auth=auth) - assert response.status_code == expected_status_code + assert response.status_code == status.HTTP_200_OK - if response.status_code == status.HTTP_200_OK: - result = ApiServerEnvelope[list[TaskGet]].model_validate_json(response.text) - assert len(result.data) > 0 - assert all(isinstance(task, TaskGet) for task in result.data) - task = result.data[0] - assert task.abort_href == f"/v0/tasks/{task.task_id}:cancel" - assert task.result_href == f"/v0/tasks/{task.task_id}/result" - assert task.status_href == f"/v0/tasks/{task.task_id}" + result = ApiServerEnvelope[list[TaskGet]].model_validate_json(response.text) + assert len(result.data) > 0 + assert all(isinstance(task, TaskGet) for task in result.data) + task = result.data[0] + assert task.abort_href == f"/v0/tasks/{task.task_id}:cancel" + assert task.result_href == f"/v0/tasks/{task.task_id}/result" + assert task.status_href == f"/v0/tasks/{task.task_id}" + + +@pytest.mark.parametrize( + "method, url, celery_exception, expected_status_code", + [ + ("GET", "/v0/tasks", CeleryError(), status.HTTP_500_INTERNAL_SERVER_ERROR), + ], +) +async def test_celery_tasks_error_propagation( + mock_task_manager_raising_factory: Callable[[Exception], None], + client: AsyncClient, + auth: BasicAuth, + method: Literal["GET", "POST"], + url: str, + celery_exception: Exception, + expected_status_code: int, +): + mock_task_manager_raising_factory(celery_exception) + + response = await client.request(method=method, url=url, auth=auth) + assert response.status_code == expected_status_code @pytest.mark.parametrize( From 47e9fd1373ba1c1351d3df26de4771bf329b5dea Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 11:34:37 +0200 Subject: [PATCH 033/111] add fixture --- .../api-server/tests/unit/celery/test_tasks.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/services/api-server/tests/unit/celery/test_tasks.py b/services/api-server/tests/unit/celery/test_tasks.py index f48f7950469..f4de511ec0c 100644 --- a/services/api-server/tests/unit/celery/test_tasks.py +++ b/services/api-server/tests/unit/celery/test_tasks.py @@ -41,6 +41,24 @@ def _get_task_manager(app): return mock_task_manager_object +@pytest.fixture +def mock_task_manager_raising_factory( + mocker: MockerFixture, + mock_task_manager_object_raising_factory: Callable[[Exception], MockType], +) -> Callable[[Exception], MockType]: + + def _(task_manager_exception: Exception): + mock = mock_task_manager_object_raising_factory(task_manager_exception) + + def _get_task_manager(app): + return mock + + mocker.patch.object(task_routes, "get_task_manager", _get_task_manager) + return mock + + return _ + + async def test_list_celery_tasks( mock_task_manager: MockType, client: AsyncClient, From 7d329f61f922c05449f271076dccaa774fdb4bc9 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 11:41:26 +0200 Subject: [PATCH 034/111] further additions --- .../api-server/tests/unit/celery/test_tasks.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/services/api-server/tests/unit/celery/test_tasks.py b/services/api-server/tests/unit/celery/test_tasks.py index f4de511ec0c..3479abcc538 100644 --- a/services/api-server/tests/unit/celery/test_tasks.py +++ b/services/api-server/tests/unit/celery/test_tasks.py @@ -81,6 +81,24 @@ async def test_list_celery_tasks( "method, url, celery_exception, expected_status_code", [ ("GET", "/v0/tasks", CeleryError(), status.HTTP_500_INTERNAL_SERVER_ERROR), + ( + "GET", + f"/v0/tasks/{_faker.uuid4()}", + CeleryError(), + status.HTTP_500_INTERNAL_SERVER_ERROR, + ), + ( + "POST", + f"/v0/tasks/{_faker.uuid4()}:cancel", + CeleryError(), + status.HTTP_500_INTERNAL_SERVER_ERROR, + ), + ( + "GET", + f"/v0/tasks/{_faker.uuid4()}/result", + CeleryError(), + status.HTTP_500_INTERNAL_SERVER_ERROR, + ), ], ) async def test_celery_tasks_error_propagation( From ca2c013d25212618f5c41f37b8f3db377c78dec4 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 13:16:43 +0200 Subject: [PATCH 035/111] ensure all task tests pass --- .../tests/unit/celery/test_tasks.py | 207 +++++++----------- 1 file changed, 83 insertions(+), 124 deletions(-) diff --git a/services/api-server/tests/unit/celery/test_tasks.py b/services/api-server/tests/unit/celery/test_tasks.py index 3479abcc538..2ce35ea299c 100644 --- a/services/api-server/tests/unit/celery/test_tasks.py +++ b/services/api-server/tests/unit/celery/test_tasks.py @@ -2,7 +2,6 @@ # pylint: disable=unused-argument -from collections.abc import Callable from typing import Literal import pytest @@ -11,12 +10,6 @@ from fastapi import status from httpx import AsyncClient, BasicAuth from models_library.api_schemas_long_running_tasks.tasks import TaskGet, TaskStatus -from models_library.api_schemas_rpc_async_jobs.exceptions import ( - JobAbortedError, - JobError, - JobNotDoneError, - JobSchedulerError, -) from pytest_mock import MockerFixture, MockType, mocker from simcore_service_api_server.api.routes import tasks as task_routes from simcore_service_api_server.models.schemas.base import ApiServerEnvelope @@ -41,24 +34,6 @@ def _get_task_manager(app): return mock_task_manager_object -@pytest.fixture -def mock_task_manager_raising_factory( - mocker: MockerFixture, - mock_task_manager_object_raising_factory: Callable[[Exception], MockType], -) -> Callable[[Exception], MockType]: - - def _(task_manager_exception: Exception): - mock = mock_task_manager_object_raising_factory(task_manager_exception) - - def _get_task_manager(app): - return mock - - mocker.patch.object(task_routes, "get_task_manager", _get_task_manager) - return mock - - return _ - - async def test_list_celery_tasks( mock_task_manager: MockType, client: AsyncClient, @@ -66,6 +41,7 @@ async def test_list_celery_tasks( ): response = await client.get("/v0/tasks", auth=auth) + assert mock_task_manager.list_tasks.called assert response.status_code == status.HTTP_200_OK result = ApiServerEnvelope[list[TaskGet]].model_validate_json(response.text) @@ -77,136 +53,119 @@ async def test_list_celery_tasks( assert task.status_href == f"/v0/tasks/{task.task_id}" -@pytest.mark.parametrize( - "method, url, celery_exception, expected_status_code", - [ - ("GET", "/v0/tasks", CeleryError(), status.HTTP_500_INTERNAL_SERVER_ERROR), - ( - "GET", - f"/v0/tasks/{_faker.uuid4()}", - CeleryError(), - status.HTTP_500_INTERNAL_SERVER_ERROR, - ), - ( - "POST", - f"/v0/tasks/{_faker.uuid4()}:cancel", - CeleryError(), - status.HTTP_500_INTERNAL_SERVER_ERROR, - ), - ( - "GET", - f"/v0/tasks/{_faker.uuid4()}/result", - CeleryError(), - status.HTTP_500_INTERNAL_SERVER_ERROR, - ), - ], -) -async def test_celery_tasks_error_propagation( - mock_task_manager_raising_factory: Callable[[Exception], None], +async def test_get_async_jobs_status( + mock_task_manager: MockType, client: AsyncClient, auth: BasicAuth, - method: Literal["GET", "POST"], - url: str, - celery_exception: Exception, - expected_status_code: int, ): - mock_task_manager_raising_factory(celery_exception) - - response = await client.request(method=method, url=url, auth=auth) - assert response.status_code == expected_status_code + task_id = f"{_faker.uuid4()}" + response = await client.get(f"/v0/tasks/{task_id}", auth=auth) + assert mock_task_manager.get_task_status.called + assert response.status_code == status.HTTP_200_OK + TaskStatus.model_validate_json(response.text) -@pytest.mark.parametrize( - "async_job_error, expected_status_code", - [ - (None, status.HTTP_200_OK), - ( - JobSchedulerError( - exc=Exception("A very rare exception raised by the scheduler") - ), - status.HTTP_500_INTERNAL_SERVER_ERROR, - ), - ], -) -async def test_get_async_jobs_status( +async def test_cancel_async_job( + mock_task_manager: MockType, client: AsyncClient, - mocked_async_jobs_rpc_api: dict[str, MockType], - async_job_error: Exception | None, auth: BasicAuth, - expected_status_code: int, ): task_id = f"{_faker.uuid4()}" - response = await client.get(f"/v0/tasks/{task_id}", auth=auth) - assert mocked_async_jobs_rpc_api["status"].called - assert f"{mocked_async_jobs_rpc_api['status'].call_args[1]['job_id']}" == task_id - assert response.status_code == expected_status_code - if response.status_code == status.HTTP_200_OK: - TaskStatus.model_validate_json(response.text) + response = await client.post(f"/v0/tasks/{task_id}:cancel", auth=auth) + assert mock_task_manager.cancel_task.called + assert response.status_code == status.HTTP_204_NO_CONTENT -@pytest.mark.parametrize( - "async_job_error, expected_status_code", - [ - (None, status.HTTP_204_NO_CONTENT), - ( - JobSchedulerError( - exc=Exception("A very rare exception raised by the scheduler") - ), - status.HTTP_500_INTERNAL_SERVER_ERROR, - ), - ], -) -async def test_cancel_async_job( +async def test_get_result( + mock_task_manager: MockType, client: AsyncClient, - mocked_async_jobs_rpc_api: dict[str, MockType], - async_job_error: Exception | None, auth: BasicAuth, - expected_status_code: int, ): task_id = f"{_faker.uuid4()}" - response = await client.post(f"/v0/tasks/{task_id}:cancel", auth=auth) - assert mocked_async_jobs_rpc_api["cancel"].called - assert f"{mocked_async_jobs_rpc_api['cancel'].call_args[1]['job_id']}" == task_id - assert response.status_code == expected_status_code + response = await client.get(f"/v0/tasks/{task_id}/result", auth=auth) + assert response.status_code == status.HTTP_200_OK + assert mock_task_manager.get_task_result.called + assert f"{mock_task_manager.get_task_result.call_args[1]['task_uuid']}" == task_id @pytest.mark.parametrize( - "async_job_error, expected_status_code", + "method, url, list_tasks_return_value, get_task_status_return_value, cancel_task_return_value, expected_status_code", [ - (None, status.HTTP_200_OK), ( - JobError( - job_id=_faker.uuid4(), - exc_type=Exception, - exc_message="An exception from inside the async job", - ), + "GET", + "/v0/tasks", + CeleryError(), + None, + None, status.HTTP_500_INTERNAL_SERVER_ERROR, ), ( - JobNotDoneError(job_id=_faker.uuid4()), - status.HTTP_404_NOT_FOUND, - ), - ( - JobAbortedError(job_id=_faker.uuid4()), - status.HTTP_409_CONFLICT, + "GET", + f"/v0/tasks/{_faker.uuid4()}", + None, + CeleryError(), + None, + status.HTTP_500_INTERNAL_SERVER_ERROR, ), ( - JobSchedulerError( - exc=Exception("A very rare exception raised by the scheduler") - ), + "POST", + f"/v0/tasks/{_faker.uuid4()}:cancel", + None, + None, + CeleryError(), status.HTTP_500_INTERNAL_SERVER_ERROR, ), ], ) -async def test_get_async_job_result( +async def test_celery_error_propagation( + mock_task_manager: MockType, client: AsyncClient, - mocked_async_jobs_rpc_api: dict[str, MockType], - async_job_error: Exception | None, auth: BasicAuth, + method: Literal["GET", "POST"], + url: str, expected_status_code: int, ): - task_id = f"{_faker.uuid4()}" - response = await client.get(f"/v0/tasks/{task_id}/result", auth=auth) + response = await client.request(method=method, url=url, auth=auth) assert response.status_code == expected_status_code - assert mocked_async_jobs_rpc_api["result"].called - assert f"{mocked_async_jobs_rpc_api['result'].call_args[1]['job_id']}" == task_id + + +# @pytest.mark.parametrize( +# "async_job_error, expected_status_code", +# [ +# (None, status.HTTP_200_OK), +# ( +# JobError( +# job_id=_faker.uuid4(), +# exc_type=Exception, +# exc_message="An exception from inside the async job", +# ), +# status.HTTP_500_INTERNAL_SERVER_ERROR, +# ), +# ( +# JobNotDoneError(job_id=_faker.uuid4()), +# status.HTTP_404_NOT_FOUND, +# ), +# ( +# JobAbortedError(job_id=_faker.uuid4()), +# status.HTTP_409_CONFLICT, +# ), +# ( +# JobSchedulerError( +# exc=Exception("A very rare exception raised by the scheduler") +# ), +# status.HTTP_500_INTERNAL_SERVER_ERROR, +# ), +# ], +# ) +# async def test_get_async_job_result( +# client: AsyncClient, +# mocked_async_jobs_rpc_api: dict[str, MockType], +# async_job_error: Exception | None, +# auth: BasicAuth, +# expected_status_code: int, +# ): +# task_id = f"{_faker.uuid4()}" +# response = await client.get(f"/v0/tasks/{task_id}/result", auth=auth) +# assert response.status_code == expected_status_code +# assert mocked_async_jobs_rpc_api["result"].called +# assert f"{mocked_async_jobs_rpc_api['result'].call_args[1]['job_id']}" == task_id From 5824f2b5ee36133dd173b593486a4cb5be230eac Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 14:44:00 +0200 Subject: [PATCH 036/111] finish task tests --- .../api/routes/tasks.py | 33 +++++- .../tests/unit/celery/test_tasks.py | 100 ++++++++++-------- 2 files changed, 84 insertions(+), 49 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py index 3ddbc615655..62a526d91a8 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py @@ -2,6 +2,7 @@ from typing import Annotated, Any from celery.exceptions import CeleryError # type: ignore[import-untyped] +from common_library.error_codes import create_error_code from fastapi import APIRouter, Depends, FastAPI, HTTPException, status from models_library.api_schemas_long_running_tasks.base import TaskProgress from models_library.api_schemas_long_running_tasks.tasks import ( @@ -15,8 +16,9 @@ ) from models_library.products import ProductName from models_library.users import UserID -from servicelib.celery.models import TaskFilter, TaskUUID +from servicelib.celery.models import TaskFilter, TaskState, TaskUUID from servicelib.fastapi.dependencies import get_app +from servicelib.logging_errors import create_troubleshootting_log_kwargs from ...models.schemas.base import ApiServerEnvelope from ...models.schemas.errors import ErrorGet @@ -199,7 +201,6 @@ async def get_task_result( task_filter = _get_task_filter(user_id, product_name) try: - # First check if task exists and is done task_status = await task_manager.get_task_status( task_filter=task_filter, task_uuid=TaskUUID(f"{task_id}"), @@ -210,15 +211,37 @@ async def get_task_result( status_code=status.HTTP_404_NOT_FOUND, detail="Task result not available yet", ) + if task_status.task_state == TaskState.ABORTED: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="Task was cancelled", + ) - result = await task_manager.get_task_result( + task_result = await task_manager.get_task_result( task_filter=task_filter, task_uuid=TaskUUID(f"{task_id}"), ) - return TaskResult(result=result, error=None) - except CeleryError as exc: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Encountered issue when getting task result", ) from exc + + if task_status.task_state == TaskState.FAILURE: + assert isinstance(task_result, Exception) + user_error_msg = f"The execution of task {task_id} failed" + support_id = create_error_code(task_result) + _logger.exception( + **create_troubleshootting_log_kwargs( + user_error_msg, + error=task_result, + error_code=support_id, + tip="Unexpected error in Celery", + ) + ) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=user_error_msg, + ) + + return TaskResult(result=task_result, error=None) diff --git a/services/api-server/tests/unit/celery/test_tasks.py b/services/api-server/tests/unit/celery/test_tasks.py index 2ce35ea299c..4f4480c879c 100644 --- a/services/api-server/tests/unit/celery/test_tasks.py +++ b/services/api-server/tests/unit/celery/test_tasks.py @@ -9,8 +9,10 @@ from faker import Faker from fastapi import status from httpx import AsyncClient, BasicAuth -from models_library.api_schemas_long_running_tasks.tasks import TaskGet, TaskStatus +from models_library.api_schemas_long_running_tasks.tasks import TaskGet +from models_library.progress_bar import ProgressReport, ProgressStructuredMessage from pytest_mock import MockerFixture, MockType, mocker +from servicelib.celery.models import TaskState, TaskStatus, TaskUUID from simcore_service_api_server.api.routes import tasks as task_routes from simcore_service_api_server.models.schemas.base import ApiServerEnvelope @@ -89,7 +91,7 @@ async def test_get_result( @pytest.mark.parametrize( - "method, url, list_tasks_return_value, get_task_status_return_value, cancel_task_return_value, expected_status_code", + "method, url, list_tasks_return_value, get_task_status_return_value, cancel_task_return_value, get_task_result_return_value, expected_status_code", [ ( "GET", @@ -97,6 +99,7 @@ async def test_get_result( CeleryError(), None, None, + None, status.HTTP_500_INTERNAL_SERVER_ERROR, ), ( @@ -105,6 +108,7 @@ async def test_get_result( None, CeleryError(), None, + None, status.HTTP_500_INTERNAL_SERVER_ERROR, ), ( @@ -113,8 +117,58 @@ async def test_get_result( None, None, CeleryError(), + None, status.HTTP_500_INTERNAL_SERVER_ERROR, ), + ( + "GET", + f"/v0/tasks/{_faker.uuid4()}/result", + None, + CeleryError(), + None, + None, + status.HTTP_500_INTERNAL_SERVER_ERROR, + ), + ( + "GET", + f"/v0/tasks/{_faker.uuid4()}/result", + None, + TaskStatus( + task_uuid=TaskUUID("123e4567-e89b-12d3-a456-426614174000"), + task_state=TaskState.STARTED, + progress_report=ProgressReport( + actual_value=0.5, + total=1.0, + unit="Byte", + message=ProgressStructuredMessage.model_config["json_schema_extra"][ + "examples" + ][0], + ), + ), + None, + None, + status.HTTP_404_NOT_FOUND, + ), + ( + "GET", + f"/v0/tasks/{_faker.uuid4()}/result", + None, + TaskStatus( + task_uuid=TaskUUID("123e4567-e89b-12d3-a456-426614174000"), + task_state=TaskState.ABORTED, + progress_report=ProgressReport( + actual_value=0.5, + total=1.0, + unit="Byte", + message=ProgressStructuredMessage.model_config["json_schema_extra"][ + "examples" + ][0], + ), + ), + None, + None, + status.HTTP_409_CONFLICT, + ), ], ) async def test_celery_error_propagation( @@ -127,45 +181,3 @@ async def test_celery_error_propagation( ): response = await client.request(method=method, url=url, auth=auth) assert response.status_code == expected_status_code - - -# @pytest.mark.parametrize( -# "async_job_error, expected_status_code", -# [ -# (None, status.HTTP_200_OK), -# ( -# JobError( -# job_id=_faker.uuid4(), -# exc_type=Exception, -# exc_message="An exception from inside the async job", -# ), -# status.HTTP_500_INTERNAL_SERVER_ERROR, -# ), -# ( -# JobNotDoneError(job_id=_faker.uuid4()), -# status.HTTP_404_NOT_FOUND, -# ), -# ( -# JobAbortedError(job_id=_faker.uuid4()), -# status.HTTP_409_CONFLICT, -# ), -# ( -# JobSchedulerError( -# exc=Exception("A very rare exception raised by the scheduler") -# ), -# status.HTTP_500_INTERNAL_SERVER_ERROR, -# ), -# ], -# ) -# async def test_get_async_job_result( -# client: AsyncClient, -# mocked_async_jobs_rpc_api: dict[str, MockType], -# async_job_error: Exception | None, -# auth: BasicAuth, -# expected_status_code: int, -# ): -# task_id = f"{_faker.uuid4()}" -# response = await client.get(f"/v0/tasks/{task_id}/result", auth=auth) -# assert response.status_code == expected_status_code -# assert mocked_async_jobs_rpc_api["result"].called -# assert f"{mocked_async_jobs_rpc_api['result'].call_args[1]['job_id']}" == task_id From 77ac52041c3c24c57cb70dae62ea8483ddd8ea68 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 15:11:58 +0200 Subject: [PATCH 037/111] add test for exception propagation from celery --- .../exceptions/backend_errors.py | 1 + .../tests/unit/celery/test_functions.py | 60 ++++++++++++++++++- 2 files changed, 59 insertions(+), 2 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py index 33960e49f6b..5257bfad700 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py @@ -8,6 +8,7 @@ class BaseBackEndError(ApiServerBaseError): """status_code: the default return status which will be returned to the client calling the api-server (in case this exception is raised)""" + msg_template = "The api-server encountered an error when contacting the backend" status_code = status.HTTP_502_BAD_GATEWAY @classmethod diff --git a/services/api-server/tests/unit/celery/test_functions.py b/services/api-server/tests/unit/celery/test_functions.py index 2e4ead95b0a..393fd116dbe 100644 --- a/services/api-server/tests/unit/celery/test_functions.py +++ b/services/api-server/tests/unit/celery/test_functions.py @@ -8,12 +8,13 @@ from celery_library.types import register_pydantic_types from faker import Faker from fastapi import FastAPI, status -from httpx import AsyncClient, BasicAuth +from httpx import AsyncClient, BasicAuth, HTTPStatusError from models_library.api_schemas_long_running_tasks.tasks import ( TaskGet, TaskResult, TaskStatus, ) +from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobFilter from models_library.functions import ( FunctionClass, FunctionID, @@ -24,17 +25,22 @@ RegisteredProjectFunctionJob, ) from models_library.projects import ProjectID -from servicelib.celery.models import TaskID +from servicelib.celery.models import TaskFilter, TaskID, TaskMetadata from servicelib.common_headers import ( X_SIMCORE_PARENT_NODE_ID, X_SIMCORE_PARENT_PROJECT_UUID, ) from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.api.dependencies.authentication import Identity +from simcore_service_api_server.api.dependencies.celery import ( + ASYNC_JOB_CLIENT_NAME, + get_task_manager, +) from simcore_service_api_server.api.routes.functions_routes import get_function from simcore_service_api_server.celery._worker_tasks._functions_tasks import ( run_function as run_function_task, ) +from simcore_service_api_server.exceptions.backend_errors import BaseBackEndError from simcore_service_api_server.models.api_resources import JobLinks from simcore_service_api_server.models.schemas.jobs import ( JobPricingSpecification, @@ -151,3 +157,53 @@ async def test_with_fake_run_function( # Poll until task completion and get result result = await poll_task_until_done(client, auth, task.task_id) RegisteredProjectFunctionJob.model_validate(result.result) + + +def _register_exception_task(exception: Exception) -> Callable[[Celery], None]: + + async def exception_task( + task: Task, + task_id: TaskID, + ): + raise exception + + def _(celery_app: Celery) -> None: + register_task(celery_app, exception_task) + + return _ + + +@pytest.mark.parametrize( + "register_celery_tasks", + [ + _register_exception_task(ValueError("Test error")), + _register_exception_task(Exception("Test error")), + _register_exception_task(BaseBackEndError()), + ], +) +@pytest.mark.parametrize("add_worker_tasks", [False]) +async def test_celery_error_propagation( + app: FastAPI, + client: AsyncClient, + auth: BasicAuth, + with_api_server_celery_worker: TestWorkController, +): + + user_identity = Identity( + user_id=_faker.pyint(), product_name=_faker.word(), email=_faker.email() + ) + job_filter = AsyncJobFilter( + user_id=user_identity.user_id, + product_name=user_identity.product_name, + client_name=ASYNC_JOB_CLIENT_NAME, + ) + task_manager = get_task_manager(app=app) + task_uuid = await task_manager.submit_task( + task_metadata=TaskMetadata(name="exception_task"), + task_filter=TaskFilter.model_validate(job_filter.model_dump()), + ) + + with pytest.raises(HTTPStatusError) as exc_info: + await poll_task_until_done(client, auth, f"{task_uuid}") + + assert exc_info.value.response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR From 91ed6f1264ee3054e35ba1df364a966dd37730f9 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 15:27:46 +0200 Subject: [PATCH 038/111] add inputs in run function test --- .../api-server/tests/unit/celery/test_functions.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/services/api-server/tests/unit/celery/test_functions.py b/services/api-server/tests/unit/celery/test_functions.py index 393fd116dbe..9d71ccdb30f 100644 --- a/services/api-server/tests/unit/celery/test_functions.py +++ b/services/api-server/tests/unit/celery/test_functions.py @@ -144,10 +144,22 @@ async def test_with_fake_run_function( headers[X_SIMCORE_PARENT_PROJECT_UUID] = "null" headers[X_SIMCORE_PARENT_NODE_ID] = "null" + body = { + "input_1": _faker.uuid4(), + "input_2": _faker.pyfloat(min_value=0, max_value=100), + "input_3": _faker.pyint(min_value=0, max_value=100), + "input_4": _faker.boolean(), + "input_5": _faker.sentence(), + "input_6": [ + _faker.pyfloat(min_value=0, max_value=100) + for _ in range(_faker.pyint(min_value=5, max_value=100)) + ], + } + response = await client.post( f"/{API_VTAG}/functions/{_faker.uuid4()}:run", auth=auth, - json={}, + json=body, headers=headers, ) From 95de5a576037147dc9774a1d83b7708e61d555f0 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 15:51:29 +0200 Subject: [PATCH 039/111] add api-worker to docker compose --- .../celery/worker_main.py | 4 ++-- services/docker-compose.yml | 17 +++++++++++++++-- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_main.py b/services/api-server/src/simcore_service_api_server/celery/worker_main.py index 5b566413fa2..b3ad5f02cc7 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_main.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_main.py @@ -16,7 +16,7 @@ from ._worker_tasks.tasks import setup_worker_tasks -def main(): +def app_factory(): _settings = ApplicationSettings.create_from_envs() setup_loggers( @@ -45,4 +45,4 @@ def worker_init_wrapper(sender, **_kwargs): if __name__ == "__main__": - main() + app_factory() diff --git a/services/docker-compose.yml b/services/docker-compose.yml index a6f8d3df855..3438963372d 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -25,7 +25,7 @@ services: image: ${DOCKER_REGISTRY:-itisfoundation}/api-server:${DOCKER_IMAGE_TAG:-latest} init: true hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" - environment: + environment: &api_server_environment <<: *tracing_open_telemetry_environs API_SERVER_DEV_FEATURES_ENABLED: ${API_SERVER_DEV_FEATURES_ENABLED} API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} @@ -75,9 +75,22 @@ services: - traefik.http.routers.${SWARM_STACK_NAME}_api-server.entrypoints=simcore_api - traefik.http.routers.${SWARM_STACK_NAME}_api-server.priority=3 - traefik.http.routers.${SWARM_STACK_NAME}_api-server.middlewares=${SWARM_STACK_NAME}_gzip@swarm,ratelimit-${SWARM_STACK_NAME}_api-server,inflightreq-${SWARM_STACK_NAME}_api-server - networks: + networks: &api_server_networks - default + + api-worker: + image: ${DOCKER_REGISTRY:-itisfoundation}/api-server:${DOCKER_IMAGE_TAG:-latest} + init: true + hostname: "api-worker-{{.Node.Hostname}}-{{.Task.Slot}}" + environment: + <<: *api_server_environment + API_SERVER_WORKER_NAME: "api-worker-{{.Node.Hostname}}-{{.Task.Slot}}-{{.Task.ID}}" + API_SERVER_WORKER_MODE: "true" + CELERY_CONCURRENCY: 100 + networks: *api_server_networks + + autoscaling: image: ${DOCKER_REGISTRY:-itisfoundation}/autoscaling:${DOCKER_IMAGE_TAG:-latest} init: true From e6eb80c2ccfbca93983e34c63e5a49a67773a4e4 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 16:28:01 +0200 Subject: [PATCH 040/111] add new fixtures and model examples --- .../api_schemas_long_running_tasks/base.py | 12 ++++ .../pytest_simcore/celery_library_mocks.py | 72 +++++++++++++++---- 2 files changed, 69 insertions(+), 15 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py b/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py index d6e132c5361..b6900545ff6 100644 --- a/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py +++ b/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py @@ -23,6 +23,18 @@ class TaskProgress(BaseModel): message: ProgressMessage = "" percent: ProgressPercent = 0.0 + model_config = { + "json_schema_extra": { + "examples": [ + { + "task_id": "3ac48b54-a48d-4c5e-a6ac-dcaddb9eaa59", + "message": "Halfway done", + "percent": 0.5, + } + ] + } + } + # used to propagate progress updates internally _update_callback: Callable[["TaskProgress"], Awaitable[None]] | None = None diff --git a/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py index 685ce26de77..aa664e99de4 100644 --- a/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py +++ b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py @@ -10,36 +10,78 @@ @pytest.fixture -def mock_task_manager_object(mocker: MockerFixture) -> MockType: - """ - Returns a TaskManager mock with example return values for each method. - """ - mock = mocker.Mock(spec=TaskManager) +def submit_task_return_value() -> TaskUUID: + return TaskUUID(_faker.uuid4()) + + +@pytest.fixture +def cancel_task_return_value() -> None: + return None + - # Example return values (replace with realistic objects as needed) - mock.submit_task.return_value = TaskUUID(_faker.uuid4()) - mock.cancel_task.return_value = None - mock.get_task_result.return_value = {"result": "example"} +@pytest.fixture +def get_task_result_return_value() -> dict: + return {"result": "example"} + + +@pytest.fixture +def get_task_status_return_value() -> TaskStatus: status_extra = TaskStatus.model_config.get("json_schema_extra") assert status_extra is not None status_examples = status_extra.get("examples") assert isinstance(status_examples, list) assert len(status_examples) > 0 - mock.get_task_status.return_value = TaskStatus.model_validate(status_examples[0]) + return TaskStatus.model_validate(status_examples[0]) + + +@pytest.fixture +def list_tasks_return_value() -> list[Task]: list_extra = Task.model_config.get("json_schema_extra") assert isinstance(list_extra, dict) list_examples = list_extra.get("examples") assert isinstance(list_examples, list) assert len(list_examples) > 0 - mock.list_tasks.return_value = [ - Task.model_validate(example) for example in list_examples - ] - mock.set_task_progress.return_value = None + return [Task.model_validate(example) for example in list_examples] + + +@pytest.fixture +def set_task_progress_return_value() -> None: + return None + + +@pytest.fixture +def mock_task_manager_object( + mocker: MockerFixture, + submit_task_return_value, + cancel_task_return_value, + get_task_result_return_value, + get_task_status_return_value, + list_tasks_return_value, + set_task_progress_return_value, +) -> MockType: + """ + Returns a TaskManager mock with overridable return values for each method. + If a return value is an Exception, the method will raise it. + """ + mock = mocker.Mock(spec=TaskManager) + + def _set_return_or_raise(method, value): + if isinstance(value, Exception): + method.side_effect = lambda *a, **kw: (_ for _ in ()).throw(value) + else: + method.return_value = value + + _set_return_or_raise(mock.submit_task, submit_task_return_value) + _set_return_or_raise(mock.cancel_task, cancel_task_return_value) + _set_return_or_raise(mock.get_task_result, get_task_result_return_value) + _set_return_or_raise(mock.get_task_status, get_task_status_return_value) + _set_return_or_raise(mock.list_tasks, list_tasks_return_value) + _set_return_or_raise(mock.set_task_progress, set_task_progress_return_value) return mock @pytest.fixture -def mock_task_manager_raising_factory( +def mock_task_manager_object_raising_factory( mocker: MockerFixture, ) -> Callable[[Exception], MockType]: def _factory(task_manager_exception: Exception) -> MockType: From bc263b0c7404c72846836bc4779b3097c93f4156 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 22:09:16 +0200 Subject: [PATCH 041/111] first attempt to add boot script for api-server worker --- services/api-server/docker/boot.sh | 60 +++++++++++++++++++++--------- 1 file changed, 43 insertions(+), 17 deletions(-) diff --git a/services/api-server/docker/boot.sh b/services/api-server/docker/boot.sh index 0f19b262c78..1436dda2838 100755 --- a/services/api-server/docker/boot.sh +++ b/services/api-server/docker/boot.sh @@ -39,23 +39,49 @@ APP_LOG_LEVEL=${API_SERVER_LOGLEVEL:-${LOG_LEVEL:-${LOGLEVEL:-INFO}}} SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" -if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') +if [ "${API_SERVER_WORKER_MODE}" = "true" ]; then + if [ "${SC_BOOT_MODE}" = "debug" ]; then + exec watchmedo auto-restart \ + --directory /devel/packages \ + --directory services/api-server \ + --pattern "*.py" \ + --recursive \ + -- \ + celery \ + --app=simcore_service_api_server.celery.worker_main:app_factory \ + worker --pool=threads \ + --loglevel="${API_SERVER_LOGLEVEL}" \ + --concurrency="${CELERY_CONCURRENCY}" \ + --hostname="${API_SERVER_WORKER_NAME}" \ + --queues="${CELERY_QUEUES:-default}" + else + exec celery \ + --app=simcore_service_api_server.celery.worker_main:app_factory \ + worker --pool=threads \ + --loglevel="${API_SERVER_LOGLEVEL}" \ + --concurrency="${CELERY_CONCURRENCY}" \ + --hostname="${API_SERVER_WORKER_NAME}" \ + --queues="${CELERY_QUEUES:-default}" + fi +else + if [ "${SC_BOOT_MODE}" = "debug" ]; then + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') - exec sh -c " - cd services/api-server/src/simcore_service_api_server && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${API_SERVER_REMOTE_DEBUG_PORT} -m \ - uvicorn \ - --factory main:app_factory \ + exec sh -c " + cd services/api-server/src/simcore_service_api_server && \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${API_SERVER_REMOTE_DEBUG_PORT} -m \ + uvicorn \ + --factory main:app_factory \ + --host 0.0.0.0 \ + --reload \ + $reload_dir_packages \ + --reload-dir . \ + --log-level \"${SERVER_LOG_LEVEL}\" + " + else + exec uvicorn \ + --factory simcore_service_api_server.main:app_factory \ --host 0.0.0.0 \ - --reload \ - $reload_dir_packages \ - --reload-dir . \ - --log-level \"${SERVER_LOG_LEVEL}\" - " -else - exec uvicorn \ - --factory simcore_service_api_server.main:app_factory \ - --host 0.0.0.0 \ - --log-level "${SERVER_LOG_LEVEL}" + --log-level "${SERVER_LOG_LEVEL}" + fi fi From 6b737917225874ca17ff88a0c35bd8d38e18fb20 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 22:18:11 +0200 Subject: [PATCH 042/111] add health check to api-server celery worker --- services/api-server/docker/healthcheck.py | 31 +++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/services/api-server/docker/healthcheck.py b/services/api-server/docker/healthcheck.py index 808782f3261..66ba806d0db 100755 --- a/services/api-server/docker/healthcheck.py +++ b/services/api-server/docker/healthcheck.py @@ -18,18 +18,49 @@ """ import os +import subprocess import sys from urllib.request import urlopen +from simcore_service_api_server.core.settings import ApplicationSettings + SUCCESS, UNHEALTHY = 0, 1 # Disabled if boots with debugger ok = os.environ.get("SC_BOOT_MODE", "").lower() == "debug" +app_settings = ApplicationSettings.create_from_envs() + + +def _is_celery_worker_healthy(): + assert app_settings.API_SERVER_CELERY + broker_url = app_settings.API_SERVER_CELERY.CELERY_RABBIT_BROKER.dsn + + try: + result = subprocess.run( + [ + "celery", + "--broker", + broker_url, + "inspect", + "ping", + "--destination", + "celery@" + os.getenv("API_SERVER_WORKER_NAME", "worker"), + ], + capture_output=True, + text=True, + check=True, + ) + return "pong" in result.stdout + except subprocess.CalledProcessError: + return False + + # Queries host # pylint: disable=consider-using-with ok = ( ok + or (app_settings.API_SERVER_WORKER_MODE and _is_celery_worker_healthy()) or urlopen( "{host}{baseurl}".format( host=sys.argv[1], baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "") From 6c10aedae6ce81b94c4f6b8145ee34ce14e85bb7 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 22:34:19 +0200 Subject: [PATCH 043/111] add asgi_lifespan to servicelib.fastapi reqs as it was missing --- packages/service-library/requirements/_fastapi.in | 2 +- packages/service-library/requirements/_fastapi.txt | 6 +++++- packages/service-library/requirements/_test.txt | 4 +++- services/agent/requirements/_base.txt | 6 +++++- services/agent/requirements/_test.txt | 4 +++- services/api-server/requirements/_base.txt | 3 +++ services/api-server/requirements/_test.txt | 4 +++- services/autoscaling/requirements/_base.txt | 6 +++++- services/autoscaling/requirements/_test.txt | 4 +++- services/catalog/requirements/_base.txt | 6 +++++- services/catalog/requirements/_test.txt | 4 +++- services/clusters-keeper/requirements/_base.txt | 6 +++++- services/clusters-keeper/requirements/_test.txt | 4 +++- services/datcore-adapter/requirements/_base.txt | 3 +++ services/datcore-adapter/requirements/_test.txt | 4 +++- services/director-v2/requirements/_base.txt | 6 +++++- services/director-v2/requirements/_test.txt | 4 +++- services/director/requirements/_base.txt | 3 +++ services/director/requirements/_test.txt | 4 +++- services/dynamic-scheduler/requirements/_base.txt | 6 +++++- services/dynamic-scheduler/requirements/_test.txt | 4 +++- services/dynamic-sidecar/requirements/_base.txt | 6 +++++- services/dynamic-sidecar/requirements/_test.txt | 4 +++- services/efs-guardian/requirements/_base.txt | 6 +++++- services/efs-guardian/requirements/_test.txt | 4 +++- services/invitations/requirements/_base.txt | 6 +++++- services/notifications/requirements/_base.txt | 6 +++++- services/notifications/requirements/_test.txt | 4 +++- services/payments/requirements/_base.txt | 3 +++ services/payments/requirements/_test.txt | 4 +++- services/resource-usage-tracker/requirements/_base.txt | 3 +++ services/resource-usage-tracker/requirements/_test.txt | 4 +++- services/storage/requirements/_base.txt | 4 +++- 33 files changed, 119 insertions(+), 28 deletions(-) diff --git a/packages/service-library/requirements/_fastapi.in b/packages/service-library/requirements/_fastapi.in index 3303e6043af..2cb3642fc8c 100644 --- a/packages/service-library/requirements/_fastapi.in +++ b/packages/service-library/requirements/_fastapi.in @@ -3,7 +3,7 @@ # # - +asgi_lifespan fastapi[standard] fastapi-lifespan-manager httpx[http2] diff --git a/packages/service-library/requirements/_fastapi.txt b/packages/service-library/requirements/_fastapi.txt index ea13f1a8db8..72f25480f66 100644 --- a/packages/service-library/requirements/_fastapi.txt +++ b/packages/service-library/requirements/_fastapi.txt @@ -5,6 +5,8 @@ anyio==4.8.0 # httpx # starlette # watchfiles +asgi-lifespan==2.1.0 + # via -r requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi certifi==2025.1.31 @@ -115,7 +117,9 @@ rich-toolkit==0.14.7 shellingham==1.5.4 # via typer sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan starlette==0.46.0 # via fastapi typer==0.16.0 diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt index c8bd758d450..c2b48dd6539 100644 --- a/packages/service-library/requirements/_test.txt +++ b/packages/service-library/requirements/_test.txt @@ -20,7 +20,9 @@ anyio==4.8.0 # -c requirements/_fastapi.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_fastapi.txt + # -r requirements/_test.in attrs==25.1.0 # via # -c requirements/_aiohttp.txt diff --git a/services/agent/requirements/_base.txt b/services/agent/requirements/_base.txt index 3d223fe81c8..420b6839b5f 100644 --- a/services/agent/requirements/_base.txt +++ b/services/agent/requirements/_base.txt @@ -45,6 +45,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi attrs==25.1.0 @@ -434,7 +436,9 @@ shellingham==1.5.4 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan starlette==0.46.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt diff --git a/services/agent/requirements/_test.txt b/services/agent/requirements/_test.txt index 8fc6688ba12..e3ff60371ea 100644 --- a/services/agent/requirements/_test.txt +++ b/services/agent/requirements/_test.txt @@ -32,7 +32,9 @@ anyio==4.8.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==25.1.0 # via # -c requirements/_base.txt diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt index 2733185971e..bd65037d16c 100644 --- a/services/api-server/requirements/_base.txt +++ b/services/api-server/requirements/_base.txt @@ -102,6 +102,8 @@ arrow==1.3.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -952,6 +954,7 @@ six==1.17.0 sniffio==1.3.1 # via # anyio + # asgi-lifespan # httpx sqlalchemy==1.4.54 # via diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt index bfb36091f16..677d1ed830c 100644 --- a/services/api-server/requirements/_test.txt +++ b/services/api-server/requirements/_test.txt @@ -31,7 +31,9 @@ anyio==4.7.0 # httpx # starlette asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==24.2.0 # via # -c requirements/_base.txt diff --git a/services/autoscaling/requirements/_base.txt b/services/autoscaling/requirements/_base.txt index f8991b81564..024a21c2403 100644 --- a/services/autoscaling/requirements/_base.txt +++ b/services/autoscaling/requirements/_base.txt @@ -80,6 +80,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi attrs==25.3.0 @@ -713,7 +715,9 @@ shellingham==1.5.4 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sortedcontainers==2.4.0 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt diff --git a/services/autoscaling/requirements/_test.txt b/services/autoscaling/requirements/_test.txt index bf95c3c2725..1a7e5c16243 100644 --- a/services/autoscaling/requirements/_test.txt +++ b/services/autoscaling/requirements/_test.txt @@ -9,7 +9,9 @@ anyio==4.9.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==25.3.0 # via # -c requirements/_base.txt diff --git a/services/catalog/requirements/_base.txt b/services/catalog/requirements/_base.txt index a69012ff573..063d2a00731 100644 --- a/services/catalog/requirements/_base.txt +++ b/services/catalog/requirements/_base.txt @@ -49,6 +49,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -491,7 +493,9 @@ shellingham==1.5.4 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sqlalchemy==1.4.54 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt index 220e76e0e44..ff3aa9f099f 100644 --- a/services/catalog/requirements/_test.txt +++ b/services/catalog/requirements/_test.txt @@ -20,7 +20,9 @@ anyio==4.8.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==25.2.0 # via # -c requirements/_base.txt diff --git a/services/clusters-keeper/requirements/_base.txt b/services/clusters-keeper/requirements/_base.txt index 21aa26e78d1..febf94edfdc 100644 --- a/services/clusters-keeper/requirements/_base.txt +++ b/services/clusters-keeper/requirements/_base.txt @@ -78,6 +78,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi attrs==25.3.0 @@ -711,7 +713,9 @@ shellingham==1.5.4 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sortedcontainers==2.4.0 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt diff --git a/services/clusters-keeper/requirements/_test.txt b/services/clusters-keeper/requirements/_test.txt index ea7958add8f..745bb427a57 100644 --- a/services/clusters-keeper/requirements/_test.txt +++ b/services/clusters-keeper/requirements/_test.txt @@ -26,7 +26,9 @@ anyio==4.9.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==25.3.0 # via # -c requirements/_base.txt diff --git a/services/datcore-adapter/requirements/_base.txt b/services/datcore-adapter/requirements/_base.txt index 5fc065474e5..f9a24abe2c3 100644 --- a/services/datcore-adapter/requirements/_base.txt +++ b/services/datcore-adapter/requirements/_base.txt @@ -47,6 +47,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi attrs==23.2.0 @@ -453,6 +455,7 @@ six==1.16.0 sniffio==1.3.1 # via # anyio + # asgi-lifespan # httpx starlette==0.41.0 # via diff --git a/services/datcore-adapter/requirements/_test.txt b/services/datcore-adapter/requirements/_test.txt index 4f7ef83300e..0d1aa35b691 100644 --- a/services/datcore-adapter/requirements/_test.txt +++ b/services/datcore-adapter/requirements/_test.txt @@ -3,7 +3,9 @@ anyio==4.3.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in boto3-stubs==1.37.4 # via -r requirements/_test.in botocore-stubs==1.37.4 diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt index 1f17e93adbf..a607c5c2e53 100644 --- a/services/director-v2/requirements/_base.txt +++ b/services/director-v2/requirements/_base.txt @@ -89,6 +89,8 @@ arrow==1.3.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -886,7 +888,9 @@ simple-websocket==1.1.0 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sortedcontainers==2.4.0 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt diff --git a/services/director-v2/requirements/_test.txt b/services/director-v2/requirements/_test.txt index 619015cc94b..b03a4faf652 100644 --- a/services/director-v2/requirements/_test.txt +++ b/services/director-v2/requirements/_test.txt @@ -38,7 +38,9 @@ anyio==4.9.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in async-asgi-testclient==1.4.11 # via -r requirements/_test.in attrs==25.3.0 diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt index 733fdc9a602..d2e359606a8 100644 --- a/services/director/requirements/_base.txt +++ b/services/director/requirements/_base.txt @@ -47,6 +47,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi attrs==24.2.0 @@ -433,6 +435,7 @@ six==1.16.0 sniffio==1.3.1 # via # anyio + # asgi-lifespan # httpx starlette==0.41.3 # via diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt index 402f65b9cdc..088797674b0 100644 --- a/services/director/requirements/_test.txt +++ b/services/director/requirements/_test.txt @@ -18,7 +18,9 @@ anyio==4.6.2.post1 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==24.2.0 # via # -c requirements/_base.txt diff --git a/services/dynamic-scheduler/requirements/_base.txt b/services/dynamic-scheduler/requirements/_base.txt index 1a86e0488c7..3321ae84cce 100644 --- a/services/dynamic-scheduler/requirements/_base.txt +++ b/services/dynamic-scheduler/requirements/_base.txt @@ -51,6 +51,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -520,7 +522,9 @@ simple-websocket==1.1.0 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sqlalchemy==1.4.54 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt diff --git a/services/dynamic-scheduler/requirements/_test.txt b/services/dynamic-scheduler/requirements/_test.txt index 5142eac3843..69fccfb42b8 100644 --- a/services/dynamic-scheduler/requirements/_test.txt +++ b/services/dynamic-scheduler/requirements/_test.txt @@ -3,7 +3,9 @@ anyio==4.8.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/dynamic-sidecar/requirements/_base.txt b/services/dynamic-sidecar/requirements/_base.txt index ecbfd0b75d0..1fe397dce6e 100644 --- a/services/dynamic-sidecar/requirements/_base.txt +++ b/services/dynamic-sidecar/requirements/_base.txt @@ -84,6 +84,8 @@ arrow==1.3.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -744,7 +746,9 @@ simple-websocket==1.1.0 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sqlalchemy==1.4.54 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt diff --git a/services/dynamic-sidecar/requirements/_test.txt b/services/dynamic-sidecar/requirements/_test.txt index 764d36fb7bf..06bb5e5b8eb 100644 --- a/services/dynamic-sidecar/requirements/_test.txt +++ b/services/dynamic-sidecar/requirements/_test.txt @@ -22,7 +22,9 @@ aiosignal==1.3.2 # -c requirements/_base.txt # aiohttp asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in async-asgi-testclient==1.4.11 # via -r requirements/_test.in attrs==25.1.0 diff --git a/services/efs-guardian/requirements/_base.txt b/services/efs-guardian/requirements/_base.txt index fbbf5d7ab4d..65e0b2a1397 100644 --- a/services/efs-guardian/requirements/_base.txt +++ b/services/efs-guardian/requirements/_base.txt @@ -82,6 +82,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -725,7 +727,9 @@ shellingham==1.5.4 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sqlalchemy==1.4.54 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt diff --git a/services/efs-guardian/requirements/_test.txt b/services/efs-guardian/requirements/_test.txt index a499913d8b3..0b5839cd061 100644 --- a/services/efs-guardian/requirements/_test.txt +++ b/services/efs-guardian/requirements/_test.txt @@ -26,7 +26,9 @@ anyio==4.9.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==25.3.0 # via # -c requirements/_base.txt diff --git a/services/invitations/requirements/_base.txt b/services/invitations/requirements/_base.txt index 361e11fb160..c683d5a4503 100644 --- a/services/invitations/requirements/_base.txt +++ b/services/invitations/requirements/_base.txt @@ -43,6 +43,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi attrs==25.2.0 @@ -450,7 +452,9 @@ shellingham==1.5.4 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan starlette==0.46.1 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt diff --git a/services/notifications/requirements/_base.txt b/services/notifications/requirements/_base.txt index 85f427e1d6c..fc16778153f 100644 --- a/services/notifications/requirements/_base.txt +++ b/services/notifications/requirements/_base.txt @@ -47,6 +47,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -483,7 +485,9 @@ shellingham==1.5.4 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sqlalchemy==1.4.54 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt diff --git a/services/notifications/requirements/_test.txt b/services/notifications/requirements/_test.txt index b62b83d51d4..4e8422dc614 100644 --- a/services/notifications/requirements/_test.txt +++ b/services/notifications/requirements/_test.txt @@ -3,7 +3,9 @@ anyio==4.9.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt diff --git a/services/payments/requirements/_base.txt b/services/payments/requirements/_base.txt index 588182ee746..f4c28956994 100644 --- a/services/payments/requirements/_base.txt +++ b/services/payments/requirements/_base.txt @@ -49,6 +49,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -546,6 +548,7 @@ six==1.16.0 sniffio==1.3.1 # via # anyio + # asgi-lifespan # httpx sqlalchemy==1.4.54 # via diff --git a/services/payments/requirements/_test.txt b/services/payments/requirements/_test.txt index d4f4c71482b..fd0acddfc74 100644 --- a/services/payments/requirements/_test.txt +++ b/services/payments/requirements/_test.txt @@ -16,7 +16,9 @@ anyio==4.6.2.post1 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==24.2.0 # via # -c requirements/_base.txt diff --git a/services/resource-usage-tracker/requirements/_base.txt b/services/resource-usage-tracker/requirements/_base.txt index 4f0e4a27cd9..11653dfeb4e 100644 --- a/services/resource-usage-tracker/requirements/_base.txt +++ b/services/resource-usage-tracker/requirements/_base.txt @@ -83,6 +83,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi async-timeout==4.0.3 @@ -771,6 +773,7 @@ six==1.16.0 sniffio==1.3.1 # via # anyio + # asgi-lifespan # httpx sqlalchemy==1.4.52 # via diff --git a/services/resource-usage-tracker/requirements/_test.txt b/services/resource-usage-tracker/requirements/_test.txt index 6ffb5d4cb7d..b30491bbabd 100644 --- a/services/resource-usage-tracker/requirements/_test.txt +++ b/services/resource-usage-tracker/requirements/_test.txt @@ -13,7 +13,9 @@ anyio==4.3.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==23.2.0 # via # -c requirements/_base.txt diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index 48bb3d2baa8..5800a52c185 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -108,7 +108,9 @@ arrow==1.3.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in asgi-lifespan==2.1.0 - # via -r requirements/_base.in + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 From 157bbac024f1984e8d5dc98ae4fdf1d45899744a Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 22:58:11 +0200 Subject: [PATCH 044/111] fixes to make services functional again --- .../simcore_service_api_server/api/routes/functions_routes.py | 4 ++-- .../src/simcore_service_api_server/celery/worker_main.py | 2 +- .../celery/worker_tasks/__init__.py | 0 .../_functions_tasks.py => worker_tasks/functions_tasks.py} | 0 .../celery/{_worker_tasks => worker_tasks}/tasks.py | 2 +- services/docker-compose.yml | 1 + 6 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 services/api-server/src/simcore_service_api_server/celery/worker_tasks/__init__.py rename services/api-server/src/simcore_service_api_server/celery/{_worker_tasks/_functions_tasks.py => worker_tasks/functions_tasks.py} (100%) rename services/api-server/src/simcore_service_api_server/celery/{_worker_tasks => worker_tasks}/tasks.py (95%) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index 9943131eb93..d06e83332cc 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -27,7 +27,7 @@ from ..._service_function_jobs import FunctionJobService from ..._service_functions import FunctionService -from ...celery._worker_tasks._functions_tasks import run_function +from ...celery.worker_tasks.functions_tasks import run_function as run_function_task from ...models.pagination import Page, PaginationParams from ...models.schemas.errors import ErrorGet from ...models.schemas.jobs import JobPricingSpecification @@ -351,7 +351,7 @@ async def run_function( # noqa: PLR0913 client_name=ASYNC_JOB_CLIENT_NAME, ) task_filter = TaskFilter.model_validate(job_filter.model_dump()) - task_name = run_function.__name__ + task_name = run_function_task.__name__ task_uuid = await task_manager.submit_task( TaskMetadata( diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_main.py b/services/api-server/src/simcore_service_api_server/celery/worker_main.py index b3ad5f02cc7..82badf2e3f5 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_main.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_main.py @@ -13,7 +13,7 @@ from ..core.application import create_app from ..core.settings import ApplicationSettings -from ._worker_tasks.tasks import setup_worker_tasks +from .worker_tasks.tasks import setup_worker_tasks def app_factory(): diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/__init__.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py similarity index 100% rename from services/api-server/src/simcore_service_api_server/celery/_worker_tasks/_functions_tasks.py rename to services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py diff --git a/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py similarity index 95% rename from services/api-server/src/simcore_service_api_server/celery/_worker_tasks/tasks.py rename to services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py index 45cafcf45c7..fce9caec484 100644 --- a/services/api-server/src/simcore_service_api_server/celery/_worker_tasks/tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py @@ -13,7 +13,7 @@ from ...api.dependencies.authentication import Identity from ...models.api_resources import JobLinks from ...models.schemas.jobs import JobPricingSpecification -from ._functions_tasks import run_function +from .functions_tasks import run_function _logger = logging.getLogger(__name__) diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 3438963372d..63ff2bfcc97 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -32,6 +32,7 @@ services: API_SERVER_LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} API_SERVER_LOGLEVEL: ${API_SERVER_LOGLEVEL} API_SERVER_PROFILING: ${API_SERVER_PROFILING} + API_SERVER_WORKER_MODE: "false" CATALOG_HOST: ${CATALOG_HOST} CATALOG_PORT: ${CATALOG_PORT} From 568817b87d6ad2e0a79ec1da8678044d5f8343b5 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 21 Aug 2025 23:15:49 +0200 Subject: [PATCH 045/111] add redis env vars to api-server --- services/api-server/docker/boot.sh | 4 ++-- .../src/simcore_service_api_server/celery/worker_main.py | 4 ++-- services/docker-compose.yml | 6 ++++++ 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/services/api-server/docker/boot.sh b/services/api-server/docker/boot.sh index 1436dda2838..f597ffb1912 100755 --- a/services/api-server/docker/boot.sh +++ b/services/api-server/docker/boot.sh @@ -48,7 +48,7 @@ if [ "${API_SERVER_WORKER_MODE}" = "true" ]; then --recursive \ -- \ celery \ - --app=simcore_service_api_server.celery.worker_main:app_factory \ + --app=simcore_service_api_server.celery.worker_main:app \ worker --pool=threads \ --loglevel="${API_SERVER_LOGLEVEL}" \ --concurrency="${CELERY_CONCURRENCY}" \ @@ -56,7 +56,7 @@ if [ "${API_SERVER_WORKER_MODE}" = "true" ]; then --queues="${CELERY_QUEUES:-default}" else exec celery \ - --app=simcore_service_api_server.celery.worker_main:app_factory \ + --app=simcore_service_api_server.celery.worker_main:app \ worker --pool=threads \ --loglevel="${API_SERVER_LOGLEVEL}" \ --concurrency="${CELERY_CONCURRENCY}" \ diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_main.py b/services/api-server/src/simcore_service_api_server/celery/worker_main.py index 82badf2e3f5..b47c61fbfeb 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_main.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_main.py @@ -42,7 +42,7 @@ def worker_init_wrapper(sender, **_kwargs): worker_shutdown.connect(on_worker_shutdown) setup_worker_tasks(app) + return app -if __name__ == "__main__": - app_factory() +app = app_factory() diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 63ff2bfcc97..77e5e42751a 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -52,6 +52,12 @@ services: RABBIT_SECURE: ${RABBIT_SECURE} RABBIT_USER: ${RABBIT_USER} + REDIS_HOST: ${REDIS_HOST} + REDIS_PORT: ${REDIS_PORT} + REDIS_SECURE: ${REDIS_SECURE} + REDIS_USER: ${REDIS_USER} + REDIS_PASSWORD: ${REDIS_PASSWORD} + STORAGE_HOST: ${STORAGE_HOST} STORAGE_PORT: ${STORAGE_PORT} From e8683a9ce76e74423ef74a7ca0dab46ee13c48b0 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Fri, 22 Aug 2025 09:20:41 +0200 Subject: [PATCH 046/111] add separate rabbitmq queue for api-worker --- .../src/servicelib/celery/models.py | 1 + .../api/routes/functions_routes.py | 4 +- .../celery/worker_main.py | 47 ++++++++++--------- .../api-server/tests/unit/celery/conftest.py | 2 +- .../tests/unit/celery/test_functions.py | 8 ++-- services/docker-compose.yml | 1 + 6 files changed, 35 insertions(+), 28 deletions(-) diff --git a/packages/service-library/src/servicelib/celery/models.py b/packages/service-library/src/servicelib/celery/models.py index 7a253781005..3e8605fb306 100644 --- a/packages/service-library/src/servicelib/celery/models.py +++ b/packages/service-library/src/servicelib/celery/models.py @@ -28,6 +28,7 @@ class TaskState(StrEnum): class TasksQueue(StrEnum): CPU_BOUND = "cpu_bound" DEFAULT = "default" + API_WORKER_QUEUE = "api_worker_queue" class TaskMetadata(BaseModel): diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index d06e83332cc..9e9fc81f81e 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -21,7 +21,7 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID -from servicelib.celery.models import TaskFilter, TaskMetadata +from servicelib.celery.models import TaskFilter, TaskMetadata, TasksQueue from servicelib.fastapi.dependencies import get_reverse_url_mapper from servicelib.long_running_tasks.models import TaskGet @@ -356,6 +356,8 @@ async def run_function( # noqa: PLR0913 task_uuid = await task_manager.submit_task( TaskMetadata( name=task_name, + ephemeral=True, + queue=TasksQueue.API_WORKER_QUEUE, ), task_filter=task_filter, user_identity=user_identity, diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_main.py b/services/api-server/src/simcore_service_api_server/celery/worker_main.py index b47c61fbfeb..4438625afd0 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_main.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_main.py @@ -16,33 +16,34 @@ from .worker_tasks.tasks import setup_worker_tasks -def app_factory(): - _settings = ApplicationSettings.create_from_envs() - - setup_loggers( - log_format_local_dev_enabled=_settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_settings.API_SERVER_LOG_FILTER_MAPPING, - tracing_settings=_settings.API_SERVER_TRACING, - log_base_level=_settings.log_level, - noisy_loggers=None, - ) +def _get_settings() -> ApplicationSettings: + return ApplicationSettings.create_from_envs() - assert _settings.API_SERVER_CELERY # nosec - app = create_celery_app(_settings.API_SERVER_CELERY) - app_server = FastAPIAppServer(app=create_app(_settings)) +_settings = _get_settings() + +setup_loggers( + log_format_local_dev_enabled=_settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=_settings.API_SERVER_LOG_FILTER_MAPPING, + tracing_settings=_settings.API_SERVER_TRACING, + log_base_level=_settings.log_level, + noisy_loggers=None, +) + +assert _settings.API_SERVER_CELERY # nosec +app = create_celery_app(_settings.API_SERVER_CELERY) + +app_server = FastAPIAppServer(app=create_app(_settings)) - def worker_init_wrapper(sender, **_kwargs): - assert _settings.API_SERVER_CELERY # nosec - return partial(on_worker_init, app_server, _settings.API_SERVER_CELERY)( - sender, **_kwargs - ) - worker_init.connect(worker_init_wrapper) - worker_shutdown.connect(on_worker_shutdown) +def worker_init_wrapper(sender, **_kwargs): + assert _settings.API_SERVER_CELERY # nosec + return partial(on_worker_init, app_server, _settings.API_SERVER_CELERY)( + sender, **_kwargs + ) - setup_worker_tasks(app) - return app +worker_init.connect(worker_init_wrapper) +worker_shutdown.connect(on_worker_shutdown) -app = app_factory() +setup_worker_tasks(app) diff --git a/services/api-server/tests/unit/celery/conftest.py b/services/api-server/tests/unit/celery/conftest.py index c4e0c1ab319..69d76503f4b 100644 --- a/services/api-server/tests/unit/celery/conftest.py +++ b/services/api-server/tests/unit/celery/conftest.py @@ -135,6 +135,6 @@ def _on_worker_init_wrapper(sender: WorkController, **_kwargs): concurrency=1, loglevel="info", perform_ping_check=False, - queues="default", + queues="api_worker_queue", ) as worker: yield worker diff --git a/services/api-server/tests/unit/celery/test_functions.py b/services/api-server/tests/unit/celery/test_functions.py index 9d71ccdb30f..cc1f5cbde19 100644 --- a/services/api-server/tests/unit/celery/test_functions.py +++ b/services/api-server/tests/unit/celery/test_functions.py @@ -25,7 +25,7 @@ RegisteredProjectFunctionJob, ) from models_library.projects import ProjectID -from servicelib.celery.models import TaskFilter, TaskID, TaskMetadata +from servicelib.celery.models import TaskFilter, TaskID, TaskMetadata, TasksQueue from servicelib.common_headers import ( X_SIMCORE_PARENT_NODE_ID, X_SIMCORE_PARENT_PROJECT_UUID, @@ -37,7 +37,7 @@ get_task_manager, ) from simcore_service_api_server.api.routes.functions_routes import get_function -from simcore_service_api_server.celery._worker_tasks._functions_tasks import ( +from simcore_service_api_server.celery.worker_tasks.functions_tasks import ( run_function as run_function_task, ) from simcore_service_api_server.exceptions.backend_errors import BaseBackEndError @@ -211,7 +211,9 @@ async def test_celery_error_propagation( ) task_manager = get_task_manager(app=app) task_uuid = await task_manager.submit_task( - task_metadata=TaskMetadata(name="exception_task"), + task_metadata=TaskMetadata( + name="exception_task", queue=TasksQueue.API_WORKER_QUEUE + ), task_filter=TaskFilter.model_validate(job_filter.model_dump()), ) diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 77e5e42751a..0c8c7781d97 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -95,6 +95,7 @@ services: API_SERVER_WORKER_NAME: "api-worker-{{.Node.Hostname}}-{{.Task.Slot}}-{{.Task.ID}}" API_SERVER_WORKER_MODE: "true" CELERY_CONCURRENCY: 100 + CELERY_QUEUES: "api_worker_queue" networks: *api_server_networks From 1400a42bd49a6620ee5a5c844851e1bafd3c82b4 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Fri, 22 Aug 2025 10:39:41 +0200 Subject: [PATCH 047/111] add test of task function itself --- .../celery/worker_tasks/functions_tasks.py | 5 +- .../models/api_resources.py | 14 ++- .../test_api_routers_functions.py | 108 ++++++++++++++++++ services/api-server/tests/unit/conftest.py | 24 ++++ 4 files changed, 147 insertions(+), 4 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py index 63b5fd8fe6a..e7660d6c38b 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py @@ -8,7 +8,6 @@ from ...api.dependencies.authentication import Identity from ...api.dependencies.rabbitmq import get_rabbitmq_rpc_client from ...api.dependencies.services import ( - get_api_client, get_catalog_service, get_directorv2_service, get_function_job_service, @@ -42,9 +41,9 @@ async def _assemble_function_job_service(*, app: FastAPI, user_identity: Identit app=app, session_cookies=session_cookie, identity=user_identity ) web_api_rpc_client = await get_wb_api_rpc_client(app=app) - director2_api = get_api_client(DirectorV2Api) + director2_api = DirectorV2Api.get_instance(app=app) assert isinstance(director2_api, DirectorV2Api) # nosec - storage_api = get_api_client(StorageApi) + storage_api = StorageApi.get_instance(app=app) assert isinstance(storage_api, StorageApi) # nosec catalog_service = get_catalog_service( rpc_client=rpc_client, diff --git a/services/api-server/src/simcore_service_api_server/models/api_resources.py b/services/api-server/src/simcore_service_api_server/models/api_resources.py index b16a0414b83..9f82f4f1997 100644 --- a/services/api-server/src/simcore_service_api_server/models/api_resources.py +++ b/services/api-server/src/simcore_service_api_server/models/api_resources.py @@ -4,7 +4,7 @@ from uuid import UUID import parse # type: ignore[import-untyped] -from pydantic import AfterValidator, BaseModel, Field, HttpUrl, TypeAdapter +from pydantic import AfterValidator, BaseModel, ConfigDict, Field, HttpUrl, TypeAdapter from pydantic.types import StringConstraints # RESOURCE NAMES https://google.aip.dev/122 @@ -103,6 +103,18 @@ def _url_missing_only_job_id(url: str | None) -> str | None: class JobLinks(BaseModel): + model_config = ConfigDict( + json_schema_extra={ + "examples": [ + { + "url_template": "https://api.osparc.io/v0/jobs/{job_id}", + "runner_url_template": "https://runner.osparc.io/dashboard", + "outputs_url_template": "https://api.osparc.io/v0/jobs/{job_id}/outputs", + } + ] + } + ) + url_template: Annotated[str | None, AfterValidator(_url_missing_only_job_id)] runner_url_template: str | None outputs_url_template: Annotated[ diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py index f4092d3afd2..2b5464dcc1d 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py @@ -14,7 +14,9 @@ import httpx import pytest import respx +from celery import Task from faker import Faker +from fastapi import FastAPI from httpx import AsyncClient from models_library.api_schemas_long_running_tasks.tasks import TaskGet from models_library.functions import ( @@ -25,6 +27,7 @@ RegisteredFunctionJob, RegisteredFunctionJobCollection, RegisteredProjectFunction, + RegisteredProjectFunctionJob, ) from models_library.functions_errors import ( FunctionIDNotFoundError, @@ -32,14 +35,22 @@ ) from models_library.rest_pagination import PageMetaInfoLimitOffset from models_library.users import UserID +from pydantic import EmailStr from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from servicelib.aiohttp import status +from servicelib.celery.app_server import BaseAppServer +from servicelib.celery.models import TaskID from servicelib.common_headers import ( X_SIMCORE_PARENT_NODE_ID, X_SIMCORE_PARENT_PROJECT_UUID, ) +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient from simcore_service_api_server._meta import API_VTAG +from simcore_service_api_server.api.dependencies.authentication import Identity +from simcore_service_api_server.celery.worker_tasks import functions_tasks +from simcore_service_api_server.models.api_resources import JobLinks +from simcore_service_api_server.services_rpc.wb_api_server import WbApiRpcClient _faker = Faker() @@ -379,6 +390,103 @@ async def async_magic(): ) +@pytest.mark.parametrize("capture", ["run_study_function_parent_info.json"]) +async def test_run_project_function( + mocker: MockerFixture, + mocked_webserver_rpc_api: dict[str, MockType], + app: FastAPI, + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + mock_registered_project_function: RegisteredProjectFunction, + mock_registered_project_function_job: RegisteredFunctionJob, + auth: httpx.BasicAuth, + user_identity: Identity, + user_email: EmailStr, + job_links: JobLinks, + mocked_webserver_rest_api_base: respx.MockRouter, + mocked_directorv2_rest_api_base: respx.MockRouter, + create_respx_mock_from_capture, + project_tests_dir: Path, + capture: str, +) -> None: + + def _get_app_server(celery_app: Any) -> FastAPI: + app_server = mocker.Mock(spec=BaseAppServer) + app_server.app = app + return app_server + + mocker.patch.object(functions_tasks, "get_app_server", _get_app_server) + + def _get_rabbitmq_rpc_client(app: FastAPI) -> RabbitMQRPCClient: + return mocker.MagicMock(spec=RabbitMQRPCClient) + + mocker.patch.object( + functions_tasks, "get_rabbitmq_rpc_client", _get_rabbitmq_rpc_client + ) + + async def _get_wb_api_rpc_client(app: FastAPI) -> WbApiRpcClient: + wb_api_rpc_client = WbApiRpcClient( + _client=mocker.MagicMock(spec=RabbitMQRPCClient) + ) + return wb_api_rpc_client + + mocker.patch.object( + functions_tasks, "get_wb_api_rpc_client", _get_wb_api_rpc_client + ) + + def _default_side_effect( + request: httpx.Request, + path_params: dict[str, Any], + capture: HttpApiCallCaptureModel, + ) -> Any: + return capture.response_body + + create_respx_mock_from_capture( + respx_mocks=[mocked_webserver_rest_api_base, mocked_directorv2_rest_api_base], + capture_path=project_tests_dir / "mocks" / capture, + side_effects_callbacks=[_default_side_effect] * 50, + ) + + mock_handler_in_functions_rpc_interface( + "get_function_user_permissions", + FunctionUserAccessRights( + user_id=user_identity.user_id, + execute=True, + read=True, + write=True, + ), + ) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function + ) + mock_handler_in_functions_rpc_interface("find_cached_function_jobs", []) + mock_handler_in_functions_rpc_interface( + "register_function_job", mock_registered_project_function_job + ) + mock_handler_in_functions_rpc_interface( + "get_functions_user_api_access_rights", + FunctionUserApiAccessRights( + user_id=user_identity.user_id, + execute_functions=True, + write_functions=True, + read_functions=True, + ), + ) + + job = await functions_tasks.run_function( + task=MagicMock(spec=Task), + task_id=TaskID(_faker.uuid4()), + user_identity=user_identity, + function=mock_registered_project_function, + function_inputs={}, + pricing_spec=None, + job_links=job_links, + x_simcore_parent_project_uuid=None, + x_simcore_parent_node_id=None, + ) + assert isinstance(job, RegisteredProjectFunctionJob) + + @pytest.mark.parametrize( "parent_project_uuid, parent_node_uuid, expected_status_code", [ diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index a7327b461eb..738778223b8 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -57,8 +57,10 @@ from pytest_simcore.simcore_webserver_projects_rest_api import GET_PROJECT from requests.auth import HTTPBasicAuth from respx import MockRouter +from simcore_service_api_server.api.dependencies.authentication import Identity from simcore_service_api_server.core.application import create_app from simcore_service_api_server.core.settings import ApplicationSettings +from simcore_service_api_server.models.api_resources import JobLinks from simcore_service_api_server.repository.api_keys import UserAndProductTuple from simcore_service_api_server.services_http.solver_job_outputs import ResultsTypes from simcore_service_api_server.services_rpc.wb_api_server import WbApiRpcClient @@ -69,6 +71,19 @@ def product_name() -> ProductName: return "osparc" +@pytest.fixture +def user_identity( + user_id: UserID, + user_email: EmailStr, + product_name: ProductName, +) -> Identity: + return Identity( + user_id=user_id, + product_name=product_name, + email=user_email, + ) + + @pytest.fixture def app_environment( monkeypatch: pytest.MonkeyPatch, @@ -549,6 +564,15 @@ def project_job_rpc_get() -> ProjectJobRpcGet: return ProjectJobRpcGet.model_validate(example) +@pytest.fixture +def job_links() -> JobLinks: + extra = JobLinks.model_config.get("json_schema_extra") + assert isinstance(extra, dict) + examples = extra.get("examples") + assert isinstance(examples, list) and len(examples) > 0 + return JobLinks.model_validate(examples[0]) + + @pytest.fixture def mocked_webserver_rpc_api( mocked_app_dependencies: None, From 7d6d4c9dd5990700b91ae5cabe8d840495b33a7e Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Fri, 22 Aug 2025 10:58:37 +0200 Subject: [PATCH 048/111] register job pydantic types for serialization - can run function via celery now --- .../api/dependencies/celery.py | 18 ++------------- .../celery/worker_tasks/tasks.py | 23 ++++++++++++------- 2 files changed, 17 insertions(+), 24 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py index e57f46a269f..9a0f31f5f3a 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py @@ -4,17 +4,10 @@ from celery_library.task_manager import CeleryTaskManager from celery_library.types import register_celery_types, register_pydantic_types from fastapi import FastAPI -from models_library.functions import ( - RegisteredProjectFunction, - RegisteredPythonCodeFunction, - RegisteredSolverFunction, -) from servicelib.celery.task_manager import TaskManager from settings_library.celery import CelerySettings -from ...api.dependencies.authentication import Identity -from ...models.api_resources import JobLinks -from ...models.schemas.jobs import JobPricingSpecification +from ...celery.worker_tasks.tasks import registered_pydantic_types ASYNC_JOB_CLIENT_NAME: Final[str] = "API_SERVER" @@ -26,14 +19,7 @@ async def on_startup() -> None: ) register_celery_types() - register_pydantic_types( - Identity, - RegisteredProjectFunction, - RegisteredPythonCodeFunction, - RegisteredSolverFunction, - JobPricingSpecification, - JobLinks, - ) + register_pydantic_types(*registered_pydantic_types) app.add_event_handler("startup", on_startup) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py index fce9caec484..61f4bd6ad78 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py @@ -5,8 +5,10 @@ from celery_library.types import register_celery_types, register_pydantic_types from models_library.functions import ( RegisteredProjectFunction, + RegisteredProjectFunctionJob, RegisteredPythonCodeFunction, RegisteredSolverFunction, + RegisteredSolverFunctionJob, ) from servicelib.logging_utils import log_context @@ -17,17 +19,22 @@ _logger = logging.getLogger(__name__) +registered_pydantic_types = ( + Identity, + JobLinks, + JobPricingSpecification, + RegisteredProjectFunction, + RegisteredProjectFunctionJob, + RegisteredPythonCodeFunction, + RegisteredProjectFunctionJob, + RegisteredSolverFunction, + RegisteredSolverFunctionJob, +) + def setup_worker_tasks(app: Celery) -> None: register_celery_types() - register_pydantic_types( - Identity, - RegisteredProjectFunction, - RegisteredPythonCodeFunction, - RegisteredSolverFunction, - JobPricingSpecification, - JobLinks, - ) + register_pydantic_types(*registered_pydantic_types) with log_context(_logger, logging.INFO, msg="worker task registration"): register_task(app, run_function) From 4a327f844b81645fe0d2517a6101d597e772ba28 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Fri, 22 Aug 2025 10:59:20 +0200 Subject: [PATCH 049/111] cosmetic fix --- .../src/simcore_service_api_server/api/dependencies/celery.py | 4 ++-- .../simcore_service_api_server/celery/worker_tasks/tasks.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py index 9a0f31f5f3a..235f2fa382d 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py @@ -7,7 +7,7 @@ from servicelib.celery.task_manager import TaskManager from settings_library.celery import CelerySettings -from ...celery.worker_tasks.tasks import registered_pydantic_types +from ...celery.worker_tasks.tasks import pydantic_types_to_register ASYNC_JOB_CLIENT_NAME: Final[str] = "API_SERVER" @@ -19,7 +19,7 @@ async def on_startup() -> None: ) register_celery_types() - register_pydantic_types(*registered_pydantic_types) + register_pydantic_types(*pydantic_types_to_register) app.add_event_handler("startup", on_startup) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py index 61f4bd6ad78..b40d3ff23f0 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py @@ -19,7 +19,7 @@ _logger = logging.getLogger(__name__) -registered_pydantic_types = ( +pydantic_types_to_register = ( Identity, JobLinks, JobPricingSpecification, @@ -34,7 +34,7 @@ def setup_worker_tasks(app: Celery) -> None: register_celery_types() - register_pydantic_types(*registered_pydantic_types) + register_pydantic_types(*pydantic_types_to_register) with log_context(_logger, logging.INFO, msg="worker task registration"): register_task(app, run_function) From 3efc1309df8c611de22dadce216de1713ea6219b Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Fri, 22 Aug 2025 12:41:51 +0200 Subject: [PATCH 050/111] change boot process for celery worker --- services/api-server/docker/boot.sh | 6 ++- .../api-server/docker/boot_celery_worker.py | 10 +++++ .../celery/worker_main.py | 39 ++++++++----------- 3 files changed, 30 insertions(+), 25 deletions(-) create mode 100644 services/api-server/docker/boot_celery_worker.py diff --git a/services/api-server/docker/boot.sh b/services/api-server/docker/boot.sh index f597ffb1912..227be9c56b9 100755 --- a/services/api-server/docker/boot.sh +++ b/services/api-server/docker/boot.sh @@ -48,7 +48,8 @@ if [ "${API_SERVER_WORKER_MODE}" = "true" ]; then --recursive \ -- \ celery \ - --app=simcore_service_api_server.celery.worker_main:app \ + --app=boot_celery_worker:app \ + --workdir=services/api-server/docker \ worker --pool=threads \ --loglevel="${API_SERVER_LOGLEVEL}" \ --concurrency="${CELERY_CONCURRENCY}" \ @@ -56,7 +57,8 @@ if [ "${API_SERVER_WORKER_MODE}" = "true" ]; then --queues="${CELERY_QUEUES:-default}" else exec celery \ - --app=simcore_service_api_server.celery.worker_main:app \ + --app=boot_celery_worker:app \ + --workdir=services/api-server/docker \ worker --pool=threads \ --loglevel="${API_SERVER_LOGLEVEL}" \ --concurrency="${CELERY_CONCURRENCY}" \ diff --git a/services/api-server/docker/boot_celery_worker.py b/services/api-server/docker/boot_celery_worker.py new file mode 100644 index 00000000000..194addc031f --- /dev/null +++ b/services/api-server/docker/boot_celery_worker.py @@ -0,0 +1,10 @@ +from celery.signals import worker_init, worker_shutdown # type: ignore[import-untyped] +from celery_library.signals import ( + on_worker_shutdown, +) +from simcore_service_api_server.celery.worker_main import get_app, worker_init_wrapper + +app = get_app() + +worker_init.connect(worker_init_wrapper) +worker_shutdown.connect(on_worker_shutdown) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_main.py b/services/api-server/src/simcore_service_api_server/celery/worker_main.py index 4438625afd0..e70b7f79112 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_main.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_main.py @@ -2,11 +2,9 @@ from functools import partial -from celery.signals import worker_init, worker_shutdown # type: ignore[import-untyped] from celery_library.common import create_app as create_celery_app from celery_library.signals import ( on_worker_init, - on_worker_shutdown, ) from servicelib.fastapi.celery.app_server import FastAPIAppServer from servicelib.logging_utils import setup_loggers @@ -16,34 +14,29 @@ from .worker_tasks.tasks import setup_worker_tasks -def _get_settings() -> ApplicationSettings: - return ApplicationSettings.create_from_envs() +def get_app(): + _settings = ApplicationSettings.create_from_envs() + setup_loggers( + log_format_local_dev_enabled=_settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=_settings.API_SERVER_LOG_FILTER_MAPPING, + tracing_settings=_settings.API_SERVER_TRACING, + log_base_level=_settings.log_level, + noisy_loggers=None, + ) -_settings = _get_settings() - -setup_loggers( - log_format_local_dev_enabled=_settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_settings.API_SERVER_LOG_FILTER_MAPPING, - tracing_settings=_settings.API_SERVER_TRACING, - log_base_level=_settings.log_level, - noisy_loggers=None, -) - -assert _settings.API_SERVER_CELERY # nosec -app = create_celery_app(_settings.API_SERVER_CELERY) + assert _settings.API_SERVER_CELERY # nosec + app = create_celery_app(_settings.API_SERVER_CELERY) + setup_worker_tasks(app) -app_server = FastAPIAppServer(app=create_app(_settings)) + return app def worker_init_wrapper(sender, **_kwargs): + _settings = ApplicationSettings.create_from_envs() assert _settings.API_SERVER_CELERY # nosec + app_server = FastAPIAppServer(app=create_app(_settings)) + return partial(on_worker_init, app_server, _settings.API_SERVER_CELERY)( sender, **_kwargs ) - - -worker_init.connect(worker_init_wrapper) -worker_shutdown.connect(on_worker_shutdown) - -setup_worker_tasks(app) From 75a06d58a59cc7e373078438a65c571ab821132f Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Fri, 22 Aug 2025 12:51:42 +0200 Subject: [PATCH 051/111] fix test_tasks.py --- .../api-server/tests/unit/celery/test_tasks.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/services/api-server/tests/unit/celery/test_tasks.py b/services/api-server/tests/unit/celery/test_tasks.py index 4f4480c879c..c5a48c407b7 100644 --- a/services/api-server/tests/unit/celery/test_tasks.py +++ b/services/api-server/tests/unit/celery/test_tasks.py @@ -9,10 +9,12 @@ from faker import Faker from fastapi import status from httpx import AsyncClient, BasicAuth -from models_library.api_schemas_long_running_tasks.tasks import TaskGet +from models_library.api_schemas_long_running_tasks.tasks import TaskGet, TaskStatus from models_library.progress_bar import ProgressReport, ProgressStructuredMessage from pytest_mock import MockerFixture, MockType, mocker -from servicelib.celery.models import TaskState, TaskStatus, TaskUUID +from servicelib.celery.models import TaskState +from servicelib.celery.models import TaskStatus as CeleryTaskStatus +from servicelib.celery.models import TaskUUID from simcore_service_api_server.api.routes import tasks as task_routes from simcore_service_api_server.models.schemas.base import ApiServerEnvelope @@ -55,7 +57,7 @@ async def test_list_celery_tasks( assert task.status_href == f"/v0/tasks/{task.task_id}" -async def test_get_async_jobs_status( +async def test_get_task_status( mock_task_manager: MockType, client: AsyncClient, auth: BasicAuth, @@ -67,7 +69,7 @@ async def test_get_async_jobs_status( TaskStatus.model_validate_json(response.text) -async def test_cancel_async_job( +async def test_cancel_task( mock_task_manager: MockType, client: AsyncClient, auth: BasicAuth, @@ -78,7 +80,7 @@ async def test_cancel_async_job( assert response.status_code == status.HTTP_204_NO_CONTENT -async def test_get_result( +async def test_get_task_result( mock_task_manager: MockType, client: AsyncClient, auth: BasicAuth, @@ -133,7 +135,7 @@ async def test_get_result( "GET", f"/v0/tasks/{_faker.uuid4()}/result", None, - TaskStatus( + CeleryTaskStatus( task_uuid=TaskUUID("123e4567-e89b-12d3-a456-426614174000"), task_state=TaskState.STARTED, progress_report=ProgressReport( @@ -153,7 +155,7 @@ async def test_get_result( "GET", f"/v0/tasks/{_faker.uuid4()}/result", None, - TaskStatus( + CeleryTaskStatus( task_uuid=TaskUUID("123e4567-e89b-12d3-a456-426614174000"), task_state=TaskState.ABORTED, progress_report=ProgressReport( From c1a67d56dd3a5415bb602300b62fa7e7385db2f7 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Fri, 22 Aug 2025 14:25:17 +0200 Subject: [PATCH 052/111] add test for full round trip of running function --- .../{ => api_functions}/celery/conftest.py | 0 .../celery/test_functions.py | 108 ++++++++++++++++++ .../test_api_routers_functions.py | 91 --------------- .../tests/unit/{celery => }/test_tasks.py | 0 4 files changed, 108 insertions(+), 91 deletions(-) rename services/api-server/tests/unit/{ => api_functions}/celery/conftest.py (100%) rename services/api-server/tests/unit/{ => api_functions}/celery/test_functions.py (63%) rename services/api-server/tests/unit/{celery => }/test_tasks.py (100%) diff --git a/services/api-server/tests/unit/celery/conftest.py b/services/api-server/tests/unit/api_functions/celery/conftest.py similarity index 100% rename from services/api-server/tests/unit/celery/conftest.py rename to services/api-server/tests/unit/api_functions/celery/conftest.py diff --git a/services/api-server/tests/unit/celery/test_functions.py b/services/api-server/tests/unit/api_functions/celery/test_functions.py similarity index 63% rename from services/api-server/tests/unit/celery/test_functions.py rename to services/api-server/tests/unit/api_functions/celery/test_functions.py index cc1f5cbde19..4326f54d363 100644 --- a/services/api-server/tests/unit/celery/test_functions.py +++ b/services/api-server/tests/unit/api_functions/celery/test_functions.py @@ -1,7 +1,11 @@ import inspect from collections.abc import Callable +from pathlib import Path +from typing import Any +import httpx import pytest +import respx from celery import Celery, Task from celery.contrib.testing.worker import TestWorkController from celery_library.task import register_task @@ -20,11 +24,17 @@ FunctionID, FunctionInputs, FunctionJobID, + FunctionUserAccessRights, + FunctionUserApiAccessRights, RegisteredFunction, + RegisteredFunctionJob, RegisteredProjectFunction, RegisteredProjectFunctionJob, ) from models_library.projects import ProjectID +from models_library.users import UserID +from pytest_mock import MockType +from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from servicelib.celery.models import TaskFilter, TaskID, TaskMetadata, TasksQueue from servicelib.common_headers import ( X_SIMCORE_PARENT_NODE_ID, @@ -221,3 +231,101 @@ async def test_celery_error_propagation( await poll_task_until_done(client, auth, f"{task_uuid}") assert exc_info.value.response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR + + +@pytest.mark.parametrize( + "parent_project_uuid, parent_node_uuid, expected_status_code", + [ + (None, None, status.HTTP_422_UNPROCESSABLE_ENTITY), + (f"{_faker.uuid4()}", None, status.HTTP_422_UNPROCESSABLE_ENTITY), + (None, f"{_faker.uuid4()}", status.HTTP_422_UNPROCESSABLE_ENTITY), + (f"{_faker.uuid4()}", f"{_faker.uuid4()}", status.HTTP_200_OK), + ("null", "null", status.HTTP_200_OK), + ], +) +@pytest.mark.parametrize("capture", ["run_study_function_parent_info.json"]) +@pytest.mark.parametrize("mocked_app_dependencies", [None]) +async def test_run_project_function_parent_info( + app: FastAPI, + with_api_server_celery_worker: TestWorkController, + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + mock_registered_project_function: RegisteredProjectFunction, + mock_registered_project_function_job: RegisteredFunctionJob, + auth: httpx.BasicAuth, + user_id: UserID, + mocked_webserver_rest_api_base: respx.MockRouter, + mocked_directorv2_rest_api_base: respx.MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], + create_respx_mock_from_capture, + project_tests_dir: Path, + parent_project_uuid: str | None, + parent_node_uuid: str | None, + expected_status_code: int, + capture: str, +) -> None: + def _default_side_effect( + request: httpx.Request, + path_params: dict[str, Any], + capture: HttpApiCallCaptureModel, + ) -> Any: + if request.method == "POST" and request.url.path.endswith("/projects"): + if parent_project_uuid and parent_project_uuid != "null": + _parent_uuid = request.headers.get(X_SIMCORE_PARENT_PROJECT_UUID) + assert _parent_uuid is not None + assert parent_project_uuid == _parent_uuid + if parent_node_uuid and parent_node_uuid != "null": + _parent_node_uuid = request.headers.get(X_SIMCORE_PARENT_NODE_ID) + assert _parent_node_uuid is not None + assert parent_node_uuid == _parent_node_uuid + return capture.response_body + + create_respx_mock_from_capture( + respx_mocks=[mocked_webserver_rest_api_base, mocked_directorv2_rest_api_base], + capture_path=project_tests_dir / "mocks" / capture, + side_effects_callbacks=[_default_side_effect] * 50, + ) + + mock_handler_in_functions_rpc_interface( + "get_function_user_permissions", + FunctionUserAccessRights( + user_id=user_id, + execute=True, + read=True, + write=True, + ), + ) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function + ) + mock_handler_in_functions_rpc_interface("find_cached_function_jobs", []) + mock_handler_in_functions_rpc_interface( + "register_function_job", mock_registered_project_function_job + ) + mock_handler_in_functions_rpc_interface( + "get_functions_user_api_access_rights", + FunctionUserApiAccessRights( + user_id=user_id, + execute_functions=True, + write_functions=True, + read_functions=True, + ), + ) + + headers = {} + if parent_project_uuid: + headers[X_SIMCORE_PARENT_PROJECT_UUID] = parent_project_uuid + if parent_node_uuid: + headers[X_SIMCORE_PARENT_NODE_ID] = parent_node_uuid + + response = await client.post( + f"{API_VTAG}/functions/{mock_registered_project_function.uid}:run", + json={}, + auth=auth, + headers=headers, + ) + assert response.status_code == expected_status_code + if response.status_code == status.HTTP_200_OK: + task = TaskGet.model_validate(response.json()) + result = await poll_task_until_done(client, auth, task.task_id) + RegisteredProjectFunctionJob.model_validate(result.result) diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py index 2b5464dcc1d..ddb145c31f1 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py @@ -487,97 +487,6 @@ def _default_side_effect( assert isinstance(job, RegisteredProjectFunctionJob) -@pytest.mark.parametrize( - "parent_project_uuid, parent_node_uuid, expected_status_code", - [ - (None, None, status.HTTP_422_UNPROCESSABLE_ENTITY), - (f"{_faker.uuid4()}", None, status.HTTP_422_UNPROCESSABLE_ENTITY), - (None, f"{_faker.uuid4()}", status.HTTP_422_UNPROCESSABLE_ENTITY), - (f"{_faker.uuid4()}", f"{_faker.uuid4()}", status.HTTP_200_OK), - ("null", "null", status.HTTP_200_OK), - ], -) -@pytest.mark.parametrize("capture", ["run_study_function_parent_info.json"]) -async def test_run_project_function_parent_info( - client: AsyncClient, - mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_project_function: RegisteredProjectFunction, - mock_registered_project_function_job: RegisteredFunctionJob, - auth: httpx.BasicAuth, - user_id: UserID, - mocked_webserver_rest_api_base: respx.MockRouter, - mocked_directorv2_rest_api_base: respx.MockRouter, - mocked_webserver_rpc_api: dict[str, MockType], - create_respx_mock_from_capture, - project_tests_dir: Path, - parent_project_uuid: str | None, - parent_node_uuid: str | None, - expected_status_code: int, - capture: str, -) -> None: - def _default_side_effect( - request: httpx.Request, - path_params: dict[str, Any], - capture: HttpApiCallCaptureModel, - ) -> Any: - if request.method == "POST" and request.url.path.endswith("/projects"): - if parent_project_uuid and parent_project_uuid != "null": - _parent_uuid = request.headers.get(X_SIMCORE_PARENT_PROJECT_UUID) - assert _parent_uuid is not None - assert parent_project_uuid == _parent_uuid - if parent_node_uuid and parent_node_uuid != "null": - _parent_node_uuid = request.headers.get(X_SIMCORE_PARENT_NODE_ID) - assert _parent_node_uuid is not None - assert parent_node_uuid == _parent_node_uuid - return capture.response_body - - create_respx_mock_from_capture( - respx_mocks=[mocked_webserver_rest_api_base, mocked_directorv2_rest_api_base], - capture_path=project_tests_dir / "mocks" / capture, - side_effects_callbacks=[_default_side_effect] * 50, - ) - - mock_handler_in_functions_rpc_interface( - "get_function_user_permissions", - FunctionUserAccessRights( - user_id=user_id, - execute=True, - read=True, - write=True, - ), - ) - mock_handler_in_functions_rpc_interface( - "get_function", mock_registered_project_function - ) - mock_handler_in_functions_rpc_interface("find_cached_function_jobs", []) - mock_handler_in_functions_rpc_interface( - "register_function_job", mock_registered_project_function_job - ) - mock_handler_in_functions_rpc_interface( - "get_functions_user_api_access_rights", - FunctionUserApiAccessRights( - user_id=user_id, - execute_functions=True, - write_functions=True, - read_functions=True, - ), - ) - - headers = {} - if parent_project_uuid: - headers[X_SIMCORE_PARENT_PROJECT_UUID] = parent_project_uuid - if parent_node_uuid: - headers[X_SIMCORE_PARENT_NODE_ID] = parent_node_uuid - - response = await client.post( - f"{API_VTAG}/functions/{mock_registered_project_function.uid}:run", - json={}, - auth=auth, - headers=headers, - ) - assert response.status_code == expected_status_code - - @pytest.mark.parametrize( "parent_project_uuid, parent_node_uuid, expected_status_code", [ diff --git a/services/api-server/tests/unit/celery/test_tasks.py b/services/api-server/tests/unit/test_tasks.py similarity index 100% rename from services/api-server/tests/unit/celery/test_tasks.py rename to services/api-server/tests/unit/test_tasks.py From 4acf14abebda5b27fcb73a9ff73cf82efb3b9b6b Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 25 Aug 2025 10:33:01 +0200 Subject: [PATCH 053/111] fix typecheck --- .../celery/worker_tasks/functions_tasks.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py index e7660d6c38b..b7f99c85a8f 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py @@ -1,6 +1,6 @@ -from celery import Task +from celery import Task # type: ignore[import-untyped] from celery_library.utils import get_app_server -from fastapi import FastAPI # type: ignore[import-untyped] +from fastapi import FastAPI from models_library.functions import FunctionInputs, RegisteredFunction from models_library.projects_nodes_io import NodeID from servicelib.celery.models import TaskID @@ -14,10 +14,9 @@ get_job_service, get_solver_service, get_storage_service, - get_wb_api_rpc_client, - get_webserver_session, ) -from ...api.dependencies.webserver_http import get_session_cookie +from ...api.dependencies.webserver_http import get_session_cookie, get_webserver_session +from ...api.dependencies.webserver_rpc import get_wb_api_rpc_client from ...models.api_resources import JobLinks from ...models.schemas.jobs import JobPricingSpecification from ...services_http.director_v2 import DirectorV2Api From f49f9ef42ffabc07ff27fdc143464aadfe739c09 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 25 Aug 2025 10:35:01 +0200 Subject: [PATCH 054/111] update openapi specs --- services/api-server/openapi.json | 21 +-------------------- 1 file changed, 1 insertion(+), 20 deletions(-) diff --git a/services/api-server/openapi.json b/services/api-server/openapi.json index cc30044fded..725e6c46f1f 100644 --- a/services/api-server/openapi.json +++ b/services/api-server/openapi.json @@ -8103,26 +8103,7 @@ "content": { "application/json": { "schema": { - "oneOf": [ - { - "$ref": "#/components/schemas/RegisteredProjectFunctionJob" - }, - { - "$ref": "#/components/schemas/RegisteredPythonCodeFunctionJob" - }, - { - "$ref": "#/components/schemas/RegisteredSolverFunctionJob" - } - ], - "discriminator": { - "propertyName": "function_class", - "mapping": { - "PROJECT": "#/components/schemas/RegisteredProjectFunctionJob", - "PYTHON_CODE": "#/components/schemas/RegisteredPythonCodeFunctionJob", - "SOLVER": "#/components/schemas/RegisteredSolverFunctionJob" - } - }, - "title": "Response Run Function V0 Functions Function Id Run Post" + "$ref": "#/components/schemas/TaskGet" } } } From 42a61330b1625ccc6acdbd27ee6c801b8bb56ec2 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 25 Aug 2025 10:49:01 +0200 Subject: [PATCH 055/111] add example in RegisteredProjectFunctionGet --- .../api_schemas_webserver/functions.py | 41 ++++++++++++++++++- 1 file changed, 40 insertions(+), 1 deletion(-) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/functions.py b/packages/models-library/src/models_library/api_schemas_webserver/functions.py index 66193388074..226db44f68d 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/functions.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/functions.py @@ -1,7 +1,7 @@ import datetime from typing import Annotated, TypeAlias -from pydantic import Field, HttpUrl +from pydantic import ConfigDict, Field, HttpUrl from ..functions import ( Function, @@ -141,6 +141,45 @@ class RegisteredProjectFunctionGet(RegisteredProjectFunction, OutputSchema): modified_at: Annotated[datetime.datetime, Field(alias="lastChangeDate")] access_rights: dict[GroupID, FunctionGroupAccessRightsGet] thumbnail: HttpUrl | None = None + model_config = ConfigDict( + populate_by_name=True, + json_schema_extra={ + "examples": [ + { + "function_class": "PROJECT", + "title": "Example Project Function", + "description": "This is an example project function.", + "input_schema": { + "schema_content": { + "type": "object", + "properties": {"input1": {"type": "integer"}}, + }, + "schema_class": "application/schema+json", + }, + "output_schema": { + "schema_content": { + "type": "object", + "properties": {"output1": {"type": "string"}}, + }, + "schema_class": "application/schema+json", + }, + "default_inputs": None, + "project_id": "11111111-1111-1111-1111-111111111111", + "uid": "22222222-2222-2222-2222-222222222222", + "created_at": "2024-01-01T12:00:00", + "modified_at": "2024-01-02T12:00:00", + "access_rights": { + "5": { + "read": True, + "write": False, + "execute": True, + } + }, + "thumbnail": None, + }, + ] + }, + ) class SolverFunctionToRegister(SolverFunction, InputSchema): ... From 60a0b8b0200bb410cedcc68bfe3d4578a2c06f1e Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 25 Aug 2025 10:53:27 +0200 Subject: [PATCH 056/111] fix example of TaskStatus --- .../service-library/src/servicelib/celery/models.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/service-library/src/servicelib/celery/models.py b/packages/service-library/src/servicelib/celery/models.py index 3e8605fb306..8fa460f7bdc 100644 --- a/packages/service-library/src/servicelib/celery/models.py +++ b/packages/service-library/src/servicelib/celery/models.py @@ -3,7 +3,7 @@ from typing import Annotated, Protocol, TypeAlias from uuid import UUID -from models_library.progress_bar import ProgressReport, ProgressStructuredMessage +from models_library.progress_bar import ProgressReport from pydantic import BaseModel, StringConstraints TaskID: TypeAlias = str @@ -115,9 +115,11 @@ class TaskStatus(BaseModel): "total": 1.0, "attempts": 1, "unit": "Byte", - "message": ProgressStructuredMessage.model_config[ - "json_schema_extra" - ]["examples"][0], + "message": { + "description": "some description", + "current": 12.2, + "total": 123, + }, }, } ] From ab428ac02d22be9a9ab558991e70ab9ccaa8ddd7 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 25 Aug 2025 10:57:37 +0200 Subject: [PATCH 057/111] upgrade fakeredis dependency in api-server --- services/api-server/requirements/_test.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt index a9c83871d23..d1909ce4a50 100644 --- a/services/api-server/requirements/_test.txt +++ b/services/api-server/requirements/_test.txt @@ -136,6 +136,8 @@ ecdsa==0.19.0 # sshpubkeys faker==36.1.1 # via -r requirements/_test.in +fakeredis==2.31.0 + # via -r requirements/_test.in fastapi==0.116.1 # via # -c requirements/_base.txt From 9709eecc37fcd60b9fd281d050fa07a21d6a86b0 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 25 Aug 2025 11:08:40 +0200 Subject: [PATCH 058/111] fix pylinting --- .../src/pytest_simcore/celery_library_mocks.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py index aa664e99de4..530b4aff171 100644 --- a/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py +++ b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py @@ -1,3 +1,5 @@ +# pylint: disable=redefined-outer-name + from collections.abc import Callable import pytest @@ -52,12 +54,12 @@ def set_task_progress_return_value() -> None: @pytest.fixture def mock_task_manager_object( mocker: MockerFixture, - submit_task_return_value, - cancel_task_return_value, - get_task_result_return_value, - get_task_status_return_value, - list_tasks_return_value, - set_task_progress_return_value, + submit_task_return_value: TaskUUID, + cancel_task_return_value: None, + get_task_result_return_value: dict, + get_task_status_return_value: TaskStatus, + list_tasks_return_value: list[Task], + set_task_progress_return_value: None, ) -> MockType: """ Returns a TaskManager mock with overridable return values for each method. From 53645b6b3fc8bd995e07a6526e88428b6c361815 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 25 Aug 2025 11:36:46 +0200 Subject: [PATCH 059/111] make pylint happy --- .../celery-library/tests/unit/test_tasks.py | 4 ++-- .../api/routes/tasks.py | 4 +++- .../celery/worker_tasks/functions_tasks.py | 6 ++++-- .../celery/worker_tasks/tasks.py | 4 +++- .../unit/api_functions/celery/conftest.py | 20 +++++++++++++++---- .../api_functions/celery/test_functions.py | 13 ++++++++++-- .../test_api_routers_functions.py | 2 +- services/api-server/tests/unit/test_tasks.py | 4 ++-- 8 files changed, 42 insertions(+), 15 deletions(-) diff --git a/packages/celery-library/tests/unit/test_tasks.py b/packages/celery-library/tests/unit/test_tasks.py index a4edfb7540a..35da31aa180 100644 --- a/packages/celery-library/tests/unit/test_tasks.py +++ b/packages/celery-library/tests/unit/test_tasks.py @@ -11,8 +11,8 @@ from random import randint import pytest -from celery import Celery, Task -from celery.contrib.abortable import AbortableTask +from celery import Celery, Task # pylint: disable=no-name-in-module +from celery.contrib.abortable import AbortableTask # pylint: disable=no-name-in-module from celery_library.errors import TransferrableCeleryError from celery_library.task import register_task from celery_library.task_manager import CeleryTaskManager diff --git a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py index 62a526d91a8..8e8579d4995 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py @@ -1,7 +1,9 @@ import logging from typing import Annotated, Any -from celery.exceptions import CeleryError # type: ignore[import-untyped] +from celery.exceptions import ( + CeleryError, # type: ignore[import-untyped] # pylint: disable=no-name-in-module +) from common_library.error_codes import create_error_code from fastapi import APIRouter, Depends, FastAPI, HTTPException, status from models_library.api_schemas_long_running_tasks.base import TaskProgress diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py index b7f99c85a8f..f5852ffc336 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py @@ -1,5 +1,7 @@ -from celery import Task # type: ignore[import-untyped] -from celery_library.utils import get_app_server +from celery import ( + Task, # type: ignore[import-untyped] # pylint: disable=no-name-in-module +) +from celery_library.utils import get_app_server # pylint: disable=no-name-in-module from fastapi import FastAPI from models_library.functions import FunctionInputs, RegisteredFunction from models_library.projects_nodes_io import NodeID diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py index b40d3ff23f0..1558ef1e2a2 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py @@ -1,6 +1,8 @@ import logging -from celery import Celery # type: ignore[import-untyped] +from celery import ( + Celery, # type: ignore[import-untyped] # pylint: disable=no-name-in-module +) from celery_library.task import register_task from celery_library.types import register_celery_types, register_pydantic_types from models_library.functions import ( diff --git a/services/api-server/tests/unit/api_functions/celery/conftest.py b/services/api-server/tests/unit/api_functions/celery/conftest.py index 69d76503f4b..caab00fb92f 100644 --- a/services/api-server/tests/unit/api_functions/celery/conftest.py +++ b/services/api-server/tests/unit/api_functions/celery/conftest.py @@ -1,13 +1,25 @@ +# pylint: disable=unused-argument +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-positional-arguments +# pylint: disable=no-name-in-module + + import datetime from collections.abc import AsyncIterator, Callable from functools import partial from typing import Any import pytest -from celery import Celery -from celery.contrib.testing.worker import TestWorkController, start_worker -from celery.signals import worker_init, worker_shutdown -from celery.worker.worker import WorkController +from celery import Celery # pylint: disable=no-name-in-module +from celery.contrib.testing.worker import ( # pylint: disable=no-name-in-module + TestWorkController, + start_worker, +) +from celery.signals import ( # pylint: disable=no-name-in-module + worker_init, + worker_shutdown, +) +from celery.worker.worker import WorkController # pylint: disable=no-name-in-module from celery_library.signals import on_worker_init, on_worker_shutdown from fakeredis.aioredis import FakeRedis from pytest_mock import MockerFixture diff --git a/services/api-server/tests/unit/api_functions/celery/test_functions.py b/services/api-server/tests/unit/api_functions/celery/test_functions.py index 4326f54d363..32bdcd7fb01 100644 --- a/services/api-server/tests/unit/api_functions/celery/test_functions.py +++ b/services/api-server/tests/unit/api_functions/celery/test_functions.py @@ -1,3 +1,10 @@ +# pylint: disable=unused-argument +# pylint: disable=redefined-outer-name +# pylint: disable=no-name-in-module +# pylint: disable=too-many-positional-arguments +# pylint: disable=too-many-arguments + + import inspect from collections.abc import Callable from pathlib import Path @@ -6,8 +13,10 @@ import httpx import pytest import respx -from celery import Celery, Task -from celery.contrib.testing.worker import TestWorkController +from celery import Celery, Task # pylint: disable=no-name-in-module +from celery.contrib.testing.worker import ( + TestWorkController, # pylint: disable=no-name-in-module +) from celery_library.task import register_task from celery_library.types import register_pydantic_types from faker import Faker diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py index ddb145c31f1..f046c25a0cb 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py @@ -14,7 +14,7 @@ import httpx import pytest import respx -from celery import Task +from celery import Task # pylint: disable=no-name-in-module from faker import Faker from fastapi import FastAPI from httpx import AsyncClient diff --git a/services/api-server/tests/unit/test_tasks.py b/services/api-server/tests/unit/test_tasks.py index c5a48c407b7..8c6e40b532c 100644 --- a/services/api-server/tests/unit/test_tasks.py +++ b/services/api-server/tests/unit/test_tasks.py @@ -5,13 +5,13 @@ from typing import Literal import pytest -from celery.exceptions import CeleryError +from celery.exceptions import CeleryError # pylint: disable=no-name-in-module from faker import Faker from fastapi import status from httpx import AsyncClient, BasicAuth from models_library.api_schemas_long_running_tasks.tasks import TaskGet, TaskStatus from models_library.progress_bar import ProgressReport, ProgressStructuredMessage -from pytest_mock import MockerFixture, MockType, mocker +from pytest_mock import MockerFixture, MockType from servicelib.celery.models import TaskState from servicelib.celery.models import TaskStatus as CeleryTaskStatus from servicelib.celery.models import TaskUUID From b1d306e16bf0502c52ee2cb6130e65cf67ce964a Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 25 Aug 2025 11:43:41 +0200 Subject: [PATCH 060/111] fix pylint after formatting --- .../celery/worker_tasks/functions_tasks.py | 4 ++-- .../simcore_service_api_server/celery/worker_tasks/tasks.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py index f5852ffc336..1bd8ab2f291 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py @@ -1,5 +1,5 @@ -from celery import ( - Task, # type: ignore[import-untyped] # pylint: disable=no-name-in-module +from celery import ( # type: ignore[import-untyped] # pylint: disable=no-name-in-module + Task, ) from celery_library.utils import get_app_server # pylint: disable=no-name-in-module from fastapi import FastAPI diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py index 1558ef1e2a2..255f0233e82 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py @@ -1,7 +1,7 @@ import logging -from celery import ( - Celery, # type: ignore[import-untyped] # pylint: disable=no-name-in-module +from celery import ( # type: ignore[import-untyped] # pylint: disable=no-name-in-module + Celery, ) from celery_library.task import register_task from celery_library.types import register_celery_types, register_pydantic_types From 5a2b652e65c007e7caf9d10bc33ea2ecf623359c Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 25 Aug 2025 13:14:19 +0200 Subject: [PATCH 061/111] pylint fix --- .../src/simcore_service_api_server/api/routes/tasks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py index 8e8579d4995..4b3aef742b1 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py @@ -1,8 +1,8 @@ import logging from typing import Annotated, Any -from celery.exceptions import ( - CeleryError, # type: ignore[import-untyped] # pylint: disable=no-name-in-module +from celery.exceptions import ( # type: ignore[import-untyped] # pylint: disable=no-name-in-module + CeleryError, ) from common_library.error_codes import create_error_code from fastapi import APIRouter, Depends, FastAPI, HTTPException, status From ff376a7042d85df01388c39c04c8f068f6097338 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 25 Aug 2025 13:36:00 +0200 Subject: [PATCH 062/111] remove worker services from public api integration tests --- packages/pytest-simcore/src/pytest_simcore/simcore_services.py | 1 + services/web/server/tests/integration/conftest.py | 1 + 2 files changed, 2 insertions(+) diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py index 77c607cbb09..274a8edb44a 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py @@ -29,6 +29,7 @@ _SERVICES_TO_SKIP: Final[set[str]] = { + "api-worker", "agent", # global mode deploy (NO exposed ports, has http API) "dask-sidecar", # global mode deploy (NO exposed ports, **NO** http API) "migration", diff --git a/services/web/server/tests/integration/conftest.py b/services/web/server/tests/integration/conftest.py index edca3137527..5fc7ea7b893 100644 --- a/services/web/server/tests/integration/conftest.py +++ b/services/web/server/tests/integration/conftest.py @@ -64,6 +64,7 @@ def webserver_environ( # the test webserver is built-up in webserver_service fixture that runs # on the host. EXCLUDED_SERVICES = [ + "api-worker", "dask-scheduler", "director", "docker-api-proxy", From 8391dfd4d877b6b68ffdda588192903f29db6b25 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 25 Aug 2025 14:32:29 +0200 Subject: [PATCH 063/111] add api-worker in docker-compose.local.yml --- services/docker-compose.local.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/services/docker-compose.local.yml b/services/docker-compose.local.yml index 5f84ba9bf10..f1b1514ba55 100644 --- a/services/docker-compose.local.yml +++ b/services/docker-compose.local.yml @@ -149,6 +149,15 @@ services: ports: - "8080" - "3022:3000" + + api-worker: + environment: + <<: *common_environment + API_SERVER_REMOTE_DEBUG_PORT : 3000 + ports: + - "8080" + - "3025:3000" + webserver: environment: &webserver_environment_local <<: *common_environment From 7ece7514c519424c110039fa8f49ba22669c334f Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 25 Aug 2025 14:48:17 +0200 Subject: [PATCH 064/111] add api-worker in docker-compose.devel.yml --- services/docker-compose.devel.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/services/docker-compose.devel.yml b/services/docker-compose.devel.yml index 085a78ef0c7..28e5a8bfa95 100644 --- a/services/docker-compose.devel.yml +++ b/services/docker-compose.devel.yml @@ -21,6 +21,16 @@ services: - ../packages:/devel/packages - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv + api-worker: + environment: + <<: *common-environment + API_SERVER_PROFILING : ${API_SERVER_PROFILING} + API_SERVER_LOGLEVEL: DEBUG + volumes: + - ./api-server:/devel/services/api-server + - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv + autoscaling: environment: <<: *common-environment From 4eb2262ee8e0635c78e42a6c3ced777c90b94310 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 26 Aug 2025 10:05:02 +0200 Subject: [PATCH 065/111] factor out function run pre check --- .../_service_function_jobs.py | 46 +++++++++++++------ .../api/routes/functions_routes.py | 9 +++- .../celery/worker_tasks/functions_tasks.py | 13 ++++-- .../celery/worker_tasks/tasks.py | 3 +- .../api_functions/celery/test_functions.py | 43 +++++++++++------ .../test_api_routers_functions.py | 3 +- 6 files changed, 80 insertions(+), 37 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py index 1c04cc36e91..ffa7a1f2190 100644 --- a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py +++ b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py @@ -160,17 +160,12 @@ async def inspect_function_job( job_status=new_job_status, ) - async def run_function( + async def run_function_pre_check( self, *, function: RegisteredFunction, function_inputs: FunctionInputs, - pricing_spec: JobPricingSpecification | None, - job_links: JobLinks, - x_simcore_parent_project_uuid: NodeID | None, - x_simcore_parent_node_id: NodeID | None, - ) -> RegisteredFunctionJob: - + ) -> JobInputs: user_api_access_rights = ( await self._web_rpc_client.get_functions_user_api_access_rights( user_id=self.user_id, product_name=self.product_name @@ -206,9 +201,24 @@ async def run_function( if not is_valid: raise FunctionInputsValidationError(error=validation_str) + return JobInputs( + values=joined_inputs or {}, + ) + + async def run_function( + self, + *, + function: RegisteredFunction, + job_inputs: JobInputs, + pricing_spec: JobPricingSpecification | None, + job_links: JobLinks, + x_simcore_parent_project_uuid: NodeID | None, + x_simcore_parent_node_id: NodeID | None, + ) -> RegisteredFunctionJob: + if cached_function_jobs := await self._web_rpc_client.find_cached_function_jobs( function_id=function.uid, - inputs=joined_inputs, + inputs=job_inputs.values, user_id=self.user_id, product_name=self.product_name, ): @@ -223,7 +233,7 @@ async def run_function( if function.function_class == FunctionClass.PROJECT: study_job = await self._job_service.create_studies_job( study_id=function.project_id, - job_inputs=JobInputs(values=joined_inputs or {}), + job_inputs=job_inputs, hidden=True, job_links=job_links, x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, @@ -239,7 +249,7 @@ async def run_function( function_uid=function.uid, title=f"Function job of function {function.uid}", description=function.description, - inputs=joined_inputs, + inputs=job_inputs.values, outputs=None, project_job_id=study_job.id, ), @@ -251,7 +261,7 @@ async def run_function( solver_job = await self._job_service.create_solver_job( solver_key=function.solver_key, version=function.solver_version, - inputs=JobInputs(values=joined_inputs or {}), + inputs=job_inputs, job_links=job_links, hidden=True, x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, @@ -268,7 +278,7 @@ async def run_function( function_uid=function.uid, title=f"Function job of function {function.uid}", description=function.description, - inputs=joined_inputs, + inputs=job_inputs.values, outputs=None, solver_job_id=solver_job.id, ), @@ -291,16 +301,24 @@ async def map_function( x_simcore_parent_node_id: NodeID | None, ) -> RegisteredFunctionJobCollection: + job_inputs = [ + await self.run_function_pre_check( + function=function, + function_inputs=inputs, + ) + for inputs in function_inputs_list + ] + function_jobs = [ await self.run_function( function=function, - function_inputs=function_inputs, + job_inputs=inputs, pricing_spec=pricing_spec, job_links=job_links, x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, x_simcore_parent_node_id=x_simcore_parent_node_id, ) - for function_inputs in function_inputs_list + for inputs in job_inputs ] function_job_collection_description = f"Function job collection of map of function {function.uid} with {len(function_inputs_list)} inputs" diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index 9e9fc81f81e..d401a0a6bdc 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -328,6 +328,9 @@ async def run_function( # noqa: PLR0913 url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], function_inputs: FunctionInputs, function_service: Annotated[FunctionService, Depends(get_function_service)], + function_job_service: Annotated[ + FunctionJobService, Depends(get_function_job_service) + ], x_simcore_parent_project_uuid: Annotated[ProjectID | Literal["null"], Header()], x_simcore_parent_node_id: Annotated[NodeID | Literal["null"], Header()], ) -> TaskGet: @@ -345,6 +348,10 @@ async def run_function( # noqa: PLR0913 pricing_spec = JobPricingSpecification.create_from_headers(request.headers) job_links = await function_service.get_function_job_links(to_run_function, url_for) + job_inputs = await function_job_service.run_function_pre_check( + function=to_run_function, function_inputs=function_inputs + ) + job_filter = AsyncJobFilter( user_id=user_identity.user_id, product_name=user_identity.product_name, @@ -362,7 +369,7 @@ async def run_function( # noqa: PLR0913 task_filter=task_filter, user_identity=user_identity, function=to_run_function, - function_inputs=function_inputs, + job_inputs=job_inputs, pricing_spec=pricing_spec, job_links=job_links, x_simcore_parent_project_uuid=parent_project_uuid, diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py index 1bd8ab2f291..97400776a5f 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py @@ -3,9 +3,10 @@ ) from celery_library.utils import get_app_server # pylint: disable=no-name-in-module from fastapi import FastAPI -from models_library.functions import FunctionInputs, RegisteredFunction +from models_library.functions import RegisteredFunction from models_library.projects_nodes_io import NodeID from servicelib.celery.models import TaskID +from simcore_service_api_server._service_function_jobs import FunctionJobService from ...api.dependencies.authentication import Identity from ...api.dependencies.rabbitmq import get_rabbitmq_rpc_client @@ -20,12 +21,14 @@ from ...api.dependencies.webserver_http import get_session_cookie, get_webserver_session from ...api.dependencies.webserver_rpc import get_wb_api_rpc_client from ...models.api_resources import JobLinks -from ...models.schemas.jobs import JobPricingSpecification +from ...models.schemas.jobs import JobInputs, JobPricingSpecification from ...services_http.director_v2 import DirectorV2Api from ...services_http.storage import StorageApi -async def _assemble_function_job_service(*, app: FastAPI, user_identity: Identity): +async def _assemble_function_job_service( + *, app: FastAPI, user_identity: Identity +) -> FunctionJobService: # to avoid this show we could introduce a dependency injection # system which is not linked to FastAPI (i.e. can be resolved manually). # One suggestion: https://github.com/ets-labs/python-dependency-injector, which is compatible @@ -91,7 +94,7 @@ async def run_function( *, user_identity: Identity, function: RegisteredFunction, - function_inputs: FunctionInputs, + job_inputs: JobInputs, pricing_spec: JobPricingSpecification | None, job_links: JobLinks, x_simcore_parent_project_uuid: NodeID | None, @@ -105,7 +108,7 @@ async def run_function( return await function_job_service.run_function( function=function, - function_inputs=function_inputs, + job_inputs=job_inputs, pricing_spec=pricing_spec, job_links=job_links, x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py index 255f0233e82..d1a9b156942 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py @@ -16,13 +16,14 @@ from ...api.dependencies.authentication import Identity from ...models.api_resources import JobLinks -from ...models.schemas.jobs import JobPricingSpecification +from ...models.schemas.jobs import JobInputs, JobPricingSpecification from .functions_tasks import run_function _logger = logging.getLogger(__name__) pydantic_types_to_register = ( Identity, + JobInputs, JobLinks, JobPricingSpecification, RegisteredProjectFunction, diff --git a/services/api-server/tests/unit/api_functions/celery/test_functions.py b/services/api-server/tests/unit/api_functions/celery/test_functions.py index 32bdcd7fb01..a8df3d9e279 100644 --- a/services/api-server/tests/unit/api_functions/celery/test_functions.py +++ b/services/api-server/tests/unit/api_functions/celery/test_functions.py @@ -31,7 +31,6 @@ from models_library.functions import ( FunctionClass, FunctionID, - FunctionInputs, FunctionJobID, FunctionUserAccessRights, FunctionUserApiAccessRights, @@ -42,7 +41,7 @@ ) from models_library.projects import ProjectID from models_library.users import UserID -from pytest_mock import MockType +from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from servicelib.celery.models import TaskFilter, TaskID, TaskMetadata, TasksQueue from servicelib.common_headers import ( @@ -50,11 +49,15 @@ X_SIMCORE_PARENT_PROJECT_UUID, ) from simcore_service_api_server._meta import API_VTAG +from simcore_service_api_server._service_function_jobs import FunctionJobService from simcore_service_api_server.api.dependencies.authentication import Identity from simcore_service_api_server.api.dependencies.celery import ( ASYNC_JOB_CLIENT_NAME, get_task_manager, ) +from simcore_service_api_server.api.dependencies.services import ( + get_function_job_service, +) from simcore_service_api_server.api.routes.functions_routes import get_function from simcore_service_api_server.celery.worker_tasks.functions_tasks import ( run_function as run_function_task, @@ -62,6 +65,7 @@ from simcore_service_api_server.exceptions.backend_errors import BaseBackEndError from simcore_service_api_server.models.api_resources import JobLinks from simcore_service_api_server.models.schemas.jobs import ( + JobInputs, JobPricingSpecification, NodeID, ) @@ -112,7 +116,7 @@ async def run_function( *, user_identity: Identity, function: RegisteredFunction, - function_inputs: FunctionInputs, + job_inputs: JobInputs, pricing_spec: JobPricingSpecification | None, job_links: JobLinks, x_simcore_parent_project_uuid: NodeID | None, @@ -122,7 +126,7 @@ async def run_function( title=_faker.sentence(), description=_faker.paragraph(), function_uid=FunctionID(_faker.uuid4()), - inputs=function_inputs, + inputs=job_inputs.values, outputs=None, function_class=FunctionClass.PROJECT, uid=FunctionJobID(_faker.uuid4()), @@ -149,19 +153,9 @@ async def test_with_fake_run_function( app: FastAPI, client: AsyncClient, auth: BasicAuth, + mocker: MockerFixture, with_api_server_celery_worker: TestWorkController, ): - app.dependency_overrides[get_function] = ( - lambda: RegisteredProjectFunction.model_validate( - RegisteredProjectFunction.model_config.get("json_schema_extra", {}).get( - "examples", [] - )[0] - ) - ) - - headers = {} - headers[X_SIMCORE_PARENT_PROJECT_UUID] = "null" - headers[X_SIMCORE_PARENT_NODE_ID] = "null" body = { "input_1": _faker.uuid4(), @@ -175,6 +169,25 @@ async def test_with_fake_run_function( ], } + async def mock_get_function_job_service() -> FunctionJobService: + mock = mocker.AsyncMock(spec=FunctionJobService) + mock.run_function_pre_check.return_value = JobInputs(values=body) + return mock + + app.dependency_overrides[get_function_job_service] = mock_get_function_job_service + + app.dependency_overrides[get_function] = ( + lambda: RegisteredProjectFunction.model_validate( + RegisteredProjectFunction.model_config.get("json_schema_extra", {}).get( + "examples", [] + )[0] + ) + ) + + headers = {} + headers[X_SIMCORE_PARENT_PROJECT_UUID] = "null" + headers[X_SIMCORE_PARENT_NODE_ID] = "null" + response = await client.post( f"/{API_VTAG}/functions/{_faker.uuid4()}:run", auth=auth, diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py index f046c25a0cb..ac40481b5ed 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py @@ -50,6 +50,7 @@ from simcore_service_api_server.api.dependencies.authentication import Identity from simcore_service_api_server.celery.worker_tasks import functions_tasks from simcore_service_api_server.models.api_resources import JobLinks +from simcore_service_api_server.models.schemas.jobs import JobInputs from simcore_service_api_server.services_rpc.wb_api_server import WbApiRpcClient _faker = Faker() @@ -478,7 +479,7 @@ def _default_side_effect( task_id=TaskID(_faker.uuid4()), user_identity=user_identity, function=mock_registered_project_function, - function_inputs={}, + job_inputs=JobInputs(values={}), pricing_spec=None, job_links=job_links, x_simcore_parent_project_uuid=None, From b306b1929ae9cdf0c02732c9f580e6048a82dac7 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 26 Aug 2025 14:43:51 +0200 Subject: [PATCH 066/111] add test for checking function job patch method --- .../functions/_controller/_functions_rpc.py | 21 +++++ .../functions/_functions_service.py | 39 ++++++++- ...function_job_collections_controller_rpc.py | 4 + .../test_function_jobs_controller_rpc.py | 84 ++++++++++++++++++- 4 files changed, 144 insertions(+), 4 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rpc.py b/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rpc.py index 1864a903276..63c0e39318d 100644 --- a/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rpc.py +++ b/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rpc.py @@ -88,6 +88,27 @@ async def register_function_job( ) +@router.expose( + reraise_if_error_type=( + UnsupportedFunctionJobClassError, + FunctionJobsWriteApiAccessDeniedError, + ) +) +async def patch_registered_function_job( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + registered_function_job: RegisteredFunctionJob, +) -> RegisteredFunctionJob: + return await _functions_service.patch_registered_function_job( + app=app, + user_id=user_id, + product_name=product_name, + registered_function_job=registered_function_job, + ) + + @router.expose(reraise_if_error_type=(FunctionJobCollectionsWriteApiAccessDeniedError,)) async def register_function_job_collection( app: web.Application, diff --git a/services/web/server/src/simcore_service_webserver/functions/_functions_service.py b/services/web/server/src/simcore_service_webserver/functions/_functions_service.py index cf9be30d41e..72bfa4940bf 100644 --- a/services/web/server/src/simcore_service_webserver/functions/_functions_service.py +++ b/services/web/server/src/simcore_service_webserver/functions/_functions_service.py @@ -95,6 +95,17 @@ async def register_function_job( return _decode_functionjob(created_function_job_db) +async def patch_registered_function_job( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + registered_function_job: RegisteredFunctionJob, +) -> RegisteredFunctionJob: + encoded_function_job = _encode_functionjob(registered_function_job) + return registered_function_job + + async def register_function_job_collection( app: web.Application, *, @@ -675,13 +686,31 @@ def _encode_functionjob( if functionjob.function_class == FunctionClass.PROJECT: class_specific_data = FunctionJobClassSpecificData( { - "project_job_id": str(functionjob.project_job_id), + "project_job_id": ( + str(functionjob.project_job_id) + if functionjob.project_job_id + else None + ), + "job_creation_task_id": ( + str(functionjob.job_creation_task_id) + if functionjob.job_creation_task_id + else None + ), } ) elif functionjob.function_class == FunctionClass.SOLVER: class_specific_data = FunctionJobClassSpecificData( { - "solver_job_id": str(functionjob.solver_job_id), + "solver_job_id": ( + str(functionjob.solver_job_id) + if functionjob.solver_job_id + else None + ), + "job_creation_task_id": ( + str(functionjob.job_creation_task_id) + if functionjob.job_creation_task_id + else None + ), } ) else: @@ -711,6 +740,9 @@ def _decode_functionjob( inputs=functionjob_db.inputs, outputs=functionjob_db.outputs, project_job_id=functionjob_db.class_specific_data["project_job_id"], + job_creation_task_id=functionjob_db.class_specific_data[ + "job_creation_task_id" + ], created_at=functionjob_db.created, ) @@ -723,6 +755,9 @@ def _decode_functionjob( inputs=functionjob_db.inputs, outputs=functionjob_db.outputs, solver_job_id=functionjob_db.class_specific_data["solver_job_id"], + job_creation_task_id=functionjob_db.class_specific_data[ + "job_creation_task_id" + ], created_at=functionjob_db.created, ) diff --git a/services/web/server/tests/unit/with_dbs/04/functions/test_function_job_collections_controller_rpc.py b/services/web/server/tests/unit/with_dbs/04/functions/test_function_job_collections_controller_rpc.py index acfb6635b7a..bd69087ecf6 100644 --- a/services/web/server/tests/unit/with_dbs/04/functions/test_function_job_collections_controller_rpc.py +++ b/services/web/server/tests/unit/with_dbs/04/functions/test_function_job_collections_controller_rpc.py @@ -60,6 +60,7 @@ async def test_function_job_collection( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job function_job_ids = [] @@ -71,6 +72,7 @@ async def test_function_job_collection( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job registered_job = await functions_rpc.register_function_job( @@ -210,6 +212,7 @@ async def test_list_function_job_collections( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job registered_job = await functions_rpc.register_function_job( @@ -308,6 +311,7 @@ async def test_list_function_job_collections_filtered_function_id( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job registered_job = await functions_rpc.register_function_job( diff --git a/services/web/server/tests/unit/with_dbs/04/functions/test_function_jobs_controller_rpc.py b/services/web/server/tests/unit/with_dbs/04/functions/test_function_jobs_controller_rpc.py index 86691cd9744..fe1417153b7 100644 --- a/services/web/server/tests/unit/with_dbs/04/functions/test_function_jobs_controller_rpc.py +++ b/services/web/server/tests/unit/with_dbs/04/functions/test_function_jobs_controller_rpc.py @@ -2,16 +2,22 @@ # pylint: disable=unused-argument import datetime -from uuid import uuid4 +from uuid import UUID, uuid4 import pytest from aiohttp.test_utils import TestClient from common_library.users_enums import UserRole +from faker import Faker from models_library.api_schemas_webserver.functions import ( ProjectFunction, ProjectFunctionJob, ) -from models_library.functions import FunctionJobCollection, FunctionJobStatus +from models_library.functions import ( + FunctionClass, + FunctionJobCollection, + FunctionJobStatus, + RegisteredProjectFunctionJob, +) from models_library.functions_errors import ( FunctionJobIDNotFoundError, FunctionJobReadAccessDeniedError, @@ -28,6 +34,9 @@ pytest_simcore_core_services_selection = ["rabbit"] +_faker = Faker() + + @pytest.mark.parametrize( "user_role", [UserRole.USER], @@ -57,6 +66,7 @@ async def test_register_get_delete_function_job( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job @@ -183,6 +193,7 @@ async def test_list_function_jobs( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job @@ -244,6 +255,7 @@ async def test_list_function_jobs_filtering( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job first_registered_function_jobs.append( @@ -262,6 +274,7 @@ async def test_list_function_jobs_filtering( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job second_registered_function_jobs.append( @@ -381,6 +394,7 @@ async def test_find_cached_function_jobs( project_job_id=uuid4(), inputs={"input1": value if value < 4 else 1}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job @@ -421,6 +435,70 @@ async def test_find_cached_function_jobs( assert cached_jobs is None +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_patch_registered_function_jobs( + client: TestClient, + rpc_client: RabbitMQRPCClient, + add_user_function_api_access_rights: None, + logged_user: UserInfoDict, + other_logged_user: UserInfoDict, + osparc_product_name: ProductName, + mock_function: ProjectFunction, + clean_functions: None, +): + + registered_function = await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=mock_function, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + function_job = ProjectFunctionJob( + function_uid=registered_function.uid, + title="Test Function Job", + description="A test function job", + project_job_id=None, + inputs={"input1": _faker.pyint(min_value=0, max_value=1000)}, + outputs={"output1": "result1"}, + job_creation_task_id=None, + ) + + # Register the function job + registered_job = await functions_rpc.register_function_job( + rabbitmq_rpc_client=rpc_client, + function_job=function_job, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + added_data = {"job_creation_task_id": f"{uuid4()}"} + registered_job_dict = registered_job.model_dump() + registered_job_dict.update(**added_data) + registered_job = RegisteredProjectFunctionJob.model_validate(registered_job_dict) + + registered_job = await functions_rpc.patch_registered_function_job( + rabbitmq_rpc_client=rpc_client, + user_id=logged_user["id"], + product_name=osparc_product_name, + registered_function_job=registered_job, + ) + assert registered_job.function_class == FunctionClass.PROJECT + assert registered_job.job_creation_task_id == added_data["job_creation_task_id"] + + added_data.update(project_job_id=f"{uuid4()}") + + registered_job_dict = registered_job.model_dump() + registered_job_dict.update(**added_data) + registered_job = RegisteredProjectFunctionJob.model_validate(registered_job_dict) + assert registered_job.function_class == FunctionClass.PROJECT + assert registered_job.job_creation_task_id == added_data["job_creation_task_id"] + assert registered_job.project_job_id == UUID(added_data["project_job_id"]) + + @pytest.mark.parametrize( "user_role", [UserRole.USER], @@ -448,6 +526,7 @@ async def test_update_function_job_status( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job @@ -507,6 +586,7 @@ async def test_update_function_job_outputs( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs=None, + job_creation_task_id=None, ) # Register the function job From 0a94743cf1a4d161ec46cc0a84d13d0a3b39e527 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 26 Aug 2025 15:29:32 +0200 Subject: [PATCH 067/111] start creating patch endpoint for function jobs --- .../functions/_controller/_functions_rpc.py | 8 +++- .../functions/_functions_repository.py | 44 +++++++++++++++++++ .../functions/_functions_service.py | 23 ++++++++-- .../test_function_jobs_controller_rpc.py | 17 +++---- 4 files changed, 79 insertions(+), 13 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rpc.py b/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rpc.py index 63c0e39318d..9631826b78d 100644 --- a/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rpc.py +++ b/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rpc.py @@ -20,6 +20,7 @@ RegisteredFunction, RegisteredFunctionJob, RegisteredFunctionJobCollection, + RegisteredFunctionJobPatch, ) from models_library.functions_errors import ( FunctionIDNotFoundError, @@ -99,13 +100,16 @@ async def patch_registered_function_job( *, user_id: UserID, product_name: ProductName, - registered_function_job: RegisteredFunctionJob, + function_job_uuid: FunctionJobID, + registered_function_job_patch: RegisteredFunctionJobPatch, ) -> RegisteredFunctionJob: + return await _functions_service.patch_registered_function_job( app=app, user_id=user_id, product_name=product_name, - registered_function_job=registered_function_job, + function_job_uuid=function_job_uuid, + registered_function_job_patch=registered_function_job_patch, ) diff --git a/services/web/server/src/simcore_service_webserver/functions/_functions_repository.py b/services/web/server/src/simcore_service_webserver/functions/_functions_repository.py index 45745d925a2..d8a14a6dc51 100644 --- a/services/web/server/src/simcore_service_webserver/functions/_functions_repository.py +++ b/services/web/server/src/simcore_service_webserver/functions/_functions_repository.py @@ -234,6 +234,50 @@ async def create_function_job( # noqa: PLR0913 return registered_function_job +async def patch_function_job( # noqa: PLR0913 + app: web.Application, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + product_name: ProductName, + function_job_uuid: FunctionJobID, + title: str | None, + description: str | None, + **class_specific_data: FunctionJobClassSpecificData | None, +) -> RegisteredFunctionJobDB: + + update_params = { + "title": title, + "description": description, + "class_specific_data": class_specific_data, + } + + async with transaction_context(get_asyncpg_engine(app), connection) as transaction: + await check_user_api_access_rights( + app, + connection=transaction, + user_id=user_id, + product_name=product_name, + api_access_rights=[ + FunctionsApiAccessRights.WRITE_FUNCTION_JOBS, + ], + ) + result = await transaction.execute( + function_jobs_table.update() + .where(function_jobs_table.c.uuid == function_job_uuid) + .values( + status="created", + **{k: v for k, v in update_params.items() if v is not None}, + ) + .returning(*_FUNCTION_JOBS_TABLE_COLS) + ) + row = result.one() + + registered_function_job = RegisteredFunctionJobDB.model_validate(row) + + return registered_function_job + + async def create_function_job_collection( app: web.Application, connection: AsyncConnection | None = None, diff --git a/services/web/server/src/simcore_service_webserver/functions/_functions_service.py b/services/web/server/src/simcore_service_webserver/functions/_functions_service.py index 72bfa4940bf..e9131968cbe 100644 --- a/services/web/server/src/simcore_service_webserver/functions/_functions_service.py +++ b/services/web/server/src/simcore_service_webserver/functions/_functions_service.py @@ -27,6 +27,7 @@ RegisteredFunctionJob, RegisteredFunctionJobCollection, RegisteredFunctionJobDB, + RegisteredFunctionJobPatch, RegisteredProjectFunction, RegisteredProjectFunctionJob, RegisteredSolverFunction, @@ -100,10 +101,26 @@ async def patch_registered_function_job( *, user_id: UserID, product_name: ProductName, - registered_function_job: RegisteredFunctionJob, + function_job_uuid: FunctionJobID, + registered_function_job_patch: RegisteredFunctionJobPatch, ) -> RegisteredFunctionJob: - encoded_function_job = _encode_functionjob(registered_function_job) - return registered_function_job + job = await _functions_repository.get_function_job( + app=app, + user_id=user_id, + product_name=product_name, + function_job_id=function_job_uuid, + ) + + await _functions_repository.patch_function_job( + app=app, + function_job_uuid=function_job_uuid, + user_id=user_id, + product_name=product_name, + title=title, + description=description, + class_specific_data=class_specific_data, + ) + return _decode_functionjob(job) async def register_function_job_collection( diff --git a/services/web/server/tests/unit/with_dbs/04/functions/test_function_jobs_controller_rpc.py b/services/web/server/tests/unit/with_dbs/04/functions/test_function_jobs_controller_rpc.py index fe1417153b7..5e18297c3fe 100644 --- a/services/web/server/tests/unit/with_dbs/04/functions/test_function_jobs_controller_rpc.py +++ b/services/web/server/tests/unit/with_dbs/04/functions/test_function_jobs_controller_rpc.py @@ -16,7 +16,6 @@ FunctionClass, FunctionJobCollection, FunctionJobStatus, - RegisteredProjectFunctionJob, ) from models_library.functions_errors import ( FunctionJobIDNotFoundError, @@ -476,24 +475,26 @@ async def test_patch_registered_function_jobs( ) added_data = {"job_creation_task_id": f"{uuid4()}"} - registered_job_dict = registered_job.model_dump() - registered_job_dict.update(**added_data) - registered_job = RegisteredProjectFunctionJob.model_validate(registered_job_dict) registered_job = await functions_rpc.patch_registered_function_job( rabbitmq_rpc_client=rpc_client, user_id=logged_user["id"], + function_job_uuid=registered_job.uid, product_name=osparc_product_name, - registered_function_job=registered_job, + job_creation_task_id=added_data["job_creation_task_id"], ) assert registered_job.function_class == FunctionClass.PROJECT assert registered_job.job_creation_task_id == added_data["job_creation_task_id"] added_data.update(project_job_id=f"{uuid4()}") - registered_job_dict = registered_job.model_dump() - registered_job_dict.update(**added_data) - registered_job = RegisteredProjectFunctionJob.model_validate(registered_job_dict) + registered_job = await functions_rpc.patch_registered_function_job( + rabbitmq_rpc_client=rpc_client, + user_id=logged_user["id"], + function_job_uuid=registered_job.uid, + product_name=osparc_product_name, + project_job_id=added_data["project_job_id"], + ) assert registered_job.function_class == FunctionClass.PROJECT assert registered_job.job_creation_task_id == added_data["job_creation_task_id"] assert registered_job.project_job_id == UUID(added_data["project_job_id"]) From 8c3b88156aae518678030abe8eadb2b386724b45 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 26 Aug 2025 16:02:17 +0200 Subject: [PATCH 068/111] missing files --- .../src/models_library/functions.py | 36 +++++++++++++++++-- .../functions/functions_rpc_interface.py | 23 ++++++++++++ .../_service_function_jobs.py | 2 ++ .../tests/unit/api_functions/conftest.py | 2 ++ 4 files changed, 61 insertions(+), 2 deletions(-) diff --git a/packages/models-library/src/models_library/functions.py b/packages/models-library/src/models_library/functions.py index 340c4db53ad..241368a33b7 100644 --- a/packages/models-library/src/models_library/functions.py +++ b/packages/models-library/src/models_library/functions.py @@ -13,6 +13,7 @@ from models_library.users import UserID from models_library.utils.enums import StrAutoEnum from pydantic import BaseModel, ConfigDict, Field +from servicelib.celery.models import TaskID from .projects import ProjectID from .utils.change_case import snake_to_camel @@ -196,22 +197,40 @@ class RegisteredFunctionJobBase(FunctionJobBase): class ProjectFunctionJob(FunctionJobBase): function_class: Literal[FunctionClass.PROJECT] = FunctionClass.PROJECT - project_job_id: ProjectID + project_job_id: ProjectID | None + job_creation_task_id: TaskID | None class RegisteredProjectFunctionJob(ProjectFunctionJob, RegisteredFunctionJobBase): pass +class RegisteredProjectFunctionJobPatch(BaseModel): + function_class: FunctionClass + title: str | None + description: str | None + project_job_id: ProjectID | None + job_creation_task_id: TaskID | None + + class SolverFunctionJob(FunctionJobBase): function_class: Literal[FunctionClass.SOLVER] = FunctionClass.SOLVER - solver_job_id: ProjectID + solver_job_id: ProjectID | None + job_creation_task_id: TaskID | None class RegisteredSolverFunctionJob(SolverFunctionJob, RegisteredFunctionJobBase): pass +class RegisteredSolverFunctionJobPatch(BaseModel): + function_class: FunctionClass + title: str | None + description: str | None + solver_job_id: ProjectID | None + job_creation_task_id: TaskID | None + + class PythonCodeFunctionJob(FunctionJobBase): function_class: Literal[FunctionClass.PYTHON_CODE] = FunctionClass.PYTHON_CODE @@ -220,6 +239,12 @@ class RegisteredPythonCodeFunctionJob(PythonCodeFunctionJob, RegisteredFunctionJ pass +class RegisteredPythonCodeFunctionJobPatch(BaseModel): + function_class: FunctionClass + title: str | None + description: str | None + + FunctionJob: TypeAlias = Annotated[ ProjectFunctionJob | PythonCodeFunctionJob | SolverFunctionJob, Field(discriminator="function_class"), @@ -232,6 +257,13 @@ class RegisteredPythonCodeFunctionJob(PythonCodeFunctionJob, RegisteredFunctionJ Field(discriminator="function_class"), ] +RegisteredFunctionJobPatch = Annotated[ + RegisteredProjectFunctionJobPatch + | RegisteredPythonCodeFunctionJobPatch + | RegisteredSolverFunctionJobPatch, + Field(discriminator="function_class"), +] + class FunctionJobStatus(BaseModel): status: str diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/functions/functions_rpc_interface.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/functions/functions_rpc_interface.py index 4fd80e9fcec..0dea1ae5586 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/functions/functions_rpc_interface.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/functions/functions_rpc_interface.py @@ -22,6 +22,7 @@ FunctionOutputs, FunctionUserAccessRights, FunctionUserApiAccessRights, + RegisteredFunctionJobPatch, ) from models_library.products import ProductName from models_library.rabbitmq_basic_types import RPCMethodName @@ -297,6 +298,28 @@ async def register_function_job( ) # Validates the result as a RegisteredFunctionJob +@log_decorator(_logger, level=logging.DEBUG) +async def patch_registered_function_job( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + user_id: UserID, + product_name: ProductName, + function_job_uuid: FunctionJobID, + registered_function_job_patch: RegisteredFunctionJobPatch, +) -> RegisteredFunctionJob: + result = await rabbitmq_rpc_client.request( + WEBSERVER_RPC_NAMESPACE, + TypeAdapter(RPCMethodName).validate_python("patch_registered_function_job"), + user_id=user_id, + product_name=product_name, + function_job_uuid=function_job_uuid, + registered_function_job_patch=registered_function_job_patch, + ) + return TypeAdapter(RegisteredFunctionJob).validate_python( + result + ) # Validates the result as a RegisteredFunctionJob + + @log_decorator(_logger, level=logging.DEBUG) async def get_function_job( rabbitmq_rpc_client: RabbitMQRPCClient, diff --git a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py index ffa7a1f2190..1fb3e6db36e 100644 --- a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py +++ b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py @@ -252,6 +252,7 @@ async def run_function( inputs=job_inputs.values, outputs=None, project_job_id=study_job.id, + job_creation_task_id=None, ), user_id=self.user_id, product_name=self.product_name, @@ -281,6 +282,7 @@ async def run_function( inputs=job_inputs.values, outputs=None, solver_job_id=solver_job.id, + job_creation_task_id=None, ), user_id=self.user_id, product_name=self.product_name, diff --git a/services/api-server/tests/unit/api_functions/conftest.py b/services/api-server/tests/unit/api_functions/conftest.py index 3d78c451598..70acd1244d7 100644 --- a/services/api-server/tests/unit/api_functions/conftest.py +++ b/services/api-server/tests/unit/api_functions/conftest.py @@ -180,6 +180,7 @@ def mock_project_function_job( "outputs": None, "project_job_id": f"{uuid4()}", "function_class": FunctionClass.PROJECT, + "job_creation_task_id": None, } return ProjectFunctionJob(**mock_function_job) @@ -209,6 +210,7 @@ def mock_solver_function_job( outputs=None, function_class=FunctionClass.SOLVER, solver_job_id=ProjectID(f"{uuid4()}"), + job_creation_task_id=None, ) From 981736f0a09379bf1e31afa0c11dbfe227091edc Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 27 Aug 2025 15:53:22 +0200 Subject: [PATCH 069/111] add mehod for patching a registered function job to function job service --- .../src/models_library/functions_errors.py | 5 + .../_service_function_jobs.py | 199 ++++++++++++++++-- .../api/routes/functions_routes.py | 40 +++- .../services_rpc/wb_api_server.py | 17 ++ 4 files changed, 228 insertions(+), 33 deletions(-) diff --git a/packages/models-library/src/models_library/functions_errors.py b/packages/models-library/src/models_library/functions_errors.py index 6c112591c07..1935e061034 100644 --- a/packages/models-library/src/models_library/functions_errors.py +++ b/packages/models-library/src/models_library/functions_errors.py @@ -163,3 +163,8 @@ class FunctionJobCollectionsExecuteApiAccessDeniedError(FunctionBaseError): class FunctionJobPatchModelIncompatibleError(FunctionBaseError): msg_template = "Incompatible patch model for Function '{function_id}' in product '{product_name}'." status_code: int = 422 + + +class FunctionJobCacheNotFoundError(FunctionBaseError): + msg_template: str = "No cached function job found." + status_code: int = 404 # Not Found diff --git a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py index b209bf0a1c7..a0b28237a42 100644 --- a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py +++ b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py @@ -1,4 +1,5 @@ from dataclasses import dataclass +from typing import NamedTuple, overload import jsonschema from common_library.exclude import as_dict_exclude_none @@ -16,11 +17,16 @@ RegisteredFunction, RegisteredFunctionJob, RegisteredFunctionJobCollection, + RegisteredProjectFunctionJobPatch, + RegisteredSolverFunctionJobPatch, SolverFunctionJob, + SolverJobID, + TaskID, ) from models_library.functions_errors import ( FunctionExecuteAccessDeniedError, FunctionInputsValidationError, + FunctionJobCacheNotFoundError, FunctionsExecuteApiAccessDeniedError, UnsupportedFunctionClassError, UnsupportedFunctionFunctionJobClassCombinationError, @@ -43,6 +49,11 @@ from .services_rpc.wb_api_server import WbApiRpcClient +class RegisteredFunctionJobData(NamedTuple): + function_job_id: FunctionJobID + job_inputs: JobInputs + + def join_inputs( default_inputs: FunctionInputs | None, function_inputs: FunctionInputs | None, @@ -162,12 +173,29 @@ async def inspect_function_job( job_status=new_job_status, ) - async def run_function_pre_check( + async def create_function_job_inputs( self, *, function: RegisteredFunction, function_inputs: FunctionInputs, ) -> JobInputs: + joined_inputs = join_inputs( + function.default_inputs, + function_inputs, + ) + return JobInputs( + values=joined_inputs or {}, + ) + + async def get_cached_function_job( + self, + *, + function: RegisteredFunction, + function_inputs: FunctionInputs, + job_inputs: JobInputs, + ) -> RegisteredFunctionJob: + """raises FunctionJobCacheNotFoundError if no cached job is found""" + user_api_access_rights = ( await self._web_rpc_client.get_functions_user_api_access_rights( user_id=self.user_id, product_name=self.product_name @@ -190,21 +218,137 @@ async def run_function_pre_check( function_id=function.uid, ) - joined_inputs = join_inputs( - function.default_inputs, - function_inputs, - ) + if cached_function_jobs := await self._web_rpc_client.find_cached_function_jobs( + function_id=function.uid, + inputs=job_inputs.values, + user_id=self.user_id, + product_name=self.product_name, + ): + for cached_function_job in cached_function_jobs: + job_status = await self.inspect_function_job( + function=function, + function_job=cached_function_job, + ) + if job_status.status == RunningState.SUCCESS: + return cached_function_job + + raise FunctionJobCacheNotFoundError() + + async def create_registered_function_job( + self, + *, + function: RegisteredFunction, + function_inputs: FunctionInputs, + job_inputs: JobInputs, + ) -> FunctionJobID: if function.input_schema is not None: is_valid, validation_str = await self.validate_function_inputs( function_id=function.uid, - inputs=joined_inputs, + inputs=job_inputs.values, ) if not is_valid: raise FunctionInputsValidationError(error=validation_str) - return JobInputs( - values=joined_inputs or {}, + if function.function_class == FunctionClass.PROJECT: + job = await self._web_rpc_client.register_function_job( + function_job=ProjectFunctionJob( + function_uid=function.uid, + title=f"Function job of function {function.uid}", + description=function.description, + inputs=job_inputs.values, + outputs=None, + project_job_id=None, + job_creation_task_id=None, + ), + user_id=self.user_id, + product_name=self.product_name, + ) + + elif function.function_class == FunctionClass.SOLVER: + job = await self._web_rpc_client.register_function_job( + function_job=SolverFunctionJob( + function_uid=function.uid, + title=f"Function job of function {function.uid}", + description=function.description, + inputs=job_inputs.values, + outputs=None, + solver_job_id=None, + job_creation_task_id=None, + ), + user_id=self.user_id, + product_name=self.product_name, + ) + else: + raise UnsupportedFunctionClassError( + function_class=function.function_class, + ) + + return job.uid + + @overload + async def patch_registered_function_job( + self, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + function_class: FunctionClass, + job_creation_task_id: TaskID | None, + project_job_id: ProjectID | None, + ) -> RegisteredFunctionJob: ... + + @overload + async def patch_registered_function_job( + self, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + function_class: FunctionClass, + job_creation_task_id: TaskID | None, + solver_job_id: SolverJobID | None, + ) -> RegisteredFunctionJob: ... + + async def patch_registered_function_job( + self, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + function_class: FunctionClass, + job_creation_task_id: TaskID | None, + project_job_id: ProjectID | None = None, + solver_job_id: SolverJobID | None = None, + ) -> RegisteredFunctionJob: + # Only allow one of project_job_id or solver_job_id depending on function_class + if function_class == FunctionClass.PROJECT: + patch = RegisteredProjectFunctionJobPatch( + title=None, + description=None, + inputs=None, + outputs=None, + job_creation_task_id=job_creation_task_id, + project_job_id=project_job_id, + ) + elif function_class == FunctionClass.SOLVER: + patch = RegisteredSolverFunctionJobPatch( + title=None, + description=None, + inputs=None, + outputs=None, + job_creation_task_id=job_creation_task_id, + solver_job_id=solver_job_id, + ) + else: + raise UnsupportedFunctionClassError( + function_class=function_class, + ) + return await self._web_rpc_client.patch_registered_function_job( + user_id=user_id, + product_name=product_name, + function_job_id=function_job_id, + registered_function_job_patch=patch, ) async def run_function( @@ -217,20 +361,7 @@ async def run_function( x_simcore_parent_project_uuid: NodeID | None, x_simcore_parent_node_id: NodeID | None, ) -> RegisteredFunctionJob: - - if cached_function_jobs := await self._web_rpc_client.find_cached_function_jobs( - function_id=function.uid, - inputs=job_inputs.values, - user_id=self.user_id, - product_name=self.product_name, - ): - for cached_function_job in cached_function_jobs: - job_status = await self.inspect_function_job( - function=function, - function_job=cached_function_job, - ) - if job_status.status == RunningState.SUCCESS: - return cached_function_job + """N.B. this function does not check access rights. Use get_cached_function_job for that""" if function.function_class == FunctionClass.PROJECT: study_job = await self._job_service.create_studies_job( @@ -306,7 +437,7 @@ async def map_function( ) -> RegisteredFunctionJobCollection: job_inputs = [ - await self.run_function_pre_check( + await self.create_registered_function_job( function=function, function_inputs=inputs, ) @@ -335,3 +466,25 @@ async def map_function( user_id=self.user_id, product_name=self.product_name, ) + function_jobs = [ + await self.run_function( + function=function, + job_inputs=inputs, + pricing_spec=pricing_spec, + job_links=job_links, + x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, + x_simcore_parent_node_id=x_simcore_parent_node_id, + ) + for inputs in job_inputs + ] + + function_job_collection_description = f"Function job collection of map of function {function.uid} with {len(function_inputs_list)} inputs" + return await self._web_rpc_client.register_function_job_collection( + function_job_collection=FunctionJobCollection( + title="Function job collection of function map", + description=function_job_collection_description, + job_ids=[function_job.uid for function_job in function_jobs], + ), + user_id=self.user_id, + product_name=self.product_name, + ) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index d401a0a6bdc..27f126c28f8 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -17,13 +17,13 @@ RegisteredFunctionJobCollection, ) from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobFilter +from models_library.functions_errors import FunctionJobCacheNotFoundError from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID from servicelib.celery.models import TaskFilter, TaskMetadata, TasksQueue from servicelib.fastapi.dependencies import get_reverse_url_mapper -from servicelib.long_running_tasks.models import TaskGet from ..._service_function_jobs import FunctionJobService from ..._service_functions import FunctionService @@ -314,7 +314,7 @@ async def validate_function_inputs( @function_router.post( "/{function_id:uuid}:run", - response_model=TaskGet, + response_model=RegisteredFunctionJob, responses={**_COMMON_FUNCTION_ERROR_RESPONSES}, description=create_route_description( base="Run function", @@ -333,7 +333,7 @@ async def run_function( # noqa: PLR0913 ], x_simcore_parent_project_uuid: Annotated[ProjectID | Literal["null"], Header()], x_simcore_parent_node_id: Annotated[NodeID | Literal["null"], Header()], -) -> TaskGet: +) -> RegisteredFunctionJob: task_manager = get_task_manager(request.app) parent_project_uuid = ( x_simcore_parent_project_uuid @@ -348,10 +348,28 @@ async def run_function( # noqa: PLR0913 pricing_spec = JobPricingSpecification.create_from_headers(request.headers) job_links = await function_service.get_function_job_links(to_run_function, url_for) - job_inputs = await function_job_service.run_function_pre_check( + job_inputs = await function_job_service.create_function_job_inputs( function=to_run_function, function_inputs=function_inputs ) + try: + # checks access rights + return await function_job_service.get_cached_function_job( + function=to_run_function, + function_inputs=function_inputs, + job_inputs=job_inputs, + ) + except FunctionJobCacheNotFoundError: + pass + + pre_registered_function_job_id = ( + await function_job_service.create_registered_function_job( + function=to_run_function, + function_inputs=function_inputs, + job_inputs=job_inputs, + ) + ) + # run function in celery task job_filter = AsyncJobFilter( user_id=user_identity.user_id, product_name=user_identity.product_name, @@ -376,12 +394,14 @@ async def run_function( # noqa: PLR0913 x_simcore_parent_node_id=parent_node_id, ) - return TaskGet( - task_id=f"{task_uuid}", - task_name=task_name, - status_href=url_for("get_task_status", task_id=task_uuid), - result_href=url_for("get_task_result", task_id=task_uuid), - abort_href=url_for("cancel_task", task_id=task_uuid), + return await function_job_service.patch_registered_function_job( + user_id=user_identity.user_id, + product_name=user_identity.product_name, + function_job_id=pre_registered_function_job_id, + registered_function_job_patch=RegisteredFunctionJobPatch( + status=RunningState.RUNNING, + task_id=task_uuid, + ), ) diff --git a/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py b/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py index 4fde8b5403c..75f7c5a7d21 100644 --- a/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py +++ b/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py @@ -28,6 +28,7 @@ FunctionOutputs, FunctionUserAccessRights, FunctionUserApiAccessRights, + RegisteredFunctionJobPatch, ) from models_library.licenses import LicensedItemID from models_library.products import ProductName @@ -489,6 +490,22 @@ async def register_function_job( function_job=function_job, ) + async def patch_registered_function_job( + self, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + registered_function_job_patch: RegisteredFunctionJobPatch, + ) -> RegisteredFunctionJob: + return await functions_rpc_interface.patch_registered_function_job( + self._client, + user_id=user_id, + product_name=product_name, + function_job_uuid=function_job_id, + registered_function_job_patch=registered_function_job_patch, + ) + async def get_function_input_schema( self, *, user_id: UserID, product_name: ProductName, function_id: FunctionID ) -> FunctionInputSchema: From 4f989b612519a466e424a6527d8de741e2e7d797 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 27 Aug 2025 16:01:34 +0200 Subject: [PATCH 070/111] implement run function workflow --- .../_service_function_jobs.py | 42 ++++++++++--------- .../api/routes/functions_routes.py | 11 +++-- .../celery/worker_tasks/functions_tasks.py | 1 + 3 files changed, 28 insertions(+), 26 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py index a0b28237a42..928269e3745 100644 --- a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py +++ b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py @@ -286,6 +286,17 @@ async def create_registered_function_job( return job.uid + @overload + async def patch_registered_function_job( + self, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + function_class: FunctionClass, + job_creation_task_id: TaskID | None, + ) -> RegisteredFunctionJob: ... + @overload async def patch_registered_function_job( self, @@ -354,6 +365,7 @@ async def patch_registered_function_job( async def run_function( self, *, + job_creation_task_id: TaskID | None, function: RegisteredFunction, job_inputs: JobInputs, pricing_spec: JobPricingSpecification | None, @@ -377,18 +389,13 @@ async def run_function( job_id=study_job.id, pricing_spec=pricing_spec, ) - return await self._web_rpc_client.register_function_job( - function_job=ProjectFunctionJob( - function_uid=function.uid, - title=f"Function job of function {function.uid}", - description=function.description, - inputs=job_inputs.values, - outputs=None, - project_job_id=study_job.id, - job_creation_task_id=None, - ), + return await self.patch_registered_function_job( user_id=self.user_id, product_name=self.product_name, + function_job_id=study_job.id, + function_class=FunctionClass.PROJECT, + job_creation_task_id=job_creation_task_id, + project_job_id=study_job.id, ) if function.function_class == FunctionClass.SOLVER: @@ -407,18 +414,13 @@ async def run_function( job_id=solver_job.id, pricing_spec=pricing_spec, ) - return await self._web_rpc_client.register_function_job( - function_job=SolverFunctionJob( - function_uid=function.uid, - title=f"Function job of function {function.uid}", - description=function.description, - inputs=job_inputs.values, - outputs=None, - solver_job_id=solver_job.id, - job_creation_task_id=None, - ), + return await self.patch_registered_function_job( user_id=self.user_id, product_name=self.product_name, + function_job_id=solver_job.id, + function_class=FunctionClass.SOLVER, + job_creation_task_id=job_creation_task_id, + solver_job_id=solver_job.id, ) raise UnsupportedFunctionClassError( diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index 27f126c28f8..8860c498093 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -17,12 +17,13 @@ RegisteredFunctionJobCollection, ) from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobFilter +from models_library.functions import FunctionClass from models_library.functions_errors import FunctionJobCacheNotFoundError from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID -from servicelib.celery.models import TaskFilter, TaskMetadata, TasksQueue +from servicelib.celery.models import TaskFilter, TaskID, TaskMetadata, TasksQueue from servicelib.fastapi.dependencies import get_reverse_url_mapper from ..._service_function_jobs import FunctionJobService @@ -347,10 +348,10 @@ async def run_function( # noqa: PLR0913 ) pricing_spec = JobPricingSpecification.create_from_headers(request.headers) job_links = await function_service.get_function_job_links(to_run_function, url_for) - job_inputs = await function_job_service.create_function_job_inputs( function=to_run_function, function_inputs=function_inputs ) + try: # checks access rights return await function_job_service.get_cached_function_job( @@ -398,10 +399,8 @@ async def run_function( # noqa: PLR0913 user_id=user_identity.user_id, product_name=user_identity.product_name, function_job_id=pre_registered_function_job_id, - registered_function_job_patch=RegisteredFunctionJobPatch( - status=RunningState.RUNNING, - task_id=task_uuid, - ), + function_class=FunctionClass.PROJECT, + job_creation_task_id=TaskID(task_uuid), ) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py index 97400776a5f..f755f7a6e81 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py @@ -107,6 +107,7 @@ async def run_function( ) return await function_job_service.run_function( + job_creation_task_id=task_id, function=function, job_inputs=job_inputs, pricing_spec=pricing_spec, From 0a75a8ded784b04ce053b868dbfc769486f4b624 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 27 Aug 2025 16:03:30 +0200 Subject: [PATCH 071/111] minor fix --- .../simcore_service_api_server/api/routes/functions_routes.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index 8860c498093..c3fbbc19902 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -17,7 +17,6 @@ RegisteredFunctionJobCollection, ) from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobFilter -from models_library.functions import FunctionClass from models_library.functions_errors import FunctionJobCacheNotFoundError from models_library.products import ProductName from models_library.projects import ProjectID @@ -399,7 +398,7 @@ async def run_function( # noqa: PLR0913 user_id=user_identity.user_id, product_name=user_identity.product_name, function_job_id=pre_registered_function_job_id, - function_class=FunctionClass.PROJECT, + function_class=to_run_function.function_class, job_creation_task_id=TaskID(task_uuid), ) From 123069274455f1f9e3cb5d0e75a63d8913b1a006 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 28 Aug 2025 05:56:10 +0200 Subject: [PATCH 072/111] typecheck fix --- .../_service_function_jobs.py | 39 +++---------------- 1 file changed, 6 insertions(+), 33 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py index 928269e3745..15abedb582b 100644 --- a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py +++ b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py @@ -7,7 +7,6 @@ FunctionClass, FunctionID, FunctionInputs, - FunctionInputsList, FunctionJobCollection, FunctionJobCollectionID, FunctionJobID, @@ -17,6 +16,7 @@ RegisteredFunction, RegisteredFunctionJob, RegisteredFunctionJobCollection, + RegisteredFunctionJobPatch, RegisteredProjectFunctionJobPatch, RegisteredSolverFunctionJobPatch, SolverFunctionJob, @@ -333,6 +333,7 @@ async def patch_registered_function_job( solver_job_id: SolverJobID | None = None, ) -> RegisteredFunctionJob: # Only allow one of project_job_id or solver_job_id depending on function_class + patch: RegisteredFunctionJobPatch if function_class == FunctionClass.PROJECT: patch = RegisteredProjectFunctionJobPatch( title=None, @@ -430,46 +431,18 @@ async def run_function( async def map_function( self, *, + job_creation_task_id: TaskID | None, function: RegisteredFunction, - function_inputs_list: FunctionInputsList, + job_inputs: list[JobInputs], job_links: JobLinks, pricing_spec: JobPricingSpecification | None, x_simcore_parent_project_uuid: ProjectID | None, x_simcore_parent_node_id: NodeID | None, ) -> RegisteredFunctionJobCollection: - job_inputs = [ - await self.create_registered_function_job( - function=function, - function_inputs=inputs, - ) - for inputs in function_inputs_list - ] - - function_jobs = [ - await self.run_function( - function=function, - job_inputs=inputs, - pricing_spec=pricing_spec, - job_links=job_links, - x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, - x_simcore_parent_node_id=x_simcore_parent_node_id, - ) - for inputs in job_inputs - ] - - function_job_collection_description = f"Function job collection of map of function {function.uid} with {len(function_inputs_list)} inputs" - return await self._web_rpc_client.register_function_job_collection( - function_job_collection=FunctionJobCollection( - title="Function job collection of function map", - description=function_job_collection_description, - job_ids=[function_job.uid for function_job in function_jobs], - ), - user_id=self.user_id, - product_name=self.product_name, - ) function_jobs = [ await self.run_function( + job_creation_task_id=job_creation_task_id, function=function, job_inputs=inputs, pricing_spec=pricing_spec, @@ -480,7 +453,7 @@ async def map_function( for inputs in job_inputs ] - function_job_collection_description = f"Function job collection of map of function {function.uid} with {len(function_inputs_list)} inputs" + function_job_collection_description = f"Function job collection of map of function {function.uid} with {len(job_inputs)} inputs" return await self._web_rpc_client.register_function_job_collection( function_job_collection=FunctionJobCollection( title="Function job collection of function map", From 52288be5d8ecc06362c53cc2a88b843efdffedd4 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 28 Aug 2025 06:13:14 +0200 Subject: [PATCH 073/111] propagate pre_registered_function_job_id properly --- .../src/simcore_service_api_server/_service_function_jobs.py | 5 +++-- .../api/routes/functions_routes.py | 1 + .../celery/worker_tasks/functions_tasks.py | 4 +++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py index 15abedb582b..fbdd5c0f859 100644 --- a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py +++ b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py @@ -368,6 +368,7 @@ async def run_function( *, job_creation_task_id: TaskID | None, function: RegisteredFunction, + pre_registered_function_job_id: FunctionJobID, job_inputs: JobInputs, pricing_spec: JobPricingSpecification | None, job_links: JobLinks, @@ -393,7 +394,7 @@ async def run_function( return await self.patch_registered_function_job( user_id=self.user_id, product_name=self.product_name, - function_job_id=study_job.id, + function_job_id=pre_registered_function_job_id, function_class=FunctionClass.PROJECT, job_creation_task_id=job_creation_task_id, project_job_id=study_job.id, @@ -418,7 +419,7 @@ async def run_function( return await self.patch_registered_function_job( user_id=self.user_id, product_name=self.product_name, - function_job_id=solver_job.id, + function_job_id=pre_registered_function_job_id, function_class=FunctionClass.SOLVER, job_creation_task_id=job_creation_task_id, solver_job_id=solver_job.id, diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index c3fbbc19902..3626a78fafb 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -387,6 +387,7 @@ async def run_function( # noqa: PLR0913 task_filter=task_filter, user_identity=user_identity, function=to_run_function, + pre_registered_function_job_id=pre_registered_function_job_id, job_inputs=job_inputs, pricing_spec=pricing_spec, job_links=job_links, diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py index f755f7a6e81..0db32ff1901 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py @@ -3,7 +3,7 @@ ) from celery_library.utils import get_app_server # pylint: disable=no-name-in-module from fastapi import FastAPI -from models_library.functions import RegisteredFunction +from models_library.functions import FunctionJobID, RegisteredFunction from models_library.projects_nodes_io import NodeID from servicelib.celery.models import TaskID from simcore_service_api_server._service_function_jobs import FunctionJobService @@ -94,6 +94,7 @@ async def run_function( *, user_identity: Identity, function: RegisteredFunction, + pre_registered_function_job_id: FunctionJobID, job_inputs: JobInputs, pricing_spec: JobPricingSpecification | None, job_links: JobLinks, @@ -109,6 +110,7 @@ async def run_function( return await function_job_service.run_function( job_creation_task_id=task_id, function=function, + pre_registered_function_job_id=pre_registered_function_job_id, job_inputs=job_inputs, pricing_spec=pricing_spec, job_links=job_links, From 07f7ec801957aa7ba01db5b8e5d10966672152fc Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 28 Aug 2025 06:44:32 +0200 Subject: [PATCH 074/111] cleanup --- .../_service_function_jobs.py | 46 ++++++++++--------- .../api/routes/functions_routes.py | 14 +++--- .../celery/worker_tasks/functions_tasks.py | 11 ++--- .../models/domain/functions.py | 9 ++++ 4 files changed, 44 insertions(+), 36 deletions(-) create mode 100644 services/api-server/src/simcore_service_api_server/models/domain/functions.py diff --git a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py index fbdd5c0f859..7ed882f92e6 100644 --- a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py +++ b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import NamedTuple, overload +from typing import overload import jsonschema from common_library.exclude import as_dict_exclude_none @@ -42,6 +42,7 @@ from ._service_jobs import JobService from .models.api_resources import JobLinks +from .models.domain.functions import PreRegisteredFunctionJobData from .models.schemas.jobs import ( JobInputs, JobPricingSpecification, @@ -49,11 +50,6 @@ from .services_rpc.wb_api_server import WbApiRpcClient -class RegisteredFunctionJobData(NamedTuple): - function_job_id: FunctionJobID - job_inputs: JobInputs - - def join_inputs( default_inputs: FunctionInputs | None, function_inputs: FunctionInputs | None, @@ -191,10 +187,15 @@ async def get_cached_function_job( self, *, function: RegisteredFunction, - function_inputs: FunctionInputs, job_inputs: JobInputs, ) -> RegisteredFunctionJob: - """raises FunctionJobCacheNotFoundError if no cached job is found""" + """ + N.B. this function checks access rights + + raises FunctionsExecuteApiAccessDeniedError if user cannot execute functions + raises FunctionJobCacheNotFoundError if no cached job is found + + """ user_api_access_rights = ( await self._web_rpc_client.get_functions_user_api_access_rights( @@ -234,13 +235,12 @@ async def get_cached_function_job( raise FunctionJobCacheNotFoundError() - async def create_registered_function_job( + async def pre_register_function_job( self, *, function: RegisteredFunction, - function_inputs: FunctionInputs, job_inputs: JobInputs, - ) -> FunctionJobID: + ) -> PreRegisteredFunctionJobData: if function.input_schema is not None: is_valid, validation_str = await self.validate_function_inputs( @@ -284,7 +284,10 @@ async def create_registered_function_job( function_class=function.function_class, ) - return job.uid + return PreRegisteredFunctionJobData( + function_job_id=job.uid, + job_inputs=job_inputs, + ) @overload async def patch_registered_function_job( @@ -368,8 +371,7 @@ async def run_function( *, job_creation_task_id: TaskID | None, function: RegisteredFunction, - pre_registered_function_job_id: FunctionJobID, - job_inputs: JobInputs, + pre_registered_function_job_data: PreRegisteredFunctionJobData, pricing_spec: JobPricingSpecification | None, job_links: JobLinks, x_simcore_parent_project_uuid: NodeID | None, @@ -380,7 +382,7 @@ async def run_function( if function.function_class == FunctionClass.PROJECT: study_job = await self._job_service.create_studies_job( study_id=function.project_id, - job_inputs=job_inputs, + job_inputs=pre_registered_function_job_data.job_inputs, hidden=True, job_links=job_links, x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, @@ -394,7 +396,7 @@ async def run_function( return await self.patch_registered_function_job( user_id=self.user_id, product_name=self.product_name, - function_job_id=pre_registered_function_job_id, + function_job_id=pre_registered_function_job_data.function_job_id, function_class=FunctionClass.PROJECT, job_creation_task_id=job_creation_task_id, project_job_id=study_job.id, @@ -404,7 +406,7 @@ async def run_function( solver_job = await self._job_service.create_solver_job( solver_key=function.solver_key, version=function.solver_version, - inputs=job_inputs, + inputs=pre_registered_function_job_data.job_inputs, job_links=job_links, hidden=True, x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, @@ -419,7 +421,7 @@ async def run_function( return await self.patch_registered_function_job( user_id=self.user_id, product_name=self.product_name, - function_job_id=pre_registered_function_job_id, + function_job_id=pre_registered_function_job_data.function_job_id, function_class=FunctionClass.SOLVER, job_creation_task_id=job_creation_task_id, solver_job_id=solver_job.id, @@ -434,7 +436,7 @@ async def map_function( *, job_creation_task_id: TaskID | None, function: RegisteredFunction, - job_inputs: list[JobInputs], + pre_registered_function_job_data_list: list[PreRegisteredFunctionJobData], job_links: JobLinks, pricing_spec: JobPricingSpecification | None, x_simcore_parent_project_uuid: ProjectID | None, @@ -445,16 +447,16 @@ async def map_function( await self.run_function( job_creation_task_id=job_creation_task_id, function=function, - job_inputs=inputs, + pre_registered_function_job_data=data, pricing_spec=pricing_spec, job_links=job_links, x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, x_simcore_parent_node_id=x_simcore_parent_node_id, ) - for inputs in job_inputs + for data in pre_registered_function_job_data_list ] - function_job_collection_description = f"Function job collection of map of function {function.uid} with {len(job_inputs)} inputs" + function_job_collection_description = f"Function job collection of map of function {function.uid} with {len(pre_registered_function_job_data_list)} inputs" return await self._web_rpc_client.register_function_job_collection( function_job_collection=FunctionJobCollection( title="Function job collection of function map", diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index 3626a78fafb..ae83611227b 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -334,6 +334,7 @@ async def run_function( # noqa: PLR0913 x_simcore_parent_project_uuid: Annotated[ProjectID | Literal["null"], Header()], x_simcore_parent_node_id: Annotated[NodeID | Literal["null"], Header()], ) -> RegisteredFunctionJob: + # massage inputs task_manager = get_task_manager(request.app) parent_project_uuid = ( x_simcore_parent_project_uuid @@ -351,20 +352,18 @@ async def run_function( # noqa: PLR0913 function=to_run_function, function_inputs=function_inputs ) + # check if results are cached try: - # checks access rights return await function_job_service.get_cached_function_job( function=to_run_function, - function_inputs=function_inputs, job_inputs=job_inputs, ) except FunctionJobCacheNotFoundError: pass - pre_registered_function_job_id = ( - await function_job_service.create_registered_function_job( + pre_registered_function_job_data = ( + await function_job_service.pre_register_function_job( function=to_run_function, - function_inputs=function_inputs, job_inputs=job_inputs, ) ) @@ -387,8 +386,7 @@ async def run_function( # noqa: PLR0913 task_filter=task_filter, user_identity=user_identity, function=to_run_function, - pre_registered_function_job_id=pre_registered_function_job_id, - job_inputs=job_inputs, + pre_registered_function_job_data=pre_registered_function_job_data, pricing_spec=pricing_spec, job_links=job_links, x_simcore_parent_project_uuid=parent_project_uuid, @@ -398,7 +396,7 @@ async def run_function( # noqa: PLR0913 return await function_job_service.patch_registered_function_job( user_id=user_identity.user_id, product_name=user_identity.product_name, - function_job_id=pre_registered_function_job_id, + function_job_id=pre_registered_function_job_data.function_job_id, function_class=to_run_function.function_class, job_creation_task_id=TaskID(task_uuid), ) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py index 0db32ff1901..a8dec972344 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py @@ -3,7 +3,7 @@ ) from celery_library.utils import get_app_server # pylint: disable=no-name-in-module from fastapi import FastAPI -from models_library.functions import FunctionJobID, RegisteredFunction +from models_library.functions import RegisteredFunction from models_library.projects_nodes_io import NodeID from servicelib.celery.models import TaskID from simcore_service_api_server._service_function_jobs import FunctionJobService @@ -21,7 +21,8 @@ from ...api.dependencies.webserver_http import get_session_cookie, get_webserver_session from ...api.dependencies.webserver_rpc import get_wb_api_rpc_client from ...models.api_resources import JobLinks -from ...models.schemas.jobs import JobInputs, JobPricingSpecification +from ...models.domain.functions import PreRegisteredFunctionJobData +from ...models.schemas.jobs import JobPricingSpecification from ...services_http.director_v2 import DirectorV2Api from ...services_http.storage import StorageApi @@ -94,8 +95,7 @@ async def run_function( *, user_identity: Identity, function: RegisteredFunction, - pre_registered_function_job_id: FunctionJobID, - job_inputs: JobInputs, + pre_registered_function_job_data: PreRegisteredFunctionJobData, pricing_spec: JobPricingSpecification | None, job_links: JobLinks, x_simcore_parent_project_uuid: NodeID | None, @@ -110,8 +110,7 @@ async def run_function( return await function_job_service.run_function( job_creation_task_id=task_id, function=function, - pre_registered_function_job_id=pre_registered_function_job_id, - job_inputs=job_inputs, + pre_registered_function_job_data=pre_registered_function_job_data, pricing_spec=pricing_spec, job_links=job_links, x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, diff --git a/services/api-server/src/simcore_service_api_server/models/domain/functions.py b/services/api-server/src/simcore_service_api_server/models/domain/functions.py new file mode 100644 index 00000000000..ff4e56ba34b --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/models/domain/functions.py @@ -0,0 +1,9 @@ +from models_library.functions import FunctionJobID +from pydantic import BaseModel + +from ...models.schemas.jobs import JobInputs + + +class PreRegisteredFunctionJobData(BaseModel): + function_job_id: FunctionJobID + job_inputs: JobInputs From bf6bb7e8b8a27ac3169b806934a536b257ff93cb Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 28 Aug 2025 07:38:42 +0200 Subject: [PATCH 075/111] fix fake run fcn test --- .../celery/worker_tasks/tasks.py | 2 + .../api_functions/celery/test_functions.py | 86 ++++++++++++++----- .../tests/unit/api_functions/conftest.py | 7 +- 3 files changed, 71 insertions(+), 24 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py index d1a9b156942..b212a04dc28 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py @@ -16,6 +16,7 @@ from ...api.dependencies.authentication import Identity from ...models.api_resources import JobLinks +from ...models.domain.functions import PreRegisteredFunctionJobData from ...models.schemas.jobs import JobInputs, JobPricingSpecification from .functions_tasks import run_function @@ -26,6 +27,7 @@ JobInputs, JobLinks, JobPricingSpecification, + PreRegisteredFunctionJobData, RegisteredProjectFunction, RegisteredProjectFunctionJob, RegisteredPythonCodeFunction, diff --git a/services/api-server/tests/unit/api_functions/celery/test_functions.py b/services/api-server/tests/unit/api_functions/celery/test_functions.py index a8df3d9e279..6f403f66dc4 100644 --- a/services/api-server/tests/unit/api_functions/celery/test_functions.py +++ b/services/api-server/tests/unit/api_functions/celery/test_functions.py @@ -38,6 +38,7 @@ RegisteredFunctionJob, RegisteredProjectFunction, RegisteredProjectFunctionJob, + RegisteredProjectFunctionJobPatch, ) from models_library.projects import ProjectID from models_library.users import UserID @@ -49,23 +50,20 @@ X_SIMCORE_PARENT_PROJECT_UUID, ) from simcore_service_api_server._meta import API_VTAG -from simcore_service_api_server._service_function_jobs import FunctionJobService from simcore_service_api_server.api.dependencies.authentication import Identity from simcore_service_api_server.api.dependencies.celery import ( ASYNC_JOB_CLIENT_NAME, get_task_manager, ) -from simcore_service_api_server.api.dependencies.services import ( - get_function_job_service, -) -from simcore_service_api_server.api.routes.functions_routes import get_function from simcore_service_api_server.celery.worker_tasks.functions_tasks import ( run_function as run_function_task, ) from simcore_service_api_server.exceptions.backend_errors import BaseBackEndError from simcore_service_api_server.models.api_resources import JobLinks +from simcore_service_api_server.models.domain.functions import ( + PreRegisteredFunctionJobData, +) from simcore_service_api_server.models.schemas.jobs import ( - JobInputs, JobPricingSpecification, NodeID, ) @@ -116,7 +114,7 @@ async def run_function( *, user_identity: Identity, function: RegisteredFunction, - job_inputs: JobInputs, + pre_registered_function_job_data: PreRegisteredFunctionJobData, pricing_spec: JobPricingSpecification | None, job_links: JobLinks, x_simcore_parent_project_uuid: NodeID | None, @@ -126,12 +124,13 @@ async def run_function( title=_faker.sentence(), description=_faker.paragraph(), function_uid=FunctionID(_faker.uuid4()), - inputs=job_inputs.values, + inputs=pre_registered_function_job_data.job_inputs.values, outputs=None, function_class=FunctionClass.PROJECT, uid=FunctionJobID(_faker.uuid4()), created_at=_faker.date_time(), project_job_id=ProjectID(_faker.uuid4()), + job_creation_task_id=None, ) # check our mock task is correct @@ -155,6 +154,12 @@ async def test_with_fake_run_function( auth: BasicAuth, mocker: MockerFixture, with_api_server_celery_worker: TestWorkController, + mock_handler_in_functions_rpc_interface: Callable[ + [str, Any, Exception | None, Callable | None], None + ], + mock_registered_project_function: RegisteredProjectFunction, + mock_registered_project_function_job: RegisteredFunctionJob, + user_id: UserID, ): body = { @@ -169,19 +174,55 @@ async def test_with_fake_run_function( ], } - async def mock_get_function_job_service() -> FunctionJobService: - mock = mocker.AsyncMock(spec=FunctionJobService) - mock.run_function_pre_check.return_value = JobInputs(values=body) - return mock - - app.dependency_overrides[get_function_job_service] = mock_get_function_job_service + mock_handler_in_functions_rpc_interface( + "get_function_user_permissions", + FunctionUserAccessRights( + user_id=user_id, + execute=True, + read=True, + write=True, + ), + None, + None, + ) + mock_handler_in_functions_rpc_interface( + "get_functions_user_api_access_rights", + FunctionUserApiAccessRights( + user_id=user_id, + read_functions=True, + write_functions=True, + execute_functions=True, + read_function_jobs=True, + write_function_jobs=True, + execute_function_jobs=True, + read_function_job_collections=True, + write_function_job_collections=True, + execute_function_job_collections=True, + ), + None, + None, + ) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function, None, None + ) + mock_handler_in_functions_rpc_interface("find_cached_function_jobs", [], None, None) + mock_handler_in_functions_rpc_interface( + "register_function_job", mock_registered_project_function_job, None, None + ) - app.dependency_overrides[get_function] = ( - lambda: RegisteredProjectFunction.model_validate( - RegisteredProjectFunction.model_config.get("json_schema_extra", {}).get( - "examples", [] - )[0] + async def _patch_side_effect(*args, **kwargs): + registered_function_job_patch = kwargs["registered_function_job_patch"] + assert isinstance( + registered_function_job_patch, RegisteredProjectFunctionJobPatch ) + job_creation_task_id = registered_function_job_patch.job_creation_task_id + assert job_creation_task_id is not None + return mock_registered_project_function_job.model_copy( + update={"job_creation_task_id": job_creation_task_id} + ) + + mock_handler_in_functions_rpc_interface( + "patch_registered_function_job", None, None, _patch_side_effect ) headers = {} @@ -196,10 +237,11 @@ async def mock_get_function_job_service() -> FunctionJobService: ) assert response.status_code == status.HTTP_200_OK - task = TaskGet.model_validate(response.json()) - + function_job = RegisteredProjectFunctionJob.model_validate(response.json()) + celery_task_id = function_job.job_creation_task_id + assert celery_task_id is not None # Poll until task completion and get result - result = await poll_task_until_done(client, auth, task.task_id) + result = await poll_task_until_done(client, auth, celery_task_id) RegisteredProjectFunctionJob.model_validate(result.result) diff --git a/services/api-server/tests/unit/api_functions/conftest.py b/services/api-server/tests/unit/api_functions/conftest.py index 70acd1244d7..729b2967a78 100644 --- a/services/api-server/tests/unit/api_functions/conftest.py +++ b/services/api-server/tests/unit/api_functions/conftest.py @@ -260,21 +260,24 @@ def mock_registered_function_job_collection( @pytest.fixture() def mock_handler_in_functions_rpc_interface( mock_wb_api_server_rpc: MockerFixture, -) -> Callable[[str, Any, Exception | None], None]: +) -> Callable[[str, Any, Exception | None, Callable | None], None]: def _mock( handler_name: str = "", return_value: Any = None, exception: Exception | None = None, + side_effect: Callable | None = None, ) -> MockType: from servicelib.rabbitmq.rpc_interfaces.webserver.functions import ( functions_rpc_interface, ) + assert exception is None or side_effect is None + return mock_wb_api_server_rpc.patch.object( functions_rpc_interface, handler_name, return_value=return_value, - side_effect=exception, + side_effect=exception or side_effect, ) return _mock From ff8b50aded7db65f95a247b64a0cbff0afc36ef7 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 28 Aug 2025 09:08:24 +0200 Subject: [PATCH 076/111] fix tests --- .../api_functions/celery/test_functions.py | 63 +++++++++++++------ 1 file changed, 44 insertions(+), 19 deletions(-) diff --git a/services/api-server/tests/unit/api_functions/celery/test_functions.py b/services/api-server/tests/unit/api_functions/celery/test_functions.py index 6f403f66dc4..9ff08307847 100644 --- a/services/api-server/tests/unit/api_functions/celery/test_functions.py +++ b/services/api-server/tests/unit/api_functions/celery/test_functions.py @@ -7,6 +7,7 @@ import inspect from collections.abc import Callable +from functools import partial from pathlib import Path from typing import Any @@ -23,7 +24,6 @@ from fastapi import FastAPI, status from httpx import AsyncClient, BasicAuth, HTTPStatusError from models_library.api_schemas_long_running_tasks.tasks import ( - TaskGet, TaskResult, TaskStatus, ) @@ -146,6 +146,18 @@ def _(celery_app: Celery) -> None: return _ +async def _patch_registered_function_job_side_effect( + mock_registered_project_function_job: RegisteredFunctionJob, *args, **kwargs +): + registered_function_job_patch = kwargs["registered_function_job_patch"] + assert isinstance(registered_function_job_patch, RegisteredProjectFunctionJobPatch) + job_creation_task_id = registered_function_job_patch.job_creation_task_id + assert job_creation_task_id is not None + return mock_registered_project_function_job.model_copy( + update={"job_creation_task_id": job_creation_task_id} + ) + + @pytest.mark.parametrize("register_celery_tasks", [_register_fake_run_function_task()]) @pytest.mark.parametrize("add_worker_tasks", [False]) async def test_with_fake_run_function( @@ -210,19 +222,14 @@ async def test_with_fake_run_function( "register_function_job", mock_registered_project_function_job, None, None ) - async def _patch_side_effect(*args, **kwargs): - registered_function_job_patch = kwargs["registered_function_job_patch"] - assert isinstance( - registered_function_job_patch, RegisteredProjectFunctionJobPatch - ) - job_creation_task_id = registered_function_job_patch.job_creation_task_id - assert job_creation_task_id is not None - return mock_registered_project_function_job.model_copy( - update={"job_creation_task_id": job_creation_task_id} - ) - mock_handler_in_functions_rpc_interface( - "patch_registered_function_job", None, None, _patch_side_effect + "patch_registered_function_job", + None, + None, + partial( + _patch_registered_function_job_side_effect, + mock_registered_project_function_job, + ), ) headers = {} @@ -313,7 +320,9 @@ async def test_run_project_function_parent_info( app: FastAPI, with_api_server_celery_worker: TestWorkController, client: AsyncClient, - mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + mock_handler_in_functions_rpc_interface: Callable[ + [str, Any, Exception | None, Callable | None], None + ], mock_registered_project_function: RegisteredProjectFunction, mock_registered_project_function_job: RegisteredFunctionJob, auth: httpx.BasicAuth, @@ -358,13 +367,15 @@ def _default_side_effect( read=True, write=True, ), + None, + None, ) mock_handler_in_functions_rpc_interface( - "get_function", mock_registered_project_function + "get_function", mock_registered_project_function, None, None ) - mock_handler_in_functions_rpc_interface("find_cached_function_jobs", []) + mock_handler_in_functions_rpc_interface("find_cached_function_jobs", [], None, None) mock_handler_in_functions_rpc_interface( - "register_function_job", mock_registered_project_function_job + "register_function_job", mock_registered_project_function_job, None, None ) mock_handler_in_functions_rpc_interface( "get_functions_user_api_access_rights", @@ -374,6 +385,17 @@ def _default_side_effect( write_functions=True, read_functions=True, ), + None, + None, + ) + mock_handler_in_functions_rpc_interface( + "patch_registered_function_job", + None, + None, + partial( + _patch_registered_function_job_side_effect, + mock_registered_project_function_job, + ), ) headers = {} @@ -390,6 +412,9 @@ def _default_side_effect( ) assert response.status_code == expected_status_code if response.status_code == status.HTTP_200_OK: - task = TaskGet.model_validate(response.json()) - result = await poll_task_until_done(client, auth, task.task_id) + function_job = RegisteredProjectFunctionJob.model_validate(response.json()) + celery_task_id = function_job.job_creation_task_id + assert celery_task_id is not None + # Poll until task completion and get result + result = await poll_task_until_done(client, auth, celery_task_id) RegisteredProjectFunctionJob.model_validate(result.result) From 34ee07f68a296c8f115843a9260608b9774d7759 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 28 Aug 2025 09:16:17 +0200 Subject: [PATCH 077/111] test fix --- .../api_functions/test_api_routers_functions.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py index ac40481b5ed..1f956f50f7c 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py @@ -50,6 +50,9 @@ from simcore_service_api_server.api.dependencies.authentication import Identity from simcore_service_api_server.celery.worker_tasks import functions_tasks from simcore_service_api_server.models.api_resources import JobLinks +from simcore_service_api_server.models.domain.functions import ( + PreRegisteredFunctionJobData, +) from simcore_service_api_server.models.schemas.jobs import JobInputs from simcore_service_api_server.services_rpc.wb_api_server import WbApiRpcClient @@ -473,13 +476,21 @@ def _default_side_effect( read_functions=True, ), ) + mock_handler_in_functions_rpc_interface( + "patch_registered_function_job", mock_registered_project_function_job + ) + + pre_registered_function_job_data = PreRegisteredFunctionJobData( + job_inputs=JobInputs(values={}), + function_job_id=mock_registered_project_function.uid, + ) job = await functions_tasks.run_function( task=MagicMock(spec=Task), task_id=TaskID(_faker.uuid4()), user_identity=user_identity, function=mock_registered_project_function, - job_inputs=JobInputs(values={}), + pre_registered_function_job_data=pre_registered_function_job_data, pricing_spec=None, job_links=job_links, x_simcore_parent_project_uuid=None, From eebdf2f4a4b1585b503b90457160c0102184895a Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 28 Aug 2025 09:25:13 +0200 Subject: [PATCH 078/111] pylint fix --- .../src/simcore_service_api_server/_service_function_jobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py index 7ed882f92e6..2203c83dfa7 100644 --- a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py +++ b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py @@ -169,7 +169,7 @@ async def inspect_function_job( job_status=new_job_status, ) - async def create_function_job_inputs( + async def create_function_job_inputs( # pylint: disable=no-self-use self, *, function: RegisteredFunction, From fc7b39ea8728d2bd73660f66943ba1f332c560af Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 28 Aug 2025 14:24:57 +0200 Subject: [PATCH 079/111] implement function job status endpoint --- .../src/models_library/functions_errors.py | 5 -- .../_service_function_jobs.py | 13 +++-- .../api/dependencies/celery.py | 3 +- .../api/routes/function_jobs_routes.py | 55 ++++++++++++++++++- .../api/routes/functions_routes.py | 2 +- .../exceptions/function_errors.py | 17 ++++++ 6 files changed, 80 insertions(+), 15 deletions(-) create mode 100644 services/api-server/src/simcore_service_api_server/exceptions/function_errors.py diff --git a/packages/models-library/src/models_library/functions_errors.py b/packages/models-library/src/models_library/functions_errors.py index 1935e061034..6c112591c07 100644 --- a/packages/models-library/src/models_library/functions_errors.py +++ b/packages/models-library/src/models_library/functions_errors.py @@ -163,8 +163,3 @@ class FunctionJobCollectionsExecuteApiAccessDeniedError(FunctionBaseError): class FunctionJobPatchModelIncompatibleError(FunctionBaseError): msg_template = "Incompatible patch model for Function '{function_id}' in product '{product_name}'." status_code: int = 422 - - -class FunctionJobCacheNotFoundError(FunctionBaseError): - msg_template: str = "No cached function job found." - status_code: int = 404 # Not Found diff --git a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py index 2203c83dfa7..fc056d6f934 100644 --- a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py +++ b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py @@ -26,7 +26,6 @@ from models_library.functions_errors import ( FunctionExecuteAccessDeniedError, FunctionInputsValidationError, - FunctionJobCacheNotFoundError, FunctionsExecuteApiAccessDeniedError, UnsupportedFunctionClassError, UnsupportedFunctionFunctionJobClassCombinationError, @@ -41,6 +40,10 @@ from pydantic import ValidationError from ._service_jobs import JobService +from .exceptions.function_errors import ( + FunctionJobCacheNotFoundError, + FunctionJobProjectMissingError, +) from .models.api_resources import JobLinks from .models.domain.functions import PreRegisteredFunctionJobData from .models.schemas.jobs import ( @@ -127,7 +130,7 @@ async def validate_function_inputs( async def inspect_function_job( self, function: RegisteredFunction, function_job: RegisteredFunctionJob ) -> FunctionJobStatus: - + """Raises FunctionJobProjectNotRegisteredError if no project is associated with job""" stored_job_status = await self._web_rpc_client.get_function_job_status( function_job_id=function_job.uid, user_id=self.user_id, @@ -141,14 +144,16 @@ async def inspect_function_job( function.function_class == FunctionClass.PROJECT and function_job.function_class == FunctionClass.PROJECT ): - assert function_job.project_job_id is not None # nosec + if function_job.project_job_id is None: + raise FunctionJobProjectMissingError() job_status = await self._job_service.inspect_study_job( job_id=function_job.project_job_id, ) elif (function.function_class == FunctionClass.SOLVER) and ( function_job.function_class == FunctionClass.SOLVER ): - assert function_job.solver_job_id is not None # nosec + if function_job.solver_job_id is None: + raise FunctionJobProjectMissingError() job_status = await self._job_service.inspect_solver_job( solver_key=function.solver_key, version=function.solver_version, diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py index 235f2fa382d..8e955b5454c 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py @@ -4,7 +4,6 @@ from celery_library.task_manager import CeleryTaskManager from celery_library.types import register_celery_types, register_pydantic_types from fastapi import FastAPI -from servicelib.celery.task_manager import TaskManager from settings_library.celery import CelerySettings from ...celery.worker_tasks.tasks import pydantic_types_to_register @@ -24,7 +23,7 @@ async def on_startup() -> None: app.add_event_handler("startup", on_startup) -def get_task_manager(app: FastAPI) -> TaskManager: +def get_task_manager(app: FastAPI) -> CeleryTaskManager: assert hasattr(app.state, "task_manager") # nosec task_manager = app.state.task_manager assert isinstance(task_manager, CeleryTaskManager) # nosec diff --git a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py index d34cc0da0c6..cf4c2f3d0fc 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py @@ -1,5 +1,7 @@ +from logging import getLogger from typing import Annotated, Final +from common_library.error_codes import create_error_code from fastapi import APIRouter, Depends, FastAPI, HTTPException, status from fastapi_pagination.api import create_page from fastapi_pagination.bases import AbstractPage @@ -15,10 +17,13 @@ from models_library.functions import RegisteredFunction from models_library.functions_errors import ( UnsupportedFunctionClassError, + UnsupportedFunctionFunctionJobClassCombinationError, ) from models_library.products import ProductName from models_library.users import UserID +from servicelib.celery.models import TaskUUID from servicelib.fastapi.dependencies import get_app +from servicelib.logging_errors import create_troubleshootting_log_kwargs from simcore_service_api_server.models.schemas.functions_filters import ( FunctionJobsListFilters, ) @@ -26,12 +31,14 @@ from ..._service_function_jobs import FunctionJobService from ..._service_jobs import JobService +from ...exceptions.function_errors import FunctionJobProjectMissingError from ...models.pagination import Page, PaginationParams from ...models.schemas.errors import ErrorGet from ...services_http.storage import StorageApi from ...services_http.webserver import AuthSession from ...services_rpc.wb_api_server import WbApiRpcClient from ..dependencies.authentication import get_current_user_id, get_product_name +from ..dependencies.celery import get_task_manager from ..dependencies.database import get_db_asyncpg_engine from ..dependencies.functions import ( get_function_from_functionjob, @@ -52,6 +59,9 @@ FMSG_CHANGELOG_NEW_IN_VERSION, create_route_description, ) +from .tasks import _get_task_filter + +_logger = getLogger(__name__) # pylint: disable=too-many-arguments # pylint: disable=cyclic-import @@ -196,6 +206,9 @@ async def delete_function_job( ), ) async def function_job_status( + app: Annotated[FastAPI, Depends(get_app)], + user_id: Annotated[UserID, Depends(get_current_user_id)], + product_name: Annotated[ProductName, Depends(get_product_name)], function_job: Annotated[ RegisteredFunctionJob, Depends(get_function_job_dependency) ], @@ -204,10 +217,46 @@ async def function_job_status( FunctionJobService, Depends(get_function_job_service) ], ) -> FunctionJobStatus: + try: + return await function_job_service.inspect_function_job( + function=function, function_job=function_job + ) + except FunctionJobProjectMissingError as exc: + if ( + function.function_class == FunctionClass.PROJECT + and function_job.function_class == FunctionClass.PROJECT + ) or ( + function.function_class == FunctionClass.SOLVER + and function_job.function_class == FunctionClass.SOLVER + ): + if task_id := function_job.job_creation_task_id: + task_manager = get_task_manager(app) + task_filter = _get_task_filter(user_id, product_name) + task_status = await task_manager.get_task_status( + task_uuid=TaskUUID(task_id), task_filter=task_filter + ) + return FunctionJobStatus( + status=f"JOB_CREATION_TASK_STATUS_{task_status.task_state}" + ) + user_error_msg = f"The creation of job {function_job.uid} failed" + support_id = create_error_code(Exception()) + _logger.exception( + **create_troubleshootting_log_kwargs( + user_error_msg, + error=Exception(), + error_code=support_id, + tip="Initial call to run metamodeling function must have failed", + ) + ) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Function job is not in a valid state", + ) from exc - return await function_job_service.inspect_function_job( - function=function, function_job=function_job - ) + raise UnsupportedFunctionFunctionJobClassCombinationError( + function_class=function.function_class, + function_job_class=function_job.function_class, + ) async def get_function_from_functionjobid( diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index ae83611227b..a7aa4f54c71 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -17,7 +17,6 @@ RegisteredFunctionJobCollection, ) from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobFilter -from models_library.functions_errors import FunctionJobCacheNotFoundError from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -28,6 +27,7 @@ from ..._service_function_jobs import FunctionJobService from ..._service_functions import FunctionService from ...celery.worker_tasks.functions_tasks import run_function as run_function_task +from ...exceptions.function_errors import FunctionJobCacheNotFoundError from ...models.pagination import Page, PaginationParams from ...models.schemas.errors import ErrorGet from ...models.schemas.jobs import JobPricingSpecification diff --git a/services/api-server/src/simcore_service_api_server/exceptions/function_errors.py b/services/api-server/src/simcore_service_api_server/exceptions/function_errors.py new file mode 100644 index 00000000000..28d44a43556 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/exceptions/function_errors.py @@ -0,0 +1,17 @@ +from fastapi import status + +from .backend_errors import BaseBackEndError + + +class BaseFunctionBackendError(BaseBackEndError): + pass + + +class FunctionJobCacheNotFoundError(BaseBackEndError): + msg_template: str = "No cached function job found." + status_code: int = 404 # Not Found + + +class FunctionJobProjectMissingError(BaseBackEndError): + msg_template: str = "Could not process function job" + status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR # Not Found From 8ed5bbddc50e1545be46e82ce837597af75032db Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Thu, 28 Aug 2025 15:37:12 +0200 Subject: [PATCH 080/111] start implementing test for getting function job status --- .../test_api_routers_function_jobs.py | 56 ++++++++++++++++++- 1 file changed, 54 insertions(+), 2 deletions(-) diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py index 6bdda8e4ecc..21f65a8bab7 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py @@ -8,21 +8,33 @@ import httpx import pytest +import simcore_service_api_server.api.routes.function_jobs_routes as function_jobs_routes +from celery_library.task_manager import CeleryTaskManager +from faker import Faker +from fastapi import FastAPI from httpx import AsyncClient from models_library.api_schemas_webserver.functions import ( ProjectFunctionJob, RegisteredProjectFunctionJob, ) -from models_library.functions import FunctionJobStatus, RegisteredProjectFunction +from models_library.functions import ( + FunctionJobStatus, + RegisteredProjectFunction, +) from models_library.products import ProductName +from models_library.progress_bar import ProgressReport, ProgressStructuredMessage +from models_library.projects import ProjectID from models_library.projects_state import RunningState from models_library.rest_pagination import PageMetaInfoLimitOffset from models_library.users import UserID from pytest_mock import MockerFixture, MockType from servicelib.aiohttp import status +from servicelib.celery.models import TaskFilter, TaskState, TaskStatus, TaskUUID from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.models.schemas.jobs import JobStatus +_faker = Faker() + async def test_delete_function_job( client: AsyncClient, @@ -179,19 +191,59 @@ def mocked_list_function_jobs(offset: int, limit: int): @pytest.mark.parametrize("job_status", ["SUCCESS", "FAILED", "STARTED"]) +@pytest.mark.parametrize("project_job_id", [None, ProjectID(_faker.uuid4())]) +@pytest.mark.parametrize("job_creation_task_id", [None, TaskUUID(_faker.uuid4())]) async def test_get_function_job_status( + app: FastAPI, mocked_app_dependencies: None, client: AsyncClient, + mocker: MockerFixture, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], mock_registered_project_function_job: RegisteredProjectFunctionJob, mock_registered_project_function: RegisteredProjectFunction, mock_method_in_jobs_service: Callable[[str, Any], None], auth: httpx.BasicAuth, job_status: str, + project_job_id: ProjectID, + job_creation_task_id: TaskUUID | None, ) -> None: + def _mock_task_manager(*args, **kwargs) -> CeleryTaskManager: + async def _get_task_status( + task_uuid: TaskUUID, task_filter: TaskFilter + ) -> TaskStatus: + assert task_uuid == job_creation_task_id + return TaskStatus( + task_uuid=task_uuid, + task_state=TaskState.STARTED, + progress_report=ProgressReport( + actual_value=0.5, + total=1.0, + attempt=1, + unit=None, + message=ProgressStructuredMessage.model_validate( + ProgressStructuredMessage.model_config["json_schema_extra"][ + "examples" + ][0] + ), + ), + ) + + obj = mocker.Mock(spec=CeleryTaskManager) + obj.get_task_status = _get_task_status + return obj + + mocker.patch.object(function_jobs_routes, "get_task_manager", _mock_task_manager) + mock_handler_in_functions_rpc_interface( - "get_function_job", mock_registered_project_function_job + "get_function_job", + mock_registered_project_function_job.model_copy( + update={ + "user_id": ANY, + "project_job_id": project_job_id, + "job_creation_task_id": job_creation_task_id, + } + ), ) mock_handler_in_functions_rpc_interface( "get_function", mock_registered_project_function From eab5e08dec3db15117e1c9aae1146c27868a75fc Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Fri, 29 Aug 2025 09:26:34 +0200 Subject: [PATCH 081/111] finish test for getting function job status --- .../api/routes/function_jobs_routes.py | 4 +- .../test_api_routers_function_jobs.py | 54 +++++++++++++++---- 2 files changed, 47 insertions(+), 11 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py index cf4c2f3d0fc..fc71866a66c 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py @@ -70,6 +70,8 @@ JOB_LIST_FILTER_PAGE_RELEASE_VERSION = "0.11.0" JOB_LOG_RELEASE_VERSION = "0.11.0" +_JOB_CREATION_TASK_STATUS_PREFIX: Final[str] = "JOB_CREATION_TASK_STATUS_" + function_job_router = APIRouter() _COMMON_FUNCTION_JOB_ERROR_RESPONSES: Final[dict] = { @@ -236,7 +238,7 @@ async def function_job_status( task_uuid=TaskUUID(task_id), task_filter=task_filter ) return FunctionJobStatus( - status=f"JOB_CREATION_TASK_STATUS_{task_status.task_state}" + status=f"{_JOB_CREATION_TASK_STATUS_PREFIX}{task_status.task_state}" ) user_error_msg = f"The creation of job {function_job.uid} failed" support_id = create_error_code(Exception()) diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py index 21f65a8bab7..289684a4377 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py @@ -1,5 +1,6 @@ # pylint: disable=unused-argument +import random import uuid from collections.abc import Callable from datetime import datetime @@ -11,7 +12,7 @@ import simcore_service_api_server.api.routes.function_jobs_routes as function_jobs_routes from celery_library.task_manager import CeleryTaskManager from faker import Faker -from fastapi import FastAPI +from fastapi import FastAPI, status from httpx import AsyncClient from models_library.api_schemas_webserver.functions import ( ProjectFunctionJob, @@ -20,6 +21,7 @@ from models_library.functions import ( FunctionJobStatus, RegisteredProjectFunction, + TaskID, ) from models_library.products import ProductName from models_library.progress_bar import ProgressReport, ProgressStructuredMessage @@ -28,9 +30,11 @@ from models_library.rest_pagination import PageMetaInfoLimitOffset from models_library.users import UserID from pytest_mock import MockerFixture, MockType -from servicelib.aiohttp import status from servicelib.celery.models import TaskFilter, TaskState, TaskStatus, TaskUUID from simcore_service_api_server._meta import API_VTAG +from simcore_service_api_server.api.routes.function_jobs_routes import ( + _JOB_CREATION_TASK_STATUS_PREFIX, +) from simcore_service_api_server.models.schemas.jobs import JobStatus _faker = Faker() @@ -191,8 +195,18 @@ def mocked_list_function_jobs(offset: int, limit: int): @pytest.mark.parametrize("job_status", ["SUCCESS", "FAILED", "STARTED"]) -@pytest.mark.parametrize("project_job_id", [None, ProjectID(_faker.uuid4())]) -@pytest.mark.parametrize("job_creation_task_id", [None, TaskUUID(_faker.uuid4())]) +@pytest.mark.parametrize( + "project_job_id, job_creation_task_id, celery_task_state", + [ + ( + ProjectID(_faker.uuid4()), + TaskID(_faker.uuid4()), + random.choice([state for state in TaskState]), + ), + (None, None, random.choice([state for state in TaskState])), + (None, TaskID(_faker.uuid4()), random.choice([state for state in TaskState])), + ], +) async def test_get_function_job_status( app: FastAPI, mocked_app_dependencies: None, @@ -205,17 +219,26 @@ async def test_get_function_job_status( auth: httpx.BasicAuth, job_status: str, project_job_id: ProjectID, - job_creation_task_id: TaskUUID | None, + job_creation_task_id: TaskID | None, + celery_task_state: TaskState, ) -> None: + _expected_return_status = ( + status.HTTP_500_INTERNAL_SERVER_ERROR + if job_status != "SUCCESS" + and job_status != "FAILED" + and (project_job_id is None and job_creation_task_id is None) + else status.HTTP_200_OK + ) + def _mock_task_manager(*args, **kwargs) -> CeleryTaskManager: async def _get_task_status( task_uuid: TaskUUID, task_filter: TaskFilter ) -> TaskStatus: - assert task_uuid == job_creation_task_id + assert f"{task_uuid}" == job_creation_task_id return TaskStatus( task_uuid=task_uuid, - task_state=TaskState.STARTED, + task_state=celery_task_state, progress_report=ProgressReport( actual_value=0.5, total=1.0, @@ -271,9 +294,20 @@ async def _get_task_status( f"{API_VTAG}/function_jobs/{mock_registered_project_function_job.uid}/status", auth=auth, ) - assert response.status_code == status.HTTP_200_OK - data = response.json() - assert data["status"] == job_status + assert response.status_code == _expected_return_status + if response.status_code == status.HTTP_200_OK: + data = response.json() + if ( + project_job_id is not None + or job_status == "SUCCESS" + or job_status == "FAILED" + ): + assert data["status"] == job_status + else: + assert ( + data["status"] + == f"{_JOB_CREATION_TASK_STATUS_PREFIX}{celery_task_state}" + ) @pytest.mark.parametrize("job_outputs", [{"X+Y": 42, "X-Y": 10}]) From 9401f48cabee2f054426ea81eb57fdcd55591aa9 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Fri, 29 Aug 2025 09:56:58 +0200 Subject: [PATCH 082/111] fix get function job outputs and test --- .../api/routes/function_jobs_routes.py | 12 +++++-- .../test_api_routers_function_jobs.py | 33 +++++++++++++++---- 2 files changed, 37 insertions(+), 8 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py index fc71866a66c..699042e8174 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py @@ -317,7 +317,11 @@ async def function_job_outputs( function.function_class == FunctionClass.PROJECT and function_job.function_class == FunctionClass.PROJECT ): - assert function_job.project_job_id is not None # nosec + if function_job.project_job_id is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Function job outputs not found", + ) new_outputs = dict( ( await studies_jobs.get_study_job_outputs( @@ -333,7 +337,11 @@ async def function_job_outputs( function.function_class == FunctionClass.SOLVER and function_job.function_class == FunctionClass.SOLVER ): - assert function_job.solver_job_id is not None # nosec + if function_job.solver_job_id is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Function job outputs not found", + ) new_outputs = dict( ( await solvers_jobs_read.get_job_outputs( diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py index 289684a4377..7ed2257af02 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py @@ -310,7 +310,13 @@ async def _get_task_status( ) -@pytest.mark.parametrize("job_outputs", [{"X+Y": 42, "X-Y": 10}]) +@pytest.mark.parametrize( + "job_outputs, project_job_id", + [ + (None, None), + ({"X+Y": 42, "X-Y": 10}, ProjectID(_faker.uuid4())), + ], +) async def test_get_function_job_outputs( client: AsyncClient, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], @@ -318,11 +324,25 @@ async def test_get_function_job_outputs( mock_registered_project_function: RegisteredProjectFunction, mocked_webserver_rpc_api: dict[str, MockType], auth: httpx.BasicAuth, - job_outputs: dict[str, Any], + job_outputs: dict[str, Any] | None, + project_job_id: ProjectID | None, ) -> None: + _expected_return_status = ( + status.HTTP_404_NOT_FOUND + if project_job_id is None and job_outputs is None + else status.HTTP_200_OK + ) + mock_handler_in_functions_rpc_interface( - "get_function_job", mock_registered_project_function_job + "get_function_job", + mock_registered_project_function_job.model_copy( + update={ + "user_id": ANY, + "project_job_id": project_job_id, + "job_creation_task_id": None, + } + ), ) mock_handler_in_functions_rpc_interface( "get_function", mock_registered_project_function @@ -333,6 +353,7 @@ async def test_get_function_job_outputs( f"{API_VTAG}/function_jobs/{mock_registered_project_function_job.uid}/outputs", auth=auth, ) - assert response.status_code == status.HTTP_200_OK - data = response.json() - assert data == job_outputs + assert response.status_code == _expected_return_status + if response.status_code == status.HTTP_200_OK: + data = response.json() + assert data == job_outputs From 55df24119d86c825060a10d6e65f78fff36ea6f0 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Fri, 29 Aug 2025 12:58:55 +0200 Subject: [PATCH 083/111] add implementation for map endpoint --- .../_service_function_jobs.py | 19 +---- .../api/routes/functions_routes.py | 77 ++++++++++++++++++- .../celery/worker_tasks/functions_tasks.py | 36 ++++++++- .../celery/worker_tasks/tasks.py | 3 +- 4 files changed, 110 insertions(+), 25 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py index fc056d6f934..4038ab9b6fc 100644 --- a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py +++ b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py @@ -7,7 +7,6 @@ FunctionClass, FunctionID, FunctionInputs, - FunctionJobCollection, FunctionJobCollectionID, FunctionJobID, FunctionJobStatus, @@ -15,7 +14,6 @@ ProjectFunctionJob, RegisteredFunction, RegisteredFunctionJob, - RegisteredFunctionJobCollection, RegisteredFunctionJobPatch, RegisteredProjectFunctionJobPatch, RegisteredSolverFunctionJobPatch, @@ -446,9 +444,9 @@ async def map_function( pricing_spec: JobPricingSpecification | None, x_simcore_parent_project_uuid: ProjectID | None, x_simcore_parent_node_id: NodeID | None, - ) -> RegisteredFunctionJobCollection: + ) -> None: - function_jobs = [ + for data in pre_registered_function_job_data_list: await self.run_function( job_creation_task_id=job_creation_task_id, function=function, @@ -458,16 +456,3 @@ async def map_function( x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, x_simcore_parent_node_id=x_simcore_parent_node_id, ) - for data in pre_registered_function_job_data_list - ] - - function_job_collection_description = f"Function job collection of map of function {function.uid} with {len(pre_registered_function_job_data_list)} inputs" - return await self._web_rpc_client.register_function_job_collection( - function_job_collection=FunctionJobCollection( - title="Function job collection of function map", - description=function_job_collection_description, - job_ids=[function_job.uid for function_job in function_jobs], - ), - user_id=self.user_id, - product_name=self.product_name, - ) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index a7aa4f54c71..fd93c414ed8 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -17,6 +17,7 @@ RegisteredFunctionJobCollection, ) from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobFilter +from models_library.functions import FunctionJobCollection, FunctionJobID from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -26,8 +27,10 @@ from ..._service_function_jobs import FunctionJobService from ..._service_functions import FunctionService +from ...celery.worker_tasks.functions_tasks import map as map_task from ...celery.worker_tasks.functions_tasks import run_function as run_function_task from ...exceptions.function_errors import FunctionJobCacheNotFoundError +from ...models.domain.functions import PreRegisteredFunctionJobData from ...models.pagination import Page, PaginationParams from ...models.schemas.errors import ErrorGet from ...models.schemas.jobs import JobPricingSpecification @@ -43,7 +46,7 @@ get_function_job_service, get_function_service, ) -from ..dependencies.webserver_rpc import get_wb_api_rpc_client +from ..dependencies.webserver_rpc import WbApiRpcClient, get_wb_api_rpc_client from ._constants import ( FMSG_CHANGELOG_ADDED_IN_VERSION, FMSG_CHANGELOG_NEW_IN_VERSION, @@ -441,6 +444,7 @@ async def delete_function( ) async def map_function( # noqa: PLR0913 request: Request, + user_identity: Annotated[Identity, Depends(get_current_identity)], to_run_function: Annotated[RegisteredFunction, Depends(get_function)], function_inputs_list: FunctionInputsList, url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], @@ -448,10 +452,12 @@ async def map_function( # noqa: PLR0913 FunctionJobService, Depends(get_function_job_service) ], function_service: Annotated[FunctionService, Depends(get_function_service)], + web_api_rpc_client: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], x_simcore_parent_project_uuid: Annotated[ProjectID | Literal["null"], Header()], x_simcore_parent_node_id: Annotated[NodeID | Literal["null"], Header()], ) -> RegisteredFunctionJobCollection: + task_manager = get_task_manager(request.app) parent_project_uuid = ( x_simcore_parent_project_uuid if isinstance(x_simcore_parent_project_uuid, ProjectID) @@ -463,14 +469,77 @@ async def map_function( # noqa: PLR0913 else None ) pricing_spec = JobPricingSpecification.create_from_headers(request.headers) - job_links = await function_service.get_function_job_links(to_run_function, url_for) - return await function_jobs_service.map_function( + job_inputs_list = [ + await function_jobs_service.create_function_job_inputs( + function=to_run_function, function_inputs=function_inputs + ) + for function_inputs in function_inputs_list + ] + + cached_job_uuids: list[FunctionJobID] = [] + pre_registered_function_job_data_list: list[PreRegisteredFunctionJobData] = [] + + for job_inputs in job_inputs_list: + try: + cached_job = await function_jobs_service.get_cached_function_job( + function=to_run_function, + job_inputs=job_inputs, + ) + cached_job_uuids.append(cached_job.uid) + except FunctionJobCacheNotFoundError: + data = await function_jobs_service.pre_register_function_job( + function=to_run_function, + job_inputs=job_inputs, + ) + pre_registered_function_job_data_list.append(data) + + # run map in celery task + job_filter = AsyncJobFilter( + user_id=user_identity.user_id, + product_name=user_identity.product_name, + client_name=ASYNC_JOB_CLIENT_NAME, + ) + task_filter = TaskFilter.model_validate(job_filter.model_dump()) + task_name = map_task.__name__ + + task_uuid = await task_manager.submit_task( + TaskMetadata( + name=task_name, + ephemeral=True, + queue=TasksQueue.API_WORKER_QUEUE, + ), + task_filter=task_filter, + user_identity=user_identity, function=to_run_function, - function_inputs_list=function_inputs_list, + pre_registered_function_job_data_list=pre_registered_function_job_data_list, pricing_spec=pricing_spec, job_links=job_links, x_simcore_parent_project_uuid=parent_project_uuid, x_simcore_parent_node_id=parent_node_id, ) + + # patch pre-registered function jobs + for data in pre_registered_function_job_data_list: + await function_jobs_service.patch_registered_function_job( + user_id=user_identity.user_id, + product_name=user_identity.product_name, + function_job_id=data.function_job_id, + function_class=to_run_function.function_class, + job_creation_task_id=TaskID(task_uuid), + ) + + function_job_collection_description = f"Function job collection of map of function {to_run_function.uid} with {len(pre_registered_function_job_data_list)} inputs" + job_ids = cached_job_uuids + [ + data.function_job_id for data in pre_registered_function_job_data_list + ] + return await web_api_rpc_client.register_function_job_collection( + function_job_collection=FunctionJobCollection( + title="Function job collection of function map", + description=function_job_collection_description, + job_ids=job_ids, + ), + user_id=user_identity.user_id, + product_name=user_identity.product_name, + ) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py index a8dec972344..492ebe0d058 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py @@ -3,7 +3,8 @@ ) from celery_library.utils import get_app_server # pylint: disable=no-name-in-module from fastapi import FastAPI -from models_library.functions import RegisteredFunction +from models_library.functions import RegisteredFunction, RegisteredFunctionJob +from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from servicelib.celery.models import TaskID from simcore_service_api_server._service_function_jobs import FunctionJobService @@ -98,9 +99,9 @@ async def run_function( pre_registered_function_job_data: PreRegisteredFunctionJobData, pricing_spec: JobPricingSpecification | None, job_links: JobLinks, - x_simcore_parent_project_uuid: NodeID | None, + x_simcore_parent_project_uuid: ProjectID | None, x_simcore_parent_node_id: NodeID | None, -): +) -> RegisteredFunctionJob: assert task_id # nosec app = get_app_server(task.app).app function_job_service = await _assemble_function_job_service( @@ -116,3 +117,32 @@ async def run_function( x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, x_simcore_parent_node_id=x_simcore_parent_node_id, ) + + +async def map( + task: Task, + task_id: TaskID, + *, + user_identity: Identity, + function: RegisteredFunction, + pre_registered_function_job_data_list: list[PreRegisteredFunctionJobData], + job_links: JobLinks, + pricing_spec: JobPricingSpecification | None, + x_simcore_parent_project_uuid: ProjectID | None, + x_simcore_parent_node_id: NodeID | None, +) -> None: + assert task_id # nosec + app = get_app_server(task.app).app + function_job_service = await _assemble_function_job_service( + app=app, user_identity=user_identity + ) + + return await function_job_service.map_function( + job_creation_task_id=task_id, + function=function, + pre_registered_function_job_data_list=pre_registered_function_job_data_list, + pricing_spec=pricing_spec, + job_links=job_links, + x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, + x_simcore_parent_node_id=x_simcore_parent_node_id, + ) diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py index b212a04dc28..2bd7a242deb 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py @@ -18,7 +18,7 @@ from ...models.api_resources import JobLinks from ...models.domain.functions import PreRegisteredFunctionJobData from ...models.schemas.jobs import JobInputs, JobPricingSpecification -from .functions_tasks import run_function +from .functions_tasks import map, run_function _logger = logging.getLogger(__name__) @@ -43,3 +43,4 @@ def setup_worker_tasks(app: Celery) -> None: with log_context(_logger, logging.INFO, msg="worker task registration"): register_task(app, run_function) + register_task(app, map) From 95ae30d9b084b34b4e82c9da157d37ec917c1f2d Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Fri, 29 Aug 2025 14:05:23 +0200 Subject: [PATCH 084/111] move test so that it uses celery worker --- .../api_functions/celery/test_functions.py | 140 +++++++++++++++++- .../test_api_routers_functions.py | 119 +-------------- 2 files changed, 147 insertions(+), 112 deletions(-) diff --git a/services/api-server/tests/unit/api_functions/celery/test_functions.py b/services/api-server/tests/unit/api_functions/celery/test_functions.py index 9ff08307847..a0a0b35c9b1 100644 --- a/services/api-server/tests/unit/api_functions/celery/test_functions.py +++ b/services/api-server/tests/unit/api_functions/celery/test_functions.py @@ -5,6 +5,7 @@ # pylint: disable=too-many-arguments +import datetime import inspect from collections.abc import Callable from functools import partial @@ -31,11 +32,13 @@ from models_library.functions import ( FunctionClass, FunctionID, + FunctionJobCollection, FunctionJobID, FunctionUserAccessRights, FunctionUserApiAccessRights, RegisteredFunction, RegisteredFunctionJob, + RegisteredFunctionJobCollection, RegisteredProjectFunction, RegisteredProjectFunctionJob, RegisteredProjectFunctionJobPatch, @@ -119,7 +122,7 @@ async def run_function( job_links: JobLinks, x_simcore_parent_project_uuid: NodeID | None, x_simcore_parent_node_id: NodeID | None, - ): + ) -> RegisteredFunctionJob: return RegisteredProjectFunctionJob( title=_faker.sentence(), description=_faker.paragraph(), @@ -418,3 +421,138 @@ def _default_side_effect( # Poll until task completion and get result result = await poll_task_until_done(client, auth, celery_task_id) RegisteredProjectFunctionJob.model_validate(result.result) + + +@pytest.mark.parametrize( + "parent_project_uuid, parent_node_uuid, expected_status_code", + [ + (None, None, status.HTTP_422_UNPROCESSABLE_ENTITY), + (f"{_faker.uuid4()}", None, status.HTTP_422_UNPROCESSABLE_ENTITY), + (None, f"{_faker.uuid4()}", status.HTTP_422_UNPROCESSABLE_ENTITY), + (f"{_faker.uuid4()}", f"{_faker.uuid4()}", status.HTTP_200_OK), + ("null", "null", status.HTTP_200_OK), + ], +) +@pytest.mark.parametrize("capture", ["run_study_function_parent_info.json"]) +@pytest.mark.parametrize("mocked_app_dependencies", [None]) +async def test_map_function_parent_info( + app: FastAPI, + with_api_server_celery_worker: TestWorkController, + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[ + [str, Any, Exception | None, Callable | None], None + ], + mock_registered_project_function: RegisteredProjectFunction, + mock_registered_project_function_job: RegisteredFunctionJob, + auth: httpx.BasicAuth, + user_id: UserID, + mocked_webserver_rest_api_base: respx.MockRouter, + mocked_directorv2_rest_api_base: respx.MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], + create_respx_mock_from_capture, + project_tests_dir: Path, + parent_project_uuid: str | None, + parent_node_uuid: str | None, + expected_status_code: int, + capture: str, +) -> None: + + side_effect_checks = {} + + def _default_side_effect( + side_effect_checks: dict, + request: httpx.Request, + path_params: dict[str, Any], + capture: HttpApiCallCaptureModel, + ) -> Any: + if request.method == "POST" and request.url.path.endswith("/projects"): + side_effect_checks["headers_checked"] = True + if parent_project_uuid and parent_project_uuid != "null": + _parent_uuid = request.headers.get(X_SIMCORE_PARENT_PROJECT_UUID) + assert _parent_uuid is not None + assert parent_project_uuid == _parent_uuid + if parent_node_uuid and parent_node_uuid != "null": + _parent_node_uuid = request.headers.get(X_SIMCORE_PARENT_NODE_ID) + assert _parent_node_uuid is not None + assert parent_node_uuid == _parent_node_uuid + return capture.response_body + + create_respx_mock_from_capture( + respx_mocks=[mocked_webserver_rest_api_base, mocked_directorv2_rest_api_base], + capture_path=project_tests_dir / "mocks" / capture, + side_effects_callbacks=[partial(_default_side_effect, side_effect_checks)] * 50, + ) + + mock_handler_in_functions_rpc_interface( + "get_function_user_permissions", + FunctionUserAccessRights( + user_id=user_id, + execute=True, + read=True, + write=True, + ), + None, + None, + ) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function, None, None + ) + mock_handler_in_functions_rpc_interface("find_cached_function_jobs", [], None, None) + mock_handler_in_functions_rpc_interface( + "register_function_job", mock_registered_project_function_job, None, None + ) + mock_handler_in_functions_rpc_interface( + "get_functions_user_api_access_rights", + FunctionUserApiAccessRights( + user_id=user_id, + execute_functions=True, + write_functions=True, + read_functions=True, + ), + None, + None, + ) + mock_handler_in_functions_rpc_interface( + "register_function_job_collection", + RegisteredFunctionJobCollection( + uid=FunctionJobID(_faker.uuid4()), + title="Test Collection", + description="A test function job collection", + job_ids=[], + created_at=datetime.datetime.now(datetime.UTC), + ), + None, + None, + ) + + patch_mock = mock_handler_in_functions_rpc_interface( + "patch_registered_function_job", + None, + None, + partial( + _patch_registered_function_job_side_effect, + mock_registered_project_function_job, + ), + ) + + headers = {} + if parent_project_uuid: + headers[X_SIMCORE_PARENT_PROJECT_UUID] = parent_project_uuid + if parent_node_uuid: + headers[X_SIMCORE_PARENT_NODE_ID] = parent_node_uuid + + response = await client.post( + f"{API_VTAG}/functions/{mock_registered_project_function.uid}:map", + json=[{}, {}], + auth=auth, + headers=headers, + ) + assert response.status_code == expected_status_code + + if expected_status_code == status.HTTP_200_OK: + job_collection = FunctionJobCollection.model_validate(response.json()) + task_id = patch_mock.call_args.kwargs[ + "registered_function_job_patch" + ].job_creation_task_id + await poll_task_until_done(client, auth, f"{task_id}") + assert side_effect_checks["headers_checked"] is True diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py index 1f956f50f7c..fbbb8936b71 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py @@ -3,18 +3,17 @@ # pylint: disable=too-many-positional-arguments # pylint: disable=redefined-outer-name -import datetime from collections.abc import Callable -from functools import partial from pathlib import Path from typing import Any from unittest.mock import MagicMock -from uuid import UUID, uuid4 +from uuid import uuid4 import httpx import pytest import respx from celery import Task # pylint: disable=no-name-in-module +from celery_library.task_manager import CeleryTaskManager from faker import Faker from fastapi import FastAPI from httpx import AsyncClient @@ -25,7 +24,6 @@ ProjectFunction, RegisteredFunction, RegisteredFunctionJob, - RegisteredFunctionJobCollection, RegisteredProjectFunction, RegisteredProjectFunctionJob, ) @@ -331,6 +329,7 @@ async def test_delete_function( ) async def test_run_map_function_not_allowed( client: AsyncClient, + mocker: MockerFixture, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], mock_registered_project_function: RegisteredProjectFunction, auth: httpx.BasicAuth, @@ -343,6 +342,11 @@ async def test_run_map_function_not_allowed( ) -> None: """Test that running a function is not allowed.""" + mocker.patch( + "simcore_service_api_server.api.routes.functions_routes.get_task_manager", + return_value=mocker.MagicMock(spec=CeleryTaskManager), + ) + mock_handler_in_functions_rpc_interface( "get_function_user_permissions", FunctionUserAccessRights( @@ -499,113 +503,6 @@ def _default_side_effect( assert isinstance(job, RegisteredProjectFunctionJob) -@pytest.mark.parametrize( - "parent_project_uuid, parent_node_uuid, expected_status_code", - [ - (None, None, status.HTTP_422_UNPROCESSABLE_ENTITY), - (f"{_faker.uuid4()}", None, status.HTTP_422_UNPROCESSABLE_ENTITY), - (None, f"{_faker.uuid4()}", status.HTTP_422_UNPROCESSABLE_ENTITY), - (f"{_faker.uuid4()}", f"{_faker.uuid4()}", status.HTTP_200_OK), - ("null", "null", status.HTTP_200_OK), - ], -) -@pytest.mark.parametrize("capture", ["run_study_function_parent_info.json"]) -async def test_map_function_parent_info( - client: AsyncClient, - mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_project_function: RegisteredProjectFunction, - mock_registered_project_function_job: RegisteredFunctionJob, - auth: httpx.BasicAuth, - user_id: UserID, - mocked_webserver_rest_api_base: respx.MockRouter, - mocked_directorv2_rest_api_base: respx.MockRouter, - mocked_webserver_rpc_api: dict[str, MockType], - create_respx_mock_from_capture, - project_tests_dir: Path, - parent_project_uuid: str | None, - parent_node_uuid: str | None, - expected_status_code: int, - capture: str, -) -> None: - side_effect_checks = {} - - def _default_side_effect( - side_effect_checks: dict, - request: httpx.Request, - path_params: dict[str, Any], - capture: HttpApiCallCaptureModel, - ) -> Any: - if request.method == "POST" and request.url.path.endswith("/projects"): - side_effect_checks["headers_checked"] = True - if parent_project_uuid and parent_project_uuid != "null": - _parent_uuid = request.headers.get(X_SIMCORE_PARENT_PROJECT_UUID) - assert _parent_uuid is not None - assert parent_project_uuid == _parent_uuid - if parent_node_uuid and parent_node_uuid != "null": - _parent_node_uuid = request.headers.get(X_SIMCORE_PARENT_NODE_ID) - assert _parent_node_uuid is not None - assert parent_node_uuid == _parent_node_uuid - return capture.response_body - - create_respx_mock_from_capture( - respx_mocks=[mocked_webserver_rest_api_base, mocked_directorv2_rest_api_base], - capture_path=project_tests_dir / "mocks" / capture, - side_effects_callbacks=[partial(_default_side_effect, side_effect_checks)] * 50, - ) - - mock_handler_in_functions_rpc_interface( - "get_function_user_permissions", - FunctionUserAccessRights( - user_id=user_id, - execute=True, - read=True, - write=True, - ), - ) - mock_handler_in_functions_rpc_interface( - "get_function", mock_registered_project_function - ) - mock_handler_in_functions_rpc_interface("find_cached_function_jobs", []) - mock_handler_in_functions_rpc_interface( - "register_function_job", mock_registered_project_function_job - ) - mock_handler_in_functions_rpc_interface( - "get_functions_user_api_access_rights", - FunctionUserApiAccessRights( - user_id=user_id, - execute_functions=True, - write_functions=True, - read_functions=True, - ), - ) - mock_handler_in_functions_rpc_interface( - "register_function_job_collection", - RegisteredFunctionJobCollection( - uid=UUID(_faker.uuid4()), - title="Test Collection", - description="A test function job collection", - job_ids=[], - created_at=datetime.datetime.now(datetime.UTC), - ), - ) - - headers = {} - if parent_project_uuid: - headers[X_SIMCORE_PARENT_PROJECT_UUID] = parent_project_uuid - if parent_node_uuid: - headers[X_SIMCORE_PARENT_NODE_ID] = parent_node_uuid - - response = await client.post( - f"{API_VTAG}/functions/{mock_registered_project_function.uid}:map", - json=[{}, {}], - auth=auth, - headers=headers, - ) - if expected_status_code == status.HTTP_200_OK: - assert side_effect_checks["headers_checked"] is True - assert response.status_code == expected_status_code - - async def test_export_logs_project_function_job( client: AsyncClient, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], From 2ce52b63f193c24ca31f4f14dd54e6a742732c21 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Fri, 29 Aug 2025 14:08:51 +0200 Subject: [PATCH 085/111] update openapi-specs --- services/api-server/openapi.json | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/services/api-server/openapi.json b/services/api-server/openapi.json index 4be0f07c0df..0c303480c22 100644 --- a/services/api-server/openapi.json +++ b/services/api-server/openapi.json @@ -8103,7 +8103,26 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TaskGet" + "oneOf": [ + { + "$ref": "#/components/schemas/RegisteredProjectFunctionJob" + }, + { + "$ref": "#/components/schemas/RegisteredPythonCodeFunctionJob" + }, + { + "$ref": "#/components/schemas/RegisteredSolverFunctionJob" + } + ], + "discriminator": { + "propertyName": "function_class", + "mapping": { + "PROJECT": "#/components/schemas/RegisteredProjectFunctionJob", + "PYTHON_CODE": "#/components/schemas/RegisteredPythonCodeFunctionJob", + "SOLVER": "#/components/schemas/RegisteredSolverFunctionJob" + } + }, + "title": "Response Run Function V0 Functions Function Id Run Post" } } } From 979cc2c1d3fcadf008380f398955462fa09587a2 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Sat, 30 Aug 2025 06:34:12 +0200 Subject: [PATCH 086/111] make pylint happy --- .../api/routes/function_jobs_routes.py | 2 +- .../api/routes/functions_routes.py | 2 +- .../celery/worker_tasks/functions_tasks.py | 2 +- .../celery/worker_tasks/tasks.py | 4 ++-- .../tests/unit/api_functions/celery/test_functions.py | 4 ++-- .../api-server/tests/unit/api_functions/conftest.py | 2 +- .../api_functions/test_api_routers_function_jobs.py | 10 ++++++---- 7 files changed, 14 insertions(+), 12 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py index 699042e8174..d37f9d112eb 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py @@ -258,7 +258,7 @@ async def function_job_status( raise UnsupportedFunctionFunctionJobClassCombinationError( function_class=function.function_class, function_job_class=function_job.function_class, - ) + ) from exc async def get_function_from_functionjobid( diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index fd93c414ed8..acb279f0956 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -27,7 +27,7 @@ from ..._service_function_jobs import FunctionJobService from ..._service_functions import FunctionService -from ...celery.worker_tasks.functions_tasks import map as map_task +from ...celery.worker_tasks.functions_tasks import function_map as map_task from ...celery.worker_tasks.functions_tasks import run_function as run_function_task from ...exceptions.function_errors import FunctionJobCacheNotFoundError from ...models.domain.functions import PreRegisteredFunctionJobData diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py index 492ebe0d058..3f3654bebfe 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py @@ -119,7 +119,7 @@ async def run_function( ) -async def map( +async def function_map( task: Task, task_id: TaskID, *, diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py index 2bd7a242deb..d51d4a4cdba 100644 --- a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py @@ -18,7 +18,7 @@ from ...models.api_resources import JobLinks from ...models.domain.functions import PreRegisteredFunctionJobData from ...models.schemas.jobs import JobInputs, JobPricingSpecification -from .functions_tasks import map, run_function +from .functions_tasks import function_map, run_function _logger = logging.getLogger(__name__) @@ -43,4 +43,4 @@ def setup_worker_tasks(app: Celery) -> None: with log_context(_logger, logging.INFO, msg="worker task registration"): register_task(app, run_function) - register_task(app, map) + register_task(app, function_map) diff --git a/services/api-server/tests/unit/api_functions/celery/test_functions.py b/services/api-server/tests/unit/api_functions/celery/test_functions.py index a0a0b35c9b1..b46a07a0567 100644 --- a/services/api-server/tests/unit/api_functions/celery/test_functions.py +++ b/services/api-server/tests/unit/api_functions/celery/test_functions.py @@ -440,7 +440,7 @@ async def test_map_function_parent_info( with_api_server_celery_worker: TestWorkController, client: AsyncClient, mock_handler_in_functions_rpc_interface: Callable[ - [str, Any, Exception | None, Callable | None], None + [str, Any, Exception | None, Callable | None], MockType ], mock_registered_project_function: RegisteredProjectFunction, mock_registered_project_function_job: RegisteredFunctionJob, @@ -550,7 +550,7 @@ def _default_side_effect( assert response.status_code == expected_status_code if expected_status_code == status.HTTP_200_OK: - job_collection = FunctionJobCollection.model_validate(response.json()) + FunctionJobCollection.model_validate(response.json()) task_id = patch_mock.call_args.kwargs[ "registered_function_job_patch" ].job_creation_task_id diff --git a/services/api-server/tests/unit/api_functions/conftest.py b/services/api-server/tests/unit/api_functions/conftest.py index 729b2967a78..fd60ede5b17 100644 --- a/services/api-server/tests/unit/api_functions/conftest.py +++ b/services/api-server/tests/unit/api_functions/conftest.py @@ -260,7 +260,7 @@ def mock_registered_function_job_collection( @pytest.fixture() def mock_handler_in_functions_rpc_interface( mock_wb_api_server_rpc: MockerFixture, -) -> Callable[[str, Any, Exception | None, Callable | None], None]: +) -> Callable[[str, Any, Exception | None, Callable | None], MockType]: def _mock( handler_name: str = "", return_value: Any = None, diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py index 7ed2257af02..3d7f6171251 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py @@ -1,4 +1,6 @@ # pylint: disable=unused-argument +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments import random import uuid @@ -9,7 +11,6 @@ import httpx import pytest -import simcore_service_api_server.api.routes.function_jobs_routes as function_jobs_routes from celery_library.task_manager import CeleryTaskManager from faker import Faker from fastapi import FastAPI, status @@ -32,6 +33,7 @@ from pytest_mock import MockerFixture, MockType from servicelib.celery.models import TaskFilter, TaskState, TaskStatus, TaskUUID from simcore_service_api_server._meta import API_VTAG +from simcore_service_api_server.api.routes import function_jobs_routes from simcore_service_api_server.api.routes.function_jobs_routes import ( _JOB_CREATION_TASK_STATUS_PREFIX, ) @@ -201,10 +203,10 @@ def mocked_list_function_jobs(offset: int, limit: int): ( ProjectID(_faker.uuid4()), TaskID(_faker.uuid4()), - random.choice([state for state in TaskState]), + random.choice(list(TaskState)), ), - (None, None, random.choice([state for state in TaskState])), - (None, TaskID(_faker.uuid4()), random.choice([state for state in TaskState])), + (None, None, random.choice(list(TaskState))), + (None, TaskID(_faker.uuid4()), random.choice(list(TaskState))), ], ) async def test_get_function_job_status( From bd64fead4500384db7321cf9fc9f84a5701d33fc Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Sat, 30 Aug 2025 06:35:45 +0200 Subject: [PATCH 087/111] typecheck --- .../simcore_service_api_server/api/routes/functions_routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index acb279f0956..5917bae9935 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -46,7 +46,7 @@ get_function_job_service, get_function_service, ) -from ..dependencies.webserver_rpc import WbApiRpcClient, get_wb_api_rpc_client +from ..dependencies.webserver_rpc import get_wb_api_rpc_client from ._constants import ( FMSG_CHANGELOG_ADDED_IN_VERSION, FMSG_CHANGELOG_NEW_IN_VERSION, From 3774f00e53b6df8da717eea08b37d39df9b747e0 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 1 Sep 2025 11:34:32 +0200 Subject: [PATCH 088/111] celery -> celery_worker --- services/api-server/docker/boot_celery_worker.py | 5 ++++- .../simcore_service_api_server/api/dependencies/celery.py | 2 +- .../api/routes/functions_routes.py | 6 ++++-- .../{celery => celery_worker}/__init__.py | 0 .../{celery => celery_worker}/worker_main.py | 0 .../{celery => celery_worker}/worker_tasks/__init__.py | 0 .../worker_tasks/functions_tasks.py | 0 .../{celery => celery_worker}/worker_tasks/tasks.py | 0 .../api-server/tests/unit/api_functions/celery/conftest.py | 2 +- .../tests/unit/api_functions/celery/test_functions.py | 2 +- .../tests/unit/api_functions/test_api_routers_functions.py | 2 +- 11 files changed, 12 insertions(+), 7 deletions(-) rename services/api-server/src/simcore_service_api_server/{celery => celery_worker}/__init__.py (100%) rename services/api-server/src/simcore_service_api_server/{celery => celery_worker}/worker_main.py (100%) rename services/api-server/src/simcore_service_api_server/{celery => celery_worker}/worker_tasks/__init__.py (100%) rename services/api-server/src/simcore_service_api_server/{celery => celery_worker}/worker_tasks/functions_tasks.py (100%) rename services/api-server/src/simcore_service_api_server/{celery => celery_worker}/worker_tasks/tasks.py (100%) diff --git a/services/api-server/docker/boot_celery_worker.py b/services/api-server/docker/boot_celery_worker.py index 194addc031f..e0c7e119ced 100644 --- a/services/api-server/docker/boot_celery_worker.py +++ b/services/api-server/docker/boot_celery_worker.py @@ -2,7 +2,10 @@ from celery_library.signals import ( on_worker_shutdown, ) -from simcore_service_api_server.celery.worker_main import get_app, worker_init_wrapper +from simcore_service_api_server.celery_worker.worker_main import ( + get_app, + worker_init_wrapper, +) app = get_app() diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py index 8e955b5454c..61428e3de6e 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py @@ -6,7 +6,7 @@ from fastapi import FastAPI from settings_library.celery import CelerySettings -from ...celery.worker_tasks.tasks import pydantic_types_to_register +from ...celery_worker.worker_tasks.tasks import pydantic_types_to_register ASYNC_JOB_CLIENT_NAME: Final[str] = "API_SERVER" diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index 5917bae9935..612446fcb03 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -27,8 +27,10 @@ from ..._service_function_jobs import FunctionJobService from ..._service_functions import FunctionService -from ...celery.worker_tasks.functions_tasks import function_map as map_task -from ...celery.worker_tasks.functions_tasks import run_function as run_function_task +from ...celery_worker.worker_tasks.functions_tasks import function_map as map_task +from ...celery_worker.worker_tasks.functions_tasks import ( + run_function as run_function_task, +) from ...exceptions.function_errors import FunctionJobCacheNotFoundError from ...models.domain.functions import PreRegisteredFunctionJobData from ...models.pagination import Page, PaginationParams diff --git a/services/api-server/src/simcore_service_api_server/celery/__init__.py b/services/api-server/src/simcore_service_api_server/celery_worker/__init__.py similarity index 100% rename from services/api-server/src/simcore_service_api_server/celery/__init__.py rename to services/api-server/src/simcore_service_api_server/celery_worker/__init__.py diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_main.py b/services/api-server/src/simcore_service_api_server/celery_worker/worker_main.py similarity index 100% rename from services/api-server/src/simcore_service_api_server/celery/worker_main.py rename to services/api-server/src/simcore_service_api_server/celery_worker/worker_main.py diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/__init__.py b/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/__init__.py similarity index 100% rename from services/api-server/src/simcore_service_api_server/celery/worker_tasks/__init__.py rename to services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/__init__.py diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/functions_tasks.py similarity index 100% rename from services/api-server/src/simcore_service_api_server/celery/worker_tasks/functions_tasks.py rename to services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/functions_tasks.py diff --git a/services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py b/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/tasks.py similarity index 100% rename from services/api-server/src/simcore_service_api_server/celery/worker_tasks/tasks.py rename to services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/tasks.py diff --git a/services/api-server/tests/unit/api_functions/celery/conftest.py b/services/api-server/tests/unit/api_functions/celery/conftest.py index caab00fb92f..aaed9053267 100644 --- a/services/api-server/tests/unit/api_functions/celery/conftest.py +++ b/services/api-server/tests/unit/api_functions/celery/conftest.py @@ -26,7 +26,7 @@ from pytest_simcore.helpers.monkeypatch_envs import delenvs_from_dict, setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.fastapi.celery.app_server import FastAPIAppServer -from simcore_service_api_server.celery.worker_main import setup_worker_tasks +from simcore_service_api_server.celery_worker.worker_main import setup_worker_tasks from simcore_service_api_server.core.application import create_app from simcore_service_api_server.core.settings import ApplicationSettings diff --git a/services/api-server/tests/unit/api_functions/celery/test_functions.py b/services/api-server/tests/unit/api_functions/celery/test_functions.py index b46a07a0567..151aa4430cc 100644 --- a/services/api-server/tests/unit/api_functions/celery/test_functions.py +++ b/services/api-server/tests/unit/api_functions/celery/test_functions.py @@ -58,7 +58,7 @@ ASYNC_JOB_CLIENT_NAME, get_task_manager, ) -from simcore_service_api_server.celery.worker_tasks.functions_tasks import ( +from simcore_service_api_server.celery_worker.worker_tasks.functions_tasks import ( run_function as run_function_task, ) from simcore_service_api_server.exceptions.backend_errors import BaseBackEndError diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py index fbbb8936b71..c473d876cf5 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py @@ -46,7 +46,7 @@ from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient from simcore_service_api_server._meta import API_VTAG from simcore_service_api_server.api.dependencies.authentication import Identity -from simcore_service_api_server.celery.worker_tasks import functions_tasks +from simcore_service_api_server.celery_worker.worker_tasks import functions_tasks from simcore_service_api_server.models.api_resources import JobLinks from simcore_service_api_server.models.domain.functions import ( PreRegisteredFunctionJobData, From b8b69fe17653119109b94e060c799e1becaabc78 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 1 Sep 2025 11:46:18 +0200 Subject: [PATCH 089/111] correct examples added to pydantic models @sanderegg --- .../api_schemas_long_running_tasks/base.py | 29 +++-- .../src/servicelib/celery/models.py | 106 ++++++++++-------- .../models/api_resources.py | 26 +++-- 3 files changed, 90 insertions(+), 71 deletions(-) diff --git a/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py b/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py index b6900545ff6..38f2fa2f926 100644 --- a/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py +++ b/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py @@ -2,7 +2,8 @@ from collections.abc import Awaitable, Callable from typing import Annotated, TypeAlias -from pydantic import BaseModel, Field, field_validator, validate_call +from pydantic import BaseModel, ConfigDict, Field, field_validator, validate_call +from pydantic.config import JsonDict _logger = logging.getLogger(__name__) @@ -23,17 +24,21 @@ class TaskProgress(BaseModel): message: ProgressMessage = "" percent: ProgressPercent = 0.0 - model_config = { - "json_schema_extra": { - "examples": [ - { - "task_id": "3ac48b54-a48d-4c5e-a6ac-dcaddb9eaa59", - "message": "Halfway done", - "percent": 0.5, - } - ] - } - } + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "task_id": "3ac48b54-a48d-4c5e-a6ac-dcaddb9eaa59", + "message": "Halfway done", + "percent": 0.5, + } + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) # used to propagate progress updates internally _update_callback: Callable[["TaskProgress"], Awaitable[None]] | None = None diff --git a/packages/service-library/src/servicelib/celery/models.py b/packages/service-library/src/servicelib/celery/models.py index 8fa460f7bdc..0c46e1716b1 100644 --- a/packages/service-library/src/servicelib/celery/models.py +++ b/packages/service-library/src/servicelib/celery/models.py @@ -4,7 +4,8 @@ from uuid import UUID from models_library.progress_bar import ProgressReport -from pydantic import BaseModel, StringConstraints +from pydantic import BaseModel, ConfigDict, StringConstraints +from pydantic.config import JsonDict TaskID: TypeAlias = str TaskName: TypeAlias = Annotated[ @@ -41,36 +42,40 @@ class Task(BaseModel): uuid: TaskUUID metadata: TaskMetadata - model_config = { - "json_schema_extra": { - "examples": [ - { - "uuid": "123e4567-e89b-12d3-a456-426614174000", - "metadata": { - "name": "task1", - "ephemeral": True, - "queue": "default", + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "uuid": "123e4567-e89b-12d3-a456-426614174000", + "metadata": { + "name": "task1", + "ephemeral": True, + "queue": "default", + }, }, - }, - { - "uuid": "223e4567-e89b-12d3-a456-426614174001", - "metadata": { - "name": "task2", - "ephemeral": False, - "queue": "cpu_bound", + { + "uuid": "223e4567-e89b-12d3-a456-426614174001", + "metadata": { + "name": "task2", + "ephemeral": False, + "queue": "cpu_bound", + }, }, - }, - { - "uuid": "323e4567-e89b-12d3-a456-426614174002", - "metadata": { - "name": "task3", - "ephemeral": True, - "queue": "default", + { + "uuid": "323e4567-e89b-12d3-a456-426614174002", + "metadata": { + "name": "task3", + "ephemeral": True, + "queue": "default", + }, }, - }, - ] - } - } + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) _TASK_DONE = {TaskState.SUCCESS, TaskState.FAILURE, TaskState.ABORTED} @@ -104,27 +109,32 @@ class TaskStatus(BaseModel): task_state: TaskState progress_report: ProgressReport - model_config = { - "json_schema_extra": { - "examples": [ - { - "task_uuid": "123e4567-e89b-12d3-a456-426614174000", - "task_state": "SUCCESS", - "progress_report": { - "actual_value": 0.5, - "total": 1.0, - "attempts": 1, - "unit": "Byte", - "message": { - "description": "some description", - "current": 12.2, - "total": 123, + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + + schema.update( + { + "examples": [ + { + "task_uuid": "123e4567-e89b-12d3-a456-426614174000", + "task_state": "SUCCESS", + "progress_report": { + "actual_value": 0.5, + "total": 1.0, + "attempts": 1, + "unit": "Byte", + "message": { + "description": "some description", + "current": 12.2, + "total": 123, + }, }, - }, - } - ] - } - } + } + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) @property def is_done(self) -> bool: diff --git a/services/api-server/src/simcore_service_api_server/models/api_resources.py b/services/api-server/src/simcore_service_api_server/models/api_resources.py index 9f82f4f1997..f796eb0af86 100644 --- a/services/api-server/src/simcore_service_api_server/models/api_resources.py +++ b/services/api-server/src/simcore_service_api_server/models/api_resources.py @@ -103,17 +103,21 @@ def _url_missing_only_job_id(url: str | None) -> str | None: class JobLinks(BaseModel): - model_config = ConfigDict( - json_schema_extra={ - "examples": [ - { - "url_template": "https://api.osparc.io/v0/jobs/{job_id}", - "runner_url_template": "https://runner.osparc.io/dashboard", - "outputs_url_template": "https://api.osparc.io/v0/jobs/{job_id}/outputs", - } - ] - } - ) + @staticmethod + def _update_json_schema_extra(schema: dict) -> None: + schema.update( + { + "examples": [ + { + "url_template": "https://api.osparc.io/v0/jobs/{job_id}", + "runner_url_template": "https://runner.osparc.io/dashboard", + "outputs_url_template": "https://api.osparc.io/v0/jobs/{job_id}/outputs", + } + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) url_template: Annotated[str | None, AfterValidator(_url_missing_only_job_id)] runner_url_template: str | None From b41eb5a0951f2af5ee73500532b521d3a35946c1 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 1 Sep 2025 13:28:29 +0200 Subject: [PATCH 090/111] improve dependency-injection system comment @sanderegg --- .../celery_worker/worker_tasks/functions_tasks.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/functions_tasks.py index 3f3654bebfe..4c3697c1ca8 100644 --- a/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/functions_tasks.py +++ b/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/functions_tasks.py @@ -31,10 +31,9 @@ async def _assemble_function_job_service( *, app: FastAPI, user_identity: Identity ) -> FunctionJobService: - # to avoid this show we could introduce a dependency injection - # system which is not linked to FastAPI (i.e. can be resolved manually). - # One suggestion: https://github.com/ets-labs/python-dependency-injector, which is compatible - # with FastAPI's Depends. + # This should ideally be done by a dependency injection system (like it is done in the api-server). + # However, for that we would need to introduce a dependency injection system which is not coupled to, + # but compatible with FastAPI's Depends. One suggestion: https://github.com/ets-labs/python-dependency-injector. # See also https://github.com/fastapi/fastapi/issues/1105#issuecomment-609919850. settings = app.state.settings assert settings.API_SERVER_WEBSERVER # nosec From b36d5ff6bbdc634f18eea8948580f6e80a04cff8 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 1 Sep 2025 13:38:24 +0200 Subject: [PATCH 091/111] massage -> preprocess --- .../simcore_service_api_server/api/routes/functions_routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index 612446fcb03..85fb6e1ca19 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -339,7 +339,7 @@ async def run_function( # noqa: PLR0913 x_simcore_parent_project_uuid: Annotated[ProjectID | Literal["null"], Header()], x_simcore_parent_node_id: Annotated[NodeID | Literal["null"], Header()], ) -> RegisteredFunctionJob: - # massage inputs + # preprocess inputs task_manager = get_task_manager(request.app) parent_project_uuid = ( x_simcore_parent_project_uuid From 9c154e426d49155665a9b8837a1e85a3c67b8c4a Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 1 Sep 2025 13:39:54 +0200 Subject: [PATCH 092/111] @pcrespov use contextlib.suppress --- .../api/routes/functions_routes.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index 85fb6e1ca19..0b1fb29027f 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -1,3 +1,5 @@ +import contextlib + # pylint: disable=too-many-positional-arguments from collections.abc import Callable from typing import Annotated, Final, Literal @@ -358,13 +360,11 @@ async def run_function( # noqa: PLR0913 ) # check if results are cached - try: + with contextlib.suppress(FunctionJobCacheNotFoundError): return await function_job_service.get_cached_function_job( function=to_run_function, job_inputs=job_inputs, ) - except FunctionJobCacheNotFoundError: - pass pre_registered_function_job_data = ( await function_job_service.pre_register_function_job( From 313cc1dc499e80c2d51b0215b43491ac0e9ccda6 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 1 Sep 2025 13:54:10 +0200 Subject: [PATCH 093/111] avoid converting CeleryError to HTTPException directly in endpoint handlers --- .../api/routes/tasks.py | 83 +++++++------------ 1 file changed, 28 insertions(+), 55 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py index 4b3aef742b1..5778bde60ef 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py @@ -1,9 +1,6 @@ import logging from typing import Annotated, Any -from celery.exceptions import ( # type: ignore[import-untyped] # pylint: disable=no-name-in-module - CeleryError, -) from common_library.error_codes import create_error_code from fastapi import APIRouter, Depends, FastAPI, HTTPException, status from models_library.api_schemas_long_running_tasks.base import TaskProgress @@ -70,15 +67,9 @@ async def list_tasks( task_manager = get_task_manager(app) - try: - tasks = await task_manager.list_tasks( - task_filter=_get_task_filter(user_id, product_name), - ) - except CeleryError as exc: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Encountered issue when listing tasks", - ) from exc + tasks = await task_manager.list_tasks( + task_filter=_get_task_filter(user_id, product_name), + ) app_router = app.router data = [ @@ -118,16 +109,10 @@ async def get_task_status( ): task_manager = get_task_manager(app) - try: - task_status = await task_manager.get_task_status( - task_filter=_get_task_filter(user_id, product_name), - task_uuid=TaskUUID(f"{task_id}"), - ) - except CeleryError as exc: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Encountered issue when getting task status", - ) from exc + task_status = await task_manager.get_task_status( + task_filter=_get_task_filter(user_id, product_name), + task_uuid=TaskUUID(f"{task_id}"), + ) return TaskStatus( task_progress=TaskProgress( @@ -159,16 +144,10 @@ async def cancel_task( ): task_manager = get_task_manager(app) - try: - await task_manager.cancel_task( - task_filter=_get_task_filter(user_id, product_name), - task_uuid=TaskUUID(f"{task_id}"), - ) - except CeleryError as exc: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Encountered issue when cancelling task", - ) from exc + await task_manager.cancel_task( + task_filter=_get_task_filter(user_id, product_name), + task_uuid=TaskUUID(f"{task_id}"), + ) @router.get( @@ -202,32 +181,26 @@ async def get_task_result( task_manager = get_task_manager(app) task_filter = _get_task_filter(user_id, product_name) - try: - task_status = await task_manager.get_task_status( - task_filter=task_filter, - task_uuid=TaskUUID(f"{task_id}"), - ) - - if not task_status.is_done: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="Task result not available yet", - ) - if task_status.task_state == TaskState.ABORTED: - raise HTTPException( - status_code=status.HTTP_409_CONFLICT, - detail="Task was cancelled", - ) + task_status = await task_manager.get_task_status( + task_filter=task_filter, + task_uuid=TaskUUID(f"{task_id}"), + ) - task_result = await task_manager.get_task_result( - task_filter=task_filter, - task_uuid=TaskUUID(f"{task_id}"), + if not task_status.is_done: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Task result not available yet", ) - except CeleryError as exc: + if task_status.task_state == TaskState.ABORTED: raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Encountered issue when getting task result", - ) from exc + status_code=status.HTTP_409_CONFLICT, + detail="Task was cancelled", + ) + + task_result = await task_manager.get_task_result( + task_filter=task_filter, + task_uuid=TaskUUID(f"{task_id}"), + ) if task_status.task_state == TaskState.FAILURE: assert isinstance(task_result, Exception) From fb2e994fc64ab1e60ac74950ccf674f85b124c87 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 1 Sep 2025 14:20:56 +0200 Subject: [PATCH 094/111] return 503 instead of 500 when celery task fails @sanderegg --- .../src/simcore_service_api_server/api/routes/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py index 5778bde60ef..9837a5f625f 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py @@ -215,7 +215,7 @@ async def get_task_result( ) ) raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail=user_error_msg, ) From a640f4cb4fbb669510c5c9db57a7ed4c5f539e7a Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 1 Sep 2025 15:01:02 +0200 Subject: [PATCH 095/111] test fixes --- .../src/models_library/progress_bar.py | 65 ++++++++++++------- .../api_functions/celery/test_functions.py | 2 +- services/api-server/tests/unit/test_tasks.py | 20 +++--- 3 files changed, 51 insertions(+), 36 deletions(-) diff --git a/packages/models-library/src/models_library/progress_bar.py b/packages/models-library/src/models_library/progress_bar.py index ad8130570e5..76735c33d80 100644 --- a/packages/models-library/src/models_library/progress_bar.py +++ b/packages/models-library/src/models_library/progress_bar.py @@ -1,6 +1,7 @@ from typing import Literal, TypeAlias from pydantic import BaseModel, ConfigDict +from pydantic.config import JsonDict # NOTE: keep a list of possible unit, and please use correct official unit names ProgressUnit: TypeAlias = Literal["Byte"] @@ -13,34 +14,38 @@ class ProgressStructuredMessage(BaseModel): unit: str | None = None sub: "ProgressStructuredMessage | None" = None - model_config = ConfigDict( - json_schema_extra={ - "examples": [ - { - "description": "some description", - "current": 12.2, - "total": 123, - }, - { - "description": "some description", - "current": 12.2, - "total": 123, - "unit": "Byte", - }, - { - "description": "downloading", - "current": 2.0, - "total": 5, - "sub": { - "description": "port 2", + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "description": "some description", + "current": 12.2, + "total": 123, + }, + { + "description": "some description", "current": 12.2, "total": 123, "unit": "Byte", }, - }, - ] - } - ) + { + "description": "downloading", + "current": 2.0, + "total": 5, + "sub": { + "description": "port 2", + "current": 12.2, + "total": 123, + "unit": "Byte", + }, + }, + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) UNITLESS = None @@ -96,7 +101,17 @@ def composed_message(self) -> str: { "actual_value": 0.3, "total": 1.0, - "message": ProgressStructuredMessage.model_config["json_schema_extra"]["examples"][2], # type: ignore [index] + "message": { + "description": "downloading", + "current": 2.0, + "total": 5, + "sub": { + "description": "port 2", + "current": 12.2, + "total": 123, + "unit": "Byte", + }, + }, }, ] }, diff --git a/services/api-server/tests/unit/api_functions/celery/test_functions.py b/services/api-server/tests/unit/api_functions/celery/test_functions.py index 151aa4430cc..5bb5971cd3b 100644 --- a/services/api-server/tests/unit/api_functions/celery/test_functions.py +++ b/services/api-server/tests/unit/api_functions/celery/test_functions.py @@ -304,7 +304,7 @@ async def test_celery_error_propagation( with pytest.raises(HTTPStatusError) as exc_info: await poll_task_until_done(client, auth, f"{task_uuid}") - assert exc_info.value.response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR + assert exc_info.value.response.status_code == status.HTTP_503_SERVICE_UNAVAILABLE @pytest.mark.parametrize( diff --git a/services/api-server/tests/unit/test_tasks.py b/services/api-server/tests/unit/test_tasks.py index 8c6e40b532c..9c2f1a25320 100644 --- a/services/api-server/tests/unit/test_tasks.py +++ b/services/api-server/tests/unit/test_tasks.py @@ -102,7 +102,7 @@ async def test_get_task_result( None, None, None, - status.HTTP_500_INTERNAL_SERVER_ERROR, + status.HTTP_503_SERVICE_UNAVAILABLE, ), ( "GET", @@ -111,7 +111,7 @@ async def test_get_task_result( CeleryError(), None, None, - status.HTTP_500_INTERNAL_SERVER_ERROR, + status.HTTP_503_SERVICE_UNAVAILABLE, ), ( "POST", @@ -120,7 +120,7 @@ async def test_get_task_result( None, CeleryError(), None, - status.HTTP_500_INTERNAL_SERVER_ERROR, + status.HTTP_503_SERVICE_UNAVAILABLE, ), ( "GET", @@ -129,7 +129,7 @@ async def test_get_task_result( CeleryError(), None, None, - status.HTTP_500_INTERNAL_SERVER_ERROR, + status.HTTP_503_SERVICE_UNAVAILABLE, ), ( "GET", @@ -142,9 +142,9 @@ async def test_get_task_result( actual_value=0.5, total=1.0, unit="Byte", - message=ProgressStructuredMessage.model_config["json_schema_extra"][ - "examples" - ][0], + message=ProgressStructuredMessage.model_json_schema()["examples"][ + 0 + ], ), ), None, @@ -162,9 +162,9 @@ async def test_get_task_result( actual_value=0.5, total=1.0, unit="Byte", - message=ProgressStructuredMessage.model_config["json_schema_extra"][ - "examples" - ][0], + message=ProgressStructuredMessage.model_json_schema()["examples"][ + 0 + ], ), ), None, From 7b5cda81656ec8c0366ccdd895325c58594e79ce Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Mon, 1 Sep 2025 16:46:36 +0200 Subject: [PATCH 096/111] return 503 in case of CeleryError @sanderegg --- .../prompts/update-user-messages.prompt.md | 27 +++++++++++++++++-- .../pytest_simcore/celery_library_mocks.py | 17 ++++-------- .../simcore_service_api_server/_constants.py | 15 ++++++----- .../exceptions/handlers/__init__.py | 13 +++++++++ services/api-server/tests/unit/test_tasks.py | 20 +++++++++----- 5 files changed, 66 insertions(+), 26 deletions(-) diff --git a/.github/prompts/update-user-messages.prompt.md b/.github/prompts/update-user-messages.prompt.md index 37eee04951b..29ababc2ef1 100644 --- a/.github/prompts/update-user-messages.prompt.md +++ b/.github/prompts/update-user-messages.prompt.md @@ -1,6 +1,7 @@ --- mode: 'edit' description: 'Update user messages' +model: Claude Sonnet 3.5 --- This prompt guide is for updating user-facing messages in ${file} or ${selection} @@ -43,7 +44,17 @@ When modifying user messages, follow **as close as possible** these rules: user_message("Unable to load project.", _version=1) ``` -3. **Message Style**: Follow **strictly** the guidelines in `${workspaceFolder}/docs/user-messages-guidelines.md` +3. **Message Style**: Follow **STRICTLY ALL 10 GUIDELINES** in `${workspaceFolder}/docs/user-messages-guidelines.md`: + - Be Clear and Concise + - Provide Specific and Actionable Information + - Avoid Technical Jargon + - Use a Polite and Non-Blaming Tone + - Avoid Negative Words and Phrases + - Place Messages Appropriately + - Use Inline Validation When Possible + - Avoid Using All-Caps and Excessive Punctuation + - **Use Humor Sparingly** - Avoid casual phrases like "Oops!", "Whoops!", or overly informal language + - Offer Alternative Solutions or Support 4. **Preserve Context**: Ensure the modified message conveys the same meaning and context as the original. @@ -56,8 +67,10 @@ When modifying user messages, follow **as close as possible** these rules: # After user_message("Your session has expired. Please log in again.", _version=3) ``` + 6. **Replace 'Study' by 'Project'**: If the message contains the word 'Study', replace it with 'Project' to align with our terminology. +7. **Professional Tone**: Maintain a professional, helpful tone. Avoid humor, casual expressions, or overly informal language that might not be appropriate for all users or situations. ## Examples @@ -91,4 +104,14 @@ return HttpErrorInfo(status.HTTP_404_NOT_FOUND, user_message("User not found.", return HttpErrorInfo(status.HTTP_404_NOT_FOUND, user_message("The requested user could not be found.", _version=2)) ``` -Remember: The goal is to improve clarity and helpfulness for end-users while maintaining accurate versioning for tracking changes. +### Example 4: Removing Humor (Guideline 9) + +```python +# Before +user_message("Oops! Something went wrong, but we've noted it down and we'll sort it out ASAP. Thanks for your patience!") + +# After +user_message("Something went wrong on our end. We've been notified and will resolve this issue as soon as possible. Thank you for your patience.", _version=1) +``` + +Remember: The goal is to improve clarity and helpfulness for end-users while maintaining accurate versioning for tracking changes. **Always check that your updated messages comply with ALL 10 guidelines, especially avoiding humor and maintaining a professional tone.** diff --git a/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py index 530b4aff171..c027bc0cbd4 100644 --- a/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py +++ b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py @@ -28,22 +28,15 @@ def get_task_result_return_value() -> dict: @pytest.fixture def get_task_status_return_value() -> TaskStatus: - status_extra = TaskStatus.model_config.get("json_schema_extra") - assert status_extra is not None - status_examples = status_extra.get("examples") - assert isinstance(status_examples, list) - assert len(status_examples) > 0 - return TaskStatus.model_validate(status_examples[0]) + example = TaskStatus.model_json_schema()["examples"][0] + return TaskStatus.model_validate(example) @pytest.fixture def list_tasks_return_value() -> list[Task]: - list_extra = Task.model_config.get("json_schema_extra") - assert isinstance(list_extra, dict) - list_examples = list_extra.get("examples") - assert isinstance(list_examples, list) - assert len(list_examples) > 0 - return [Task.model_validate(example) for example in list_examples] + examples = Task.model_json_schema()["examples"] + assert len(examples) > 0 + return [Task.model_validate(example) for example in examples] @pytest.fixture diff --git a/services/api-server/src/simcore_service_api_server/_constants.py b/services/api-server/src/simcore_service_api_server/_constants.py index 7bfbfd43907..512a987b640 100644 --- a/services/api-server/src/simcore_service_api_server/_constants.py +++ b/services/api-server/src/simcore_service_api_server/_constants.py @@ -1,9 +1,12 @@ from typing import Final -MSG_BACKEND_SERVICE_UNAVAILABLE: Final[ - str -] = "backend service is disabled or unreachable" +from common_library.user_messages import user_message -MSG_INTERNAL_ERROR_USER_FRIENDLY_TEMPLATE: Final[ - str -] = "Oops! Something went wrong, but we've noted it down and we'll sort it out ASAP. Thanks for your patience!" +MSG_BACKEND_SERVICE_UNAVAILABLE: Final[str] = user_message( + "The service is currently unavailable. Please try again later.", _version=1 +) + +MSG_INTERNAL_ERROR_USER_FRIENDLY_TEMPLATE: Final[str] = user_message( + "Something went wrong on our end. We've been notified and will resolve this issue as soon as possible. Thank you for your patience.", + _version=2, +) diff --git a/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py b/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py index 2385ea984f4..6a267395c1f 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py @@ -1,3 +1,4 @@ +from celery.exceptions import CeleryError from fastapi import FastAPI from fastapi.exceptions import RequestValidationError from httpx import HTTPError as HttpxException @@ -37,6 +38,18 @@ def setup(app: FastAPI, *, is_debug: bool = False): error_message="This endpoint is still not implemented (under development)", ), ) + + app.add_exception_handler( + CeleryError, + make_handler_for_exception( + CeleryError, + status.HTTP_503_SERVICE_UNAVAILABLE, + error_message=MSG_INTERNAL_ERROR_USER_FRIENDLY_TEMPLATE, + add_exception_to_message=is_debug, + add_oec_to_message=True, + ), + ) + app.add_exception_handler( Exception, make_handler_for_exception( diff --git a/services/api-server/tests/unit/test_tasks.py b/services/api-server/tests/unit/test_tasks.py index 9c2f1a25320..8ec736b4727 100644 --- a/services/api-server/tests/unit/test_tasks.py +++ b/services/api-server/tests/unit/test_tasks.py @@ -142,9 +142,13 @@ async def test_get_task_result( actual_value=0.5, total=1.0, unit="Byte", - message=ProgressStructuredMessage.model_json_schema()["examples"][ - 0 - ], + message=ProgressStructuredMessage.model_validate( + { + "description": "some description", + "current": 12.2, + "total": 123, + } + ), ), ), None, @@ -162,9 +166,13 @@ async def test_get_task_result( actual_value=0.5, total=1.0, unit="Byte", - message=ProgressStructuredMessage.model_json_schema()["examples"][ - 0 - ], + message=ProgressStructuredMessage.model_validate( + { + "description": "some description", + "current": 12.2, + "total": 123, + } + ), ), ), None, From 4e09ac30ac43adff165fcf4217eaff1cfae4b6aa Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 2 Sep 2025 09:58:14 +0200 Subject: [PATCH 097/111] use pytest-simcore fixture --- .../tests/unit/api_functions/celery/conftest.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/services/api-server/tests/unit/api_functions/celery/conftest.py b/services/api-server/tests/unit/api_functions/celery/conftest.py index aaed9053267..8fbcc1f7488 100644 --- a/services/api-server/tests/unit/api_functions/celery/conftest.py +++ b/services/api-server/tests/unit/api_functions/celery/conftest.py @@ -21,15 +21,19 @@ ) from celery.worker.worker import WorkController # pylint: disable=no-name-in-module from celery_library.signals import on_worker_init, on_worker_shutdown -from fakeredis.aioredis import FakeRedis from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import delenvs_from_dict, setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.fastapi.celery.app_server import FastAPIAppServer +from settings_library.redis import RedisSettings from simcore_service_api_server.celery_worker.worker_main import setup_worker_tasks from simcore_service_api_server.core.application import create_app from simcore_service_api_server.core.settings import ApplicationSettings +pytest_plugins = [ + "pytest_simcore.redis_service", +] + @pytest.fixture(scope="session") def celery_config() -> dict[str, Any]: @@ -47,12 +51,6 @@ def celery_config() -> dict[str, Any]: } -@pytest.fixture -async def mocked_redis_server(mocker: MockerFixture) -> None: - mock_redis = FakeRedis() - mocker.patch("redis.asyncio.from_url", return_value=mock_redis) - - @pytest.fixture async def mocked_log_streamer_setup(mocker: MockerFixture) -> MockerFixture: # mock log streamer: He is looking for non-existent queues. Should be solved more elegantly @@ -76,7 +74,7 @@ def mock_celery_app(mocker: MockerFixture, celery_config: dict[str, Any]) -> Cel def app_environment( mock_celery_app: Celery, mocked_log_streamer_setup: MockerFixture, - mocked_redis_server: None, + use_in_memory_redis: RedisSettings, monkeypatch: pytest.MonkeyPatch, app_environment: EnvVarsDict, rabbit_env_vars_dict: EnvVarsDict, From 46f1bd449a04f1018390d35b609931c84cb11601 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 2 Sep 2025 10:01:12 +0200 Subject: [PATCH 098/111] typecheck issue fix --- .../exceptions/handlers/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py b/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py index 6a267395c1f..df9f8b7f4ac 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py @@ -1,4 +1,6 @@ -from celery.exceptions import CeleryError +from celery.exceptions import ( + CeleryError, # type: ignore[import-untyped] # pylint: disable=no-name-in-module +) from fastapi import FastAPI from fastapi.exceptions import RequestValidationError from httpx import HTTPError as HttpxException From 098018ab316b9e0dec8ee1edff08f3f34cad8f74 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 2 Sep 2025 10:05:23 +0200 Subject: [PATCH 099/111] fix the typecheck fix --- .../exceptions/handlers/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py b/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py index df9f8b7f4ac..a0d071a47e6 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py @@ -1,5 +1,5 @@ -from celery.exceptions import ( - CeleryError, # type: ignore[import-untyped] # pylint: disable=no-name-in-module +from celery.exceptions import ( # type: ignore[import-untyped] + CeleryError, ) from fastapi import FastAPI from fastapi.exceptions import RequestValidationError From 228fae97ead1dea7a8a6f7e3608d10ef204f6f9e Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 2 Sep 2025 10:10:51 +0200 Subject: [PATCH 100/111] move pytest_plugins to toplevel conftest --- services/api-server/tests/conftest.py | 1 + .../api-server/tests/unit/api_functions/celery/conftest.py | 4 ---- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/services/api-server/tests/conftest.py b/services/api-server/tests/conftest.py index ba83c5ffd5c..f0c05db2d1f 100644 --- a/services/api-server/tests/conftest.py +++ b/services/api-server/tests/conftest.py @@ -29,6 +29,7 @@ "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.rabbit_service", + "pytest_simcore.redis_service", "pytest_simcore.repository_paths", "pytest_simcore.schemas", "pytest_simcore.services_api_mocks_for_aiohttp_clients", diff --git a/services/api-server/tests/unit/api_functions/celery/conftest.py b/services/api-server/tests/unit/api_functions/celery/conftest.py index 8fbcc1f7488..df34a188627 100644 --- a/services/api-server/tests/unit/api_functions/celery/conftest.py +++ b/services/api-server/tests/unit/api_functions/celery/conftest.py @@ -30,10 +30,6 @@ from simcore_service_api_server.core.application import create_app from simcore_service_api_server.core.settings import ApplicationSettings -pytest_plugins = [ - "pytest_simcore.redis_service", -] - @pytest.fixture(scope="session") def celery_config() -> dict[str, Any]: From e057a26c15879e34d9baafeadb1524e1b2b431c6 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 2 Sep 2025 11:46:57 +0200 Subject: [PATCH 101/111] fix tests --- .../src/models_library/progress_bar.py | 42 +++++++++---------- .../test_api_routers_function_jobs.py | 6 +-- 2 files changed, 23 insertions(+), 25 deletions(-) diff --git a/packages/models-library/src/models_library/progress_bar.py b/packages/models-library/src/models_library/progress_bar.py index 76735c33d80..21fb158a0eb 100644 --- a/packages/models-library/src/models_library/progress_bar.py +++ b/packages/models-library/src/models_library/progress_bar.py @@ -17,32 +17,30 @@ class ProgressStructuredMessage(BaseModel): @staticmethod def _update_json_schema_extra(schema: JsonDict) -> None: schema.update( - { - "examples": [ - { - "description": "some description", - "current": 12.2, - "total": 123, - }, - { - "description": "some description", + examples=[ + { + "description": "some description", + "current": 12.2, + "total": 123, + }, + { + "description": "some description", + "current": 12.2, + "total": 123, + "unit": "Byte", + }, + { + "description": "downloading", + "current": 2.0, + "total": 5, + "sub": { + "description": "port 2", "current": 12.2, "total": 123, "unit": "Byte", }, - { - "description": "downloading", - "current": 2.0, - "total": 5, - "sub": { - "description": "port 2", - "current": 12.2, - "total": 123, - "unit": "Byte", - }, - }, - ] - } + }, + ] ) model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py index 3d7f6171251..534051212a7 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py @@ -247,9 +247,9 @@ async def _get_task_status( attempt=1, unit=None, message=ProgressStructuredMessage.model_validate( - ProgressStructuredMessage.model_config["json_schema_extra"][ - "examples" - ][0] + ProgressStructuredMessage.model_json_schema()["$defs"][ + "ProgressStructuredMessage" + ]["examples"][0] ), ), ) From eb6de3bfc0c8bd7ee6cbac1cd6f0f4494365681c Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 2 Sep 2025 16:03:35 +0200 Subject: [PATCH 102/111] fix mock --- services/api-server/tests/unit/conftest.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index 738778223b8..1865519bb1a 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -566,11 +566,8 @@ def project_job_rpc_get() -> ProjectJobRpcGet: @pytest.fixture def job_links() -> JobLinks: - extra = JobLinks.model_config.get("json_schema_extra") - assert isinstance(extra, dict) - examples = extra.get("examples") - assert isinstance(examples, list) and len(examples) > 0 - return JobLinks.model_validate(examples[0]) + example = JobLinks.model_json_schema()["examples"][0] + return JobLinks.model_validate(example) @pytest.fixture From b356c2b443ad42f71727aee6de246744a6309949 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 2 Sep 2025 16:07:26 +0200 Subject: [PATCH 103/111] @pcrespov simply reraise exception to return 500 status cod --- .../api/routes/function_jobs_routes.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py index d37f9d112eb..b43f5840ce0 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py @@ -250,10 +250,7 @@ async def function_job_status( tip="Initial call to run metamodeling function must have failed", ) ) - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Function job is not in a valid state", - ) from exc + raise raise UnsupportedFunctionFunctionJobClassCombinationError( function_class=function.function_class, From 4eaea73bb5ecca1a2460c9d721b6b0da87f1d95a Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 2 Sep 2025 16:11:27 +0200 Subject: [PATCH 104/111] @pcrespov decouple setup of celery task manager from dependencies submodule --- .../api/dependencies/celery.py | 17 ----------------- .../clients/celery_task_manager.py | 18 ++++++++++++++++++ .../core/application.py | 2 +- 3 files changed, 19 insertions(+), 18 deletions(-) create mode 100644 services/api-server/src/simcore_service_api_server/clients/celery_task_manager.py diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py index 61428e3de6e..1fa0ccfb3e4 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py @@ -1,28 +1,11 @@ from typing import Final -from celery_library.common import create_app, create_task_manager from celery_library.task_manager import CeleryTaskManager -from celery_library.types import register_celery_types, register_pydantic_types from fastapi import FastAPI -from settings_library.celery import CelerySettings - -from ...celery_worker.worker_tasks.tasks import pydantic_types_to_register ASYNC_JOB_CLIENT_NAME: Final[str] = "API_SERVER" -def setup_task_manager(app: FastAPI, celery_settings: CelerySettings) -> None: - async def on_startup() -> None: - app.state.task_manager = await create_task_manager( - create_app(celery_settings), celery_settings - ) - - register_celery_types() - register_pydantic_types(*pydantic_types_to_register) - - app.add_event_handler("startup", on_startup) - - def get_task_manager(app: FastAPI) -> CeleryTaskManager: assert hasattr(app.state, "task_manager") # nosec task_manager = app.state.task_manager diff --git a/services/api-server/src/simcore_service_api_server/clients/celery_task_manager.py b/services/api-server/src/simcore_service_api_server/clients/celery_task_manager.py new file mode 100644 index 00000000000..0b4ac4c2f4e --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/clients/celery_task_manager.py @@ -0,0 +1,18 @@ +from celery_library.common import create_app, create_task_manager +from celery_library.types import register_celery_types, register_pydantic_types +from fastapi import FastAPI +from settings_library.celery import CelerySettings + +from ..celery_worker.worker_tasks.tasks import pydantic_types_to_register + + +def setup_task_manager(app: FastAPI, celery_settings: CelerySettings) -> None: + async def on_startup() -> None: + app.state.task_manager = await create_task_manager( + create_app(celery_settings), celery_settings + ) + + register_celery_types() + register_pydantic_types(*pydantic_types_to_register) + + app.add_event_handler("startup", on_startup) diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py index ca030b39e88..572001ddc9d 100644 --- a/services/api-server/src/simcore_service_api_server/core/application.py +++ b/services/api-server/src/simcore_service_api_server/core/application.py @@ -13,9 +13,9 @@ from .. import exceptions from .._meta import API_VERSION, API_VTAG, APP_NAME -from ..api.dependencies.celery import setup_task_manager from ..api.root import create_router from ..api.routes.health import router as health_router +from ..clients.celery_task_manager import setup_task_manager from ..clients.postgres import setup_postgres from ..services_http import director_v2, storage, webserver from ..services_http.rabbitmq import setup_rabbitmq From faeac0660d5787fea08177d502617ac82985a3e4 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 2 Sep 2025 16:15:15 +0200 Subject: [PATCH 105/111] @GitHK poll_task_until_done -> wait_for_task_result --- .../tests/unit/api_functions/celery/test_functions.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/services/api-server/tests/unit/api_functions/celery/test_functions.py b/services/api-server/tests/unit/api_functions/celery/test_functions.py index 5bb5971cd3b..e9ecaf64996 100644 --- a/services/api-server/tests/unit/api_functions/celery/test_functions.py +++ b/services/api-server/tests/unit/api_functions/celery/test_functions.py @@ -83,7 +83,7 @@ _faker = Faker() -async def poll_task_until_done( +async def wait_for_task_result( client: AsyncClient, auth: BasicAuth, task_id: str, @@ -251,7 +251,7 @@ async def test_with_fake_run_function( celery_task_id = function_job.job_creation_task_id assert celery_task_id is not None # Poll until task completion and get result - result = await poll_task_until_done(client, auth, celery_task_id) + result = await wait_for_task_result(client, auth, celery_task_id) RegisteredProjectFunctionJob.model_validate(result.result) @@ -302,7 +302,7 @@ async def test_celery_error_propagation( ) with pytest.raises(HTTPStatusError) as exc_info: - await poll_task_until_done(client, auth, f"{task_uuid}") + await wait_for_task_result(client, auth, f"{task_uuid}") assert exc_info.value.response.status_code == status.HTTP_503_SERVICE_UNAVAILABLE @@ -419,7 +419,7 @@ def _default_side_effect( celery_task_id = function_job.job_creation_task_id assert celery_task_id is not None # Poll until task completion and get result - result = await poll_task_until_done(client, auth, celery_task_id) + result = await wait_for_task_result(client, auth, celery_task_id) RegisteredProjectFunctionJob.model_validate(result.result) @@ -554,5 +554,5 @@ def _default_side_effect( task_id = patch_mock.call_args.kwargs[ "registered_function_job_patch" ].job_creation_task_id - await poll_task_until_done(client, auth, f"{task_id}") + await wait_for_task_result(client, auth, f"{task_id}") assert side_effect_checks["headers_checked"] is True From a51a42ca68a2e65e49f6a165a5ac319cedd786e3 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 2 Sep 2025 16:16:46 +0200 Subject: [PATCH 106/111] @GitHK wait fixed 1 sec --- .../tests/unit/api_functions/celery/test_functions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/api-server/tests/unit/api_functions/celery/test_functions.py b/services/api-server/tests/unit/api_functions/celery/test_functions.py index e9ecaf64996..130a05bebaa 100644 --- a/services/api-server/tests/unit/api_functions/celery/test_functions.py +++ b/services/api-server/tests/unit/api_functions/celery/test_functions.py @@ -74,7 +74,7 @@ AsyncRetrying, retry_if_exception_type, stop_after_delay, - wait_exponential, + wait_fixed, ) pytest_simcore_core_services_selection = ["postgres", "rabbit"] @@ -92,7 +92,7 @@ async def wait_for_task_result( async for attempt in AsyncRetrying( stop=stop_after_delay(timeout), - wait=wait_exponential(multiplier=0.5, min=0.5, max=2.0), + wait=wait_fixed(wait=datetime.timedelta(seconds=1.0)), reraise=True, retry=retry_if_exception_type(AssertionError), ): From 7f20a8483151b3c3c6faedce4123870ed89926d8 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 2 Sep 2025 16:19:35 +0200 Subject: [PATCH 107/111] pylint fix --- .../simcore_service_api_server/exceptions/handlers/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py b/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py index a0d071a47e6..adecb5d7203 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py @@ -1,4 +1,4 @@ -from celery.exceptions import ( # type: ignore[import-untyped] +from celery.exceptions import ( # type: ignore[import-untyped] #pylint: disable=no-name-in-module CeleryError, ) from fastapi import FastAPI From 76aecfeaa35468c691a14259c7ed56697bd1d60d Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 2 Sep 2025 16:31:08 +0200 Subject: [PATCH 108/111] ensure ordering of jobs is preserved --- .../api/routes/functions_routes.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index 0b1fb29027f..097fa1fc5b0 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -480,7 +480,7 @@ async def map_function( # noqa: PLR0913 for function_inputs in function_inputs_list ] - cached_job_uuids: list[FunctionJobID] = [] + job_ids: list[FunctionJobID] = [] pre_registered_function_job_data_list: list[PreRegisteredFunctionJobData] = [] for job_inputs in job_inputs_list: @@ -489,13 +489,14 @@ async def map_function( # noqa: PLR0913 function=to_run_function, job_inputs=job_inputs, ) - cached_job_uuids.append(cached_job.uid) + job_ids.append(cached_job.uid) except FunctionJobCacheNotFoundError: data = await function_jobs_service.pre_register_function_job( function=to_run_function, job_inputs=job_inputs, ) pre_registered_function_job_data_list.append(data) + job_ids.append(data.function_job_id) # run map in celery task job_filter = AsyncJobFilter( @@ -533,9 +534,6 @@ async def map_function( # noqa: PLR0913 ) function_job_collection_description = f"Function job collection of map of function {to_run_function.uid} with {len(pre_registered_function_job_data_list)} inputs" - job_ids = cached_job_uuids + [ - data.function_job_id for data in pre_registered_function_job_data_list - ] return await web_api_rpc_client.register_function_job_collection( function_job_collection=FunctionJobCollection( title="Function job collection of function map", From 312e853f198e1cf9747b357ef22a2b5feb9f6173 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Tue, 2 Sep 2025 16:48:16 +0200 Subject: [PATCH 109/111] first attempt to test job_id order --- .../api_functions/celery/test_functions.py | 42 ++++++++++++++----- 1 file changed, 32 insertions(+), 10 deletions(-) diff --git a/services/api-server/tests/unit/api_functions/celery/test_functions.py b/services/api-server/tests/unit/api_functions/celery/test_functions.py index 130a05bebaa..cc51fe1536a 100644 --- a/services/api-server/tests/unit/api_functions/celery/test_functions.py +++ b/services/api-server/tests/unit/api_functions/celery/test_functions.py @@ -498,8 +498,23 @@ def _default_side_effect( "get_function", mock_registered_project_function, None, None ) mock_handler_in_functions_rpc_interface("find_cached_function_jobs", [], None, None) + + _generated_project_job_ids: list[ProjectID] = [] + + async def _patch_register_function_job( + project_job_ids: list[ProjectID], *args, **kwargs + ): + project_job_id = ProjectID(_faker.uuid4()) + project_job_ids.append(project_job_id) + return mock_registered_project_function_job.model_copy( + update={"project_job_id": project_job_id} + ) + mock_handler_in_functions_rpc_interface( - "register_function_job", mock_registered_project_function_job, None, None + "register_function_job", + None, + None, + partial(_patch_register_function_job, _generated_project_job_ids), ) mock_handler_in_functions_rpc_interface( "get_functions_user_api_access_rights", @@ -512,17 +527,23 @@ def _default_side_effect( None, None, ) + + async def _patch_register_function_job_collection(*args, **kwargs): + return ( + RegisteredFunctionJobCollection( + uid=FunctionJobID(_faker.uuid4()), + title="Test Collection", + description="A test function job collection", + job_ids=kwargs["function_job_collection"].job_ids, + created_at=datetime.datetime.now(datetime.UTC), + ), + ) + mock_handler_in_functions_rpc_interface( "register_function_job_collection", - RegisteredFunctionJobCollection( - uid=FunctionJobID(_faker.uuid4()), - title="Test Collection", - description="A test function job collection", - job_ids=[], - created_at=datetime.datetime.now(datetime.UTC), - ), None, None, + _patch_register_function_job_collection, ) patch_mock = mock_handler_in_functions_rpc_interface( @@ -543,14 +564,15 @@ def _default_side_effect( response = await client.post( f"{API_VTAG}/functions/{mock_registered_project_function.uid}:map", - json=[{}, {}], + json=[{"input_1": 2.0}, {"input_2": 3.0}], auth=auth, headers=headers, ) assert response.status_code == expected_status_code if expected_status_code == status.HTTP_200_OK: - FunctionJobCollection.model_validate(response.json()) + job_collection = FunctionJobCollection.model_validate(response.json()) + assert job_collection.job_ids == _generated_project_job_ids task_id = patch_mock.call_args.kwargs[ "registered_function_job_patch" ].job_creation_task_id From fa4f7ad2ef193b22fd257a63347e822300ff9915 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 3 Sep 2025 10:07:46 +0200 Subject: [PATCH 110/111] Revert "first attempt to test job_id order" This reverts commit 312e853f198e1cf9747b357ef22a2b5feb9f6173. --- .../api_functions/celery/test_functions.py | 42 +++++-------------- 1 file changed, 10 insertions(+), 32 deletions(-) diff --git a/services/api-server/tests/unit/api_functions/celery/test_functions.py b/services/api-server/tests/unit/api_functions/celery/test_functions.py index cc51fe1536a..130a05bebaa 100644 --- a/services/api-server/tests/unit/api_functions/celery/test_functions.py +++ b/services/api-server/tests/unit/api_functions/celery/test_functions.py @@ -498,23 +498,8 @@ def _default_side_effect( "get_function", mock_registered_project_function, None, None ) mock_handler_in_functions_rpc_interface("find_cached_function_jobs", [], None, None) - - _generated_project_job_ids: list[ProjectID] = [] - - async def _patch_register_function_job( - project_job_ids: list[ProjectID], *args, **kwargs - ): - project_job_id = ProjectID(_faker.uuid4()) - project_job_ids.append(project_job_id) - return mock_registered_project_function_job.model_copy( - update={"project_job_id": project_job_id} - ) - mock_handler_in_functions_rpc_interface( - "register_function_job", - None, - None, - partial(_patch_register_function_job, _generated_project_job_ids), + "register_function_job", mock_registered_project_function_job, None, None ) mock_handler_in_functions_rpc_interface( "get_functions_user_api_access_rights", @@ -527,23 +512,17 @@ async def _patch_register_function_job( None, None, ) - - async def _patch_register_function_job_collection(*args, **kwargs): - return ( - RegisteredFunctionJobCollection( - uid=FunctionJobID(_faker.uuid4()), - title="Test Collection", - description="A test function job collection", - job_ids=kwargs["function_job_collection"].job_ids, - created_at=datetime.datetime.now(datetime.UTC), - ), - ) - mock_handler_in_functions_rpc_interface( "register_function_job_collection", + RegisteredFunctionJobCollection( + uid=FunctionJobID(_faker.uuid4()), + title="Test Collection", + description="A test function job collection", + job_ids=[], + created_at=datetime.datetime.now(datetime.UTC), + ), None, None, - _patch_register_function_job_collection, ) patch_mock = mock_handler_in_functions_rpc_interface( @@ -564,15 +543,14 @@ async def _patch_register_function_job_collection(*args, **kwargs): response = await client.post( f"{API_VTAG}/functions/{mock_registered_project_function.uid}:map", - json=[{"input_1": 2.0}, {"input_2": 3.0}], + json=[{}, {}], auth=auth, headers=headers, ) assert response.status_code == expected_status_code if expected_status_code == status.HTTP_200_OK: - job_collection = FunctionJobCollection.model_validate(response.json()) - assert job_collection.job_ids == _generated_project_job_ids + FunctionJobCollection.model_validate(response.json()) task_id = patch_mock.call_args.kwargs[ "registered_function_job_patch" ].job_creation_task_id From 9172a396d123b88caa34ed23865a41ae3d50de66 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard Date: Wed, 3 Sep 2025 10:26:47 +0200 Subject: [PATCH 111/111] fix fixture for creating celery app --- .../tests/unit/api_functions/celery/conftest.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/services/api-server/tests/unit/api_functions/celery/conftest.py b/services/api-server/tests/unit/api_functions/celery/conftest.py index df34a188627..993ba4b73ab 100644 --- a/services/api-server/tests/unit/api_functions/celery/conftest.py +++ b/services/api-server/tests/unit/api_functions/celery/conftest.py @@ -27,6 +27,7 @@ from servicelib.fastapi.celery.app_server import FastAPIAppServer from settings_library.redis import RedisSettings from simcore_service_api_server.celery_worker.worker_main import setup_worker_tasks +from simcore_service_api_server.clients import celery_task_manager from simcore_service_api_server.core.application import create_app from simcore_service_api_server.core.settings import ApplicationSettings @@ -60,8 +61,11 @@ async def mocked_log_streamer_setup(mocker: MockerFixture) -> MockerFixture: def mock_celery_app(mocker: MockerFixture, celery_config: dict[str, Any]) -> Celery: celery_app = Celery(**celery_config) - for module in ("simcore_service_api_server.api.dependencies.celery.create_app",): - mocker.patch(module, return_value=celery_app) + mocker.patch.object( + celery_task_manager, + celery_task_manager.create_app.__name__, + lambda settings: celery_app, + ) return celery_app