diff --git a/packages/models-library/src/models_library/projects_nodes.py b/packages/models-library/src/models_library/projects_nodes.py index 1bb91c1ddaa..5e1d6935de5 100644 --- a/packages/models-library/src/models_library/projects_nodes.py +++ b/packages/models-library/src/models_library/projects_nodes.py @@ -238,7 +238,7 @@ class Node(BaseModel): Field(description="The short name of the node", examples=["JupyterLab"]), ] progress: Annotated[ - float | None, + int | None, Field( ge=0, le=100, @@ -354,8 +354,12 @@ class Node(BaseModel): Field(default_factory=NodeState, description="The node's state object"), ] = DEFAULT_FACTORY - # NOTE: requested_resources should be here! WARNING: this model is used both in database and rest api! - # Model for project_nodes table should NOT be Node but a different one ! + required_resources: Annotated[ + dict[str, Any] | None, + Field(default_factory=dict), + # NOTE: requested_resources should be here! WARNING: this model is used both in database and rest api! + # Model for project_nodes table should NOT be Node but a different one ! + ] = DEFAULT_FACTORY boot_options: Annotated[ dict[EnvVarKey, str] | None, diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/201aa37f4d9a_remove_workbench_column_from_projects_.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/201aa37f4d9a_remove_workbench_column_from_projects_.py new file mode 100644 index 00000000000..a97c7b2fb8b --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/201aa37f4d9a_remove_workbench_column_from_projects_.py @@ -0,0 +1,337 @@ +"""Remove workbench column from projects_table + +Revision ID: 201aa37f4d9a +Revises: 06eafd25d004 +Create Date: 2025-07-22 19:25:42.125196+00:00 + +""" + +import json +from typing import Any + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "201aa37f4d9a" +down_revision = "06eafd25d004" +branch_labels = None +depends_on = None + + +def _migrate_workbench_to_projects_nodes() -> None: + """Migrate nodes from projects.workbench to projects_nodes table.""" + + # Get database connection + connection = op.get_bind() + + # Fetch all projects with workbench data + projects_result = connection.execute( + sa.text("SELECT uuid, workbench FROM projects WHERE workbench IS NOT NULL") + ) + + errors: list[str] = [] + updated_nodes_count = 0 + inserted_nodes_count = 0 + + for project_uuid, workbench_json in projects_result: + if not workbench_json: + continue + + try: + workbench_data = ( + workbench_json + if isinstance(workbench_json, dict) + else json.loads(workbench_json) + ) + except (json.JSONDecodeError, TypeError) as e: + errors.append(f"Project {project_uuid}: Invalid workbench JSON - {e}") + continue + + if not isinstance(workbench_data, dict): + errors.append(f"Project {project_uuid}: Workbench is not a dictionary") + continue + + for node_id, node_data in workbench_data.items(): + if not isinstance(node_data, dict): + errors.append( + f"Project {project_uuid}, Node {node_id}: Node data is not a dictionary" + ) + continue + + # Validate required fields + missing_fields = [] + if not node_data.get("key"): + missing_fields.append("key") + if not node_data.get("version"): + missing_fields.append("version") + if not node_data.get("label"): + missing_fields.append("label") + + if missing_fields: + errors.append( + f"Project {project_uuid}, Node {node_id}: Missing required fields: {', '.join(missing_fields)}" + ) + continue + + # Check if node already exists + existing_node = connection.execute( + sa.text( + "SELECT project_node_id FROM projects_nodes WHERE project_uuid = :project_uuid AND node_id = :node_id" + ), + {"project_uuid": project_uuid, "node_id": node_id}, + ).fetchone() + + # Prepare node data for insertion/update + node_values = { + "project_uuid": project_uuid, + "node_id": node_id, + "key": node_data["key"], + "version": node_data["version"], + "label": node_data["label"], + "progress": node_data.get("progress"), + "thumbnail": node_data.get("thumbnail"), + "input_access": ( + json.dumps(node_data["input_access"]) + if node_data.get("input_access") + else None + ), + "input_nodes": ( + json.dumps(node_data["input_nodes"]) + if node_data.get("input_nodes") + else None + ), + "inputs": ( + json.dumps(node_data["inputs"]) if node_data.get("inputs") else None + ), + "inputs_required": ( + json.dumps(node_data["inputs_required"]) + if node_data.get("inputs_required") + else None + ), + "inputs_units": ( + json.dumps(node_data["inputs_units"]) + if node_data.get("inputs_units") + else None + ), + "output_nodes": ( + json.dumps(node_data["output_nodes"]) + if node_data.get("output_nodes") + else None + ), + "outputs": ( + json.dumps(node_data["outputs"]) + if node_data.get("outputs") + else None + ), + "run_hash": node_data.get( + "run_hash", node_data.get("runHash") + ), # Handle both camelCase and snake_case + "state": ( + json.dumps(node_data["state"]) if node_data.get("state") else None + ), + "parent": node_data.get("parent"), + "boot_options": ( + json.dumps(node_data["boot_options"]) + if node_data.get("boot_options", node_data.get("bootOptions")) + else None + ), + } + + if existing_node: + # Update existing node + update_sql = """ + UPDATE projects_nodes SET + key = :key, + version = :version, + label = :label, + progress = :progress, + thumbnail = :thumbnail, + input_access = :input_access::jsonb, + input_nodes = :input_nodes::jsonb, + inputs = :inputs::jsonb, + inputs_required = :inputs_required::jsonb, + inputs_units = :inputs_units::jsonb, + output_nodes = :output_nodes::jsonb, + outputs = :outputs::jsonb, + run_hash = :run_hash, + state = :state::jsonb, + parent = :parent, + boot_options = :boot_options::jsonb, + modified_datetime = NOW() + WHERE project_uuid = :project_uuid AND node_id = :node_id + """ + connection.execute(sa.text(update_sql), node_values) + updated_nodes_count += 1 + print(f"Updated existing node {node_id} in project {project_uuid}") + + else: + # Insert new node + insert_sql = """ + INSERT INTO projects_nodes ( + project_uuid, node_id, key, version, label, progress, thumbnail, + input_access, input_nodes, inputs, inputs_required, inputs_units, + output_nodes, outputs, run_hash, state, parent, boot_options, + required_resources, created_datetime, modified_datetime + ) VALUES ( + :project_uuid, :node_id, :key, :version, :label, :progress, :thumbnail, + :input_access::jsonb, :input_nodes::jsonb, :inputs::jsonb, + :inputs_required::jsonb, :inputs_units::jsonb, :output_nodes::jsonb, + :outputs::jsonb, :run_hash, :state::jsonb, :parent, :boot_options::jsonb, + '{}'::jsonb, NOW(), NOW() + ) + """ + connection.execute(sa.text(insert_sql), node_values) + inserted_nodes_count += 1 + + print( + f"Migration summary: {inserted_nodes_count} nodes inserted, {updated_nodes_count} nodes updated" + ) + + if errors: + error_message = f"Migration failed with {len(errors)} errors:\n" + "\n".join( + errors + ) + print(error_message) + raise RuntimeError(error_message) + + +def _restore_workbench_from_projects_nodes() -> None: + """Restore workbench data from projects_nodes table to projects.workbench column.""" + + # Get database connection + connection = op.get_bind() + + # Get all projects that have nodes in projects_nodes + projects_with_nodes = connection.execute( + sa.text( + """ + SELECT DISTINCT project_uuid + FROM projects_nodes + ORDER BY project_uuid + """ + ) + ) + + errors: list[str] = [] + restored_projects_count = 0 + + for (project_uuid,) in projects_with_nodes: + # Fetch all nodes for this project + nodes_result = connection.execute( + sa.text( + """ + SELECT node_id, key, version, label, progress, thumbnail, + input_access, input_nodes, inputs, inputs_required, inputs_units, + output_nodes, outputs, run_hash, state, parent, boot_options + FROM projects_nodes + WHERE project_uuid = :project_uuid + ORDER BY node_id + """ + ), + {"project_uuid": project_uuid}, + ) + + workbench_data: dict[str, Any] = {} + + for row in nodes_result: + node_id = row.node_id + + # Build node data dictionary + node_data: dict[str, Any] = { + "key": row.key, + "version": row.version, + "label": row.label, + } + + # Add optional fields if they exist + if row.progress is not None: + node_data["progress"] = float(row.progress) + if row.thumbnail: + node_data["thumbnail"] = row.thumbnail + if row.input_access: + node_data["inputAccess"] = row.input_access + if row.input_nodes: + node_data["inputNodes"] = row.input_nodes + if row.inputs: + node_data["inputs"] = row.inputs + if row.inputs_required: + node_data["inputsRequired"] = row.inputs_required + if row.inputs_units: + node_data["inputsUnits"] = row.inputs_units + if row.output_nodes: + node_data["outputNodes"] = row.output_nodes + if row.outputs: + node_data["outputs"] = row.outputs + if row.run_hash: + node_data["runHash"] = row.run_hash + if row.state: + node_data["state"] = row.state + if row.parent: + node_data["parent"] = row.parent + if row.boot_options: + node_data["bootOptions"] = row.boot_options + + workbench_data[node_id] = node_data + + if workbench_data: + try: + # Update the project with the restored workbench data + connection.execute( + sa.text( + """ + UPDATE projects + SET workbench = :workbench_data + WHERE uuid = :project_uuid + """ + ), + { + "project_uuid": project_uuid, + "workbench_data": json.dumps(workbench_data), + }, + ) + restored_projects_count += 1 + + except Exception as e: + errors.append( + f"Project {project_uuid}: Failed to restore workbench data - {e}" + ) + + print( + f"Downgrade summary: {restored_projects_count} projects restored with workbench data" + ) + + if errors: + error_message = f"Downgrade failed with {len(errors)} errors:\n" + "\n".join( + errors + ) + print(error_message) + raise RuntimeError(error_message) + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + # Migrate workbench data to projects_nodes before dropping the column + _migrate_workbench_to_projects_nodes() + + op.drop_column("projects", "workbench") + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "projects", + sa.Column( + "workbench", + postgresql.JSON(astext_type=sa.Text()), + autoincrement=False, + nullable=False, + ), + ) + + # Restore workbench data from projects_nodes table + _restore_workbench_from_projects_nodes() + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/c9c165644731_update_project_last_changed_date_column_.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/c9c165644731_update_project_last_changed_date_column_.py new file mode 100644 index 00000000000..847c318d216 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/c9c165644731_update_project_last_changed_date_column_.py @@ -0,0 +1,63 @@ +"""Update project last_changed_date column when a node is created, updated or deleted. + +Revision ID: c9c165644731 +Revises: 201aa37f4d9a +Create Date: 2025-08-07 10:26:37.577990+00:00 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "c9c165644731" +down_revision = "201aa37f4d9a" +branch_labels = None +depends_on = None + + +update_projects_last_change_date = sa.DDL( + """ +CREATE OR REPLACE FUNCTION update_projects_last_change_date() +RETURNS TRIGGER AS $$ +DECLARE + project_uuid VARCHAR; +BEGIN + IF TG_OP = 'DELETE' THEN + project_uuid := OLD.project_uuid; + ELSE + project_uuid := NEW.project_uuid; + END IF; + + UPDATE projects + SET last_change_date = NOW() + WHERE uuid = project_uuid; + + RETURN NULL; +END; +$$ LANGUAGE plpgsql; +""" +) + + +projects_nodes_changed = sa.DDL( + """ +DROP TRIGGER IF EXISTS projects_nodes_changed on projects_nodes; +CREATE TRIGGER projects_nodes_changed +AFTER INSERT OR UPDATE OR DELETE ON projects_nodes +FOR EACH ROW +EXECUTE FUNCTION update_projects_last_change_date(); +""" +) + + +def upgrade(): + op.execute(update_projects_last_change_date) + op.execute(projects_nodes_changed) + + +def downgrade(): + op.execute( + sa.DDL("DROP TRIGGER IF EXISTS projects_nodes_changed ON projects_nodes;") + ) + op.execute(sa.DDL("DROP FUNCTION IF EXISTS update_projects_last_change_date();")) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/projects.py b/packages/postgres-database/src/simcore_postgres_database/models/projects.py index 7af3c09fc65..0006055d83e 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/projects.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/projects.py @@ -96,12 +96,13 @@ class ProjectTemplateType(str, enum.Enum): default=None, ), # CHILDREN/CONTENT-------------------------- - sa.Column( - "workbench", - sa.JSON, - nullable=False, - doc="Pipeline with the project's workflow. Schema in models_library.projects.Workbench", - ), + # NOTE: commented out to check who still uses this + # sa.Column( + # "workbench", + # sa.JSON, + # nullable=False, + # doc="Pipeline with the project's workflow. Schema in models_library.projects.Workbench", + # ), # FRONT-END ---------------------------- sa.Column( "ui", diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py index 4bb6855b0bf..c69e1bbd5d8 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py @@ -4,15 +4,18 @@ from typing import Annotated, Any import asyncpg.exceptions # type: ignore[import-untyped] +import sqlalchemy as sa import sqlalchemy.exc from common_library.async_tools import maybe_await from common_library.basic_types import DEFAULT_FACTORY from common_library.errors_classes import OsparcErrorMixin from pydantic import BaseModel, ConfigDict, Field from sqlalchemy.dialects.postgresql import insert as pg_insert +from sqlalchemy.sql.selectable import Subquery from ._protocols import DBConnection from .aiopg_errors import ForeignKeyViolation, UniqueViolation +from .models.projects import projects from .models.projects_node_to_pricing_unit import projects_node_to_pricing_unit from .models.projects_nodes import projects_nodes from .utils_aiosqlalchemy import map_db_exception @@ -81,6 +84,60 @@ class ProjectNode(ProjectNodeCreate): model_config = ConfigDict(from_attributes=True) +def create_workbench_subquery(project_id: str) -> Subquery: + workbench_obj = sa.func.json_build_object( + "key", + projects_nodes.c.key, + "version", + projects_nodes.c.version, + "label", + projects_nodes.c.label, + "progress", + projects_nodes.c.progress, + "thumbnail", + projects_nodes.c.thumbnail, + "inputAccess", + projects_nodes.c.input_access, + "inputNodes", + projects_nodes.c.input_nodes, + "inputs", + projects_nodes.c.inputs, + "inputsRequired", + projects_nodes.c.inputs_required, + "inputsUnits", + projects_nodes.c.inputs_units, + "outputNodes", + projects_nodes.c.output_nodes, + "outputs", + projects_nodes.c.outputs, + "runHash", + projects_nodes.c.run_hash, + "state", + projects_nodes.c.state, + "parent", + projects_nodes.c.parent, + "bootOptions", + projects_nodes.c.boot_options, + ) + + return ( + sa.select( + projects_nodes.c.project_uuid, + sa.func.json_object_agg( + projects_nodes.c.node_id, sa.func.json(workbench_obj) + ).label("workbench"), + ) + .select_from( + projects_nodes.join( + projects, projects_nodes.c.project_uuid == projects.c.uuid + ) + ) + .where(projects.c.uuid == project_id) + .group_by(projects_nodes.c.project_uuid) + .subquery() + ) + + @dataclass(frozen=True, kw_only=True) class ProjectNodesRepo: project_uuid: uuid.UUID diff --git a/packages/pytest-simcore/src/pytest_simcore/db_entries_mocks.py b/packages/pytest-simcore/src/pytest_simcore/db_entries_mocks.py index 1b1ec0ff762..05f7d8b4fbe 100644 --- a/packages/pytest-simcore/src/pytest_simcore/db_entries_mocks.py +++ b/packages/pytest-simcore/src/pytest_simcore/db_entries_mocks.py @@ -4,16 +4,19 @@ # pylint:disable=no-value-for-parameter import contextlib +import logging from collections.abc import AsyncIterator, Awaitable, Callable, Iterator from typing import Any from uuid import uuid4 import pytest import sqlalchemy as sa +from common_library.dict_tools import remap_keys from faker import Faker from models_library.products import ProductName from models_library.projects import ProjectAtDB, ProjectID -from models_library.projects_nodes_io import NodeID +from models_library.projects_nodes import Node +from pytest_simcore.helpers.logging_tools import log_context from simcore_postgres_database.models.comp_pipeline import StateType, comp_pipeline from simcore_postgres_database.models.comp_tasks import comp_tasks from simcore_postgres_database.models.products import products @@ -30,6 +33,8 @@ from .helpers.postgres_tools import insert_and_get_row_lifespan from .helpers.postgres_users import sync_insert_and_get_user_and_secrets_lifespan +_logger = logging.getLogger(__name__) + @pytest.fixture() def create_registered_user( @@ -81,71 +86,106 @@ async def create_project( ) -> AsyncIterator[Callable[..., Awaitable[ProjectAtDB]]]: created_project_ids: list[str] = [] - async def _( - user: dict[str, Any], - *, - project_nodes_overrides: dict[str, Any] | None = None, - **project_overrides, - ) -> ProjectAtDB: - project_uuid = uuid4() - print(f"Created new project with uuid={project_uuid}") - project_config = { - "uuid": f"{project_uuid}", - "name": faker.name(), - "type": ProjectType.STANDARD.name, - "description": faker.text(), - "prj_owner": user["id"], - "access_rights": {"1": {"read": True, "write": True, "delete": True}}, - "thumbnail": "", - "workbench": {}, - } - project_config.update(**project_overrides) - async with sqlalchemy_async_engine.connect() as con, con.begin(): - result = await con.execute( - projects.insert() - .values(**project_config) - .returning(sa.literal_column("*")) - ) - - inserted_project = ProjectAtDB.model_validate(result.one()) - project_nodes_repo = ProjectNodesRepo(project_uuid=project_uuid) - # NOTE: currently no resources is passed until it becomes necessary - default_node_config = { - "required_resources": {}, - "key": faker.pystr(), - "version": faker.pystr(), - "label": faker.pystr(), - } - if project_nodes_overrides: - default_node_config.update(project_nodes_overrides) - await project_nodes_repo.add( - con, - nodes=[ - ProjectNodeCreate( - node_id=NodeID(node_id), - **(default_node_config | node_data.model_dump(mode="json")), + async with contextlib.AsyncExitStack() as stack: + + async def _create( + user: dict[str, Any], + *, + project_nodes_overrides: dict[str, Any] | None = None, + **project_overrides, + ) -> ProjectAtDB: + + project_uuid = uuid4() + with log_context( + logging.INFO, + "Creating new project with uuid=%s", + project_uuid, + logger=_logger, + ) as log_ctx: + + project_values = { + "uuid": f"{project_uuid}", + "name": faker.name(), + "type": ProjectType.STANDARD.name, + "description": faker.text(), + "prj_owner": user["id"], + "access_rights": { + "1": {"read": True, "write": True, "delete": True} + }, + "thumbnail": "", + **project_overrides, + } + project_workbench = project_values.pop("workbench", {}) + + project_db_rows = await stack.enter_async_context( + insert_and_get_row_lifespan( + sqlalchemy_async_engine, + table=projects, + values=project_values, + pk_col=projects.c.uuid, ) - for node_id, node_data in inserted_project.workbench.items() - ], - ) - await con.execute( - projects_to_products.insert().values( - project_uuid=f"{inserted_project.uuid}", - product_name=product_name, ) - ) - print(f"--> created {inserted_project=}") - created_project_ids.append(f"{inserted_project.uuid}") - return inserted_project + inserted_project = ProjectAtDB.model_validate( + {**project_db_rows, "workbench": project_workbench} + ) - yield _ + async with sqlalchemy_async_engine.connect() as con, con.begin(): + # collect nodes + nodes = [] + for node_id, node_data in project_workbench.items(): + # NOTE: workbench node have a lot of camecase fields. We validate with Node and + # export to ProjectNodeCreate with alias=False + + node_model = Node.model_validate(node_data).model_dump( + mode="json", by_alias=True + ) + + field_mapping = { + "inputAccess": "input_access", + "inputNodes": "input_nodes", + "inputsRequired": "inputs_required", + "inputsUnits": "inputs_units", + "outputNodes": "output_nodes", + "runHash": "run_hash", + "bootOptions": "boot_options", + } + + node = remap_keys(node_model, field_mapping) + + # NOTE: currently no resources is passed until it becomes necessary + project_workbench_node = { + "required_resources": {}, + **node, + } + + if project_nodes_overrides: + project_workbench_node.update(project_nodes_overrides) + + nodes.append( + ProjectNodeCreate(node_id=node_id, **project_workbench_node) + ) + + # add nodes + project_nodes_repo = ProjectNodesRepo(project_uuid=project_uuid) + await project_nodes_repo.add( + con, + nodes=nodes, + ) - # cleanup - async with sqlalchemy_async_engine.begin() as con: - await con.execute( - projects.delete().where(projects.c.uuid.in_(created_project_ids)) - ) - print(f"<-- delete projects {created_project_ids=}") + # link to product + await con.execute( + projects_to_products.insert().values( + project_uuid=f"{inserted_project.uuid}", + product_name=product_name, + ) + ) + log_ctx.logger.info("Created project %s", inserted_project) + created_project_ids.append(f"{inserted_project.uuid}") + return inserted_project + + yield _create + + _logger.info("<-- delete projects %s", created_project_ids) @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py index 40575367091..24ab562a3e8 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py @@ -182,7 +182,6 @@ def random_project(fake: Faker = DEFAULT_FAKER, **overrides) -> dict[str, Any]: "prj_owner": fake.pyint(), "thumbnail": fake.image_url(width=120, height=120), "access_rights": {}, - "workbench": {}, "published": False, } diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/storage_utils_project.py b/packages/pytest-simcore/src/pytest_simcore/helpers/storage_utils_project.py index ad4535c9d70..c9c680175ae 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/storage_utils_project.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/storage_utils_project.py @@ -2,12 +2,13 @@ from copy import deepcopy from typing import Any -from models_library.projects_nodes_io import NodeIDStr +from models_library.projects_nodes_io import NodeID def clone_project_data( - project: dict, -) -> tuple[dict[str, Any], dict[NodeIDStr, NodeIDStr]]: + project: dict[str, Any], + project_nodes: dict[NodeID, dict[str, Any]], +) -> tuple[dict[str, Any], dict[NodeID, dict[str, Any]], dict[NodeID, NodeID]]: project_copy = deepcopy(project) # Update project id @@ -17,28 +18,17 @@ def clone_project_data( project_copy.pop("id", None) project_copy["name"] = f"{project['name']}-copy" - # Workbench nodes shall be unique within the project context - def _create_new_node_uuid(old_uuid: NodeIDStr) -> NodeIDStr: - return NodeIDStr(uuidlib.uuid5(project_copy_uuid, old_uuid)) + # Nodes shall be unique within the project context + def _new_node_uuid(old: NodeID) -> NodeID: + return uuidlib.uuid5(project_copy_uuid, f"{old}") - nodes_map = {} - for node_uuid in project.get("workbench", {}): - nodes_map[node_uuid] = _create_new_node_uuid(node_uuid) + nodes_map = {node_uuid: _new_node_uuid(node_uuid) for node_uuid in project_nodes} + project_nodes_copy = { + nodes_map[old_node_id]: { + **deepcopy(data), + "node_id": nodes_map[old_node_id], # update the internal "node_id" field + } + for old_node_id, data in project_nodes.items() + } - def _replace_uuids(node): - if isinstance(node, str): - node = nodes_map.get(node, node) - elif isinstance(node, list): - node = [_replace_uuids(item) for item in node] - elif isinstance(node, dict): - _frozen_items = tuple(node.items()) - for key, value in _frozen_items: - if key in nodes_map: - new_key = nodes_map[key] - node[new_key] = node.pop(key) - key = new_key - node[key] = _replace_uuids(value) - return node - - project_copy["workbench"] = _replace_uuids(project_copy.get("workbench", {})) - return project_copy, nodes_map + return project_copy, project_nodes_copy, nodes_map diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py index 917d70d24cc..c8c71bd2754 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py @@ -4,12 +4,15 @@ import json import uuid as uuidlib +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager from pathlib import Path from typing import Any from aiohttp import web from aiohttp.test_utils import TestClient from common_library.dict_tools import remap_keys +from deepdiff import DeepDiff from models_library.projects_nodes_io import NodeID from models_library.services_resources import ServiceResourcesDictHelpers from simcore_postgres_database.utils_projects_nodes import ProjectNodeCreate @@ -73,25 +76,49 @@ async def create_project( db: ProjectDBAPI = app[APP_PROJECT_DBAPI] - new_project = await db.insert_project( + raw_workbench: dict[str, Any] = project_data.pop("workbench", {}) + for raw_node in raw_workbench.values(): # back-compatibility with old format + if "position" in raw_node: + del raw_node["position"] + + # Get valid ProjectNodeCreate fields, excluding node_id since it's set separately + valid_fields = ProjectNodeCreate.get_field_names(exclude={"node_id"}) + + # Mapping from camelCase (workbench) to snake_case (ProjectNodeCreate) + field_mapping = { + "inputAccess": "input_access", + "inputNodes": "input_nodes", + "inputsUnits": "inputs_units", + "outputNodes": "output_nodes", + "runHash": "run_hash", + "bootOptions": "boot_options", + } + + fake_required_resources: dict[str, Any] = ServiceResourcesDictHelpers.model_config[ + "json_schema_extra" + ]["examples"][0] + + project_nodes = { + NodeID(node_id): ProjectNodeCreate( + node_id=NodeID(node_id), + # NOTE: fake initial resources until more is needed + required_resources=fake_required_resources, + **{ + str(field_mapping.get(field, field)): value + for field, value in raw_node.items() + if field_mapping.get(field, field) in valid_fields + }, + ) + for node_id, raw_node in raw_workbench.items() + } + + project_created = await db.insert_project( project_data, user_id, product_name=product_name, force_project_uuid=force_uuid, force_as_template=as_template, - # NOTE: fake initial resources until more is needed - project_nodes={ - NodeID(node_id): ProjectNodeCreate( - node_id=NodeID(node_id), - required_resources=ServiceResourcesDictHelpers.model_config[ - "json_schema_extra" - ]["examples"][0], - key=node_info.get("key"), - version=node_info.get("version"), - label=node_info.get("label"), - ) - for node_id, node_info in project_data.get("workbench", {}).items() - }, + project_nodes=project_nodes, ) if params_override and ( @@ -103,7 +130,7 @@ async def create_project( for group_id, permissions in _access_rights.items(): await update_or_insert_project_group( app, - project_id=new_project["uuid"], + project_id=project_created["uuid"], group_id=int(group_id), read=permissions["read"], write=permissions["write"], @@ -112,20 +139,21 @@ async def create_project( try: uuidlib.UUID(str(project_data["uuid"])) - assert new_project["uuid"] == project_data["uuid"] + assert project_created["uuid"] == project_data["uuid"] except (ValueError, AssertionError): # in that case the uuid gets replaced - assert new_project["uuid"] != project_data["uuid"] - project_data["uuid"] = new_project["uuid"] + assert project_created["uuid"] != project_data["uuid"] + project_data["uuid"] = project_created["uuid"] for key in DB_EXCLUSIVE_COLUMNS: project_data.pop(key, None) - new_project: ProjectDict = remap_keys( - new_project, + project_created: ProjectDict = remap_keys( + project_created, rename={"trashed": "trashedAt"}, ) - return new_project + project_created["workbench"] = raw_workbench + return project_created async def delete_all_projects(app: web.Application): @@ -137,50 +165,35 @@ async def delete_all_projects(app: web.Application): await conn.execute(query) -class NewProject: - def __init__( - self, - params_override: dict | None = None, - app: web.Application | None = None, - *, - user_id: int, - product_name: str, - tests_data_dir: Path, - force_uuid: bool = False, - as_template: bool = False, - ): - assert app # nosec - - self.params_override = params_override - self.user_id = user_id - self.product_name = product_name - self.app = app - self.prj = {} - self.force_uuid = force_uuid - self.tests_data_dir = tests_data_dir - self.as_template = as_template - - assert tests_data_dir.exists() - assert tests_data_dir.is_dir() - - async def __aenter__(self) -> ProjectDict: - assert self.app # nosec - - self.prj = await create_project( - self.app, - self.params_override, - self.user_id, - product_name=self.product_name, - force_uuid=self.force_uuid, - default_project_json=self.tests_data_dir / "fake-project.json", - as_template=self.as_template, - ) - - return self.prj +@asynccontextmanager +async def NewProject( + params_override: dict | None = None, + app: web.Application | None = None, + *, + user_id: int, + product_name: str, + tests_data_dir: Path, + force_uuid: bool = False, + as_template: bool = False, +) -> AsyncIterator[ProjectDict]: + assert app # nosec + assert tests_data_dir.exists() + assert tests_data_dir.is_dir() + + project = await create_project( + app, + params_override, + user_id, + product_name=product_name, + force_uuid=force_uuid, + default_project_json=tests_data_dir / "fake-project.json", + as_template=as_template, + ) - async def __aexit__(self, *args): - assert self.app # nosec - await delete_all_projects(self.app) + try: + yield project + finally: + await delete_all_projects(app) async def assert_get_same_project( @@ -198,6 +211,11 @@ async def assert_get_same_project( resp = await client.get(f"{url}") data, error = await assert_status(resp, expected) + # without our control + if not error: - assert data == {k: project[k] for k in data} + diff = DeepDiff( + data, {k: project[k] for k in data}, exclude_paths="root['lastChangeDate']" + ) + assert not diff, diff.pretty() return data diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py index a41d4876612..4dd4e31317d 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py @@ -2,6 +2,7 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable +import contextlib from collections.abc import AsyncIterator, Awaitable, Callable from contextlib import asynccontextmanager from typing import Any @@ -14,11 +15,13 @@ from models_library.users import UserID from pydantic import TypeAdapter from simcore_postgres_database.models.project_to_groups import project_to_groups +from simcore_postgres_database.models.projects_nodes import projects_nodes from simcore_postgres_database.storage_models import projects, users from sqlalchemy.dialects.postgresql import insert as pg_insert from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine from .helpers.faker_factories import DEFAULT_FAKER, random_project +from .helpers.postgres_tools import insert_and_get_row_lifespan from .helpers.postgres_users import insert_and_get_user_and_secrets_lifespan @@ -214,32 +217,38 @@ async def _() -> None: @pytest.fixture async def create_project_node( user_id: UserID, sqlalchemy_async_engine: AsyncEngine, faker: Faker -) -> Callable[..., Awaitable[NodeID]]: - async def _creator( - project_id: ProjectID, node_id: NodeID | None = None, **kwargs - ) -> NodeID: - async with sqlalchemy_async_engine.begin() as conn: - result = await conn.execute( - sa.select(projects.c.workbench).where( - projects.c.uuid == f"{project_id}" - ) - ) - row = result.fetchone() - assert row - project_workbench: dict[str, Any] = row.workbench - new_node_id = node_id or NodeID(f"{faker.uuid4()}") - node_data = { +) -> AsyncIterator[Callable[..., Awaitable[tuple[NodeID, dict[str, Any]]]]]: + created_node_entries: list[tuple[NodeID, ProjectID]] = [] + + async with contextlib.AsyncExitStack() as stack: + + async def _creator( + project_id: ProjectID, node_id: NodeID | None = None, **kwargs + ) -> tuple[NodeID, dict[str, Any]]: + new_node_id = node_id or NodeID(faker.uuid4()) + node_values = { + "node_id": f"{new_node_id}", + "project_uuid": f"{project_id}", "key": "simcore/services/frontend/file-picker", "version": "1.0.0", "label": "pytest_fake_node", + **kwargs, } - node_data.update(**kwargs) - project_workbench.update({f"{new_node_id}": node_data}) - await conn.execute( - projects.update() - .where(projects.c.uuid == f"{project_id}") - .values(workbench=project_workbench) + + node_row = await stack.enter_async_context( + insert_and_get_row_lifespan( + sqlalchemy_async_engine, + table=projects_nodes, + values=node_values, + pk_col=projects_nodes.c.node_id, + pk_value=f"{new_node_id}", + ) ) - return new_node_id - return _creator + created_node_entries.append((new_node_id, project_id)) + return new_node_id, node_row + + yield _creator + + # Cleanup is handled automatically by insert_and_get_row_lifespan + print("Deleting ", created_node_entries) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index 014aff56529..a7f9ec22fd0 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -114,10 +114,9 @@ def check_value(cls, v: DataItemValue, info: ValidationInfo) -> DataItemValue: and not isinstance(v, PortLink) ): if port_utils.is_file_type(property_type): - if not isinstance(v, (FileLink, DownloadLink)): - raise ValueError( - f"{property_type!r} value does not validate against any of FileLink, DownloadLink or PortLink schemas" - ) + if not isinstance(v, FileLink | DownloadLink): + msg = f"{property_type!r} value does not validate against any of FileLink, DownloadLink or PortLink schemas" + raise ValueError(msg) elif property_type == "ref_contentSchema": v, _ = validate_port_content( port_key=info.data.get("key"), @@ -125,10 +124,11 @@ def check_value(cls, v: DataItemValue, info: ValidationInfo) -> DataItemValue: unit=None, content_schema=info.data.get("content_schema", {}), ) - elif isinstance(v, (list, dict)): - raise TypeError( + elif isinstance(v, list | dict): + msg = ( f"Containers as {v} currently only supported within content_schema." ) + raise TypeError(msg) return v @field_validator("value_item", "value_concrete", mode="before") @@ -194,28 +194,29 @@ async def get_value( ) async def _evaluate() -> ItemValue | None: + # NOTE: review types returned by this function !!! if isinstance(self.value, PortLink): # this is a link to another node's port - other_port_itemvalue: None | ( - ItemValue - ) = await port_utils.get_value_link_from_port_link( - self.value, - # pylint: disable=protected-access - self._node_ports._node_ports_creator_cb, - file_link_type=file_link_type, + other_port_itemvalue: ItemValue | None = ( + await port_utils.get_value_link_from_port_link( + self.value, + # pylint: disable=protected-access + self._node_ports._node_ports_creator_cb, + file_link_type=file_link_type, + ) ) return other_port_itemvalue if isinstance(self.value, FileLink): # let's get the download/upload link from storage - url_itemvalue: None | ( - AnyUrl - ) = await port_utils.get_download_link_from_storage( - # pylint: disable=protected-access - user_id=self._node_ports.user_id, - value=self.value, - link_type=file_link_type, + url_itemvalue: AnyUrl | None = ( + await port_utils.get_download_link_from_storage( + # pylint: disable=protected-access + user_id=self._node_ports.user_id, + value=self.value, + link_type=file_link_type, + ) ) return url_itemvalue @@ -256,15 +257,15 @@ async def _evaluate() -> ItemConcreteValue | None: if isinstance(self.value, PortLink): # this is a link to another node - other_port_concretevalue: None | ( - ItemConcreteValue - ) = await port_utils.get_value_from_link( - # pylint: disable=protected-access - key=self.key, - value=self.value, - file_to_key_map=self.file_to_key_map, - node_port_creator=self._node_ports._node_ports_creator_cb, # noqa: SLF001 - progress_bar=progress_bar, + other_port_concretevalue: None | ItemConcreteValue = ( + await port_utils.get_value_from_link( + # pylint: disable=protected-access + key=self.key, + value=self.value, + file_to_key_map=self.file_to_key_map, + node_port_creator=self._node_ports._node_ports_creator_cb, # noqa: SLF001 + progress_bar=progress_bar, + ) ) value = other_port_concretevalue diff --git a/services/catalog/src/simcore_service_catalog/repository/projects.py b/services/catalog/src/simcore_service_catalog/repository/projects.py index 48de3867a6c..084395baac1 100644 --- a/services/catalog/src/simcore_service_catalog/repository/projects.py +++ b/services/catalog/src/simcore_service_catalog/repository/projects.py @@ -1,40 +1,57 @@ import logging +from typing import Final import sqlalchemy as sa from models_library.services import ServiceKeyVersion from pydantic import ValidationError from simcore_postgres_database.models.projects import ProjectType, projects +from simcore_postgres_database.models.projects_nodes import projects_nodes from ._base import BaseRepository _logger = logging.getLogger(__name__) +_IGNORED_SERVICE_KEYS: Final[set[str]] = { + # NOTE: frontend only nodes + "simcore/services/frontend/file-picker", + "simcore/services/frontend/nodes-group", +} + + class ProjectsRepository(BaseRepository): async def list_services_from_published_templates(self) -> list[ServiceKeyVersion]: - list_of_published_services: list[ServiceKeyVersion] = [] async with self.db_engine.connect() as conn: - async for row in await conn.stream( - sa.select(projects).where( - (projects.c.type == ProjectType.TEMPLATE) - & (projects.c.published.is_(True)) + query = ( + sa.select(projects_nodes.c.key, projects_nodes.c.version) + .distinct() + .select_from( + projects_nodes.join( + projects, projects_nodes.c.project_uuid == projects.c.uuid + ) + ) + .where( + sa.and_( + projects.c.type == ProjectType.TEMPLATE, + projects.c.published.is_(True), + projects_nodes.c.key.notin_(_IGNORED_SERVICE_KEYS), + ) ) - ): - project_workbench = row.workbench - for node in project_workbench: - service = project_workbench[node] - try: - if ( - "file-picker" in service["key"] - or "nodes-group" in service["key"] - ): - # these 2 are not going to pass the validation tests, they are frontend only nodes. - continue - list_of_published_services.append(ServiceKeyVersion(**service)) - except ValidationError: - _logger.warning( - "service %s could not be validated", service, exc_info=True - ) - continue - - return list_of_published_services + ) + + services = [] + async for row in await conn.stream(query): + try: + service = ServiceKeyVersion.model_validate( + row, from_attributes=True + ) + services.append(service) + except ValidationError: + _logger.warning( + "service with key=%s and version=%s could not be validated", + row.key, + row.version, + exc_info=True, + ) + + return services diff --git a/services/catalog/tests/unit/with_dbs/test_repositories.py b/services/catalog/tests/unit/with_dbs/test_repositories.py index ec8fca12825..5e68dad4138 100644 --- a/services/catalog/tests/unit/with_dbs/test_repositories.py +++ b/services/catalog/tests/unit/with_dbs/test_repositories.py @@ -23,9 +23,10 @@ from packaging import version from pydantic import EmailStr, HttpUrl, TypeAdapter from pytest_simcore.helpers.catalog_services import CreateFakeServiceDataCallable -from pytest_simcore.helpers.faker_factories import random_project +from pytest_simcore.helpers.faker_factories import random_project, random_project_node from pytest_simcore.helpers.postgres_tools import insert_and_get_row_lifespan from simcore_postgres_database.models.projects import ProjectType, projects +from simcore_postgres_database.models.projects_nodes import projects_nodes from simcore_service_catalog.models.services_db import ( ServiceAccessRightsDB, ServiceDBFilters, @@ -812,21 +813,41 @@ async def test_list_services_from_published_templates( type=ProjectType.TEMPLATE, published=True, prj_owner=user["id"], - workbench={ - "node-1": { - "key": "simcore/services/dynamic/jupyterlab", - "version": "1.0.0", - }, - "node-2": { - "key": "simcore/services/frontend/file-picker", - "version": "1.0.0", - }, - }, ), pk_col=projects.c.uuid, pk_value="template-1", ) ) + await stack.enter_async_context( + insert_and_get_row_lifespan( + sqlalchemy_async_engine, + table=projects_nodes, + values=random_project_node( + node_id="node-1.1", + project_uuid="template-1", + key="simcore/services/dynamic/jupyterlab", + version="1.0.0", + label="jupyterlab", + ), + pk_col=projects_nodes.c.node_id, + pk_value="node-1.1", + ) + ) + await stack.enter_async_context( + insert_and_get_row_lifespan( + sqlalchemy_async_engine, + table=projects_nodes, + values=random_project_node( + node_id="node-1.2", + project_uuid="template-1", + key="simcore/services/frontend/file-picker", + version="1.0.0", + label="file-picker", + ), + pk_col=projects_nodes.c.node_id, + pk_value="node-1.2", + ) + ) await stack.enter_async_context( insert_and_get_row_lifespan( sqlalchemy_async_engine, @@ -836,17 +857,26 @@ async def test_list_services_from_published_templates( type=ProjectType.TEMPLATE, published=False, prj_owner=user["id"], - workbench={ - "node-1": { - "key": "simcore/services/dynamic/some-service", - "version": "2.0.0", - }, - }, ), pk_col=projects.c.uuid, pk_value="template-2", ) ) + await stack.enter_async_context( + insert_and_get_row_lifespan( + sqlalchemy_async_engine, + table=projects_nodes, + values=random_project_node( + node_id="node-2.1", + project_uuid="template-2", + key="simcore/services/dynamic/some-service", + version="2.0.0", + label="some-service", + ), + pk_col=projects_nodes.c.node_id, + pk_value="node-2.1", + ) + ) # Act: Call the method services = await projects_repo.list_services_from_published_templates() @@ -874,21 +904,41 @@ async def test_list_services_from_published_templates_with_invalid_service( type=ProjectType.TEMPLATE, published=True, prj_owner=user["id"], - workbench={ - "node-1": { - "key": "simcore/services/frontend/file-picker", - "version": "1.0.0", - }, - "node-2": { - "key": "simcore/services/dynamic/invalid-service", - "version": "invalid", - }, - }, ), pk_col=projects.c.uuid, pk_value="template-1", ) ) + await stack.enter_async_context( + insert_and_get_row_lifespan( + sqlalchemy_async_engine, + table=projects_nodes, + values=random_project_node( + node_id="node-1.1", + project_uuid="template-1", + key="simcore/services/frontend/file-picker", + version="1.0.0", + label="file-picker", + ), + pk_col=projects_nodes.c.node_id, + pk_value="node-1.1", + ) + ) + await stack.enter_async_context( + insert_and_get_row_lifespan( + sqlalchemy_async_engine, + table=projects_nodes, + values=random_project_node( + node_id="node-1.2", + project_uuid="template-1", + key="simcore/services/dynamic/invalid-service", + version="invalid", # NOTE: invalid version + label="invalid-service", + ), + pk_col=projects_nodes.c.node_id, + pk_value="node-1.2", + ) + ) # Act: Call the method and capture logs with caplog.at_level(logging.WARNING): @@ -897,7 +947,7 @@ async def test_list_services_from_published_templates_with_invalid_service( # Assert: Validate the results assert len(services) == 0 # No valid services should be returned assert ( - "service {'key': 'simcore/services/dynamic/invalid-service', 'version': 'invalid'} could not be validated" + "service with key=simcore/services/dynamic/invalid-service and version=invalid could not be validated" in caplog.text ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py index eba9954771c..10103909a63 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py @@ -400,6 +400,9 @@ async def generate_tasks_list_from_project( raise WalletNotEnoughCreditsError( wallet_name=wallet_info.wallet_name, wallet_credit_amount=wallet_info.wallet_credit_amount, + user_id=user_id, + product_name=product_name, + project_id=project.uuid, ) assert rabbitmq_rpc_client # nosec diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects.py index 2935b6ec251..8e3c4eb6e72 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/projects.py @@ -3,10 +3,14 @@ import sqlalchemy as sa from models_library.projects import ProjectAtDB, ProjectID from models_library.projects_nodes_io import NodeID -from simcore_postgres_database.utils_projects_nodes import ProjectNodesRepo +from simcore_postgres_database.utils_projects_nodes import ( + ProjectNodesRepo, + create_workbench_subquery, +) +from simcore_postgres_database.utils_repos import pass_or_acquire_connection from ....core.errors import ProjectNotFoundError -from ..tables import projects +from ..tables import projects, projects_nodes from ._base import BaseRepository logger = logging.getLogger(__name__) @@ -14,24 +18,45 @@ class ProjectsRepository(BaseRepository): async def get_project(self, project_id: ProjectID) -> ProjectAtDB: + workbench_subquery = create_workbench_subquery(f"{project_id}") + async with self.db_engine.connect() as conn: - row = ( - await conn.execute( - sa.select(projects).where(projects.c.uuid == str(project_id)) + query = ( + sa.select( + projects, + sa.func.coalesce( + workbench_subquery.c.workbench, sa.text("'{}'::json") + ).label("workbench"), + ) + .select_from( + projects.outerjoin( + workbench_subquery, + projects.c.uuid == workbench_subquery.c.project_uuid, + ) ) - ).one_or_none() - if not row: - raise ProjectNotFoundError(project_id=project_id) - return ProjectAtDB.model_validate(row) + .where(projects.c.uuid == str(project_id)) + ) + result = await conn.execute(query) + row = result.one_or_none() + if not row: + raise ProjectNotFoundError(project_id=project_id) + return ProjectAtDB.model_validate(row) async def is_node_present_in_workbench( self, project_id: ProjectID, node_uuid: NodeID ) -> bool: - try: - project = await self.get_project(project_id) - return f"{node_uuid}" in project.workbench - except ProjectNotFoundError: - return False + async with pass_or_acquire_connection(self.db_engine) as conn: + stmt = ( + sa.select(sa.literal(1)) + .where( + projects_nodes.c.project_uuid == str(project_id), + projects_nodes.c.node_id == str(node_uuid), + ) + .limit(1) + ) + + result = await conn.execute(stmt) + return result.scalar_one_or_none() is not None async def get_project_id_from_node(self, node_id: NodeID) -> ProjectID: async with self.db_engine.connect() as conn: diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/tables.py b/services/director-v2/src/simcore_service_director_v2/modules/db/tables.py index f47250b651e..6e11cf8b40c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/tables.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/tables.py @@ -10,17 +10,21 @@ ) from simcore_postgres_database.models.projects import ProjectType, projects from simcore_postgres_database.models.projects_networks import projects_networks +from simcore_postgres_database.models.projects_nodes import projects_nodes -__all__ = [ +__all__: tuple[str, ...] = ( + "NodeClass", + "ProjectType", + "StateType", "comp_pipeline", + "comp_run_snapshot_tasks", "comp_runs", "comp_tasks", "groups_extra_properties", - "NodeClass", - "projects_networks", "projects", - "ProjectType", - "StateType", + "projects_networks", + "projects_nodes", "user_to_groups", - "comp_run_snapshot_tasks", -] +) + +# nopycln: file diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dags.py b/services/director-v2/src/simcore_service_director_v2/utils/dags.py index 13a2dfe8f39..f0a55669c83 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dags.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dags.py @@ -29,6 +29,7 @@ def create_complete_dag(workbench: NodesDict) -> nx.DiGraph: dag_graph: nx.DiGraph = nx.DiGraph() for node_id, node in workbench.items(): assert node.state # nosec + dag_graph.add_node( node_id, name=node.label, @@ -43,6 +44,9 @@ def create_complete_dag(workbench: NodesDict) -> nx.DiGraph: if node.input_nodes: for input_node_id in node.input_nodes: predecessor_node = workbench.get(f"{input_node_id}") + assert ( # nosec + predecessor_node + ), f"Node {input_node_id} not found in workbench" if predecessor_node: dag_graph.add_edge(str(input_node_id), node_id) diff --git a/services/director-v2/tests/helpers/shared_comp_utils.py b/services/director-v2/tests/helpers/shared_comp_utils.py index f2ce2ff4283..7a664089d60 100644 --- a/services/director-v2/tests/helpers/shared_comp_utils.py +++ b/services/director-v2/tests/helpers/shared_comp_utils.py @@ -4,7 +4,7 @@ import httpx from models_library.api_schemas_directorv2.computations import ComputationGet -from models_library.projects import ProjectAtDB +from models_library.projects import ProjectID from models_library.projects_pipeline import PipelineDetails from models_library.projects_state import RunningState from models_library.users import UserID @@ -21,27 +21,27 @@ async def assert_computation_task_out_obj( task_out: ComputationGet, *, - project: ProjectAtDB, - exp_task_state: RunningState, - exp_pipeline_details: PipelineDetails, + project_uuid: ProjectID, + expected_task_state: RunningState, + expected_pipeline_details: PipelineDetails, iteration: PositiveInt | None, ) -> None: - assert task_out.id == project.uuid - assert task_out.state == exp_task_state - assert task_out.url.path == f"/v2/computations/{project.uuid}" - if exp_task_state in [ + assert task_out.id == project_uuid + assert task_out.state == expected_task_state + assert task_out.url.path == f"/v2/computations/{project_uuid}" + if expected_task_state in [ RunningState.PUBLISHED, RunningState.PENDING, RunningState.STARTED, ]: assert task_out.stop_url - assert task_out.stop_url.path == f"/v2/computations/{project.uuid}:stop" + assert task_out.stop_url.path == f"/v2/computations/{project_uuid}:stop" else: assert task_out.stop_url is None assert task_out.iteration == iteration # check pipeline details contents received_task_out_pipeline = task_out.pipeline_details.model_dump() - expected_task_out_pipeline = exp_pipeline_details.model_dump() + expected_task_out_pipeline = expected_pipeline_details.model_dump() assert received_task_out_pipeline == expected_task_out_pipeline diff --git a/services/director-v2/tests/integration/01/test_computation_api.py b/services/director-v2/tests/integration/01/test_computation_api.py index f5dce1567de..2cb0ecca030 100644 --- a/services/director-v2/tests/integration/01/test_computation_api.py +++ b/services/director-v2/tests/integration/01/test_computation_api.py @@ -25,7 +25,7 @@ from models_library.clusters import ClusterAuthentication from models_library.projects import ProjectAtDB from models_library.projects_nodes import NodeState -from models_library.projects_nodes_io import NodeID +from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.projects_pipeline import PipelineDetails from models_library.projects_state import RunningState from models_library.users import UserID @@ -206,7 +206,7 @@ async def test_start_empty_computation_is_refused( ): await create_pipeline( async_client, - project=empty_project, + project_uuid=empty_project.uuid, user_id=user["id"], start_pipeline=True, product_name=osparc_product_name, @@ -415,26 +415,29 @@ async def test_run_partial_computation( ) def _convert_to_pipeline_details( - project: ProjectAtDB, - exp_pipeline_adj_list: dict[int, list[int]], - exp_node_states: dict[int, dict[str, Any]], + workbench_node_uuids: list[NodeIDStr], + expected_pipeline_adj_list: dict[int, list[int]], + expected_node_states: dict[int, dict[str, Any]], ) -> PipelineDetails: - workbench_node_uuids = list(project.workbench.keys()) + converted_adj_list: dict[NodeID, list[NodeID]] = {} - for node_key, next_nodes in exp_pipeline_adj_list.items(): + for node_key, next_nodes in expected_pipeline_adj_list.items(): converted_adj_list[NodeID(workbench_node_uuids[node_key])] = [ NodeID(workbench_node_uuids[n]) for n in next_nodes ] converted_node_states: dict[NodeID, NodeState] = { - NodeID(workbench_node_uuids[n]): NodeState( - modified=s["modified"], + NodeID(workbench_node_uuids[node_index]): NodeState( + modified=node_state["modified"], dependencies={ - NodeID(workbench_node_uuids[dep_n]) for dep_n in s["dependencies"] + NodeID(workbench_node_uuids[dep_n]) + for dep_n in node_state["dependencies"] }, - currentStatus=s.get("currentStatus", RunningState.NOT_STARTED), - progress=s.get("progress"), + current_status=node_state.get( + "currentStatus", RunningState.NOT_STARTED + ), + progress=node_state.get("progress"), ) - for n, s in exp_node_states.items() + for node_index, node_state in expected_node_states.items() } pipeline_progress = 0 for node_id in converted_adj_list: @@ -448,13 +451,15 @@ def _convert_to_pipeline_details( # convert the ids to the node uuids from the project expected_pipeline_details = _convert_to_pipeline_details( - sleepers_project, params.exp_pipeline_adj_list, params.exp_node_states + workbench_node_uuids=list(sleepers_project.workbench.keys()), + expected_pipeline_adj_list=params.exp_pipeline_adj_list, + expected_node_states=params.exp_node_states, ) # send a valid project with sleepers task_out = await create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], start_pipeline=True, product_name=osparc_product_name, @@ -468,9 +473,9 @@ def _convert_to_pipeline_details( # check the contents is correctb await assert_computation_task_out_obj( task_out, - project=sleepers_project, - exp_task_state=RunningState.PUBLISHED, - exp_pipeline_details=expected_pipeline_details, + project_uuid=sleepers_project.uuid, + expected_task_state=RunningState.PUBLISHED, + expected_pipeline_details=expected_pipeline_details, iteration=1, ) @@ -479,13 +484,15 @@ def _convert_to_pipeline_details( async_client, task_out.url, user["id"], sleepers_project.uuid ) expected_pipeline_details_after_run = _convert_to_pipeline_details( - sleepers_project, params.exp_pipeline_adj_list, params.exp_node_states_after_run + workbench_node_uuids=list(sleepers_project.workbench.keys()), + expected_pipeline_adj_list=params.exp_pipeline_adj_list, + expected_node_states=params.exp_node_states_after_run, ) await assert_computation_task_out_obj( task_out, - project=sleepers_project, - exp_task_state=RunningState.SUCCESS, - exp_pipeline_details=expected_pipeline_details_after_run, + project_uuid=sleepers_project.uuid, + expected_task_state=RunningState.SUCCESS, + expected_pipeline_details=expected_pipeline_details_after_run, iteration=1, ) @@ -498,7 +505,7 @@ def _convert_to_pipeline_details( ): await create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], start_pipeline=True, product_name=osparc_product_name, @@ -514,13 +521,13 @@ def _convert_to_pipeline_details( # force run it this time. # the task are up-to-date but we force run them expected_pipeline_details_forced = _convert_to_pipeline_details( - sleepers_project, - params.exp_pipeline_adj_list_after_force_run, - params.exp_node_states_after_force_run, + workbench_node_uuids=list(sleepers_project.workbench.keys()), + expected_pipeline_adj_list=params.exp_pipeline_adj_list_after_force_run, + expected_node_states=params.exp_node_states_after_force_run, ) task_out = await create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], start_pipeline=True, product_name=osparc_product_name, @@ -536,9 +543,9 @@ def _convert_to_pipeline_details( await assert_computation_task_out_obj( task_out, - project=sleepers_project, - exp_task_state=RunningState.PUBLISHED, - exp_pipeline_details=expected_pipeline_details_forced, + project_uuid=sleepers_project.uuid, + expected_task_state=RunningState.PUBLISHED, + expected_pipeline_details=expected_pipeline_details_forced, iteration=2, ) @@ -570,7 +577,7 @@ async def test_run_computation( # send a valid project with sleepers task_out = await create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], start_pipeline=True, product_name=osparc_product_name, @@ -581,9 +588,9 @@ async def test_run_computation( # check the contents is correct: a pipeline that just started gets PUBLISHED await assert_computation_task_out_obj( task_out, - project=sleepers_project, - exp_task_state=RunningState.PUBLISHED, - exp_pipeline_details=fake_workbench_computational_pipeline_details, + project_uuid=sleepers_project.uuid, + expected_task_state=RunningState.PUBLISHED, + expected_pipeline_details=fake_workbench_computational_pipeline_details, iteration=1, ) @@ -603,9 +610,9 @@ async def test_run_computation( await assert_computation_task_out_obj( task_out, - project=sleepers_project, - exp_task_state=RunningState.SUCCESS, - exp_pipeline_details=fake_workbench_computational_pipeline_details_completed, + project_uuid=sleepers_project.uuid, + expected_task_state=RunningState.SUCCESS, + expected_pipeline_details=fake_workbench_computational_pipeline_details_completed, iteration=1, ) @@ -617,7 +624,7 @@ async def test_run_computation( ): await create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], start_pipeline=True, product_name=osparc_product_name, @@ -641,7 +648,7 @@ async def test_run_computation( expected_pipeline_details_forced.progress = 0 task_out = await create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], start_pipeline=True, product_name=osparc_product_name, @@ -651,9 +658,9 @@ async def test_run_computation( # check the contents is correct await assert_computation_task_out_obj( task_out, - project=sleepers_project, - exp_task_state=RunningState.PUBLISHED, - exp_pipeline_details=expected_pipeline_details_forced, # NOTE: here the pipeline already ran so its states are different + project_uuid=sleepers_project.uuid, + expected_task_state=RunningState.PUBLISHED, + expected_pipeline_details=expected_pipeline_details_forced, # NOTE: here the pipeline already ran so its states are different iteration=2, ) @@ -663,9 +670,9 @@ async def test_run_computation( ) await assert_computation_task_out_obj( task_out, - project=sleepers_project, - exp_task_state=RunningState.SUCCESS, - exp_pipeline_details=fake_workbench_computational_pipeline_details_completed, + project_uuid=sleepers_project.uuid, + expected_task_state=RunningState.SUCCESS, + expected_pipeline_details=fake_workbench_computational_pipeline_details_completed, iteration=2, ) @@ -694,7 +701,7 @@ async def test_abort_computation( # send a valid project with sleepers task_out = await create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], start_pipeline=True, product_name=osparc_product_name, @@ -704,9 +711,9 @@ async def test_abort_computation( # check the contents is correctb await assert_computation_task_out_obj( task_out, - project=sleepers_project, - exp_task_state=RunningState.PUBLISHED, - exp_pipeline_details=fake_workbench_computational_pipeline_details, + project_uuid=sleepers_project.uuid, + expected_task_state=RunningState.PUBLISHED, + expected_pipeline_details=fake_workbench_computational_pipeline_details, iteration=1, ) @@ -771,7 +778,7 @@ async def test_update_and_delete_computation( # send a valid project with sleepers task_out = await create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], start_pipeline=False, product_name=osparc_product_name, @@ -781,16 +788,16 @@ async def test_update_and_delete_computation( # check the contents is correctb await assert_computation_task_out_obj( task_out, - project=sleepers_project, - exp_task_state=RunningState.NOT_STARTED, - exp_pipeline_details=fake_workbench_computational_pipeline_details_not_started, + project_uuid=sleepers_project.uuid, + expected_task_state=RunningState.NOT_STARTED, + expected_pipeline_details=fake_workbench_computational_pipeline_details_not_started, iteration=None, ) # update the pipeline task_out = await create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], start_pipeline=False, product_name=osparc_product_name, @@ -800,16 +807,16 @@ async def test_update_and_delete_computation( # check the contents is correctb await assert_computation_task_out_obj( task_out, - project=sleepers_project, - exp_task_state=RunningState.NOT_STARTED, - exp_pipeline_details=fake_workbench_computational_pipeline_details_not_started, + project_uuid=sleepers_project.uuid, + expected_task_state=RunningState.NOT_STARTED, + expected_pipeline_details=fake_workbench_computational_pipeline_details_not_started, iteration=None, ) # update the pipeline task_out = await create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], start_pipeline=False, product_name=osparc_product_name, @@ -819,16 +826,16 @@ async def test_update_and_delete_computation( # check the contents is correctb await assert_computation_task_out_obj( task_out, - project=sleepers_project, - exp_task_state=RunningState.NOT_STARTED, - exp_pipeline_details=fake_workbench_computational_pipeline_details_not_started, + project_uuid=sleepers_project.uuid, + expected_task_state=RunningState.NOT_STARTED, + expected_pipeline_details=fake_workbench_computational_pipeline_details_not_started, iteration=None, ) # start it now task_out = await create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], start_pipeline=True, product_name=osparc_product_name, @@ -837,9 +844,9 @@ async def test_update_and_delete_computation( # check the contents is correctb await assert_computation_task_out_obj( task_out, - project=sleepers_project, - exp_task_state=RunningState.PUBLISHED, - exp_pipeline_details=fake_workbench_computational_pipeline_details, + project_uuid=sleepers_project.uuid, + expected_task_state=RunningState.PUBLISHED, + expected_pipeline_details=fake_workbench_computational_pipeline_details, iteration=1, ) @@ -859,7 +866,7 @@ async def test_update_and_delete_computation( with pytest.raises(httpx.HTTPStatusError, match=f"{status.HTTP_409_CONFLICT}"): await create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], start_pipeline=False, product_name=osparc_product_name, @@ -912,7 +919,7 @@ async def test_pipeline_with_no_computational_services_still_create_correct_comp ): await create_pipeline( async_client, - project=project_with_dynamic_node, + project_uuid=project_with_dynamic_node.uuid, user_id=user["id"], start_pipeline=True, product_name=osparc_product_name, @@ -922,7 +929,7 @@ async def test_pipeline_with_no_computational_services_still_create_correct_comp # still this pipeline shall be createable if we do not want to start it await create_pipeline( async_client, - project=project_with_dynamic_node, + project_uuid=project_with_dynamic_node.uuid, user_id=user["id"], start_pipeline=False, product_name=osparc_product_name, @@ -1119,7 +1126,7 @@ async def test_burst_create_computations( [ create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], product_name=osparc_product_name, product_api_base_url=osparc_product_api_base_url, @@ -1130,7 +1137,7 @@ async def test_burst_create_computations( + [ create_pipeline( async_client, - project=sleepers_project2, + project_uuid=sleepers_project2.uuid, user_id=user["id"], product_name=osparc_product_name, product_api_base_url=osparc_product_api_base_url, @@ -1148,7 +1155,7 @@ async def test_burst_create_computations( [ create_pipeline( async_client, - project=sleepers_project, + project_uuid=sleepers_project.uuid, user_id=user["id"], product_name=osparc_product_name, product_api_base_url=osparc_product_api_base_url, @@ -1159,7 +1166,7 @@ async def test_burst_create_computations( + [ create_pipeline( async_client, - project=sleepers_project2, + project_uuid=sleepers_project2.uuid, user_id=user["id"], product_name=osparc_product_name, product_api_base_url=osparc_product_api_base_url, diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index aa70662048c..8f758931da1 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -308,7 +308,7 @@ async def current_study( # create entries in comp_task table in order to pull output ports await create_pipeline( async_client, - project=project_at_db, + project_uuid=project_at_db.uuid, user_id=current_user["id"], start_pipeline=False, product_name=osparc_product_name, @@ -996,7 +996,7 @@ async def test_nodeports_integration( # STEP 2 task_out = await create_pipeline( async_client, - project=current_study, + project_uuid=current_study.uuid, user_id=current_user["id"], start_pipeline=True, product_name=osparc_product_name, @@ -1010,9 +1010,9 @@ async def test_nodeports_integration( await assert_computation_task_out_obj( task_out, - project=current_study, - exp_task_state=RunningState.SUCCESS, - exp_pipeline_details=PipelineDetails.model_validate(fake_dy_success), + project_uuid=current_study.uuid, + expected_task_state=RunningState.SUCCESS, + expected_pipeline_details=PipelineDetails.model_validate(fake_dy_success), iteration=1, ) update_project_workbench_with_comp_tasks(str(current_study.uuid)) diff --git a/services/director-v2/tests/integration/conftest.py b/services/director-v2/tests/integration/conftest.py index 13a56f99e98..c94d96ae286 100644 --- a/services/director-v2/tests/integration/conftest.py +++ b/services/director-v2/tests/integration/conftest.py @@ -5,18 +5,19 @@ import asyncio import uuid from collections.abc import AsyncIterator, Awaitable, Callable +from typing import Any from unittest.mock import AsyncMock import httpx import pytest import sqlalchemy as sa from models_library.api_schemas_directorv2.computations import ComputationGet -from models_library.projects import ProjectAtDB +from models_library.projects import ProjectID from models_library.users import UserID from pytest_mock import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_postgres_database.models.comp_tasks import comp_tasks -from simcore_postgres_database.models.projects import projects +from simcore_postgres_database.models.projects_nodes import projects_nodes from starlette import status from tenacity import retry from tenacity.retry import retry_if_exception_type @@ -35,31 +36,40 @@ def mock_env(mock_env: EnvVarsDict, minio_s3_settings_envs: EnvVarsDict) -> EnvV def update_project_workbench_with_comp_tasks( postgres_db: sa.engine.Engine, ) -> Callable: - def updator(project_uuid: str): - with postgres_db.connect() as con: + def _updator(project_uuid: str): + with postgres_db.connect() as con, con.begin(): + + # select all projects_nodes for this project result = con.execute( - projects.select().where(projects.c.uuid == project_uuid) + projects_nodes.select().where( + projects_nodes.c.project_uuid == project_uuid + ) ) - prj_row = result.first() - assert prj_row - prj_workbench = prj_row.workbench + project_nodes_map: dict[str, Any] = { + row.node_id: row._asdict() for row in result + } + # comp_tasks get and run_hash and outputs result = con.execute( - comp_tasks.select().where(comp_tasks.c.project_id == project_uuid) - ) - # let's get the results and run_hash - for task_row in result: - # pass these to the project workbench - prj_workbench[task_row.node_id]["outputs"] = task_row.outputs - prj_workbench[task_row.node_id]["runHash"] = task_row.run_hash - - con.execute( - projects.update() # pylint:disable=no-value-for-parameter - .values(workbench=prj_workbench) - .where(projects.c.uuid == project_uuid) + comp_tasks.select().where(comp_tasks.c.project_id == f"{project_uuid}") ) + comp_tasks_rows = result.fetchall() + for task_row in comp_tasks_rows: + project_nodes_map[task_row.node_id]["outputs"] = task_row.outputs + project_nodes_map[task_row.node_id]["run_hash"] = task_row.run_hash + + # update projects_nodes with comp_tasks data + for node_id, node_data in project_nodes_map.items(): + con.execute( + projects_nodes.update() # pylint:disable=no-value-for-parameter + .values(**node_data) + .where( + (projects_nodes.c.node_id == node_id) + & (projects_nodes.c.project_uuid == project_uuid) + ) + ) - return updator + return _updator @pytest.fixture(scope="session") @@ -85,7 +95,7 @@ async def create_pipeline( async def _creator( client: httpx.AsyncClient, *, - project: ProjectAtDB, + project_uuid: ProjectID, user_id: UserID, product_name: str, product_api_base_url: str, @@ -96,7 +106,7 @@ async def _creator( COMPUTATION_URL, json={ "user_id": user_id, - "project_id": str(project.uuid), + "project_id": str(project_uuid), "start_pipeline": start_pipeline, "product_name": product_name, "product_api_base_url": product_api_base_url, diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_manager.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_manager.py index 134d03f05ec..ee53ef6b1ae 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_manager.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_manager.py @@ -411,7 +411,12 @@ async def test_empty_pipeline_is_not_scheduled( use_on_demand_clusters=False, collection_run_id=fake_collection_run_id, ) - assert len(caplog.records) == 1 - assert "no computational dag defined" in caplog.records[0].message + + warning_log_regs = [ + log_rec for log_rec in caplog.records if log_rec.levelname == "WARNING" + ] + assert len(warning_log_regs) == 1 + assert "no computational dag defined" in warning_log_regs[0].message + await assert_comp_runs_empty(sqlalchemy_async_engine) _assert_scheduler_client_not_called(scheduler_rabbit_client_parser) diff --git a/services/storage/src/simcore_service_storage/modules/db/projects.py b/services/storage/src/simcore_service_storage/modules/db/projects.py index 765430a6dd1..71f2c3f6e9f 100644 --- a/services/storage/src/simcore_service_storage/modules/db/projects.py +++ b/services/storage/src/simcore_service_storage/modules/db/projects.py @@ -1,10 +1,10 @@ from collections.abc import AsyncIterator -from contextlib import suppress +from typing import NamedTuple import sqlalchemy as sa -from models_library.projects import ProjectAtDB, ProjectID, ProjectIDStr +from models_library.projects import ProjectID, ProjectIDStr from models_library.projects_nodes_io import NodeIDStr -from pydantic import ValidationError +from simcore_postgres_database.models.projects_nodes import projects_nodes from simcore_postgres_database.storage_models import projects from simcore_postgres_database.utils_repos import pass_or_acquire_connection from sqlalchemy.ext.asyncio import AsyncConnection @@ -12,41 +12,29 @@ from ._base import BaseRepository +class ProjectBasicTuple(NamedTuple): + uuid: ProjectID + name: str + + class ProjectRepository(BaseRepository): async def list_valid_projects_in( self, *, connection: AsyncConnection | None = None, - include_uuids: list[ProjectID], - ) -> AsyncIterator[ProjectAtDB]: + project_uuids: list[ProjectID], + ) -> AsyncIterator[ProjectBasicTuple]: """ NOTE that it lists ONLY validated projects in 'project_uuids' """ async with pass_or_acquire_connection(self.db_engine, connection) as conn: async for row in await conn.stream( - sa.select(projects).where( - projects.c.uuid.in_(f"{pid}" for pid in include_uuids) + sa.select(projects.c.uuid, projects.c.name).where( + projects.c.uuid.in_(f"{pid}" for pid in project_uuids) ) ): - with suppress(ValidationError): - yield ProjectAtDB.model_validate(row) - - async def project_exists( - self, - *, - connection: AsyncConnection | None = None, - project_uuid: ProjectID, - ) -> bool: - async with pass_or_acquire_connection(self.db_engine, connection) as conn: - return bool( - await conn.scalar( - sa.select(sa.func.count()) - .select_from(projects) - .where(projects.c.uuid == f"{project_uuid}") - ) - == 1 - ) + yield ProjectBasicTuple(uuid=ProjectID(row.uuid), name=row.name) async def get_project_id_and_node_id_to_names_map( self, @@ -54,16 +42,26 @@ async def get_project_id_and_node_id_to_names_map( connection: AsyncConnection | None = None, project_uuids: list[ProjectID], ) -> dict[ProjectID, dict[ProjectIDStr | NodeIDStr, str]]: - mapping = {} + names_map: dict[ProjectID, dict[ProjectIDStr | NodeIDStr, str]] = {} async with pass_or_acquire_connection(self.db_engine, connection) as conn: async for row in await conn.stream( - sa.select(projects.c.uuid, projects.c.name, projects.c.workbench).where( + sa.select(projects.c.uuid, projects.c.name).where( projects.c.uuid.in_(f"{pid}" for pid in project_uuids) ) ): - mapping[ProjectID(f"{row.uuid}")] = {f"{row.uuid}": row.name} | { - f"{node_id}": node["label"] - for node_id, node in row.workbench.items() - } + names_map[ProjectID(row.uuid)] = {f"{row.uuid}": row.name} + + async for row in await conn.stream( + sa.select( + projects_nodes.c.node_id, + projects_nodes.c.project_uuid, + projects_nodes.c.label, + ).where( + projects_nodes.c.project_uuid.in_( + [f"{project_uuid}" for project_uuid in project_uuids] + ) + ) + ): + names_map[ProjectID(row.project_uuid)] |= {f"{row.node_id}": row.label} - return mapping + return names_map diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py index 7e2dbdc5baf..9d842472c2f 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py @@ -41,6 +41,7 @@ from servicelib.logging_utils import log_context from servicelib.progress_bar import ProgressBarData from servicelib.utils import ensure_ends_with, limited_gather +from simcore_postgres_database.utils_projects import ProjectsRepo from simcore_postgres_database.utils_repos import transaction_context from sqlalchemy.ext.asyncio import AsyncEngine @@ -116,25 +117,26 @@ async def _add_frontend_needed_data( # with information from the projects table! # NOTE: This part with the projects, should be done in the client code not here! - prj_names_mapping: dict[ProjectID | NodeID, str] = {} + repo = ProjectRepository.instance(engine) + valid_project_uuids = [ + proj_data.uuid + async for proj_data in repo.list_valid_projects_in(project_uuids=project_ids) + ] - async for proj_data in ProjectRepository.instance(engine).list_valid_projects_in( - include_uuids=project_ids - ): - prj_names_mapping |= {proj_data.uuid: proj_data.name} | { - NodeID(node_id): node_data.label - for node_id, node_data in proj_data.workbench.items() - } + prj_names_mapping = await repo.get_project_id_and_node_id_to_names_map( + project_uuids=valid_project_uuids + ) clean_data: list[FileMetaData] = [] for d in data: if d.project_id not in prj_names_mapping: continue assert d.project_id # nosec - d.project_name = prj_names_mapping[d.project_id] - if d.node_id in prj_names_mapping: + names_mapping = prj_names_mapping[d.project_id] + d.project_name = names_mapping[f"{d.project_id}"] + if f"{d.node_id}" in names_mapping: assert d.node_id # nosec - d.node_name = prj_names_mapping[d.node_id] + d.node_name = names_mapping[f"{d.node_id}"] if d.node_name and d.project_name: clean_data.append(d) @@ -169,7 +171,7 @@ async def list_datasets(self, user_id: UserID) -> list[DatasetMetaData]: ) async for prj_data in ProjectRepository.instance( get_db_engine(self.app) - ).list_valid_projects_in(include_uuids=readable_projects_ids) + ).list_valid_projects_in(project_uuids=readable_projects_ids) ] async def list_files_in_dataset( @@ -781,8 +783,8 @@ async def deep_copy_project_simcore_s3( node_mapping: dict[NodeID, NodeID], task_progress: ProgressBarData, ) -> None: - src_project_uuid: ProjectID = ProjectID(src_project["uuid"]) - dst_project_uuid: ProjectID = ProjectID(dst_project["uuid"]) + src_project_uuid = ProjectID(src_project["uuid"]) + dst_project_uuid = ProjectID(dst_project["uuid"]) with log_context( _logger, logging.INFO, @@ -792,9 +794,9 @@ async def deep_copy_project_simcore_s3( task_progress.description = "Checking study access rights..." for prj_uuid in [src_project_uuid, dst_project_uuid]: - if not await ProjectRepository.instance( - get_db_engine(self.app) - ).project_exists(project_uuid=prj_uuid): + if not await ProjectsRepo(get_db_engine(self.app)).exists( + project_uuid=prj_uuid + ): raise ProjectNotFoundError(project_id=prj_uuid) source_access_rights = await AccessLayerRepository.instance( get_db_engine(self.app) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 802c3fab387..3902dc2a142 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -258,9 +258,13 @@ async def client( @pytest.fixture async def node_id( - project_id: ProjectID, create_project_node: Callable[[ProjectID], Awaitable[NodeID]] + project_id: ProjectID, + create_project_node: Callable[ + [ProjectID], Awaitable[tuple[NodeID, dict[str, Any]]] + ], ) -> NodeID: - return await create_project_node(project_id) + node_id, _ = await create_project_node(project_id) + return node_id @pytest.fixture @@ -783,7 +787,7 @@ async def _upload_folder_task( async def random_project_with_files( sqlalchemy_async_engine: AsyncEngine, create_project: Callable[..., Awaitable[dict[str, Any]]], - create_project_node: Callable[..., Awaitable[NodeID]], + create_project_node: Callable[..., Awaitable[tuple[NodeID, dict[str, Any]]]], create_simcore_file_id: Callable[ [ProjectID, NodeID, str, Path | None], SimcoreS3FileID ], @@ -797,17 +801,28 @@ async def random_project_with_files( upload_file: Callable[..., Awaitable[tuple[Path, SimcoreS3FileID]]], ) -> Callable[ [ProjectWithFilesParams], - Awaitable[tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]]], + Awaitable[ + tuple[ + dict[str, Any], + dict[NodeID, dict[str, Any]], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ] + ], ]: async def _creator( project_params: ProjectWithFilesParams, - ) -> tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]]: + ) -> tuple[ + dict[str, Any], + dict[NodeID, dict[str, Any]], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ]: assert len(project_params.allowed_file_sizes) == len( project_params.allowed_file_checksums ) project = await create_project(name="random-project") node_to_files_mapping: dict[NodeID, dict[SimcoreS3FileID, FileIDDict]] = {} upload_tasks = [] + nodes: dict[NodeID, dict[str, Any]] = {} for _ in range(project_params.num_nodes): # Create a node with outputs (files and others) project_id = ProjectID(project["uuid"]) @@ -817,7 +832,7 @@ async def _creator( output3_file_id = create_simcore_file_id( project_id, node_id, output3_file_name, Path("outputs/output_3") ) - created_node_id = await create_project_node( + created_node_id, created_node = await create_project_node( ProjectID(project["uuid"]), node_id, outputs={ @@ -827,6 +842,7 @@ async def _creator( }, ) assert created_node_id == node_id + nodes[created_node_id] = created_node upload_tasks.append( _upload_one_file_task( @@ -877,7 +893,7 @@ async def _creator( node_to_files_mapping[node_id][file_id] = file_dict project = await get_updated_project(sqlalchemy_async_engine, project["uuid"]) - return project, node_to_files_mapping + return project, nodes, node_to_files_mapping return _creator @@ -932,7 +948,7 @@ async def output_file( yield file async with sqlalchemy_async_engine.begin() as conn: - result = await conn.execute( + await conn.execute( file_meta_data.delete().where(file_meta_data.c.file_id == row.file_id) ) diff --git a/services/storage/tests/unit/test_handlers_datasets.py b/services/storage/tests/unit/test_handlers_datasets.py index 5808a63f1f1..47d83a98fb5 100644 --- a/services/storage/tests/unit/test_handlers_datasets.py +++ b/services/storage/tests/unit/test_handlers_datasets.py @@ -29,7 +29,7 @@ from servicelib.aiohttp import status from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager -pytest_simcore_core_services_selection = ["postgres"] +pytest_simcore_core_services_selection = ["postgres", "rabbit"] pytest_simcore_ops_services_selection = ["adminer"] diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index f07b63cdbe9..f4528233856 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -63,7 +63,7 @@ from types_aiobotocore_s3 import S3Client from yarl import URL -pytest_simcore_core_services_selection = ["postgres"] +pytest_simcore_core_services_selection = ["postgres", "rabbit"] pytest_simcore_ops_services_selection = ["adminer"] @@ -1518,14 +1518,18 @@ async def test_listing_with_project_id_filter( random_project_with_files: Callable[ [ProjectWithFilesParams], Awaitable[ - tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]] + tuple[ + dict[str, Any], + dict[NodeID, dict[str, Any]], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ] ], ], uuid_filter: bool, project_params: ProjectWithFilesParams, ): - src_project, src_projects_list = await random_project_with_files(project_params) - _, _ = await random_project_with_files(project_params) + src_project, _, src_projects_list = await random_project_with_files(project_params) + await random_project_with_files(project_params) assert len(src_projects_list.keys()) > 0 node_id = next(iter(src_projects_list.keys())) project_files_in_db = set(src_projects_list[node_id]) diff --git a/services/storage/tests/unit/test_handlers_paths.py b/services/storage/tests/unit/test_handlers_paths.py index 0fac7c5deb2..3cad84da775 100644 --- a/services/storage/tests/unit/test_handlers_paths.py +++ b/services/storage/tests/unit/test_handlers_paths.py @@ -8,6 +8,7 @@ import random +import secrets from collections.abc import Awaitable, Callable from pathlib import Path from typing import Any, TypeAlias @@ -31,6 +32,7 @@ from pytest_simcore.helpers.storage_utils import FileIDDict, ProjectWithFilesParams from servicelib.fastapi.rest_pagination import CustomizedPathsCursorPage from simcore_postgres_database.models.projects import projects +from simcore_postgres_database.models.projects_nodes import projects_nodes from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager from sqlalchemy.ext.asyncio import AsyncEngine @@ -159,16 +161,16 @@ async def test_list_paths_pagination( user_id: UserID, with_random_project_with_files: tuple[ dict[str, Any], + dict[NodeID, dict[str, Any]], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], ], ): - project, list_of_files = with_random_project_with_files - num_nodes = len(list(project["workbench"])) + project, nodes, list_of_files = with_random_project_with_files # ls the nodes (DB-based) file_filter = Path(project["uuid"]) expected_paths = sorted( - ((file_filter / node_key, False) for node_key in project["workbench"]), + ((file_filter / f"{node_id}", False) for node_id in nodes), key=lambda x: x[0], ) await _assert_list_paths( @@ -178,12 +180,12 @@ async def test_list_paths_pagination( user_id, file_filter=file_filter, expected_paths=expected_paths, - limit=int(num_nodes / 2 + 0.5), + limit=int(len(nodes) / 2 + 0.5), ) # ls in the workspace (S3-based) # ls in the workspace - selected_node_id = NodeID(random.choice(list(project["workbench"]))) # noqa: S311 + selected_node_id = random.choice(list(nodes)) # noqa: S311 selected_node_s3_keys = [ Path(s3_object_id) for s3_object_id in list_of_files[selected_node_id] ] @@ -242,11 +244,12 @@ async def test_list_paths_pagination_large_page( user_id: UserID, with_random_project_with_files: tuple[ dict[str, Any], + dict[NodeID, dict[str, Any]], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], ], ): - project, list_of_files = with_random_project_with_files - selected_node_id = NodeID(random.choice(list(project["workbench"]))) # noqa: S311 + project, nodes, list_of_files = with_random_project_with_files + selected_node_id = random.choice(list(nodes)) # noqa: S311 selected_node_s3_keys = [ Path(s3_object_id) for s3_object_id in list_of_files[selected_node_id] ] @@ -294,7 +297,11 @@ async def test_list_paths( random_project_with_files: Callable[ [ProjectWithFilesParams], Awaitable[ - tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]] + tuple[ + dict[str, Any], + dict[NodeID, dict[str, Any]], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ] ], ], project_params: ProjectWithFilesParams, @@ -307,7 +314,10 @@ async def test_list_paths( # ls root returns our projects expected_paths = sorted( - ((Path(f"{prj_db['uuid']}"), False) for prj_db, _ in project_to_files_mapping), + ( + (Path(f"{prj_db['uuid']}"), False) + for prj_db, _, _ in project_to_files_mapping + ), key=lambda x: x[0], ) await _assert_list_paths( @@ -320,7 +330,7 @@ async def test_list_paths( ) # ls with only some part of the path should return only the projects that match - selected_project, selected_project_files = random.choice( # noqa: S311 + selected_project, selected_nodes, selected_project_files = secrets.choice( project_to_files_mapping ) partial_file_filter = Path( @@ -342,7 +352,7 @@ async def test_list_paths( # now we ls inside one of the projects returns the nodes file_filter = Path(selected_project["uuid"]) expected_paths = sorted( - ((file_filter / node_key, False) for node_key in selected_project["workbench"]), + ((file_filter / f"{node_id}", False) for node_id in selected_nodes), key=lambda x: x[0], ) await _assert_list_paths( @@ -355,9 +365,7 @@ async def test_list_paths( ) # now we ls in one of the nodes - selected_node_id = NodeID( - random.choice(list(selected_project["workbench"])) # noqa: S311 - ) + selected_node_id = random.choice(list(selected_nodes)) # noqa: S311 selected_node_s3_keys = [ Path(s3_object_id) for s3_object_id in selected_project_files[selected_node_id] ] @@ -458,34 +466,46 @@ async def test_list_paths_with_display_name_containing_slashes( user_id: UserID, with_random_project_with_files: tuple[ dict[str, Any], + dict[NodeID, dict[str, Any]], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], ], sqlalchemy_async_engine: AsyncEngine, ): - project, list_of_files = with_random_project_with_files + project, nodes, list_of_files = with_random_project_with_files project_name_with_slashes = "soméà$èq¨thing with/ slas/h/es/" node_name_with_non_ascii = "my node / is not ascii: éàèù" - # adjust project to contain "difficult" characters + async with sqlalchemy_async_engine.begin() as conn: + # update project to contain "difficult" characters result = await conn.execute( sa.update(projects) .where(projects.c.uuid == project["uuid"]) .values(name=project_name_with_slashes) - .returning(sa.literal_column(f"{projects.c.name}, {projects.c.workbench}")) + .returning(projects.c.name) ) row = result.one() assert row.name == project_name_with_slashes - project_workbench = row.workbench - assert len(project_workbench) == 1 - node = next(iter(project_workbench.values())) - node["label"] = node_name_with_non_ascii - result = await conn.execute( - sa.update(projects) + + # update a node (first occurrence) to contain "difficult" characters + subquery = ( + sa.select(projects_nodes.c.node_id) + .select_from(projects_nodes.join(projects)) .where(projects.c.uuid == project["uuid"]) - .values(workbench=project_workbench) - .returning(sa.literal_column(f"{projects.c.name}, {projects.c.workbench}")) + .order_by(projects_nodes.c.node_id) + .limit(1) ) - row = result.one() + first_row = await conn.execute(subquery) + first_id = first_row.scalar_one_or_none() + + if first_id: + result = await conn.execute( + sa.update(projects_nodes) + .where(projects_nodes.c.node_id == first_id) + .values(label=node_name_with_non_ascii) + .returning(projects_nodes.c.label) + ) + row = result.one() + assert row.label == node_name_with_non_ascii # ls the root file_filter = None @@ -507,7 +527,7 @@ async def test_list_paths_with_display_name_containing_slashes( # ls the nodes to ensure / is still there between project and node file_filter = Path(project["uuid"]) expected_paths = sorted( - ((file_filter / node_key, False) for node_key in project["workbench"]), + ((file_filter / f"{node_id}", False) for node_id in nodes), key=lambda x: x[0], ) assert len(expected_paths) == 1, "test configuration problem" @@ -526,7 +546,7 @@ async def test_list_paths_with_display_name_containing_slashes( ), "display path parts should be url encoded" # ls in the node workspace - selected_node_id = NodeID(random.choice(list(project["workbench"]))) # noqa: S311 + selected_node_id = random.choice(list(nodes)) # noqa: S311 selected_node_s3_keys = [ Path(s3_object_id) for s3_object_id in list_of_files[selected_node_id] ] @@ -625,6 +645,7 @@ async def test_path_compute_size( user_id: UserID, with_random_project_with_files: tuple[ dict[str, Any], + dict[NodeID, dict[str, Any]], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], ], project_params: ProjectWithFilesParams, @@ -632,7 +653,7 @@ async def test_path_compute_size( assert ( len(project_params.allowed_file_sizes) == 1 ), "test preconditions are not filled! allowed file sizes should have only 1 option for this test" - project, list_of_files = with_random_project_with_files + project, nodes, list_of_files = with_random_project_with_files total_num_files = sum( len(files_in_node) for files_in_node in list_of_files.values() @@ -651,7 +672,7 @@ async def test_path_compute_size( ) # get size of one of the nodes - selected_node_id = NodeID(random.choice(list(project["workbench"]))) # noqa: S311 + selected_node_id = random.choice(list(nodes)) # noqa: S311 path = Path(project["uuid"]) / f"{selected_node_id}" selected_node_s3_keys = [ Path(s3_object_id) for s3_object_id in list_of_files[selected_node_id] diff --git a/services/storage/tests/unit/test_rpc_handlers_paths.py b/services/storage/tests/unit/test_rpc_handlers_paths.py index c1acc0719f9..d151c1d2f7a 100644 --- a/services/storage/tests/unit/test_rpc_handlers_paths.py +++ b/services/storage/tests/unit/test_rpc_handlers_paths.py @@ -23,7 +23,6 @@ from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE from models_library.products import ProductName from models_library.projects_nodes_io import LocationID, NodeID, SimcoreS3FileID -from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID from pydantic import ByteSize, TypeAdapter from pytest_simcore.helpers.storage_utils import FileIDDict, ProjectWithFilesParams @@ -69,7 +68,7 @@ async def _assert_compute_path_size( path: Path, expected_total_size: int, ) -> ByteSize: - async_job, async_job_name = await compute_path_size( + async_job, _ = await compute_path_size( storage_rpc_client, location_id=location_id, path=path, @@ -80,7 +79,7 @@ async def _assert_compute_path_size( async for job_composed_result in wait_and_get_result( storage_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, - method_name=RPCMethodName(compute_path_size.__name__), + method_name=compute_path_size.__name__, job_id=async_job.job_id, job_filter=AsyncJobFilter( user_id=user_id, product_name=product_name, client_name="PYTEST_CLIENT_NAME" @@ -106,7 +105,7 @@ async def _assert_delete_paths( *, paths: set[Path], ) -> None: - async_job, async_job_name = await delete_paths( + async_job, _ = await delete_paths( storage_rpc_client, location_id=location_id, paths=paths, @@ -117,7 +116,7 @@ async def _assert_delete_paths( async for job_composed_result in wait_and_get_result( storage_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, - method_name=RPCMethodName(compute_path_size.__name__), + method_name=compute_path_size.__name__, job_id=async_job.job_id, job_filter=AsyncJobFilter( user_id=user_id, product_name=product_name, client_name="PYTEST_CLIENT_NAME" @@ -157,6 +156,7 @@ async def test_path_compute_size( location_id: LocationID, with_random_project_with_files: tuple[ dict[str, Any], + dict[NodeID, dict[str, Any]], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], ], project_params: ProjectWithFilesParams, @@ -165,7 +165,7 @@ async def test_path_compute_size( assert ( len(project_params.allowed_file_sizes) == 1 ), "test preconditions are not filled! allowed file sizes should have only 1 option for this test" - project, list_of_files = with_random_project_with_files + project, nodes, list_of_files = with_random_project_with_files total_num_files = sum( len(files_in_node) for files_in_node in list_of_files.values() @@ -184,7 +184,7 @@ async def test_path_compute_size( ) # get size of one of the nodes - selected_node_id = NodeID(random.choice(list(project["workbench"]))) # noqa: S311 + selected_node_id = random.choice(list(nodes)) # noqa: S311 path = Path(project["uuid"]) / f"{selected_node_id}" selected_node_s3_keys = [ Path(s3_object_id) for s3_object_id in list_of_files[selected_node_id] @@ -335,6 +335,7 @@ async def test_delete_paths( location_id: LocationID, with_random_project_with_files: tuple[ dict[str, Any], + dict[NodeID, dict[str, Any]], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], ], project_params: ProjectWithFilesParams, @@ -344,7 +345,7 @@ async def test_delete_paths( assert ( len(project_params.allowed_file_sizes) == 1 ), "test preconditions are not filled! allowed file sizes should have only 1 option for this test" - project, list_of_files = with_random_project_with_files + project, nodes, list_of_files = with_random_project_with_files total_num_files = sum( len(files_in_node) for files_in_node in list_of_files.values() @@ -364,11 +365,7 @@ async def test_delete_paths( # now select multiple random files to delete selected_paths = random.sample( - list( - list_of_files[ - NodeID(random.choice(list(project["workbench"]))) # noqa: S311 - ] - ), + list(list_of_files[random.choice(list(nodes))]), # noqa: S311 round(project_params.workspace_files_count / 2), ) @@ -377,7 +374,7 @@ async def test_delete_paths( location_id, user_id, product_name, - paths=set({Path(_) for _ in selected_paths}), + paths={Path(_) for _ in selected_paths}, ) # the size is reduced by the amount of deleted files diff --git a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py index 59d5a6d5586..dc12a664083 100644 --- a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py @@ -38,7 +38,7 @@ from models_library.api_schemas_webserver.storage import PathToExport from models_library.basic_types import SHA256Str from models_library.products import ProductName -from models_library.projects_nodes_io import NodeID, NodeIDStr, SimcoreS3FileID +from models_library.projects_nodes_io import NodeID, SimcoreS3FileID from models_library.users import UserID from pydantic import ByteSize, TypeAdapter from pytest_mock import MockerFixture @@ -75,20 +75,33 @@ async def _request_copy_folders( rpc_client: RabbitMQRPCClient, user_id: UserID, product_name: ProductName, - source_project: dict[str, Any], + src_project: dict[str, Any], + src_project_nodes: dict[NodeID, dict[str, Any]], dst_project: dict[str, Any], + dst_project_nodes: dict[NodeID, dict[str, Any]], nodes_map: dict[NodeID, NodeID], *, client_timeout: datetime.timedelta = datetime.timedelta(seconds=60), ) -> dict[str, Any]: with log_context( logging.INFO, - f"Copying folders from {source_project['uuid']} to {dst_project['uuid']}", + f"Copying folders from {src_project['uuid']} to {dst_project['uuid']}", ) as ctx: + source = src_project | { + "workbench": { + f"{node_id}": node for node_id, node in src_project_nodes.items() + } + } + destination = dst_project | { + "workbench": { + f"{node_id}": node for node_id, node in dst_project_nodes.items() + } + } + async_job_get, async_job_name = await copy_folders_from_project( rpc_client, body=FoldersBody( - source=source_project, destination=dst_project, nodes_map=nodes_map + source=source, destination=destination, nodes_map=nodes_map ), job_filter=AsyncJobFilter( user_id=user_id, @@ -109,6 +122,8 @@ async def _request_copy_folders( if async_job_result.done: result = await async_job_result.result() assert isinstance(result, AsyncJobResult) + assert isinstance(result.result, dict) + result.result.pop("workbench", None) # remove workbench from the data return result.result pytest.fail(reason="Copy folders failed!") @@ -138,7 +153,9 @@ async def test_copy_folders_from_non_existing_project( user_id, product_name, incorrect_src_project, + {}, dst_project, + {}, nodes_map={}, ) @@ -150,7 +167,9 @@ async def test_copy_folders_from_non_existing_project( user_id, product_name, src_project, + {}, incorrect_dst_project, + {}, nodes_map={}, ) @@ -173,7 +192,9 @@ async def test_copy_folders_from_empty_project( user_id, product_name, src_project, + {}, dst_project, + {}, nodes_map={}, ) assert data == jsonable_encoder(dst_project) @@ -226,33 +247,41 @@ async def test_copy_folders_from_valid_project_with_one_large_file( random_project_with_files: Callable[ [ProjectWithFilesParams], Awaitable[ - tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]] + tuple[ + dict[str, Any], + dict[NodeID, dict[str, Any]], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ] ], ], project_params: ProjectWithFilesParams, ): # 1. create a src project with 1 large file - src_project, src_projects_list = await random_project_with_files(project_params) + src_project, src_project_nodes, src_projects_list = await random_project_with_files( + project_params + ) # 2. create a dst project without files - dst_project, nodes_map = clone_project_data(src_project) + dst_project, dst_project_nodes, nodes_map = clone_project_data( + src_project, src_project_nodes + ) dst_project = await create_project(**dst_project) - # copy the project files + data = await _request_copy_folders( storage_rabbitmq_rpc_client, user_id, product_name, src_project, + src_project_nodes, dst_project, - nodes_map={NodeID(i): NodeID(j) for i, j in nodes_map.items()}, + dst_project_nodes, + nodes_map=nodes_map, ) assert data == jsonable_encoder( await get_updated_project(sqlalchemy_async_engine, dst_project["uuid"]) ) # check that file meta data was effectively copied for src_node_id in src_projects_list: - dst_node_id = nodes_map.get( - TypeAdapter(NodeIDStr).validate_python(f"{src_node_id}") - ) + dst_node_id = nodes_map.get(src_node_id) assert dst_node_id for src_file_id, src_file in src_projects_list[src_node_id].items(): path: Any = src_file["path"] @@ -319,15 +348,23 @@ async def test_copy_folders_from_valid_project( random_project_with_files: Callable[ [ProjectWithFilesParams], Awaitable[ - tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]] + tuple[ + dict[str, Any], + dict[NodeID, dict[str, Any]], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ] ], ], project_params: ProjectWithFilesParams, ): # 1. create a src project with some files - src_project, src_projects_list = await random_project_with_files(project_params) + src_project, src_project_nodes, src_projects_list = await random_project_with_files( + project_params + ) # 2. create a dst project without files - dst_project, nodes_map = clone_project_data(src_project) + dst_project, dst_project_nodes, nodes_map = clone_project_data( + src_project, src_project_nodes + ) dst_project = await create_project(**dst_project) # copy the project files data = await _request_copy_folders( @@ -335,8 +372,10 @@ async def test_copy_folders_from_valid_project( user_id, product_name, src_project, + src_project_nodes, dst_project, - nodes_map={NodeID(i): NodeID(j) for i, j in nodes_map.items()}, + dst_project_nodes, + nodes_map=nodes_map, ) assert data == jsonable_encoder( await get_updated_project(sqlalchemy_async_engine, dst_project["uuid"]) @@ -344,9 +383,7 @@ async def test_copy_folders_from_valid_project( # check that file meta data was effectively copied for src_node_id in src_projects_list: - dst_node_id = nodes_map.get( - TypeAdapter(NodeIDStr).validate_python(f"{src_node_id}") - ) + dst_node_id = nodes_map.get(src_node_id) assert dst_node_id for src_file_id, src_file in src_projects_list[src_node_id].items(): path: Any = src_file["path"] @@ -376,14 +413,17 @@ async def _create_and_delete_folders_from_project( user_id: UserID, product_name: ProductName, project: dict[str, Any], + project_nodes: dict[NodeID, dict[str, Any]], initialized_app: FastAPI, project_db_creator: Callable, check_list_files: bool, *, client_timeout: datetime.timedelta = datetime.timedelta(seconds=60), ) -> None: - destination_project, nodes_map = clone_project_data(project) - await project_db_creator(**destination_project) + dst_project, dst_project_nodes, nodes_map = clone_project_data( + project, project_nodes + ) + await project_db_creator(**dst_project) # creating a copy data = await _request_copy_folders( @@ -391,14 +431,15 @@ async def _create_and_delete_folders_from_project( user_id, product_name, project, - destination_project, - nodes_map={NodeID(i): NodeID(j) for i, j in nodes_map.items()}, + project_nodes, + dst_project, + dst_project_nodes, + nodes_map=nodes_map, client_timeout=client_timeout, ) - # data should be equal to the destination project, and all store entries should point to simcore.s3 # NOTE: data is jsonized where destination project is not! - assert jsonable_encoder(destination_project) == data + assert jsonable_encoder(dst_project) == data project_id = data["uuid"] @@ -491,13 +532,14 @@ async def test_create_and_delete_folders_from_project( product_name: ProductName, with_random_project_with_files: tuple[ dict[str, Any], + dict[NodeID, dict[str, Any]], dict[NodeID, dict[SimcoreS3FileID, dict[str, Path | str]]], ], create_project: Callable[..., Awaitable[dict[str, Any]]], mock_datcore_download, num_concurrent_calls: int, ): - project_in_db, _ = with_random_project_with_files + project_in_db, project_nodes_in_db, _ = with_random_project_with_files # NOTE: here the point is to NOT have a limit on the number of calls!! await asyncio.gather( *[ @@ -507,6 +549,7 @@ async def test_create_and_delete_folders_from_project( user_id, product_name, project_in_db, + project_nodes_in_db, initialized_app, create_project, check_list_files=False, @@ -601,14 +644,18 @@ async def test_start_export_data( random_project_with_files: Callable[ [ProjectWithFilesParams], Awaitable[ - tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]] + tuple[ + dict[str, Any], + dict[NodeID, dict[str, Any]], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ] ], ], project_params: ProjectWithFilesParams, task_progress_spy: Mock, export_as: Literal["path", "download_link"], ): - _, src_projects_list = await random_project_with_files(project_params) + _, _, src_projects_list = await random_project_with_files(project_params) all_available_files: set[SimcoreS3FileID] = set() for x in src_projects_list.values(): diff --git a/services/storage/tests/unit/test_simcore_s3_dsm.py b/services/storage/tests/unit/test_simcore_s3_dsm.py index fdde44a8663..7aa3f69a191 100644 --- a/services/storage/tests/unit/test_simcore_s3_dsm.py +++ b/services/storage/tests/unit/test_simcore_s3_dsm.py @@ -173,11 +173,15 @@ async def paths_for_export( random_project_with_files: Callable[ [ProjectWithFilesParams], Awaitable[ - tuple[dict[str, Any], dict[NodeID, dict[SimcoreS3FileID, FileIDDict]]] + tuple[ + dict[str, Any], + dict[NodeID, dict[str, Any]], + dict[NodeID, dict[SimcoreS3FileID, FileIDDict]], + ] ], ], ) -> set[SimcoreS3FileID]: - _, file_mapping = await random_project_with_files( + _, _, file_mapping = await random_project_with_files( ProjectWithFilesParams( num_nodes=2, allowed_file_sizes=(TypeAdapter(ByteSize).validate_python("1KiB"),), diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index d704cd3043a..af709cad957 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -14006,9 +14006,9 @@ components: description: The short name of the node progress: anyOf: - - type: number - maximum: 100.0 - minimum: 0.0 + - type: integer + maximum: 100 + minimum: 0 - type: 'null' title: Progress description: the node progress value @@ -14149,9 +14149,9 @@ components: description: The short name of the node progress: anyOf: - - type: number - maximum: 100.0 - minimum: 0.0 + - type: integer + maximum: 100 + minimum: 0 - type: 'null' title: Progress description: the node progress value diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py b/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py index 8611373d95d..9a8f4cb7005 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py @@ -14,6 +14,8 @@ ) from models_library.products import ProductName from models_library.projects import ProjectID +from models_library.projects_nodes import Node +from models_library.projects_nodes_io import NodeID from models_library.rest_ordering import OrderBy from models_library.services_types import ServiceRunID from models_library.users import UserID @@ -31,10 +33,12 @@ from servicelib.utils import limited_gather from ..products.products_service import is_product_billable +from ..projects._projects_nodes_repository import ( + get_by_projects, +) from ..projects.api import ( batch_get_project_name, check_user_project_permission, - get_project_dict_legacy, ) from ..projects.projects_metadata_service import ( get_project_custom_metadata_or_empty_dict, @@ -247,16 +251,14 @@ async def list_computations_latest_iteration_tasks( # Get unique set of all project_uuids from comp_tasks unique_project_uuids = {task.project_uuid for task in _tasks_get.items} # Fetch projects metadata concurrently - # NOTE: MD: can be improved with a single batch call - project_dicts = await limited_gather( - *[ - get_project_dict_legacy(app, project_uuid=project_uuid) - for project_uuid in unique_project_uuids - ], - limit=20, + _projects_nodes: dict[ProjectID, list[tuple[NodeID, Node]]] = await get_by_projects( + app, project_ids=unique_project_uuids ) + # Build a dict: project_uuid -> workbench - project_uuid_to_workbench = {prj["uuid"]: prj["workbench"] for prj in project_dicts} + project_uuid_to_workbench: dict[ProjectID, dict[NodeID, Node]] = { + project_uuid: dict(nodes) for project_uuid, nodes in _projects_nodes.items() + } _service_run_ids = [item.service_run_id for item in _tasks_get.items] _is_product_billable = await is_product_billable(app, product_name=product_name) @@ -286,9 +288,8 @@ async def list_computations_latest_iteration_tasks( started_at=item.started_at, ended_at=item.ended_at, log_download_link=item.log_download_link, - node_name=project_uuid_to_workbench[f"{item.project_uuid}"][ - f"{item.node_id}" - ].get("label", ""), + node_name=project_uuid_to_workbench[item.project_uuid][item.node_id].label + or "Unknown", osparc_credits=credits_or_none, ) for item, credits_or_none in zip( @@ -409,16 +410,15 @@ async def list_computation_collection_run_tasks( # Get unique set of all project_uuids from comp_tasks unique_project_uuids = {task.project_uuid for task in _tasks_get.items} - # NOTE: MD: can be improved with a single batch call - project_dicts = await limited_gather( - *[ - get_project_dict_legacy(app, project_uuid=project_uuid) - for project_uuid in unique_project_uuids - ], - limit=20, + + _projects_nodes: dict[ProjectID, list[tuple[NodeID, Node]]] = await get_by_projects( + app, project_ids=unique_project_uuids ) + # Build a dict: project_uuid -> workbench - project_uuid_to_workbench = {prj["uuid"]: prj["workbench"] for prj in project_dicts} + project_uuid_to_workbench: dict[ProjectID, dict[NodeID, Node]] = { + project_uuid: dict(nodes) for project_uuid, nodes in _projects_nodes.items() + } # Fetch projects metadata concurrently _projects_metadata = await _get_projects_metadata( @@ -455,9 +455,7 @@ async def list_computation_collection_run_tasks( log_download_link=item.log_download_link, name=( custom_metadata.get("job_name") - or project_uuid_to_workbench[f"{item.project_uuid}"][ - f"{item.node_id}" - ].get("label") + or project_uuid_to_workbench[item.project_uuid][item.node_id].label or "Unknown" ), osparc_credits=credits_or_none, diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py index a6e36c0091a..da56f640be7 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py @@ -117,18 +117,12 @@ async def create_node(request: web.Request) -> web.Response: text=f"Service {body.service_key}:{body.service_version} is deprecated" ) - # ensure the project exists - project_data = await _projects_service.get_project_for_user( - request.app, - project_uuid=f"{path_params.project_id}", - user_id=req_ctx.user_id, - ) data = { "node_id": await _projects_service.add_project_node( request, - project_data, req_ctx.user_id, req_ctx.product_name, + path_params.project_id, get_api_base_url(request), body.service_key, body.service_version, diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/ports_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/ports_rest.py index f9336f6e7c3..1e2ea19a474 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/ports_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/ports_rest.py @@ -8,11 +8,8 @@ ProjectOutputGet, ) from models_library.basic_types import KeyIDStr -from models_library.projects import ProjectID -from models_library.projects_nodes import Node +from models_library.projects_nodes import PartialNode from models_library.projects_nodes_io import NodeID -from models_library.users import UserID -from models_library.utils.fastapi_encoders import jsonable_encoder from models_library.utils.services_io import JsonSchemaDict from pydantic import BaseModel, Field, TypeAdapter from servicelib.aiohttp.requests_validation import ( @@ -26,29 +23,14 @@ from ...models import ClientSessionHeaderParams from ...security.decorators import permission_required from ...utils_aiohttp import envelope_json_response -from .. import _ports_service, _projects_service -from .._access_rights_service import check_user_project_permission -from .._projects_repository_legacy import ProjectDBAPI -from ..models import ProjectDict +from .. import _access_rights_service, _nodes_service, _ports_service +from .._projects_service import _create_project_document_and_notify from ._rest_exceptions import handle_plugin_requests_exceptions from ._rest_schemas import AuthenticatedRequestContext, ProjectPathParams log = logging.getLogger(__name__) -async def _get_validated_workbench_model( - app: web.Application, project_id: ProjectID, user_id: UserID -) -> dict[NodeID, Node]: - project: ProjectDict = await _projects_service.get_project_for_user( - app, - project_uuid=f"{project_id}", - user_id=user_id, - include_state=False, - ) - - return TypeAdapter(dict[NodeID, Node]).validate_python(project["workbench"]) - - routes = web.RouteTableDef() @@ -66,9 +48,17 @@ async def get_project_inputs(request: web.Request) -> web.Response: assert request.app # nosec - workbench = await _get_validated_workbench_model( - app=request.app, project_id=path_params.project_id, user_id=req_ctx.user_id + await _access_rights_service.check_user_project_permission( + request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + project_id=path_params.project_id, + permission="read", ) + workbench = await _nodes_service.get_project_nodes_map( + app=request.app, project_id=path_params.project_id + ) + inputs: dict[NodeID, Any] = _ports_service.get_project_inputs(workbench) return envelope_json_response( @@ -86,7 +76,6 @@ async def get_project_inputs(request: web.Request) -> web.Response: @permission_required("project.update") @handle_plugin_requests_exceptions async def update_project_inputs(request: web.Request) -> web.Response: - db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) inputs_updates = await parse_request_body_as(list[ProjectInputUpdate], request) @@ -94,10 +83,19 @@ async def update_project_inputs(request: web.Request) -> web.Response: assert request.app # nosec - workbench = await _get_validated_workbench_model( - app=request.app, project_id=path_params.project_id, user_id=req_ctx.user_id + await _access_rights_service.check_user_project_permission( + request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + project_id=path_params.project_id, + permission="write", # because we are updating inputs later + ) + current_workbench = await _nodes_service.get_project_nodes_map( + app=request.app, project_id=path_params.project_id + ) + current_inputs: dict[NodeID, Any] = _ports_service.get_project_inputs( + current_workbench ) - current_inputs: dict[NodeID, Any] = _ports_service.get_project_inputs(workbench) # build workbench patch partial_workbench_data = {} @@ -106,38 +104,39 @@ async def update_project_inputs(request: web.Request) -> web.Response: if node_id not in current_inputs: raise web.HTTPBadRequest(text=f"Invalid input key [{node_id}]") - workbench[node_id].outputs = {KeyIDStr("out_1"): input_update.value} - partial_workbench_data[node_id] = workbench[node_id].model_dump( + current_workbench[node_id].outputs = {KeyIDStr("out_1"): input_update.value} + partial_workbench_data[node_id] = current_workbench[node_id].model_dump( include={"outputs"}, exclude_unset=True ) - # patch workbench - await check_user_project_permission( + partial_nodes_map = TypeAdapter(dict[NodeID, PartialNode]).validate_python( + partial_workbench_data + ) + + await _nodes_service.update_project_nodes_map( request.app, project_id=path_params.project_id, - user_id=req_ctx.user_id, - product_name=req_ctx.product_name, - permission="write", + partial_nodes_map=partial_nodes_map, ) - assert db # nosec - updated_project, _ = await db.update_project_multiple_node_data( + # get updated workbench (including not updated nodes) + updated_workbench = await _nodes_service.get_project_nodes_map( + request.app, project_id=path_params.project_id + ) + + await _create_project_document_and_notify( + request.app, + project_id=path_params.project_id, user_id=req_ctx.user_id, - project_uuid=path_params.project_id, - product_name=req_ctx.product_name, - partial_workbench_data=jsonable_encoder(partial_workbench_data), client_session_id=header_params.client_session_id, ) - workbench = TypeAdapter(dict[NodeID, Node]).validate_python( - updated_project["workbench"] - ) - inputs: dict[NodeID, Any] = _ports_service.get_project_inputs(workbench) + inputs: dict[NodeID, Any] = _ports_service.get_project_inputs(updated_workbench) return envelope_json_response( { node_id: ProjectInputGet( - key=node_id, label=workbench[node_id].label, value=value + key=node_id, label=updated_workbench[node_id].label, value=value ) for node_id, value in inputs.items() } @@ -159,9 +158,17 @@ async def get_project_outputs(request: web.Request) -> web.Response: assert request.app # nosec - workbench = await _get_validated_workbench_model( - app=request.app, project_id=path_params.project_id, user_id=req_ctx.user_id + await _access_rights_service.check_user_project_permission( + request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + project_id=path_params.project_id, + permission="read", + ) + workbench = await _nodes_service.get_project_nodes_map( + app=request.app, project_id=path_params.project_id ) + outputs: dict[NodeID, Any] = await _ports_service.get_project_outputs( request.app, project_id=path_params.project_id, workbench=workbench ) @@ -206,10 +213,16 @@ async def list_project_metadata_ports(request: web.Request) -> web.Response: assert request.app # nosec - workbench = await _get_validated_workbench_model( - app=request.app, project_id=path_params.project_id, user_id=req_ctx.user_id + await _access_rights_service.check_user_project_permission( + request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + project_id=path_params.project_id, + permission="read", + ) + workbench = await _nodes_service.get_project_nodes_map( + app=request.app, project_id=path_params.project_id ) - return envelope_json_response( [ ProjectMetadataPortGet( diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py index 4168ec608de..0c411dacc6b 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py @@ -252,7 +252,9 @@ async def get_active_project(request: web.Request) -> web.Response: request.app, request.url, dict(request.headers), project ) - data = ProjectGet.from_domain_model(project).data(exclude_unset=True) + data = ProjectGet.from_domain_model(project).model_dump( + by_alias=True, exclude_unset=True, exclude_none=True + ) return envelope_json_response(data) @@ -284,7 +286,10 @@ async def get_project(request: web.Request): # Adds permalink await update_or_pop_permalink_in_project( - request.app, request.url, dict(request.headers), project + request.app, + request_url=request.url, + request_headers=dict(request.headers), + project=project, ) data = ProjectGet.from_domain_model(project).data(exclude_unset=True) diff --git a/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py b/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py index 539b1085036..008de54506b 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py @@ -4,7 +4,6 @@ from models_library.products import ProductName from models_library.projects import ProjectID from models_library.users import UserID -from pydantic import TypeAdapter from simcore_postgres_database.models.groups import user_to_groups from simcore_postgres_database.models.project_to_groups import project_to_groups from simcore_postgres_database.models.projects import projects @@ -19,6 +18,8 @@ from sqlalchemy.dialects.postgresql import insert as pg_insert from sqlalchemy.ext.asyncio import AsyncConnection +from simcore_service_webserver.projects._projects_repository_legacy_utils import get_project_workbench + from ..db.base_repository import BaseRepository from .models import ProjectDBGet, ProjectJobDBGet @@ -178,7 +179,6 @@ async def list_projects_marked_as_jobs( list_query = ( sa.select( *_PROJECT_DB_COLS, - projects.c.workbench, base_query.c.job_parent_resource_name, base_query.c.storage_assets_deleted, ) @@ -201,10 +201,10 @@ async def list_projects_marked_as_jobs( total_count = await conn.scalar(total_query) assert isinstance(total_count, int) # nosec - result = await conn.execute(list_query) - projects_list = TypeAdapter(list[ProjectJobDBGet]).validate_python( - result.fetchall() - ) + projects_list = [] + async for project_row in await conn.stream(list_query): + workbench = await get_project_workbench(conn, project_row.uuid) + projects_list.append(ProjectJobDBGet.model_validate({**project_row, "workbench": workbench})) return total_count, projects_list @@ -221,7 +221,6 @@ async def get_project_marked_as_job( query = ( sa.select( *_PROJECT_DB_COLS, - projects.c.workbench, projects_to_jobs.c.job_parent_resource_name, projects_to_jobs.c.storage_assets_deleted, ) @@ -244,4 +243,6 @@ async def get_project_marked_as_job( row = result.first() if row is None: return None - return TypeAdapter(ProjectJobDBGet).validate_python(row) + + workbench = await get_project_workbench(conn, row.uuid) + return ProjectJobDBGet.model_validate({**row, "workbench": workbench}) diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_repository.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_repository.py index f969f8de983..8a01f5b5f87 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_repository.py @@ -1,10 +1,17 @@ from aiohttp import web from models_library.projects import ProjectID +from models_library.projects_nodes import Node, PartialNode +from models_library.projects_nodes_io import NodeID from models_library.services_types import ServiceKey, ServiceVersion +from pydantic import TypeAdapter from simcore_postgres_database.utils_projects_nodes import ProjectNode, ProjectNodesRepo -from simcore_postgres_database.utils_repos import pass_or_acquire_connection +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) from ..db.plugin import get_asyncpg_engine +from . import _nodes_models_adapters async def get_project_nodes_services( @@ -13,10 +20,51 @@ async def get_project_nodes_services( repo = ProjectNodesRepo(project_uuid=project_uuid) async with pass_or_acquire_connection(get_asyncpg_engine(app)) as conn: - nodes = await repo.list(conn) + project_nodes = await repo.list(conn) # removes duplicates by preserving order - return list(dict.fromkeys((node.key, node.version) for node in nodes)) + return list(dict.fromkeys((node.key, node.version) for node in project_nodes)) + + +async def get_project_nodes_map( + app: web.Application, *, project_id: ProjectID +) -> dict[NodeID, Node]: + + repo = ProjectNodesRepo(project_uuid=project_id) + + async with pass_or_acquire_connection(get_asyncpg_engine(app)) as conn: + project_nodes = await repo.list(conn) + + workbench = { + project_node.node_id: _nodes_models_adapters.node_from_project_node( + project_node + ) + for project_node in project_nodes + } + return TypeAdapter(dict[NodeID, Node]).validate_python(workbench) + + +async def update_project_nodes_map( + app: web.Application, + *, + project_id: ProjectID, + partial_nodes_map: dict[NodeID, PartialNode], +) -> dict[NodeID, Node]: + repo = ProjectNodesRepo(project_uuid=project_id) + + workbench: dict[NodeID, Node] = {} + async with transaction_context(get_asyncpg_engine(app)) as conn: + for node_id, node in partial_nodes_map.items(): + project_node = await repo.update( + conn, + node_id=node_id, + **node.model_dump(exclude_none=True, exclude_unset=True), + ) + workbench[node_id] = _nodes_models_adapters.node_from_project_node( + project_node + ) + + return TypeAdapter(dict[NodeID, Node]).validate_python(workbench) async def get_project_nodes( diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py index 5354b496ca5..3b01822927c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py @@ -10,7 +10,7 @@ from models_library.api_schemas_storage.storage_schemas import FileMetaDataGet from models_library.basic_types import KeyIDStr from models_library.projects import ProjectID -from models_library.projects_nodes import Node +from models_library.projects_nodes import Node, PartialNode from models_library.projects_nodes_io import NodeID, SimCoreFileLink from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID @@ -82,6 +82,26 @@ async def get_project_nodes_services( ) +async def get_project_nodes_map( + app: web.Application, *, project_id: ProjectID +) -> dict[NodeID, Node]: + """ + Returns a map of node_id to Node for the given project_id which used to be called the project's `workbench` + """ + return await _nodes_repository.get_project_nodes_map(app, project_id=project_id) + + +async def update_project_nodes_map( + app: web.Application, + *, + project_id: ProjectID, + partial_nodes_map: dict[NodeID, PartialNode], +) -> dict[NodeID, Node]: + return await _nodes_repository.update_project_nodes_map( + app, project_id=project_id, partial_nodes_map=partial_nodes_map + ) + + async def get_project_nodes( app: web.Application, *, project_uuid: ProjectID ) -> list[ProjectNode]: diff --git a/services/web/server/src/simcore_service_webserver/projects/_project_document_service.py b/services/web/server/src/simcore_service_webserver/projects/_project_document_service.py index dacc6833dc5..9ec79ebcda8 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_project_document_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_project_document_service.py @@ -64,26 +64,25 @@ async def _create_project_document_and_increment_version() -> ( - the project document and its version must be kept in sync """ # Get the full project with workbench for document creation - project_with_workbench = await _projects_repository.get_project_with_workbench( + project = await _projects_repository.get_project_with_workbench( app=app, project_uuid=project_uuid ) + # Create project document project_document = ProjectDocument( - uuid=project_with_workbench.uuid, - workspace_id=project_with_workbench.workspace_id, - name=project_with_workbench.name, - description=project_with_workbench.description, - thumbnail=project_with_workbench.thumbnail, - last_change_date=project_with_workbench.last_change_date, - classifiers=project_with_workbench.classifiers, - dev=project_with_workbench.dev, - quality=project_with_workbench.quality, - workbench=project_with_workbench.workbench, - ui=project_with_workbench.ui, - type=cast(ProjectTypeAPI, project_with_workbench.type), - template_type=cast( - ProjectTemplateType, project_with_workbench.template_type - ), + uuid=project.uuid, + workspace_id=project.workspace_id, + name=project.name, + description=project.description, + thumbnail=project.thumbnail, + last_change_date=project.last_change_date, + classifiers=project.classifiers, + dev=project.dev, + quality=project.quality, + workbench=project.workbench, + ui=project.ui, + type=cast(ProjectTypeAPI, project.type), + template_type=cast(ProjectTemplateType, project.template_type), ) # Increment document version redis_client_sdk = get_redis_document_manager_client_sdk(app) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_repository.py b/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_repository.py index cd6880732e0..f79e3f409ea 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_nodes_repository.py @@ -1,25 +1,32 @@ import logging import sqlalchemy as sa - from aiohttp import web from models_library.projects import ProjectID from models_library.projects_nodes import Node, PartialNode from models_library.projects_nodes_io import NodeID -from simcore_postgres_database.utils_repos import transaction_context +from simcore_postgres_database.utils_projects_nodes import ProjectNode +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) from simcore_postgres_database.webserver_models import projects_nodes from sqlalchemy.ext.asyncio import AsyncConnection -from .exceptions import NodeNotFoundError from ..db.plugin import get_asyncpg_engine +from .exceptions import NodeNotFoundError _logger = logging.getLogger(__name__) _SELECTION_PROJECTS_NODES_DB_ARGS = [ + projects_nodes.c.node_id, + projects_nodes.c.project_uuid, projects_nodes.c.key, projects_nodes.c.version, projects_nodes.c.label, + projects_nodes.c.created, + projects_nodes.c.modified, projects_nodes.c.progress, projects_nodes.c.thumbnail, projects_nodes.c.input_access, @@ -36,6 +43,40 @@ ] +async def add( + app: web.Application, + connection: AsyncConnection | None = None, + *, + project_id: ProjectID, + node_id: NodeID, + node: Node, +) -> None: + values = node.model_dump(mode="json", exclude_none=True) + + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + await conn.execute( + projects_nodes.insert().values( + project_uuid=f"{project_id}", node_id=f"{node_id}", **values + ) + ) + + +async def delete( + app: web.Application, + connection: AsyncConnection | None = None, + *, + project_id: ProjectID, + node_id: NodeID, +) -> None: + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + await conn.execute( + projects_nodes.delete().where( + (projects_nodes.c.project_uuid == f"{project_id}") + & (projects_nodes.c.node_id == f"{node_id}") + ) + ) + + async def get( app: web.Application, connection: AsyncConnection | None = None, @@ -43,27 +84,92 @@ async def get( project_id: ProjectID, node_id: NodeID, ) -> Node: - async with transaction_context(get_asyncpg_engine(app), connection) as conn: - get_stmt = sa.select( - *_SELECTION_PROJECTS_NODES_DB_ARGS - ).where( + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + query = sa.select(*_SELECTION_PROJECTS_NODES_DB_ARGS).where( (projects_nodes.c.project_uuid == f"{project_id}") & (projects_nodes.c.node_id == f"{node_id}") ) - result = await conn.stream(get_stmt) + result = await conn.stream(query) assert result # nosec row = await result.first() if row is None: raise NodeNotFoundError( - project_uuid=f"{project_id}", - node_uuid=f"{node_id}" + project_uuid=f"{project_id}", node_uuid=f"{node_id}" ) assert row # nosec return Node.model_validate(row, from_attributes=True) +async def get_by_project( + app: web.Application, + connection: AsyncConnection | None = None, + *, + project_id: ProjectID, +) -> list[tuple[NodeID, Node]]: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + query = sa.select(*_SELECTION_PROJECTS_NODES_DB_ARGS).where( + projects_nodes.c.project_uuid == f"{project_id}" + ) + + stream = await conn.stream(query) + assert stream # nosec + + result: list[tuple[NodeID, Node]] = [] + async for row in stream: + # build Model only once on top of row + pn = ProjectNode.model_validate(row, from_attributes=True) + node = Node.model_validate( + pn.model_dump( + exclude_none=True, + exclude_unset=True, + exclude={"node_id", "created", "modified"}, + ) + ) + result.append((NodeID(row.node_id), node)) + + return result + + +async def get_by_projects( + app: web.Application, + project_ids: set[ProjectID], + connection: AsyncConnection | None = None, +) -> dict[ProjectID, list[tuple[NodeID, Node]]]: + if not project_ids: + return {} + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + query = sa.select(*_SELECTION_PROJECTS_NODES_DB_ARGS).where( + projects_nodes.c.project_uuid.in_([f"{pid}" for pid in project_ids]) + ) + + stream = await conn.stream(query) + assert stream # nosec + + # Initialize dict with empty lists for all requested project_ids + projects_to_nodes: dict[ProjectID, list[tuple[NodeID, Node]]] = { + pid: [] for pid in project_ids + } + + # Fill in the actual data + async for row in stream: + node = Node.model_validate( + ProjectNode.model_validate(row).model_dump( + exclude_none=True, + exclude_unset=True, + exclude={"node_id", "created", "modified"}, + ) + ) + + projects_to_nodes[ProjectID(row.project_uuid)].append( + (NodeID(row.node_id), node) + ) + + return projects_to_nodes + + async def update( app: web.Application, connection: AsyncConnection | None = None, @@ -71,15 +177,17 @@ async def update( project_id: ProjectID, node_id: NodeID, partial_node: PartialNode, -) -> None: +) -> Node: values = partial_node.model_dump(mode="json", exclude_unset=True) async with transaction_context(get_asyncpg_engine(app), connection) as conn: - await conn.stream( + result = await conn.stream( projects_nodes.update() .values(**values) .where( (projects_nodes.c.project_uuid == f"{project_id}") & (projects_nodes.c.node_id == f"{node_id}") ) + .returning(*_SELECTION_PROJECTS_NODES_DB_ARGS) ) + return Node.model_validate(await result.first(), from_attributes=True) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py index 83c357c3657..8c3099d1a28 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py @@ -8,13 +8,16 @@ from common_library.exclude import Unset, is_set from models_library.basic_types import IDStr from models_library.groups import GroupID +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.rest_ordering import OrderBy, OrderDirection from models_library.rest_pagination import MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE from models_library.workspaces import WorkspaceID -from pydantic import NonNegativeInt, PositiveInt +from pydantic import NonNegativeInt, PositiveInt, TypeAdapter from simcore_postgres_database.models.projects import projects +from simcore_postgres_database.models.projects_to_products import projects_to_products from simcore_postgres_database.models.users import users +from simcore_postgres_database.utils_projects_nodes import create_workbench_subquery from simcore_postgres_database.utils_repos import ( get_columns_from_db_model, pass_or_acquire_connection, @@ -107,14 +110,32 @@ async def get_project( project_uuid: ProjectID, ) -> ProjectDBGet: async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: - query = sql.select(*PROJECT_DB_COLS).where(projects.c.uuid == f"{project_uuid}") - result = await conn.execute(query) + result = await conn.execute( + sa.select(*PROJECT_DB_COLS).where(projects.c.uuid == f"{project_uuid}") + ) row = result.one_or_none() if row is None: raise ProjectNotFoundError(project_uuid=project_uuid) return ProjectDBGet.model_validate(row) +async def get_project_product( + app, + connection: AsyncConnection | None = None, + *, + project_uuid: ProjectID, +) -> ProductName: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.scalar( + sa.select(projects_to_products.c.product_name).where( + projects_to_products.c.project_uuid == f"{project_uuid}" + ) + ) + if result is None: + raise ProjectNotFoundError(project_uuid=project_uuid) + return TypeAdapter(ProductName).validate_python(result) + + async def get_project_with_workbench( app: web.Application, connection: AsyncConnection | None = None, @@ -122,8 +143,21 @@ async def get_project_with_workbench( project_uuid: ProjectID, ) -> ProjectWithWorkbenchDBGet: async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: - query = sql.select(*PROJECT_DB_COLS, projects.c.workbench).where( - projects.c.uuid == f"{project_uuid}" + workbench_subquery = create_workbench_subquery(f"{project_uuid}") + query = ( + sql.select( + *PROJECT_DB_COLS, + sa.func.coalesce( + workbench_subquery.c.workbench, sa.text("'{}'::json") + ).label("workbench"), + ) + .select_from( + projects.outerjoin( + workbench_subquery, + projects.c.uuid == workbench_subquery.c.project_uuid, + ) + ) + .where(projects.c.uuid == f"{project_uuid}") ) result = await conn.execute(query) row = result.one_or_none() diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py index 32bd7c00a53..fe34803645a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py @@ -10,6 +10,7 @@ from typing import Any, Self, cast from uuid import uuid1 +from models_library.utils._original_fastapi_encoders import jsonable_encoder import sqlalchemy as sa from aiohttp import web from aiopg.sa import Engine @@ -35,7 +36,6 @@ ) from models_library.rest_ordering import OrderBy, OrderDirection from models_library.users import UserID -from models_library.utils.fastapi_encoders import jsonable_encoder from models_library.wallets import WalletDB, WalletID from models_library.workspaces import WorkspaceQuery, WorkspaceScope from pydantic import TypeAdapter @@ -95,6 +95,7 @@ convert_to_db_names, convert_to_schema_names, create_project_access_rights, + get_project_workbench, patch_workbench, ) from ._socketio_service import notify_project_document_updated @@ -178,7 +179,7 @@ def _reraise_if_not_unique_uuid_error(err: UniqueViolation): with attempt: async with conn.begin(): project_index = None - project_uuid = ProjectID(f"{insert_values['uuid']}") + project_uuid = ProjectID(insert_values["uuid"]) try: result: ResultProxy = await conn.execute( @@ -225,41 +226,10 @@ def _reraise_if_not_unique_uuid_error(err: UniqueViolation): ) selected_values["tags"] = project_tag_ids - # NOTE: this will at some point completely replace workbench in the DB - if selected_values["workbench"]: - project_nodes_repo = ProjectNodesRepo( - project_uuid=project_uuid + if project_nodes: + await ProjectNodesRepo(project_uuid=project_uuid).add( + conn, nodes=list(project_nodes.values()) ) - if project_nodes is None: - project_nodes = { - NodeID(node_id): ProjectNodeCreate( - node_id=NodeID(node_id), - required_resources={}, - key=node_info.get("key"), - version=node_info.get("version"), - label=node_info.get("label"), - ) - for node_id, node_info in selected_values[ - "workbench" - ].items() - } - - nodes = [ - project_nodes.get( - NodeID(node_id), - ProjectNodeCreate( - node_id=NodeID(node_id), - required_resources={}, - key=node_info.get("key"), - version=node_info.get("version"), - label=node_info.get("label"), - ), - ) - for node_id, node_info in selected_values[ - "workbench" - ].items() - ] - await project_nodes_repo.add(conn, nodes=nodes) return selected_values async def insert_project( @@ -324,17 +294,48 @@ async def insert_project( # ensure we have the minimal amount of data here # All non-default in projects table insert_values.setdefault("name", "New Study") - insert_values.setdefault("workbench", {}) insert_values.setdefault("workspace_id", None) # must be valid uuid try: - ProjectID(str(insert_values.get("uuid"))) + ProjectID(f"{insert_values.get('uuid')}") except ValueError: if force_project_uuid: raise insert_values["uuid"] = f"{uuid1()}" + # extract workbench nodes + workbench: dict[str, Any] = insert_values.pop("workbench", {}) + project_nodes = project_nodes or {} + + # Get valid ProjectNodeCreate fields, excluding node_id since it's set separately + valid_fields = ProjectNodeCreate.get_field_names(exclude={"node_id"}) + + # Mapping from camelCase (workbench) to snake_case (ProjectNodeCreate) + field_mapping = { + "inputAccess": "input_access", + "inputNodes": "input_nodes", + "inputsRequired": "inputs_required", + "inputsUnits": "inputs_units", + "outputNodes": "output_nodes", + "runHash": "run_hash", + "bootOptions": "boot_options", + } + + project_nodes |= { + NodeID(node_id): ProjectNodeCreate( + node_id=NodeID(node_id), + **{ + str(field_mapping.get(field, field)): value + for field, value in Node.model_validate(project_workbench_node) + .model_dump(mode="json", by_alias=True) + .items() + if field_mapping.get(field, field) in valid_fields + }, + ) + for node_id, project_workbench_node in workbench.items() + } + inserted_project = await self._insert_project_in_db( insert_values, force_project_uuid=force_project_uuid, @@ -343,6 +344,8 @@ async def insert_project( project_nodes=project_nodes, ) + inserted_project["workbench"] = workbench + async with self.engine.acquire() as conn: # Returns created project with names as in the project schema user_email = await self._get_user_email(conn, user_id) @@ -398,7 +401,6 @@ def _create_private_workspace_query( private_workspace_query = ( sa.select( *PROJECT_DB_COLS, - projects.c.workbench, projects_to_products.c.product_name, projects_to_folders.c.folder_id, ) @@ -457,7 +459,6 @@ def _create_shared_workspace_query( shared_workspace_query = ( sa.select( *PROJECT_DB_COLS, - projects.c.workbench, projects_to_products.c.product_name, projects_to_folders.c.folder_id, ) @@ -681,8 +682,11 @@ async def list_projects_dicts( # pylint: disable=too-many-arguments,too-many-st # Therefore, if we use this model, it will return those default values, which is not backward-compatible # with the frontend. The frontend would need to check and adapt how it handles default values in # Workbench nodes, which are currently not returned if not set in the DB. - ProjectListAtDB.model_validate(row) - prjs_output.append(dict(row.items())) + prj_dict = dict(row.items()) | { + "workbench": await get_project_workbench(conn, row.uuid), + } + ProjectListAtDB.model_validate(prj_dict) + prjs_output.append(prj_dict) return ( prjs_output, @@ -729,7 +733,6 @@ async def get_project_db(self, project_uuid: ProjectID) -> ProjectDBGet: result = await conn.execute( sa.select( *PROJECT_DB_COLS, - projects.c.workbench, ).where(projects.c.uuid == f"{project_uuid}") ) row = await result.fetchone() @@ -1067,6 +1070,7 @@ async def add_project_node( project_nodes_repo = ProjectNodesRepo(project_uuid=project_id) async with self.engine.acquire() as conn: await project_nodes_repo.add(conn, nodes=[node]) + await self._update_project_workbench_with_lock_and_notify( partial_workbench_data, user_id=user_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy_utils.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy_utils.py index 5b482452625..8c088dfad06 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy_utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy_utils.py @@ -6,6 +6,7 @@ from typing import Any, Literal, cast import sqlalchemy as sa +from sqlalchemy.ext.asyncio import AsyncConnection from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy from models_library.projects import ProjectID, ProjectType @@ -14,6 +15,10 @@ from models_library.utils.change_case import camel_to_snake, snake_to_camel from pydantic import ValidationError from simcore_postgres_database.models.project_to_groups import project_to_groups +from simcore_postgres_database.utils_projects_nodes import ( + ProjectNodesRepo, + create_workbench_subquery, +) from simcore_postgres_database.webserver_models import ( ProjectTemplateType as ProjectTemplateTypeDB, ) @@ -217,20 +222,27 @@ async def _get_project( ), ).label("access_rights"), ) - .where(project_to_groups.c.project_uuid == f"{project_uuid}") + .where(project_to_groups.c.project_uuid == project_uuid) .group_by(project_to_groups.c.project_uuid) ).subquery("access_rights_subquery") + workbench_subquery = create_workbench_subquery(project_uuid) + query = ( sa.select( *PROJECT_DB_COLS, - projects.c.workbench, users.c.primary_gid.label("trashed_by_primary_gid"), access_rights_subquery.c.access_rights, + sa.func.coalesce( + workbench_subquery.c.workbench, sa.text("'{}'::json") + ).label("workbench"), ) .select_from( - projects.join(access_rights_subquery, isouter=True).outerjoin( - users, projects.c.trashed_by == users.c.id + projects.join(access_rights_subquery, isouter=True) + .outerjoin(users, projects.c.trashed_by == users.c.id) + .outerjoin( + workbench_subquery, + projects.c.uuid == workbench_subquery.c.project_uuid, ) ) .where( @@ -365,3 +377,20 @@ def patch_workbench( # patch current_node_data.update(new_node_data) return (patched_project, changed_entries) + + +async def get_project_workbench( + connection: AsyncConnection, + project_uuid: str, +) -> dict[str, Any]: + project_nodes_repo = ProjectNodesRepo(project_uuid=ProjectID(project_uuid)) + exclude_fields = {"node_id", "required_resources", "created", "modified"} + workbench: dict[str, Any] = {} + + project_nodes = await project_nodes_repo.list(connection) + for project_node in project_nodes: + node_data = project_node.model_dump( + exclude=exclude_fields, exclude_none=True, exclude_unset=True + ) + workbench[f"{project_node.node_id}"] = node_data + return workbench diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 12bbc4b2e30..eafc4a95709 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -104,7 +104,6 @@ from servicelib.utils import fire_and_forget_task, limited_gather, logged_gather from simcore_postgres_database.models.users import UserRole from simcore_postgres_database.utils_projects_nodes import ( - ProjectNodeCreate, ProjectNodesNodeNotFoundError, ) from simcore_postgres_database.webserver_models import ProjectType @@ -176,13 +175,42 @@ ProjectTooManyUserSessionsError, ProjectTypeAndTemplateIncompatibilityError, ) -from .models import ProjectDBGet, ProjectDict, ProjectPatchInternalExtended +from .models import ( + ProjectDBGet, + ProjectDict, + ProjectPatchInternalExtended, + ProjectWithWorkbenchDBGet, +) from .settings import ProjectsSettings, get_plugin_settings from .utils import extract_dns_without_default_port _logger = logging.getLogger(__name__) +async def _create_project_document_and_notify( + app, + *, + project_id: ProjectID, + user_id: UserID, + client_session_id: ClientSessionID | None, +): + ( + project_document, + document_version, + ) = await create_project_document_and_increment_version(app, project_id) + + user_primary_gid = await users_service.get_user_primary_group_id(app, user_id) + + await notify_project_document_updated( + app=app, + project_id=project_id, + user_primary_gid=user_primary_gid, + client_session_id=client_session_id, + version=document_version, + document=project_document, + ) + + async def patch_project_and_notify_users( app: web.Application, *, @@ -254,7 +282,10 @@ async def get_project_for_user( """ db = ProjectDBAPI.get_from_app_context(app) - product_name = await db.get_project_product(ProjectID(project_uuid)) + product_name = await _projects_repository.get_project_product( + app, project_uuid=ProjectID(project_uuid) + ) + user_project_access = await check_user_project_permission( app, project_id=ProjectID(project_uuid), @@ -664,11 +695,11 @@ async def _check_project_node_has_all_required_inputs( permission="read", ) - project_dict, _ = await db.get_project_dict_and_type(f"{project_uuid}") + nodes = await _projects_nodes_repository.get_by_project( + app, project_id=project_uuid + ) - nodes_map: dict[NodeID, Node] = { - NodeID(k): Node(**v) for k, v in project_dict["workbench"].items() - } + nodes_map = dict(nodes) node = nodes_map[node_id] unset_required_inputs: list[str] = [] @@ -695,9 +726,10 @@ def _check_required_input(required_input_key: KeyIDStr) -> None: if output_entry is None: unset_outputs_in_upstream.append((source_output_key, source_node.label)) - assert isinstance(node.inputs_required, list) # nosec - for required_input in node.inputs_required: - _check_required_input(required_input) + if node.inputs_required is not None: + assert isinstance(node.inputs_required, list) # nosec + for required_input in node.inputs_required: + _check_required_input(required_input) node_with_required_inputs = node.label if unset_required_inputs: @@ -923,9 +955,9 @@ async def _safe_service_start() -> None: async def add_project_node( request: web.Request, - project: dict[str, Any], user_id: UserID, product_name: str, + project_id: ProjectID, product_api_base_url: str, service_key: ServiceKey, service_version: ServiceVersion, @@ -936,14 +968,14 @@ async def add_project_node( "starting node %s:%s in project %s for user %s", service_key, service_version, - project["uuid"], + project_id, user_id, extra=get_log_record_extra(user_id=user_id), ) await check_user_project_permission( request.app, - project_id=project["uuid"], + project_id=project_id, user_id=user_id, product_name=product_name, permission="write", @@ -953,26 +985,23 @@ async def add_project_node( default_resources = await catalog_service.get_service_resources( request.app, user_id, service_key, service_version ) - db_legacy: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) - assert db_legacy # nosec - await db_legacy.add_project_node( - user_id, - ProjectID(project["uuid"]), - ProjectNodeCreate( - node_id=node_uuid, - required_resources=jsonable_encoder(default_resources), + + await _projects_nodes_repository.add( + request.app, + project_id=project_id, + node_id=node_uuid, + node=Node( key=service_key, version=service_version, label=service_key.split("/")[-1], + required_resources=jsonable_encoder(default_resources), ), - Node.model_validate( - { - "key": service_key, - "version": service_version, - "label": service_key.split("/")[-1], - } - ), - product_name, + ) + + await _create_project_document_and_notify( + request.app, + project_id=project_id, + user_id=user_id, client_session_id=client_session_id, ) @@ -981,12 +1010,12 @@ async def add_project_node( await director_v2_service.create_or_update_pipeline( request.app, user_id, - project["uuid"], + project_id, product_name, product_api_base_url, ) await dynamic_scheduler_service.update_projects_networks( - request.app, project_id=ProjectID(project["uuid"]) + request.app, project_id=project_id ) if _is_node_dynamic(service_key): @@ -999,7 +1028,7 @@ async def add_project_node( product_name=product_name, product_api_base_url=product_api_base_url, user_id=user_id, - project_uuid=ProjectID(project["uuid"]), + project_uuid=project_id, node_uuid=node_uuid, ) @@ -1014,16 +1043,14 @@ async def start_project_node( project_id: ProjectID, node_id: NodeID, ): - project = await get_project_for_user(request.app, f"{project_id}", user_id) - workbench = project.get("workbench", {}) - if not workbench.get(f"{node_id}"): - raise NodeNotFoundError(project_uuid=f"{project_id}", node_uuid=f"{node_id}") - node_details = Node.model_construct(**workbench[f"{node_id}"]) + node = await _projects_nodes_repository.get( + request.app, project_id=project_id, node_id=node_id + ) await _start_dynamic_service( request, - service_key=node_details.key, - service_version=node_details.version, + service_key=node.key, + service_version=node.version, product_name=product_name, product_api_base_url=product_api_base_url, user_id=user_id, @@ -1106,12 +1133,19 @@ async def delete_project_node( fire_and_forget_tasks_collection=request.app[APP_FIRE_AND_FORGET_TASKS_KEY], ) - # remove the node from the db - db_legacy: ProjectDBAPI = request.app[APP_PROJECT_DBAPI] - assert db_legacy # nosec - await db_legacy.remove_project_node( - user_id, project_uuid, NodeID(node_uuid), client_session_id=client_session_id + await _projects_nodes_repository.delete( + request.app, + project_id=project_uuid, + node_id=NodeID(node_uuid), ) + + await _create_project_document_and_notify( + request.app, + project_id=project_uuid, + user_id=user_id, + client_session_id=client_session_id, + ) + # also ensure the project is updated by director-v2 since services product_name = products_web.get_product_name(request) await director_v2_service.create_or_update_pipeline( @@ -1147,8 +1181,9 @@ async def update_project_node_state( user_id, ) - db_legacy = ProjectDBAPI.get_from_app_context(app) - product_name = await db_legacy.get_project_product(project_id) + product_name = await _projects_repository.get_project_product( + app, project_uuid=project_id + ) await check_user_project_permission( app, project_id=project_id, @@ -1157,17 +1192,6 @@ async def update_project_node_state( permission="write", # NOTE: MD: before only read was sufficient, double check this ) - # Delete this once workbench is removed from the projects table - # See: https://github.com/ITISFoundation/osparc-simcore/issues/7046 - await db_legacy.update_project_node_data( - user_id=user_id, - project_uuid=project_id, - node_id=node_id, - product_name=None, - new_node_data={"state": {"currentStatus": new_state}}, - client_session_id=client_session_id, - ) - await _projects_nodes_repository.update( app, project_id=project_id, @@ -1176,6 +1200,14 @@ async def update_project_node_state( state=NodeState(current_status=RunningState(new_state)) ), ) + + await _create_project_document_and_notify( + app, + project_id=project_id, + user_id=user_id, + client_session_id=client_session_id, + ) + return await get_project_for_user( app, user_id=user_id, project_uuid=f"{project_id}", include_state=True ) @@ -1201,8 +1233,6 @@ async def patch_project_node( mode="json", exclude_unset=True, by_alias=True ) - _projects_repository_legacy = ProjectDBAPI.get_from_app_context(app) - # 1. Check user permissions await check_user_project_permission( app, @@ -1214,14 +1244,15 @@ async def patch_project_node( # 2. If patching service key or version make sure it's valid if _node_patch_exclude_unset.get("key") or _node_patch_exclude_unset.get("version"): - _project, _ = await _projects_repository_legacy.get_project_dict_and_type( - project_uuid=f"{project_id}" + _project_node = await _projects_nodes_repository.get( + app, + project_id=project_id, + node_id=node_id, ) - _project_node_data = _project["workbench"][f"{node_id}"] - _service_key = _node_patch_exclude_unset.get("key", _project_node_data["key"]) + _service_key = _node_patch_exclude_unset.get("key", _project_node.key) _service_version = _node_patch_exclude_unset.get( - "version", _project_node_data["version"] + "version", _project_node.version ) rabbitmq_rpc_client = get_rabbitmq_rpc_client(app) await catalog_rpc.check_for_service( @@ -1233,15 +1264,6 @@ async def patch_project_node( ) # 3. Patch the project node - updated_project, _ = await _projects_repository_legacy.update_project_node_data( - user_id=user_id, - project_uuid=project_id, - node_id=node_id, - product_name=product_name, - new_node_data=_node_patch_exclude_unset, - client_session_id=client_session_id, - ) - await _projects_nodes_repository.update( app, project_id=project_id, @@ -1249,6 +1271,13 @@ async def patch_project_node( partial_node=partial_node, ) + await _create_project_document_and_notify( + app, + project_id=project_id, + user_id=user_id, + client_session_id=client_session_id, + ) + # 4. Make calls to director-v2 to keep data in sync (ex. comp_* DB tables) await director_v2_service.create_or_update_pipeline( app, @@ -1262,18 +1291,26 @@ async def patch_project_node( app, project_id=project_id ) - updated_project = await add_project_states_for_user( - user_id=user_id, project=updated_project, app=app + updated_project: ProjectWithWorkbenchDBGet = ( + await _projects_repository.get_project_with_workbench( + app, project_uuid=project_id + ) + ) + + updated_project_with_states = await add_project_states_for_user( + user_id=user_id, project=updated_project.model_dump(mode="json"), app=app ) # 5. if inputs/outputs have been changed all depending nodes shall be notified if {"inputs", "outputs"} & _node_patch_exclude_unset.keys(): - for node_uuid in updated_project["workbench"]: + for node_uuid in updated_project_with_states["workbench"]: await notify_project_node_update( - app, updated_project, node_uuid, errors=None + app, updated_project_with_states, node_uuid, errors=None ) return - await notify_project_node_update(app, updated_project, node_id, errors=None) + await notify_project_node_update( + app, updated_project.model_dump(mode="json"), node_id, errors=None + ) async def update_project_node_outputs( @@ -1299,8 +1336,9 @@ async def update_project_node_outputs( ) new_outputs = new_outputs or {} - db_legacy = ProjectDBAPI.get_from_app_context(app) - product_name = await db_legacy.get_project_product(project_id) + product_name = await _projects_repository.get_project_product( + app, project_uuid=project_id + ) await check_user_project_permission( app, project_id=project_id, @@ -1309,16 +1347,7 @@ async def update_project_node_outputs( permission="write", # NOTE: MD: before only read was sufficient, double check this ) - updated_project, changed_entries = await db_legacy.update_project_node_data( - user_id=user_id, - project_uuid=project_id, - node_id=node_id, - product_name=None, - new_node_data={"outputs": new_outputs, "runHash": new_run_hash}, - client_session_id=client_session_id, - ) - - await _projects_nodes_repository.update( + updated_node = await _projects_nodes_repository.update( app, project_id=project_id, node_id=node_id, @@ -1327,23 +1356,30 @@ async def update_project_node_outputs( ), ) + await _create_project_document_and_notify( + app, + project_id=project_id, + user_id=user_id, + client_session_id=client_session_id, + ) + _logger.debug( "patched project %s, following entries changed: %s", project_id, - pformat(changed_entries), + pformat(updated_node), ) - updated_project = await add_project_states_for_user( - user_id=user_id, project=updated_project, app=app + + updated_project = await _projects_repository.get_project_with_workbench( + app, project_uuid=project_id ) - # changed entries come in the form of {node_uuid: {outputs: {changed_key1: value1, changed_key2: value2}}} - # we do want only the key names - changed_keys = ( - changed_entries.get(TypeAdapter(NodeIDStr).validate_python(f"{node_id}"), {}) - .get("outputs", {}) - .keys() + updated_project_with_states = await add_project_states_for_user( + user_id=user_id, + project=updated_project.model_dump(mode="json"), + app=app, ) - return updated_project, changed_keys + + return updated_project_with_states, list(new_outputs.keys()) async def list_node_ids_in_project( @@ -1436,6 +1472,7 @@ async def _trigger_connected_service_retrieve( # find the nodes that need to retrieve data for node_uuid, node in workbench.items(): + # check this node is dynamic if not _is_node_dynamic(node["key"]): continue @@ -1447,9 +1484,11 @@ async def _trigger_connected_service_retrieve( if not isinstance(port_value, dict): continue - input_node_uuid = port_value.get("nodeUuid") + # FIXME: hack to support both field and alias names because cannot guarantee which one is stored in workbench + input_node_uuid = port_value.get("nodeUuid", port_value.get("node_uuid")) if input_node_uuid != updated_node_uuid: continue + # so this node is linked to the updated one, now check if the port was changed? linked_input_port = port_value.get("output") if linked_input_port in changed_keys: @@ -1457,8 +1496,8 @@ async def _trigger_connected_service_retrieve( # call /retrieve on the nodes update_tasks = [ - dynamic_scheduler_service.retrieve_inputs(app, NodeID(node), keys) - for node, keys in nodes_keys_to_update.items() + dynamic_scheduler_service.retrieve_inputs(app, NodeID(node_id), keys) + for node_id, keys in nodes_keys_to_update.items() ] await logged_gather(*update_tasks, reraise=False) @@ -1932,15 +1971,15 @@ async def add_project_states_for_user( user_primrary_groupid=user_primary_group_id, ) if NodeID(node_uuid) in computational_node_states: - node_state = computational_node_states[NodeID(node_uuid)].model_copy( - update={"lock_state": node_lock_state} - ) + computed_node_state = computational_node_states[ + NodeID(node_uuid) + ].model_copy(update={"lock_state": node_lock_state}) else: # if the node is not in the computational state, we create a new one service_is_running = node_lock_state and ( node_lock_state.status is NodeShareStatus.OPENED ) - node_state = NodeState( + computed_node_state = NodeState( current_status=( RunningState.STARTED if service_is_running @@ -1950,9 +1989,17 @@ async def add_project_states_for_user( ) # upgrade the project - node.setdefault("state", {}).update( - node_state.model_dump(mode="json", by_alias=True, exclude_unset=True) + # NOTE: copy&dump step avoids both alias and field-names to be keys in the dict + # e.g. "current_status" and "currentStatus" + current_node_state = NodeState.model_validate( + node.get("state") + or {} # NOTE: that node.get("state") can exists and be None! + ) + updated_node_state = current_node_state.model_copy( + update=computed_node_state.model_dump(mode="json", exclude_unset=True) ) + node["state"] = updated_node_state.model_dump(by_alias=True, exclude_unset=True) + if "progress" in node["state"] and node["state"]["progress"] is not None: # ensure progress is a percentage node["progress"] = round(node["state"]["progress"] * 100.0) diff --git a/services/web/server/src/simcore_service_webserver/projects/_tags_service.py b/services/web/server/src/simcore_service_webserver/projects/_tags_service.py index d7f1af590a2..e733747e67e 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_tags_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_tags_service.py @@ -8,6 +8,7 @@ from models_library.workspaces import UserWorkspaceWithAccessRights from ..workspaces import _workspaces_repository as workspaces_workspaces_repository +from . import _projects_repository from ._access_rights_service import check_user_project_permission from ._projects_repository_legacy import ProjectDBAPI from .models import ProjectDict @@ -20,7 +21,9 @@ async def add_tag( ) -> ProjectDict: db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(app) - product_name = await db.get_project_product(project_uuid) + product_name = await _projects_repository.get_project_product( + app, project_uuid=project_uuid + ) await check_user_project_permission( app, project_id=project_uuid, diff --git a/services/web/server/tests/integration/01/test_computation.py b/services/web/server/tests/integration/01/test_computation.py index d9ac678cde7..6b788b7b4fa 100644 --- a/services/web/server/tests/integration/01/test_computation.py +++ b/services/web/server/tests/integration/01/test_computation.py @@ -30,8 +30,8 @@ from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings from simcore_postgres_database.models.comp_runs_collections import comp_runs_collections -from simcore_postgres_database.models.projects import projects from simcore_postgres_database.models.projects_metadata import projects_metadata +from simcore_postgres_database.models.projects_nodes import projects_nodes from simcore_postgres_database.models.users import UserRole from simcore_postgres_database.webserver_models import ( NodeClass, @@ -255,14 +255,12 @@ async def _get_project_workbench_from_db( # this check is only there to check the comp_pipeline is there print(f"--> looking for project {project_id=} in projects table...") async with sqlalchemy_async_engine.connect() as conn: - project_in_db = ( - await conn.execute(sa.select(projects).where(projects.c.uuid == project_id)) - ).one() + result = await conn.execute( + sa.select(projects_nodes).where(projects_nodes.c.project_uuid == project_id) + ) + rows = result.mappings().all() - print( - f"<-- found following workbench: {json_dumps(project_in_db.workbench, indent=2)}" - ) - return project_in_db.workbench + return {row["node_id"]: dict(row) for row in rows} async def _assert_and_wait_for_pipeline_state( @@ -332,16 +330,16 @@ async def _assert_and_wait_for_comp_task_states_to_be_transmitted_in_projects( # if this one is in, the other should also be but let's check it carefully assert node_values.run_hash - assert "runHash" in node_in_project_table - assert node_values.run_hash == node_in_project_table["runHash"] + assert "run_hash" in node_in_project_table + assert node_values.run_hash == node_in_project_table["run_hash"] assert node_values.state assert "state" in node_in_project_table - assert "currentStatus" in node_in_project_table["state"] + assert "current_status" in node_in_project_table["state"] # NOTE: beware that the comp_tasks has StateType and Workbench has RunningState (sic) assert ( DB_TO_RUNNING_STATE[node_values.state].value - == node_in_project_table["state"]["currentStatus"] + == node_in_project_table["state"]["current_status"] ) print( "--> tasks were properly transferred! " diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py index 3e535cab5b5..f1e6c01e6b8 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py @@ -240,7 +240,7 @@ async def test_creating_new_project_from_template_without_copying_data_creates_s EXPECTED_DELETED_FIELDS = ["outputs", "progress", "runHash"] for node_data in project_workbench.values(): for field in EXPECTED_DELETED_FIELDS: - assert field not in node_data + assert field not in node_data or not node_data[field] @pytest.mark.parametrize(*_standard_user_role_response()) @@ -291,7 +291,7 @@ async def test_creating_new_project_as_template_without_copying_data_creates_ske EXPECTED_DELETED_FIELDS = ["outputs", "progress", "runHash"] for node_data in project_workbench.values(): for field in EXPECTED_DELETED_FIELDS: - assert field not in node_data + assert field not in node_data or not node_data[field] @pytest.mark.parametrize(*_standard_user_role_response()) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py index 2b457d3579a..3fa5482fddf 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py @@ -15,6 +15,7 @@ import sqlalchemy as sa from aiohttp.test_utils import TestClient from aioresponses import aioresponses +from deepdiff import DeepDiff from faker import Faker from models_library.api_schemas_directorv2.dynamic_services import ( GetProjectInactivityResponse, @@ -23,7 +24,10 @@ from models_library.products import ProductName from pydantic import TypeAdapter from pytest_mock import MockerFixture -from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.assert_checks import ( + assert_equal_ignoring_none, + assert_status, +) from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, MockedStorageSubsystem, @@ -174,7 +178,9 @@ async def _assert_get_same_project( project_permalink = data.pop("permalink", None) folder_id = data.pop("folderId", None) - assert data == {k: project[k] for k in data} + assert not DeepDiff( + data, {k: project[k] for k in data}, exclude_paths="root['lastChangeDate']" + ) if project_state: assert ProjectStateOutputSchema.model_validate(project_state) @@ -215,7 +221,11 @@ async def test_list_projects( project_permalink = got.pop("permalink") folder_id = got.pop("folderId") - assert got == {k: template_project[k] for k in got} + assert not DeepDiff( + got, + {k: template_project[k] for k in got}, + exclude_paths="root['lastChangeDate']", + ) assert not ProjectStateOutputSchema( **project_state @@ -228,7 +238,11 @@ async def test_list_projects( project_permalink = got.pop("permalink", None) folder_id = got.pop("folderId") - assert got == {k: user_project[k] for k in got} + assert not DeepDiff( + got, + {k: user_project[k] for k in got}, + exclude_paths="root['lastChangeDate']", + ) assert ProjectStateOutputSchema(**project_state) assert project_permalink is None @@ -245,7 +259,12 @@ async def test_list_projects( project_permalink = got.pop("permalink", None) folder_id = got.pop("folderId") - assert got == {k: user_project[k] for k in got} + assert not DeepDiff( + got, + {k: user_project[k] for k in got}, + exclude_paths="root['lastChangeDate']", + ) + assert not ProjectStateOutputSchema( **project_state ).share_state.locked, "Single user does not lock" @@ -263,7 +282,11 @@ async def test_list_projects( project_permalink = got.pop("permalink") folder_id = got.pop("folderId") - assert got == {k: template_project[k] for k in got} + assert not DeepDiff( + got, + {k: template_project[k] for k in got}, + exclude_paths="root['lastChangeDate']", + ) assert not ProjectStateOutputSchema( **project_state ).share_state.locked, "Templates are not locked" @@ -632,7 +655,7 @@ async def test_new_template_from_project( ) assert len(templates) == 1 - assert templates[0] == template_project + assert_equal_ignoring_none(template_project, templates[0]) assert template_project["name"] == user_project["name"] assert template_project["description"] == user_project["description"] diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py index ce031477e93..0e4838e2b5c 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py @@ -53,7 +53,7 @@ from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings -from simcore_postgres_database.models.projects import projects as projects_db_model +from simcore_postgres_database.models.projects_nodes import projects_nodes from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects._controller.nodes_rest import ( _ProjectNodePreview, @@ -380,16 +380,14 @@ async def test_create_node( # check database is updated assert "node_id" in data - create_node_id = data["node_id"] + node_id = data["node_id"] with postgres_db.connect() as conn: result = conn.execute( - sa.select(projects_db_model.c.workbench).where( - projects_db_model.c.uuid == user_project["uuid"] - ) + sa.select(sa.literal(1)) + .where(projects_nodes.c.node_id == node_id) + .limit(1) ) - assert result - workbench = result.one()[projects_db_model.c.workbench] - assert create_node_id in workbench + assert result.scalar() is not None else: assert error @@ -478,21 +476,22 @@ def inc_running_services(self, *args, **kwargs): # noqa: ARG002 # check that we do have NUM_DY_SERVICES nodes in the project with postgres_db.connect() as conn: result = conn.execute( - sa.select(projects_db_model.c.workbench).where( - projects_db_model.c.uuid == user_project["uuid"] + sa.select(projects_nodes.c.node_id).where( + projects_nodes.c.project_uuid == user_project["uuid"] ) ) assert result - workbench = result.one()[projects_db_model.c.workbench] - assert len(workbench) == NUM_DY_SERVICES + num_services_in_project - node_ids_in_db = set(workbench.keys()) - set(running_services.running_services_uuids).issubset(node_ids_in_db) + node_ids = result.scalars().all() + assert len(node_ids) == NUM_DY_SERVICES + num_services_in_project + assert {f"{i}" for i in running_services.running_services_uuids}.issubset(node_ids) print(f"--> {NUM_DY_SERVICES} nodes were created concurrently") + # # delete now # delete_node_tasks = [] - for node_id in workbench: + + for node_id in node_ids: delete_url = client.app.router["delete_node"].url_for( project_id=user_project["uuid"], node_id=node_id ) @@ -615,13 +614,13 @@ async def inc_running_services(self, *args, **kwargs): # noqa: ARG002 # check that we do have NUM_DY_SERVICES nodes in the project with postgres_db.connect() as conn: result = conn.execute( - sa.select(projects_db_model.c.workbench).where( - projects_db_model.c.uuid == project["uuid"] + sa.select(projects_nodes.c.node_id).where( + projects_nodes.c.project_uuid == project["uuid"] ) ) assert result - workbench = result.one()[projects_db_model.c.workbench] - assert len(workbench) == NUM_DY_SERVICES + node_ids = result.scalars().all() + assert len(node_ids) == NUM_DY_SERVICES @pytest.mark.parametrize(*standard_user_role()) @@ -775,13 +774,11 @@ async def test_delete_node( # ensure the node is gone with postgres_db.connect() as conn: result = conn.execute( - sa.select(projects_db_model.c.workbench).where( - projects_db_model.c.uuid == user_project["uuid"] - ) + sa.select(sa.literal(1)) + .where(projects_nodes.c.node_id == node_id) + .limit(1) ) - assert result - workbench = result.one()[projects_db_model.c.workbench] - assert node_id not in workbench + assert result.scalar() is None @pytest.fixture diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py index e1cfe13f8b2..59958de8acf 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py @@ -12,6 +12,7 @@ import pytest from aiohttp.test_utils import TestClient +from deepdiff import DeepDiff from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.webserver_users import UserInfoDict @@ -168,7 +169,9 @@ async def test_patch_project_node( "output_1": { "store": 0, "path": "9934cba6-4b51-11ef-968a-02420a00f1c1/571ffc8d-fa6e-411f-afc8-9c62d08dd2fa/matus.txt", + "label": "matus.txt", "eTag": "d41d8cd98f00b204e9800998ecf8427e", + "dataset": None, } } } @@ -185,7 +188,6 @@ async def test_patch_project_node( _tested_node = data["workbench"][node_id] assert _tested_node["label"] == "testing-string" - assert _tested_node["progress"] is None assert _tested_node["key"] == _patch_key["key"] assert _tested_node["version"] == _patch_version["version"] assert _tested_node["inputs"] == _patch_inputs["inputs"] @@ -262,10 +264,14 @@ async def test_patch_project_node_inputs_notifies( await assert_status(resp, expected) assert mocked_notify_project_node_update.call_count > 1 # 1 message per node updated - assert [ - call_args[0][2] - for call_args in mocked_notify_project_node_update.await_args_list - ] == list(user_project["workbench"].keys()) + assert not DeepDiff( + [ + call_args[0][2] + for call_args in mocked_notify_project_node_update.await_args_list + ], + list(user_project["workbench"].keys()), + ignore_order=True, + ) @pytest.mark.parametrize( diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py index e8a1536c5e4..bd2b14a4ad0 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py @@ -12,6 +12,7 @@ import pytest from aiohttp.test_utils import TestClient from aioresponses import aioresponses as AioResponsesMock # noqa: N812 +from deepdiff import DeepDiff from models_library.api_schemas_directorv2.computations import TasksOutputs from models_library.api_schemas_webserver.projects import ProjectGet from models_library.utils.fastapi_encoders import jsonable_encoder @@ -74,7 +75,7 @@ def mock_directorv2_service_api_responses( return aioresponses_mocker -@pytest.mark.acceptance_test() +@pytest.mark.acceptance_test @pytest.mark.parametrize( "user_role,expected", [ @@ -109,55 +110,62 @@ async def test_io_workflow( ports_meta, error = await assert_status(resp, expected_status_code=expected) if not error: - assert ports_meta == [ - { - "key": "38a0d401-af4b-4ea7-ab4c-5005c712a546", - "kind": "input", - "content_schema": { - "description": "Input integer value", - "title": "X", - "type": "integer", + diff = DeepDiff( + ports_meta, + [ + { + "key": "38a0d401-af4b-4ea7-ab4c-5005c712a546", + "kind": "input", + "content_schema": { + "description": "Input integer value", + "title": "X", + "type": "integer", + }, }, - }, - { - "key": "fc48252a-9dbb-4e07-bf9a-7af65a18f612", - "kind": "input", - "content_schema": { - "description": "Input integer value", - "title": "Z", - "type": "integer", + { + "key": "fc48252a-9dbb-4e07-bf9a-7af65a18f612", + "kind": "input", + "content_schema": { + "description": "Input integer value", + "title": "Z", + "type": "integer", + }, }, - }, - { - "key": "7bf0741f-bae4-410b-b662-fc34b47c27c9", - "kind": "input", - "content_schema": { - "description": "Input boolean value", - "title": "on", - "type": "boolean", + { + "key": "7bf0741f-bae4-410b-b662-fc34b47c27c9", + "kind": "input", + "content_schema": { + "description": "Input boolean value", + "title": "on", + "type": "boolean", + }, }, - }, - { - "key": "09fd512e-0768-44ca-81fa-0cecab74ec1a", - "kind": "output", - "content_schema": { - "description": "Output integer value", - "title": "Random sleep interval_2", - "type": "integer", + { + "key": "09fd512e-0768-44ca-81fa-0cecab74ec1a", + "kind": "output", + "content_schema": { + "description": "Output integer value", + "title": "Random sleep interval_2", + "type": "integer", + }, }, - }, - { - "key": "76f607b4-8761-4f96-824d-cab670bc45f5", - "kind": "output", - "content_schema": { - "description": "Output integer value", - "title": "Random sleep interval", - "type": "integer", + { + "key": "76f607b4-8761-4f96-824d-cab670bc45f5", + "kind": "output", + "content_schema": { + "description": "Output integer value", + "title": "Random sleep interval", + "type": "integer", + }, }, - }, - ] + ], + ignore_order=True, + ) - assert ports_meta == PROJECTS_METADATA_PORTS_RESPONSE_BODY_DATA + assert not diff + assert not DeepDiff( + ports_meta, PROJECTS_METADATA_PORTS_RESPONSE_BODY_DATA, ignore_order=True + ) # get_project_inputs expected_url = client.app.router["get_project_inputs"].url_for( diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index 466cae4766b..cfa61a8e5df 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -22,6 +22,7 @@ import sqlalchemy as sa from aiohttp import ClientResponse from aiohttp.test_utils import TestClient, TestServer +from deepdiff import DeepDiff # type: ignore[attr-defined] from faker import Faker from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( @@ -95,10 +96,15 @@ def assert_replaced(current_project, update_data): def _extract(dikt, keys): return {k: dikt[k] for k in keys} - modified = [ + skip = [ "lastChangeDate", + "templateType", + "trashedAt", + "trashedBy", + "workspaceId", + "folderId", ] - keep = [k for k in update_data if k not in modified] + keep = [k for k in update_data if k not in skip] assert _extract(current_project, keep) == _extract(update_data, keep) @@ -1200,7 +1206,7 @@ async def test_get_active_project( ) assert not error assert ProjectStateOutputSchema(**data.pop("state")).share_state.locked - data.pop("folderId") + data.pop("folderId", None) user_project_last_change_date = user_project.pop("lastChangeDate") data_last_change_date = data.pop("lastChangeDate") @@ -2114,7 +2120,11 @@ async def test_open_shared_project_at_same_time( elif data: project_status = ProjectStateOutputSchema(**data.pop("state")) data.pop("folderId") - assert data == {k: shared_project[k] for k in data} + assert not DeepDiff( + data, + {k: shared_project[k] for k in data}, + exclude_paths=["root['lastChangeDate']"], + ) assert project_status.share_state.locked assert project_status.share_state.current_user_groupids assert len(project_status.share_state.current_user_groupids) == 1 diff --git a/services/web/server/tests/unit/with_dbs/03/test_project_db.py b/services/web/server/tests/unit/with_dbs/03/test_project_db.py index e8731a32fb0..6a07ec722bc 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/03/test_project_db.py @@ -19,7 +19,7 @@ from aiohttp.test_utils import TestClient from faker import Faker from models_library.projects import ProjectID, ProjectTemplateType -from models_library.projects_nodes_io import NodeID, NodeIDStr +from models_library.projects_nodes_io import NodeID from psycopg2.errors import UniqueViolation from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -45,9 +45,7 @@ ) from simcore_service_webserver.projects.api import has_user_project_access_rights from simcore_service_webserver.projects.exceptions import ( - NodeNotFoundError, ProjectNodeRequiredInputsNotSetError, - ProjectNotFoundError, ) from simcore_service_webserver.users.exceptions import UserNotFoundError from simcore_service_webserver.utils import to_datetime @@ -168,7 +166,6 @@ def _assert_project_db_row( "description": project["description"], "thumbnail": project["thumbnail"], "prj_owner": None, - "workbench": project["workbench"], "published": False, "dev": project["dev"], "classifiers": project["classifiers"], @@ -179,9 +176,15 @@ def _assert_project_db_row( } expected_db_entries.update(kwargs) assert row + # Remove last_change_date from strict equality check project_entries_in_db = {k: row[k] for k in expected_db_entries} + project_last_change = project_entries_in_db.pop("last_change_date", None) + expected_db_entries.pop("last_change_date", None) assert project_entries_in_db == expected_db_entries - assert row["last_change_date"] >= row["creation_date"] + # last_change_date should be >= creation_date + assert project_last_change is not None + assert row["creation_date"] is not None + assert project_last_change >= row["creation_date"] @pytest.fixture @@ -361,270 +364,6 @@ async def test_insert_project_to_db( await _assert_projects_nodes_db_rows(aiopg_engine, new_project) -@pytest.mark.parametrize( - "user_role", - [UserRole.USER], -) -async def test_patch_user_project_workbench_raises_if_project_does_not_exist( - fake_project: dict[str, Any], - logged_user: dict[str, Any], - db_api: ProjectDBAPI, - faker: Faker, -): - partial_workbench_data = { - faker.uuid4(): { - "key": "simcore/services/comp/sleepers", - "version": faker.numerify("%.#.#"), - "label": "I am a test node", - } - } - with pytest.raises(ProjectNotFoundError): - await db_api._update_project_workbench( # noqa: SLF001 - partial_workbench_data, - user_id=logged_user["id"], - project_uuid=fake_project["uuid"], - allow_workbench_changes=False, - ) - - -@pytest.mark.parametrize( - "user_role", - [UserRole.USER], -) -async def test_patch_user_project_workbench_creates_nodes( - fake_project: dict[str, Any], - logged_user: dict[str, Any], - db_api: ProjectDBAPI, - faker: Faker, - aiopg_engine: aiopg.sa.engine.Engine, - insert_project_in_db: Callable[..., Awaitable[dict[str, Any]]], -): - empty_fake_project = deepcopy(fake_project) - workbench = empty_fake_project.setdefault("workbench", {}) - assert isinstance(workbench, dict) - workbench.clear() - new_project = await insert_project_in_db( - empty_fake_project, user_id=logged_user["id"] - ) - await _assert_projects_nodes_db_rows(aiopg_engine, new_project) - partial_workbench_data = { - faker.uuid4(): { - "key": f"simcore/services/comp/{faker.pystr().lower()}", - "version": faker.numerify("%.#.#"), - "label": faker.text(), - } - for _ in range(faker.pyint(min_value=5, max_value=30)) - } - ( - patched_project, - changed_entries, - ) = await db_api._update_project_workbench( # noqa: SLF001 - partial_workbench_data, - user_id=logged_user["id"], - project_uuid=new_project["uuid"], - allow_workbench_changes=True, - ) - for node_id in partial_workbench_data: - assert node_id in patched_project["workbench"] - assert partial_workbench_data[node_id] == patched_project["workbench"][node_id] - assert node_id in changed_entries - assert changed_entries[node_id] == partial_workbench_data[node_id] - - -@pytest.mark.parametrize( - "user_role", - [UserRole.USER], -) -async def test_patch_user_project_workbench_creates_nodes_raises_if_invalid_node_is_passed( - fake_project: dict[str, Any], - logged_user: dict[str, Any], - db_api: ProjectDBAPI, - faker: Faker, - aiopg_engine: aiopg.sa.engine.Engine, - insert_project_in_db: Callable[..., Awaitable[dict[str, Any]]], -): - empty_fake_project = deepcopy(fake_project) - workbench = empty_fake_project.setdefault("workbench", {}) - assert isinstance(workbench, dict) - workbench.clear() - - new_project = await insert_project_in_db( - empty_fake_project, user_id=logged_user["id"] - ) - await _assert_projects_nodes_db_rows(aiopg_engine, new_project) - partial_workbench_data = { - faker.uuid4(): { - "version": faker.numerify("%.#.#"), - "label": faker.text(), - } - for _ in range(faker.pyint(min_value=5, max_value=30)) - } - with pytest.raises(NodeNotFoundError): - await db_api._update_project_workbench( # noqa: SLF001 - partial_workbench_data, - user_id=logged_user["id"], - project_uuid=new_project["uuid"], - allow_workbench_changes=True, - ) - - -@pytest.mark.parametrize( - "user_role", - [UserRole.USER], -) -@pytest.mark.parametrize("number_of_nodes", [1, randint(250, 300)]) # noqa: S311 -async def test_patch_user_project_workbench_concurrently( - fake_project: dict[str, Any], - postgres_db: sa.engine.Engine, - logged_user: dict[str, Any], - primary_group: dict[str, str], - db_api: ProjectDBAPI, - number_of_nodes: int, - aiopg_engine: aiopg.sa.engine.Engine, - insert_project_in_db: Callable[..., Awaitable[dict[str, Any]]], -): - _NUMBER_OF_NODES = number_of_nodes - BASE_UUID = UUID("ccc0839f-93b8-4387-ab16-197281060927") - node_uuids = [str(uuid5(BASE_UUID, f"{n}")) for n in range(_NUMBER_OF_NODES)] - - # create a project with a lot of nodes - fake_project["workbench"] = { - node_uuids[n]: { - "key": "simcore/services/comp/sleepers", - "version": "1.43.5", - "label": f"I am node {n}", - } - for n in range(_NUMBER_OF_NODES) - } - expected_project = deepcopy(fake_project) - - # add the project - original_project = deepcopy(fake_project) - new_project = await insert_project_in_db(fake_project, user_id=logged_user["id"]) - - _assert_added_project( - original_project, - new_project, - exp_overrides={ - "prjOwner": logged_user["email"], - }, - ) - _assert_project_db_row( - postgres_db, - new_project, - prj_owner=logged_user["id"], - ) - await _assert_projects_nodes_db_rows(aiopg_engine, new_project) - - # patch all the nodes concurrently - randomly_created_outputs = [ - { - "outputs": {f"out_{k}": f"{k}"} # noqa: B035 - for k in range(randint(1, 10)) # noqa: S311 - } - for n in range(_NUMBER_OF_NODES) - ] - for n in range(_NUMBER_OF_NODES): - expected_project["workbench"][node_uuids[n]].update(randomly_created_outputs[n]) - - patched_projects: list[tuple[dict[str, Any], dict[str, Any]]] = ( - await asyncio.gather( - *[ - db_api._update_project_workbench( # noqa: SLF001 - {NodeIDStr(node_uuids[n]): randomly_created_outputs[n]}, - user_id=logged_user["id"], - project_uuid=new_project["uuid"], - allow_workbench_changes=False, - ) - for n in range(_NUMBER_OF_NODES) - ] - ) - ) - # NOTE: each returned project contains the project with some updated workbenches - # the ordering is uncontrolled. - # The important thing is that the final result shall contain ALL the changes - - for (prj, changed_entries), node_uuid, exp_outputs in zip( - patched_projects, node_uuids, randomly_created_outputs, strict=True - ): - assert prj["workbench"][node_uuid]["outputs"] == exp_outputs["outputs"] - assert changed_entries == {node_uuid: {"outputs": exp_outputs["outputs"]}} - - # get the latest change date - latest_change_date = max( - to_datetime(prj["lastChangeDate"]) for prj, _ in patched_projects - ) - - # check the nodes are completely patched as expected - _assert_project_db_row( - postgres_db, - expected_project, - prj_owner=logged_user["id"], - creation_date=to_datetime(new_project["creationDate"]), - last_change_date=latest_change_date, - ) - - # now concurrently remove the outputs - for n in range(_NUMBER_OF_NODES): - expected_project["workbench"][node_uuids[n]]["outputs"] = {} - - patched_projects = await asyncio.gather( - *[ - db_api._update_project_workbench( # noqa: SLF001 - {NodeIDStr(node_uuids[n]): {"outputs": {}}}, - user_id=logged_user["id"], - project_uuid=new_project["uuid"], - allow_workbench_changes=False, - ) - for n in range(_NUMBER_OF_NODES) - ] - ) - - # get the latest change date - latest_change_date = max( - to_datetime(prj["lastChangeDate"]) for prj, _ in patched_projects - ) - - # check the nodes are completely patched as expected - _assert_project_db_row( - postgres_db, - expected_project, - prj_owner=logged_user["id"], - creation_date=to_datetime(new_project["creationDate"]), - last_change_date=latest_change_date, - ) - - # now concurrently remove the outputs - for n in range(_NUMBER_OF_NODES): - expected_project["workbench"][node_uuids[n]]["outputs"] = {} - - patched_projects = await asyncio.gather( - *[ - db_api._update_project_workbench( # noqa: SLF001 - {NodeIDStr(node_uuids[n]): {"outputs": {}}}, - user_id=logged_user["id"], - project_uuid=new_project["uuid"], - allow_workbench_changes=False, - ) - for n in range(_NUMBER_OF_NODES) - ] - ) - - # get the latest change date - latest_change_date = max( - to_datetime(prj["lastChangeDate"]) for prj, _ in patched_projects - ) - - # check the nodes are completely patched as expected - _assert_project_db_row( - postgres_db, - expected_project, - prj_owner=logged_user["id"], - creation_date=to_datetime(new_project["creationDate"]), - last_change_date=latest_change_date, - ) - - @pytest.fixture() async def some_projects_and_nodes( logged_user: dict[str, Any], diff --git a/services/web/server/tests/unit/with_dbs/04/notifications/test_notifications__db_comp_tasks_listening_task.py b/services/web/server/tests/unit/with_dbs/04/notifications/test_notifications__db_comp_tasks_listening_task.py index cb63e262518..9c360f74ae7 100644 --- a/services/web/server/tests/unit/with_dbs/04/notifications/test_notifications__db_comp_tasks_listening_task.py +++ b/services/web/server/tests/unit/with_dbs/04/notifications/test_notifications__db_comp_tasks_listening_task.py @@ -232,8 +232,11 @@ async def mock_dynamic_service_rpc( """ Mocks the dynamic service RPC calls to avoid actual service calls during tests. """ - return mocker.patch( - "servicelib.rabbitmq.rpc_interfaces.dynamic_scheduler.services.retrieve_inputs", + import servicelib.rabbitmq.rpc_interfaces.dynamic_scheduler.services + + return mocker.patch.object( + servicelib.rabbitmq.rpc_interfaces.dynamic_scheduler.services, + "retrieve_inputs", autospec=True, ) diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py index 9108c5b22be..73f89dde70c 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py @@ -78,7 +78,7 @@ async def test_workspaces_full_workflow_with_folders_and_projects( # noqa: PLR0 data, _ = await assert_status(resp, status.HTTP_200_OK) assert data["uuid"] == project["uuid"] assert data["workspaceId"] == added_workspace.workspace_id - assert data["folderId"] is None + assert data.get("folderId") is None # Create folder in workspace url = client.app.router["create_folder"].url_for() diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_projects_between_workspaces.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_projects_between_workspaces.py index 43d1484ad18..23a1fe9fc80 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_projects_between_workspaces.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_projects_between_workspaces.py @@ -93,7 +93,7 @@ async def test_moving_between_private_and_shared_workspaces( base_url = client.app.router["get_project"].url_for(project_id=project["uuid"]) resp = await client.get(f"{base_url}") data, _ = await assert_status(resp, status.HTTP_200_OK) - assert data["workspaceId"] is None # <-- Workspace ID is None + assert data.get("workspaceId") is None # <-- Workspace ID is None # Move project from your private workspace to shared workspace base_url = client.app.router["move_project_to_workspace"].url_for( @@ -252,7 +252,7 @@ async def test_moving_between_workspaces_check_removed_from_folder( base_url = client.app.router["get_project"].url_for(project_id=project["uuid"]) resp = await client.get(f"{base_url}") data, _ = await assert_status(resp, status.HTTP_200_OK) - assert data["workspaceId"] is None # <-- Workspace ID is None + assert data.get("workspaceId") is None # <-- Workspace ID is None # Check project_to_folders DB is empty with postgres_db.connect() as con: diff --git a/tests/e2e/tutorials/sleepers_project_template_sql.csv b/tests/e2e/tutorials/sleepers_project_template_sql.csv index 7e96b580dcf..1cd53e34516 100644 --- a/tests/e2e/tutorials/sleepers_project_template_sql.csv +++ b/tests/e2e/tutorials/sleepers_project_template_sql.csv @@ -1,2 +1,2 @@ -id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published,access_rights,dev,classifiers,ui,quality,hidden,workspace_id,trashed,trashed_explicitly,trashed_by,template_type -10,TEMPLATE,ed6c2f58-dc16-445d-bb97-e989e2611603,Sleepers,5 sleepers interconnected,"",,2019-06-06 14:34:19.631,2019-06-06 14:34:28.647,"{""027e3ff9-3119-45dd-b8a2-2e31661a7385"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 0"", ""inputs"": {""in_2"": 2}, ""inputAccess"": {""in_1"": ""Invisible"", ""in_2"": ""ReadOnly""}, ""inputNodes"": [], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 1"", ""inputs"": {""in_1"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_1""}, ""in_2"": 2}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""bf405067-d168-44ba-b6dc-bb3e08542f92"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 2"", ""inputs"": {""in_1"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_2""}}, ""inputNodes"": [""562aaea9-95ff-46f3-8e84-db8f3c9e3a39""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 200}}, ""de2578c5-431e-5065-a079-a5a0476e3c10"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 3"", ""inputs"": {""in_2"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_2""}}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 420, ""y"": 400}}, ""de2578c5-431e-559d-aa19-dc9293e10e4c"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 4"", ""inputs"": {""in_1"": {""nodeUuid"": ""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""de2578c5-431e-5065-a079-a5a0476e3c10"", ""output"": ""out_2""}}, ""inputNodes"": [""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""de2578c5-431e-5065-a079-a5a0476e3c10""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 800, ""y"": 300}}}",t,"{""1"": {""read"": true, ""write"": false, ""delete"": false}}",{},{},{},{},f,,,f,,TEMPLATE +id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,published,access_rights,dev,classifiers,ui,quality,hidden,workspace_id,trashed,trashed_explicitly,trashed_by,template_type +10,TEMPLATE,ed6c2f58-dc16-445d-bb97-e989e2611603,Sleepers,5 sleepers interconnected,"",,2019-06-06 14:34:19.631,2019-06-06 14:34:28.647,t,"{""1"": {""read"": true, ""write"": false, ""delete"": false}}",{},{},{},{},f,,,f,,TEMPLATE