diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 38985f92..00000000 --- a/.coveragerc +++ /dev/null @@ -1 +0,0 @@ -[run] diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 00000000..8c8a9738 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,33 @@ +name: Publish Docker image + +on: + release: + types: [published] + +jobs: + push_to_registry: + name: Push Docker image to Docker Hub + runs-on: ubuntu-latest + steps: + - name: Check out the repo + uses: actions/checkout@v3 + + - name: Log in to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Docker meta + id: meta + uses: docker/metadata-action@v4 + with: + images: ispyb/py-ispyb + + - name: Build and push Docker image + uses: docker/build-push-action@v3 + with: + context: . + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index f7c6b1a5..00000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,67 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -# -# ******** NOTE ******** -# We have attempted to detect the languages in your repository. Please check -# the `language` matrix defined below to confirm you have the correct set of -# supported CodeQL languages. -# -name: "CodeQL" - -on: - push: - branches: [ master ] - pull_request: - # The branches below must be a subset of the branches above - branches: [ master ] - schedule: - - cron: '41 21 * * 2' - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - language: [ 'javascript', 'python' ] - # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] - # Learn more: - # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed - - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v1 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v1 - - # â„šī¸ Command-line programs to run using the OS shell. - # 📚 https://git.io/JvXDl - - # âœī¸ If the Autobuild fails above, remove it and uncomment the following three lines - # and modify them (or add more) to build your code if your project - # uses a compiled language - - #- run: | - # make bootstrap - # make release - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml new file mode 100644 index 00000000..3eb767f2 --- /dev/null +++ b/.github/workflows/doc.yml @@ -0,0 +1,53 @@ +name: Deploy documentation +on: + push: + branches: + - master +jobs: + build-and-deploy: + concurrency: ci-${{ github.ref }} + runs-on: ubuntu-latest + services: + mariadb: + image: ispyb/ispyb-pydb:latest + ports: + - 3306:3306 + options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Set up Python 3.10 + uses: actions/setup-python@v3 + with: + python-version: "3.10" + + - name: Install dependencies + run: | + pip install mkdocs + pip install mkdocs-material + + - name: Build + run: | + mkdocs build --strict --verbose + + - name: Create OpenAPI spec + env: + ISPYB_ENVIRONMENT: ci + run: | + sudo apt-get update && sudo apt-get install -y libldap2-dev libsasl2-dev + pip install -r requirements.txt + pip install -e . --no-deps + python scripts/generate_apispec.py + mkdir public/api + + - name: Run redoc + uses: seeebiii/redoc-cli-github-action@v10 + with: + args: "bundle -o public/api/index.html openapi.json" + + - name: Deploy + uses: JamesIves/github-pages-deploy-action@v4.3.0 + with: + branch: gh-pages + folder: public diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..00837bb9 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,27 @@ +name: Lint + +on: [push] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v3 + - name: Run flake8 + uses: py-actions/flake8@v2 + style: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Run black + uses: psf/black@stable + security: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Run bandit + uses: tj-actions/bandit@v5.1 + with: + targets: pyispyb diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml deleted file mode 100644 index 1a03a7b6..00000000 --- a/.github/workflows/python-publish.yml +++ /dev/null @@ -1,31 +0,0 @@ -# This workflow will upload a Python Package using Twine when a release is created -# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries - -name: Upload Python Package - -on: - release: - types: [created] - -jobs: - deploy: - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: '3.x' - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install setuptools wheel twine - - name: Build and publish - env: - TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: | - python setup.py sdist bdist_wheel - twine upload dist/* diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 00000000..5927c88c --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,38 @@ +name: Test + +on: [push] + +jobs: + test: + runs-on: ubuntu-latest + services: + mariadb: + image: ispyb/ispyb-pydb:latest + ports: + - 3306:3306 + options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 + strategy: + matrix: + python-version: ["3.10"] + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + sudo apt-get update && sudo apt-get install -y libldap2-dev libsasl2-dev + pip install -r requirements.txt + pip install -r requirements-dev.txt + pip install -e . --no-deps + - name: Run tests + env: + ISPYB_ENVIRONMENT: ci + run: | + pytest --cov=pyispyb --cov-report xml tests/ + - name: Coverage report + uses: codecov/codecov-action@v2 + with: + files: ./coverage.xml + fail_ci_if_error: true diff --git a/.gitignore b/.gitignore index 5a6f73f0..991b8c19 100644 --- a/.gitignore +++ b/.gitignore @@ -2,12 +2,16 @@ __pycache__/ *.py[cod] *swp -.vscode* -ispyb_core_config.yml -ispyb_ssx_config.yml -ispyb_em_config.yml +/auth.yml build/ dist/ pyispyb.egg-info/ + +.tox* +.DS_Store + +.coverage +coverage.xml +*.py,cover diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..65e4bfe6 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,16 @@ +{ + "overrides": [ + { + "files": ["*.yml", "*.yaml"], + "options": { + "tabWidth": 2 + } + }, + { + "files": "*.json", + "options": { + "tabWidth": 4 + } + } + ] +} diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index 5902aaa5..00000000 --- a/.pylintrc +++ /dev/null @@ -1,380 +0,0 @@ -[MASTER] - -# Specify a configuration file. -#rcfile= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - - -# Pickle collected data for later comparisons. -persistent=yes - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - -# Use multiple processes to speed up Pylint. -jobs=1 - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=yes - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code -#extension-pkg-whitelist= - -# Allow optimization of some AST trees. This will activate a peephole AST -# optimizer, which will apply various small optimizations. For instance, it can -# be used to obtain the result of joining multiple strings with the addition -# operator. Joining a lot of strings can lead to a maximum recursion error in -# Pylint and this flag can prevent that. It has one side effect, the resulting -# AST will be different than the one from reality. -optimize-ast=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED -confidence= - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time. See also the "--disable" option for examples. -#enable= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -# disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,protected-access -disable=bad-continuation,D212,D107,D203 - - -[REPORTS] - -# Set the output format. Available formats are text, parseable, colorized, msvs -# (visual studio) and html. You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Put messages in a separate file for each module / package specified on the -# command line instead of printing them on stdout. Reports (if any) will be -# written in a file name "pylint_global.[txt|html]". -files-output=no - -# Tells whether to display a full report or only the messages -reports=yes - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -#msg-template= - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME,XXX,TODO - - -[BASIC] - -# List of builtins function names that should not be used, separated by a comma -bad-functions=map,filter,input - -# Good variable names which should always be accepted, separated by a comma -good-names=i,j,k,ex,Run,_ - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Include a hint for the correct naming format with invalid-name -include-naming-hint=no - -# Regular expression matching correct function names -function-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Naming hint for function names -function-name-hint=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression matching correct variable names -variable-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Naming hint for variable names -variable-name-hint=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression matching correct constant names -const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Naming hint for constant names -const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Regular expression matching correct attribute names -attr-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Naming hint for attribute names -attr-name-hint=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression matching correct argument names -argument-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Naming hint for argument names -argument-name-hint=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression matching correct class attribute names -class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - -# Naming hint for class attribute names -class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - -# Regular expression matching correct inline iteration names -inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ - -# Naming hint for inline iteration names -inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ - -# Regular expression matching correct class names -class-rgx=[A-Z_][a-zA-Z0-9]+$ - -# Naming hint for class names -class-name-hint=[A-Z_][a-zA-Z0-9]+$ - -# Regular expression matching correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Naming hint for module names -module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Regular expression matching correct method names -method-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Naming hint for method names -method-name-hint=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - - -[ELIF] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - - -[LOGGING] - -# Logging modules to check that the string format arguments are in logging -# function parameter format -logging-modules=logging - - -[SPELLING] - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[VARIABLES] - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# A regular expression matching the name of dummy variables (i.e. expectedly -# not used). -dummy-variables-rgx=_$|dummy - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_,_cb - - -[FORMAT] - -# Maximum number of characters on a single line. -max-line-length=88 - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma,dict-separator - -# Maximum number of lines in a module -max-module-lines=1000 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - - -[TYPECHECK] - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules=ldap,requests,flask_restx,MySQLdb,sqlalchemy,marshmallow,lockfile,jwt,migrations,pytest,invoke,alembic,flask_sqlalchemy,flask_cors,flask_marshmallow,webargs,marshmallow_jsonschema,gevent,suds - -# List of classes names for which member attributes should not be checked -# (useful for classes with attributes dynamically set). This supports can work -# with qualified names. -ignored-classes=SQLAlchemy - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - - -[SIMILARITIES] - -# Minimum lines number of a similarity. -min-similarity-lines=4 - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=5 - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.* - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of branch for function / method body -max-branches=12 - -# Maximum number of statements in function / method body -max-statements=50 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of attributes for a class (see R0902). -max-attributes=20 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of boolean expressions in a if statement -max-bool-expr=5 - - -[IMPORTS] - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub,TERMIOS,Bastion,rexec - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__,__new__,setUp - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict,_fields,_replace,_source,_make - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 412c7a20..00000000 --- a/.travis.yml +++ /dev/null @@ -1,29 +0,0 @@ -language: python - -python: - - "3.8" - -addons: - mariadb: 10.3 - - -install: - - sudo apt-get install -y python3-mysqldb - - pip install -r requirements.txt - - pip install -r tests/requirements.txt - -script: - - cp examples/ispyb_core_config_example.yml ispyb_core_config.yml - - cp examples/ispyb_ssx_config_example.yml ispyb_ssx_config.yml - - cp examples/ispyb_em_config_example.yml ispyb_em_config.yml - - - cd scripts - - bash create_core_db.sh - - bash create_ssx_db.sh - - bash run_all.sh - - cd .. - - pylint -E pyispyb.app pyispyb.core pyispyb.ssx - - coverage run -m pytest tests/core - - bash <(curl -s https://codecov.io/bash) - - coverage run -m pytest tests/ssx - - bash <(curl -s https://codecov.io/bash) diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 00000000..7234a5cf --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,10 @@ +{ + // See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations. + // Extension identifier format: ${publisher}.${name}. Example: vscode.csharp + // List of extensions which should be recommended for users of this workspace. + "recommendations": [ + "esbenp.prettier-vscode" + ], + // List of extensions recommended by VS Code that should not be recommended for users of this workspace. + "unwantedRecommendations": [] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..f17ba304 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,18 @@ +{ + "python.linting.enabled": true, + "python.linting.lintOnSave": true, + "python.linting.flake8Enabled": true, + "python.linting.flake8Path": "flake8", + "editor.formatOnSave": true, + "python.formatting.provider": "black", + "prettier.tabWidth": 4, + "[yaml]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "[json]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "[jsonc]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + } +} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index db37edb2..067fe373 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,184 +1,72 @@ -## How to contribute to repository +# How to contribute to py-ISPyB -Before submiting the code to the repository please read these contributing guidlines. -The aim of these guidlines is to help the developers community to maintain the code stable and reusable. +Before submitting the code to this repository please read these contributing guidelines. These guidelines aim to help the developer community to maintain the code stable and reusable. -### Reporting bugs - -Before submitting a new bug check if the bug is not already reported in the [issues](). -If the corresponding issue do not exist then: - -* Open a new issue with a short description in the title. -* In the description describe the bug: - * Conditions when the bug appears. - * How it can be reproduced. - * Possible cause of the bug and source code where it occures. - * If possible add error log and screenshot. -* Assign a label to the issue (see available labels). - -### Submiting code to the repository - -Pull request (PR) is the most convinient way of submitting a new code to the repository. It helps developers to see the proposed code and publicly review it. To avoid any conflicts in the code base it is important to keep your local git repository syncronized with the latest code in the repository. If repository is checkout out directly then use `git pull` to obtain the latest code from the repository. If a local fork is used then: - -* If necessary add link to the upstream repository: - - ```bash - git remote add upstream https://github.com/--- - ``` - -* Fetch all branches and merge upstream to your forked master: - ```bash - git fetch --all - git checkout master - git merge upstream/master - ``` - -#### Preparing a new commit - -* Create a new branch: - `git checkout -b NEW_BRACH_NAME` - * If the pull request is associated with an issue then reference the issue in the name. For example: - `git checkout -b issue_100` -* Edit necessary files, delete existing or add a new file. -* Add files to the staging area: - `git add ChangedFile1 ChangedFile2` -* Save your new commit to the local repository: - `git commit` -* Commit command will open a text editor: - * In the first line write a short commit summary (max 50 characters. It will appear as a title of PR. - * Add an empty line. - * Write a longer description. -* Upload the content of the new branch to the remote repository: - `git push origin NEW_BRACH_NAME` -* Go to the github webpage and create a new PR. - -#### Anouncing a new pull request via github webpage - -* Go to the project webpage and press "Create pull request". -* Edit information about the PR. -* If needed assign a developer who shall review the PR. - -### Accepting a pull request - -* The author of a PR may request a PR review from a certain amount of developers. -* A reviewer can Comment, Approve or Request changes. -* Before accepting the PR reviewer has to test the proposed code changes. To test the PR pull the proposed PR: - ```bash - git fetch origin pull/ID/head:NEW_BRANCH_NAME - git checkout NEW_BRANCH_NAME - ``` -* All the assigned reviewers of a PR have to approve the PR before it can be merged. -* The last reviewer to review the PR have the responsibility of merging it. -* A PR that has no reviewer can be approved and merged by anyone. - -### Coding style guidlines - -It is very important to write a clean and readable code. Therefore we follow the [PEP8 guidlines](https://www.python.org/dev/peps/pep-0008/). Minimal required guidlines are: -* Maximum 88 characters per line. -* Use 4 spaces (not a tab) per identation level. -* Do not use wild (star) imports. -* Used naming styles: - * lower_case_with_underscores (snake style) for variables, methods. - * CapitalizedWords for class names. - * UPPERCASE for constants. -* When catching exceptions, mention specific exceptions whenever possible instead of using a bare except. -* Add [google style](https://www.sphinx-doc.org/en/master/usage/extensions/example_google.html?highlight=google%20style) doc strings to describe methods and classes: - -An example how to describe a class: +This is a contribution guide for [py-ISPyB](https://github.com/ispyb/py-ispyb). If you would like to learn more about the project, please start with the [README](https://github.com/ispyb/py-ispyb/blob/master/README.md). - ```bash -class ExampleClass(object): - """The summary line for a class docstring should fit on one line. +This guide is intended for members of the ISPyB collaboration with contributor access to the repository. If you are not a member but would like to contribute, please contact us. - If the class has public attributes, they may be documented here - in an ``Attributes`` section and follow the same formatting as a - function's ``Args`` section. Alternatively, attributes may be documented - inline with the attribute's declaration (see __init__ method below). +- [Reporting Bugs](#reporting-bugs) +- [Submiting code to the repository](#submiting-code-to-the-repository) +- [Reviewing process](#reviewing-process) +- [Coding style guidelines](#coding-style-guidelines) - Properties created with the ``@property`` decorator should be documented - in the property's getter method. - - Attributes: - attr1 (str): Description of `attr1`. - attr2 (:obj:`int`, optional): Description of `attr2`. - - """ - - def __init__(self, param1, param2, param3): - """Example of docstring on the __init__ method. - - The __init__ method may be documented in either the class level - docstring, or as a docstring on the __init__ method itself. - - Either form is acceptable, but the two should not be mixed. Choose one - convention to document the __init__ method and be consistent with it. +### Reporting bugs - Note: - Do not include the `self` parameter in the ``Args`` section. +Before submitting a new bug check if the bug is not already reported in the [bug issues](https://github.com/ispyb/py-ispyb/issues?q=is%3Aopen+is%3Aissue+label%3Abug). +If the corresponding issue does not exist then: - Args: - param1 (str): Description of `param1`. - param2 (:obj:`int`, optional): Description of `param2`. Multiple - lines are supported. - param3 (list(str)): Description of `param3`. +- [Open a new issue](https://github.com/ispyb/py-ispyb/issues/new) with a short description in the title. +- In the description describe the bug: + - Conditions when the bug appears; + - How it can be reproduced; + - Possible cause of the bug and source code where it occurs; + - If possible add an error log. +- Assign the `bug` label to the issue. - """ - self.attr1 = param1 - self.attr2 = param2 - self.attr3 = param3 #: Doc comment *inline* with attribute +### Submitting code to the repository - #: list(str): Doc comment *before* attribute, with type specified - self.attr4 = ['attr4'] +To submit code to the repository, please follow these steps: - self.attr5 = None - """str: Docstring *after* attribute, with type specified.""" +1. All code contribution has to be done from an issue. If there is no existing issue for the submission you wish to make, start by [creating a new one](https://github.com/ispyb/py-ispyb/issues/new), and describe what should be done for it to be considered as fulfilled; - ``` +2. Once you have identified your contribution issue, [assign it to yourself](https://docs.github.com/en/issues/tracking-your-work-with-issues/assigning-issues-and-pull-requests-to-other-github-users) so that everyone can keep track of what is in progress; -An example how to describe a function: +3. Create a branch for the contribution. You should create a branch from the GitHub issue page, by clicking the `Create a branch` button under the `Development` section on the right; - ```bash -def function_with_types_in_docstring(param1, param2): - """Example function with types documented in the docstring. +4. Start implementing your changes and [commit](https://github.com/git-guides/git-commit) them; - `PEP 484`_ type annotations are supported. If attribute, parameter, and - return types are annotated according to `PEP 484`_, they do not need to be - included in the docstring: +5. Once the changes are mature enough to be discussed, create a [pull request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request) in the repository. This pull request can be a `draft` if you don't want it to be reviewed yet; - Args: - param1 (int): The first parameter. - param2 (str): The second parameter. +6. If your pull request is a draft, when you feel like your changes are ready for production, mark the pull request as `ready for review`; - Returns: - bool: The return value. True for success, False otherwise. +7. You may add one or more reviewers to the pull request; if so the pull request must wait till all of these have made a review. Alternatively you can leave the pull request for reviewers to volunteer. People are free to review pull requests whether invited or not. +8. Wait for [review](#reviewing-process); - .. _PEP 484: - https://www.python.org/dev/peps/pep-0484/ +9. Once a reviewer has approved your pull request, you are in charge of [merging](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/merging-a-pull-request). We use `squash and merge` to keep history clear and simple. - """ +### Reviewing process - ``` +Pull requests marked as ready have to be reviewed before they can be merged. +**At least one reviewer must be, if possible, from a different facility than the author.** +The reviewer is in charge of verifying the following conditions: -You can use [autopep8](https://pypi.org/project/autopep8/) and [black](https://pypi.org/project/autopep8/) to format your code: +- the contribution matches the issue requirements +- no breaking change has been introduced without proper discussion +- sufficient testing has been implemented +- CI checks are green +- the coding style is respected - ```bash - autopep8 -a -r -j 0 -i --max-line-length 88 ./ - black --safe ./ - ``` +If necessary, make comments on the code with clear hints on what to do for the author. -### Continuous integration (CI) +When the code is ready for production, mark the pull request as ready. -For continuous integration [Travis](https://travis-ci.org/) is used. +**When the reviewer validates the pull request, its author is in charge of merging.** -### Additional notes +### Coding style guidelines -Issue and Pull request Labels +It is very important to write clean and readable code. Therefore we use `Flake8` linting and `black` formatting as a style standard. This standard is enforced in CI. -* bug: indicates a bug in the code. Issue has a highest priority. -* abstract: Abstract class involved. Issue has a hight priority. -* question: general question. -* not used code: suggestion to remove a code block or a file from the repository. -* wip: work in progress -* enchancement: code improvement. +**In addition to this, it was decided to name variables that refer to database columns, such as `sessionId` named the same way as they are in the schema. This is not enforced by CI linting and should be verified by reviewers.** diff --git a/Dockerfile b/Dockerfile index 9a84ba76..772a7097 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,42 +1,30 @@ -FROM frolvlad/alpine-python3 -MAINTAINER Ivars Karpics - -ENV API_SERVER_HOME=/opt/www -WORKDIR "$API_SERVER_HOME" -COPY "./requirements.txt" "./" -COPY "./app/requirements.txt" "./app/" -COPY "./config.py" "./" -COPY "./tasks" "./tasks" -COPY "./ispyb_core_config_example.yml" "./ispyb_core_config.yml" - -ARG INCLUDE_POSTGRESQL=false -ARG INCLUDE_UWSGI=false -ARG INCLUDE_MYSQL=true - -RUN apk add --no-cache --virtual=.build_dependencies musl-dev gcc python3-dev libffi-dev linux-headers -RUN apk add --no-cache mariadb-connector-c-dev ;\ - apk add --no-cache --virtual .build-deps \ - build-base \ - mariadb-dev -RUN cd /opt/www +FROM python:3.10.9 + +RUN apt-get update && apt-get install -y \ + libldap2-dev \ + libsasl2-dev \ + libmariadb-dev \ + build-essential + +WORKDIR /app + +COPY ./requirements.txt /app/requirements.txt RUN pip install --upgrade pip -RUN pip install -r tasks/requirements.txt - -RUN invoke app.dependencies.install && \ - ( \ - if [ "$INCLUDE_POSTGRESQL" = 'true' ]; then \ - apk add --no-cache libpq && \ - apk add --no-cache --virtual=.build_dependencies postgresql-dev && \ - pip install psycopg2 ; \ - fi \ - ) && \ - ( if [ "$INCLUDE_UWSGI" = 'true' ]; then pip install uwsgi ; fi ) && \ - rm -rf ~/.cache/pip && \ - apk del .build_dependencies - -COPY "./" "./" - -RUN chown -R nobody "." - -USER nobody -CMD ["invoke", "app.run", "--no-install-dependencies", "--host", "0.0.0.0" ] +RUN pip install -r requirements.txt + + +COPY ./pyispyb /app/pyispyb/ +COPY ./config/docker.env /app/config/docker.env +COPY ./uvicorn.sh /app/uvicorn.sh + +ENV ISPYB_ENVIRONMENT="docker" + + +EXPOSE 80 + +CMD ["gunicorn",\ + "pyispyb.app.main:app",\ + "--workers", "5",\ + "--worker-class", "uvicorn.workers.UvicornWorker",\ + "--bind", "0.0.0.0:80"\ + ] diff --git a/README.md b/README.md index d37a6fbd..2199dce4 100644 --- a/README.md +++ b/README.md @@ -1,86 +1,22 @@ # py-ispyb -[![codecov](https://codecov.io/gh/ispyb/py-ispyb/branch/master/graph/badge.svg)](https://codecov.io/gh/ispyb/py-ispyb) -[![License: LGPL v3](https://img.shields.io/badge/License-LGPL%20v3-blue.svg)](https://www.gnu.org/licenses/lgpl-3.0) +ISPyB backend server based on FastAPI. +[![codecov](https://codecov.io/gh/ispyb/py-ispyb/branch/master/graph/badge.svg?token=aqUsyDbqOG)](https://codecov.io/gh/ispyb/py-ispyb) -ISPyB backend server based on python flask-restx. +## Getting started +For dependencies, see [https://ispyb.github.io/py-ispyb/](https://ispyb.github.io/py-ispyb/). -## Dependencies +For installation instructions, see [https://ispyb.github.io/py-ispyb/run](https://ispyb.github.io/py-ispyb/run) -* [**Python**](https://www.python.org/) 3.5+ / pypy2 -* [**flask-restx**](https://github.com/python-restx/flask-restx) (+ - [*flask*](http://flask.pocoo.org/)) -* [**sqlalchemy**](http://www.sqlalchemy.org/) (+ - [*flask-sqlalchemy*](http://flask-sqlalchemy.pocoo.org/)) - Database ORM. -* [**marshmallow**](http://marshmallow.rtfd.org/) -* [**ruamel.yaml**](https://pypi.org/project/ruamel.yaml/) +Once installed and started, you can start using the server through the [OpenAPI documenation](http://localhost:8000/docs) - -## How to run py-ispyb - -### Install requirements - -In case of MySQL or MariaDB you might have to install dev tools: - -`sudo apt-get install -y python3-mysqldb` - -or - -`apt-get install libmariadbclient-dev` - -Install python dependencies: - -`sudo pip install -r requirements.txt` - -### Copy and edit yaml configuration file - -`cp ispyb_core_config_example.yml ispyb_core_config.yml` - -If you do not have a running ispyb database then you can create one by running: - -`scripts/create_core_db.sh` - -### Regenerate data base models and schemas +The documentation files are found in the `/docs` directory. These can be served locally: ```bash -cd scripts -./generate_core_models.sh PATH_TO_CONFIG_FILE.yml -python3 generate_core_schemas.py -cd .. +pip install mkdocs +mkdocs serve ``` -### Run application in debug mode - -* `python3 wsgi.py` -* `invoke app.run` - -Now you can go to http://localhost:5000/ispyb/api/v1/doc and explore py-ispyb via swagger ui. -For authentication json web tokens (jwt) are used. In the web browser call http://localhost:5000/ispyb/api/v1/auth/login , enter user credentials and retrieve access token from the response: - -```bash -{ - "token": "YOUR_JWT_TOKEN", - "roles": [ - "user" - ] -} -``` - -Corresponding curl command: - -`curl -u USER:PASS -i -H "Accept:application/json" http://localhost:5000/ispyb/api/v1/auth/login` - -In the case of dummy authentication username _user_ will grant _user_ role and _manager_ will grant _user_ and _manager_ roles. - -For requests use the token in the `Authorization` header: `Bearer YOUR_JWT_TOKEN`. For example to retrieve proposals call: - -`curl -X GET -H 'Authorization: Bearer YOUR_JWT_TOKEN' -i http://localhost:5000/ispyb/api/v1/proposals` - -## Misc - -* Swagger documentation: https://raw.githubusercontent.com/ispyb/py-ispyb/master/docs/swagger.json -* For deployment options see `deploy` directory. -* Status codes: https://www.flaskapi.org/api-guide/status-codes/ - +Then open [http://127.0.0.1:8080/](http://127.0.0.1:8080/) diff --git a/clients/js/ispyb-client/.gitignore b/clients/js/ispyb-client/.gitignore deleted file mode 100644 index 927d17bb..00000000 --- a/clients/js/ispyb-client/.gitignore +++ /dev/null @@ -1,18 +0,0 @@ -# See https://help.github.com/ignore-files/ for more about ignoring files. - -# dependencies -/node_modules - -# testing -/coverage - -# production -/build - -# misc -.DS_Store -.env -npm-debug.log* -yarn-debug.log* -yarn-error.log* - diff --git a/clients/js/ispyb-client/README.md b/clients/js/ispyb-client/README.md deleted file mode 100644 index 6b0ed518..00000000 --- a/clients/js/ispyb-client/README.md +++ /dev/null @@ -1,4 +0,0 @@ -Simple js client based on react - -npm install -npm run diff --git a/clients/js/ispyb-client/package.json b/clients/js/ispyb-client/package.json deleted file mode 100644 index 1b35c9af..00000000 --- a/clients/js/ispyb-client/package.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "name": "ispyb-client", - "version": "0.1.0", - "private": true, - "dependencies": { - "react": "^16.13.1", - "react-dom": "^16.13.1", - "react-scripts": "0.9.5" - }, - "devDependencies": {}, - "scripts": { - "start": "react-scripts start", - "build": "react-scripts build", - "test": "react-scripts test --env=jsdom", - "eject": "react-scripts eject" - } -} \ No newline at end of file diff --git a/clients/js/ispyb-client/public/favicon.ico b/clients/js/ispyb-client/public/favicon.ico deleted file mode 100644 index 5c125de5..00000000 Binary files a/clients/js/ispyb-client/public/favicon.ico and /dev/null differ diff --git a/clients/js/ispyb-client/public/index.html b/clients/js/ispyb-client/public/index.html deleted file mode 100644 index 84e9e4ee..00000000 --- a/clients/js/ispyb-client/public/index.html +++ /dev/null @@ -1,33 +0,0 @@ - - - - - - - - - - React App - - -
- - - diff --git a/clients/js/ispyb-client/src/App.css b/clients/js/ispyb-client/src/App.css deleted file mode 100644 index b41d297c..00000000 --- a/clients/js/ispyb-client/src/App.css +++ /dev/null @@ -1,33 +0,0 @@ -.App { - text-align: center; -} - -.App-logo { - animation: App-logo-spin infinite 20s linear; - height: 40vmin; - pointer-events: none; -} - -.App-header { - background-color: #282c34; - min-height: 100vh; - display: flex; - flex-direction: column; - align-items: center; - justify-content: center; - font-size: calc(10px + 2vmin); - color: white; -} - -.App-link { - color: #61dafb; -} - -@keyframes App-logo-spin { - from { - transform: rotate(0deg); - } - to { - transform: rotate(360deg); - } -} diff --git a/clients/js/ispyb-client/src/App.js b/clients/js/ispyb-client/src/App.js deleted file mode 100644 index 5c6025b2..00000000 --- a/clients/js/ispyb-client/src/App.js +++ /dev/null @@ -1,26 +0,0 @@ -import React, {Component} from 'react'; -import Contacts from './components/contacts'; - -class App extends Component { - render() { - return ( - - ) - } - - state = { - contacts: [] - }; - - componentDidMount() { - //fetch('http://jsonplaceholder.typicode.com/users') - fetch('http://127.0.0.1:5000/ispyb/api/v1/prop/list') - .then(res => res.json()) - .then((data) => { - this.setState({ contacts: data }) - }) - .catch(console.log) - } -} - -export default App; diff --git a/clients/js/ispyb-client/src/App.test.js b/clients/js/ispyb-client/src/App.test.js deleted file mode 100644 index a754b201..00000000 --- a/clients/js/ispyb-client/src/App.test.js +++ /dev/null @@ -1,9 +0,0 @@ -import React from 'react'; -import ReactDOM from 'react-dom'; -import App from './App'; - -it('renders without crashing', () => { - const div = document.createElement('div'); - ReactDOM.render(, div); - ReactDOM.unmountComponentAtNode(div); -}); diff --git a/clients/js/ispyb-client/src/components/contacts.js b/clients/js/ispyb-client/src/components/contacts.js deleted file mode 100644 index 7b994d70..00000000 --- a/clients/js/ispyb-client/src/components/contacts.js +++ /dev/null @@ -1,19 +0,0 @@ -import React from 'react' - -const Contacts = ({contacts}) => { - return ( -
-

Proposal List

- {contacts.map((contact) => ( -
-
-
{contact.proposalNumber}
-

{contact.title}

-
-
- ))} -
- ) -}; - -export default Contacts diff --git a/clients/js/ispyb-client/src/index.css b/clients/js/ispyb-client/src/index.css deleted file mode 100644 index cee5f348..00000000 --- a/clients/js/ispyb-client/src/index.css +++ /dev/null @@ -1,14 +0,0 @@ -body { - margin: 0; - padding: 0; - font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", - "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", - sans-serif; - -webkit-font-smoothing: antialiased; - -moz-osx-font-smoothing: grayscale; -} - -code { - font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", - monospace; -} diff --git a/clients/js/ispyb-client/src/index.js b/clients/js/ispyb-client/src/index.js deleted file mode 100644 index 0c5e75da..00000000 --- a/clients/js/ispyb-client/src/index.js +++ /dev/null @@ -1,12 +0,0 @@ -import React from 'react'; -import ReactDOM from 'react-dom'; -import './index.css'; -import App from './App'; -import * as serviceWorker from './serviceWorker'; - -ReactDOM.render(, document.getElementById('root')); - -// If you want your app to work offline and load faster, you can change -// unregister() to register() below. Note this comes with some pitfalls. -// Learn more about service workers: http://bit.ly/CRA-PWA -serviceWorker.unregister(); diff --git a/clients/js/ispyb-client/src/serviceWorker.js b/clients/js/ispyb-client/src/serviceWorker.js deleted file mode 100755 index 2283ff9c..00000000 --- a/clients/js/ispyb-client/src/serviceWorker.js +++ /dev/null @@ -1,135 +0,0 @@ -// This optional code is used to register a service worker. -// register() is not called by default. - -// This lets the app load faster on subsequent visits in production, and gives -// it offline capabilities. However, it also means that developers (and users) -// will only see deployed updates on subsequent visits to a page, after all the -// existing tabs open on the page have been closed, since previously cached -// resources are updated in the background. - -// To learn more about the benefits of this model and instructions on how to -// opt-in, read http://bit.ly/CRA-PWA - -const isLocalhost = Boolean( - window.location.hostname === 'localhost' || - // [::1] is the IPv6 localhost address. - window.location.hostname === '[::1]' || - // 127.0.0.1/8 is considered localhost for IPv4. - window.location.hostname.match( - /^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/ - ) -); - -export function register(config) { - if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) { - // The URL constructor is available in all browsers that support SW. - const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href); - if (publicUrl.origin !== window.location.origin) { - // Our service worker won't work if PUBLIC_URL is on a different origin - // from what our page is served on. This might happen if a CDN is used to - // serve assets; see https://github.com/facebook/create-react-app/issues/2374 - return; - } - - window.addEventListener('load', () => { - const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`; - - if (isLocalhost) { - // This is running on localhost. Let's check if a service worker still exists or not. - checkValidServiceWorker(swUrl, config); - - // Add some additional logging to localhost, pointing developers to the - // service worker/PWA documentation. - navigator.serviceWorker.ready.then(() => { - console.log( - 'This web app is being served cache-first by a service ' + - 'worker. To learn more, visit http://bit.ly/CRA-PWA' - ); - }); - } else { - // Is not localhost. Just register service worker - registerValidSW(swUrl, config); - } - }); - } -} - -function registerValidSW(swUrl, config) { - navigator.serviceWorker - .register(swUrl) - .then(registration => { - registration.onupdatefound = () => { - const installingWorker = registration.installing; - if (installingWorker == null) { - return; - } - installingWorker.onstatechange = () => { - if (installingWorker.state === 'installed') { - if (navigator.serviceWorker.controller) { - // At this point, the updated precached content has been fetched, - // but the previous service worker will still serve the older - // content until all client tabs are closed. - console.log( - 'New content is available and will be used when all ' + - 'tabs for this page are closed. See http://bit.ly/CRA-PWA.' - ); - - // Execute callback - if (config && config.onUpdate) { - config.onUpdate(registration); - } - } else { - // At this point, everything has been precached. - // It's the perfect time to display a - // "Content is cached for offline use." message. - console.log('Content is cached for offline use.'); - - // Execute callback - if (config && config.onSuccess) { - config.onSuccess(registration); - } - } - } - }; - }; - }) - .catch(error => { - console.error('Error during service worker registration:', error); - }); -} - -function checkValidServiceWorker(swUrl, config) { - // Check if the service worker can be found. If it can't reload the page. - fetch(swUrl) - .then(response => { - // Ensure service worker exists, and that we really are getting a JS file. - const contentType = response.headers.get('content-type'); - if ( - response.status === 404 || - (contentType != null && contentType.indexOf('javascript') === -1) - ) { - // No service worker found. Probably a different app. Reload the page. - navigator.serviceWorker.ready.then(registration => { - registration.unregister().then(() => { - window.location.reload(); - }); - }); - } else { - // Service worker found. Proceed as normal. - registerValidSW(swUrl, config); - } - }) - .catch(() => { - console.log( - 'No internet connection found. App is running in offline mode.' - ); - }); -} - -export function unregister() { - if ('serviceWorker' in navigator) { - navigator.serviceWorker.ready.then(registration => { - registration.unregister(); - }); - } -} diff --git a/clients/python/read_client.py b/clients/python/read_client.py deleted file mode 100644 index dc238e12..00000000 --- a/clients/python/read_client.py +++ /dev/null @@ -1,34 +0,0 @@ -import sys -from requests import get, post - -root_url = "http://localhost:5000/ispyb/api/v1" -response = get(root_url + "/auth/login", auth=(sys.argv[1], sys.argv[2])) -# Alternative get(root_url + "/auth/login", headers={'username': 'user', -# 'password': 'pass'} - -if response.status_code == 200: - roles = response.json()["roles"] - token = response.json()["token"] - print("Response: %s" % response.json()) - headers = {"Authorization": "Bearer " + token} - - print("-----------------------------------------") - path = "/proposals" - print("Request: %s%s" % (root_url, path)) - response = get(root_url + path, headers=headers) - print("Status code: %d" % response.status_code) - data = response.json() - print(data) - #proposal_id = data["rows"][0]["proposalId"] - - """ - proposal_id = 133 - path = "/proposals/%d" % proposal_id - print("Request: %s%s" % (root_url, path)) - response = get(root_url + path, headers=headers) - print("Status code: %d" % response.status_code) - data = response.json() - print('Data: %s' % str(data)) - """ -else: - print(response.reason, response.text) diff --git a/clients/python/upload_pdb.py b/clients/python/upload_pdb.py deleted file mode 100644 index ff46a47c..00000000 --- a/clients/python/upload_pdb.py +++ /dev/null @@ -1,24 +0,0 @@ -import sys -from requests import get, post, patch - -root_url = "http://localhost:5000/ispyb/api/v1" -token = "MasterToken" -headers = {"Authorization": "Bearer " + token} -crystal_id = 1 - -print("-----------------------------------------") -path = "/samples/crystals/%d/pdb?pdbFileName=7MH1.pdb" % crystal_id -print("[GET]: %s%s" % (root_url, path)) -response = get(root_url + path, headers=headers) -print("Status code: %d" % response.status_code) -print(response.json()) - -""" -path="/samples/crystals/%d/pdb" % crystal_id -filepath="/home/mxuser/Downloads/7dvf.pdb" -files = {"file": open(filepath, "rb")} -print("[PATCH]: %s%s" % (root_url, path)) -response = patch(root_url + path, headers=headers, files=files) -print("Status code: %d" % response.status_code) -print(response.json()) -""" diff --git a/clients/python/write_client.py b/clients/python/write_client.py deleted file mode 100644 index 1ab1a79a..00000000 --- a/clients/python/write_client.py +++ /dev/null @@ -1,47 +0,0 @@ -from tests.data import test_proposal -import os -import sys -import json -from requests import get, post - -TESTS_DIR = os.path.abspath(os.path.dirname(__file__)) -ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) -sys.path.insert(0, ROOT_DIR) - - -root_url = "http://localhost:5000/ispyb/api/v1" -response = get(root_url + "/auth/login", auth=("admin", "pass")) -token = response.json()["token"] -# Alternative get(root_url + "/auth/login", headers={'username': 'user', -# 'password': 'pass'} - -""" -if response.status_code == 200: - roles = response.json()["roles"] - token = response.json()["token"] - print("User %s validated" % username) - print("Token: %s" % token) - headers = {"Authorization": "Bearer " + token} - - - print("-----------------------------------------") - - path = "/schemas/proposal" - - response = get(root_url + path, headers=headers) - print("Status code: %d" % response.status_code) - data = response.json() - for prop in (data['definitions']['ProposalSchema']['properties']): - print(prop) - - -else: - print("Unable to validate user %s" % username) - print(response.reason, response.text) -""" -headers = {"Authorization": "Bearer " + str(token)} -path = root_url + "/proposals" -print(test_proposal) -response = post(path, json=test_proposal, headers=headers) - -print(response.status_code) diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 00000000..484f8e55 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,35 @@ +codecov: + require_ci_to_pass: no + +coverage: + precision: 2 + round: down + range: "70...100" + status: + project: + default: + target: 0% + threshold: 100% + patch: + default: + target: 0% + threshold: 100% + +parsers: + gcov: + branch_detection: + conditional: yes + loop: yes + method: no + macro: no + +comment: + layout: "diff,flags,files,footer" + behavior: default + require_changes: no + +github_checks: + annotations: false + +ignore: + - "pyispyb/core/models.py" diff --git a/config/.env b/config/.env new file mode 100644 index 00000000..3bcc0594 --- /dev/null +++ b/config/.env @@ -0,0 +1,12 @@ +SERVICE_NAME=core + +API_ROOT=/ispyb/api/v1 + +QUERY_DEBUG=false + +JWT_CODING_ALGORITHM=HS256 +TOKEN_EXP_TIME=300 + +CORS=true + +AUTH_CONFIG=auth.yml diff --git a/config/ci.env b/config/ci.env new file mode 100644 index 00000000..55335196 --- /dev/null +++ b/config/ci.env @@ -0,0 +1,16 @@ +SERVICE_NAME=core + +API_ROOT=/ispyb/api/v1 + +QUERY_DEBUG=false + +JWT_CODING_ALGORITHM=HS256 +TOKEN_EXP_TIME=300 + +CORS=true + +SECRET_KEY=ci_secret + +SQLALCHEMY_DATABASE_URI=mysql+mysqlconnector://test:test@127.0.0.1/test + +AUTH_CONFIG=tests/config/auth.yml diff --git a/config/dev.env b/config/dev.env new file mode 100644 index 00000000..5850ae88 --- /dev/null +++ b/config/dev.env @@ -0,0 +1,16 @@ +SERVICE_NAME=core + +API_ROOT=/ispyb/api/v1 + +QUERY_DEBUG=false + +JWT_CODING_ALGORITHM=HS256 +TOKEN_EXP_TIME=300 + +CORS=true + +SECRET_KEY=dev_secret + +SQLALCHEMY_DATABASE_URI=mysql+mysqlconnector://test:test@127.0.0.1/test + +AUTH_CONFIG=auth.yml diff --git a/config/docker.env b/config/docker.env new file mode 100644 index 00000000..f3a71780 --- /dev/null +++ b/config/docker.env @@ -0,0 +1,12 @@ +SERVICE_NAME=core + +API_ROOT=/ispyb/api/v1 + +QUERY_DEBUG=false + +JWT_CODING_ALGORITHM=HS256 +TOKEN_EXP_TIME=300 + +CORS=true + +AUTH_CONFIG=/config/auth.yml diff --git a/config/test.env b/config/test.env new file mode 100644 index 00000000..d59b8135 --- /dev/null +++ b/config/test.env @@ -0,0 +1,16 @@ +SERVICE_NAME=core + +API_ROOT=/ispyb/api/v1 + +QUERY_DEBUG=false + +JWT_CODING_ALGORITHM=HS256 +TOKEN_EXP_TIME=300 + +CORS=true + +SECRET_KEY=test_secret + +SQLALCHEMY_DATABASE_URI=mysql+mysqlconnector://test:test@127.0.0.1/test + +AUTH_CONFIG=tests/config/auth.yml diff --git a/deploy/README.md b/deploy/README.md deleted file mode 100644 index bd7b185c..00000000 --- a/deploy/README.md +++ /dev/null @@ -1,12 +0,0 @@ -## Deploy with docker - -```bash -$ docker-compose build -$ docker-compose up -``` - -```bash -$ docker-compose kill -$ docker-compose rm -fv -``` - diff --git a/deploy/docker-compose.yml b/deploy/docker-compose.yml deleted file mode 100644 index f5a63f54..00000000 --- a/deploy/docker-compose.yml +++ /dev/null @@ -1,14 +0,0 @@ -revproxy: - restart: always - build: ./revproxy - ports: - - "90:90" - links: - - ispyb-api:ispyb-api - -ispyb-api: - restart: always - build: ../ - environment: - EXAMPLE_API_REVERSE_PROXY_SETUP: 'true' - FLASK_CONFIG: 'ispyb-core-production' diff --git a/deploy/revproxy/Dockerfile b/deploy/revproxy/Dockerfile deleted file mode 100644 index cfb42c1a..00000000 --- a/deploy/revproxy/Dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -############################################################################## -# Reverse Proxy -############################################################################## -FROM nginx:alpine - -ADD conf.d/ /etc/nginx/conf.d -ADD index.html /etc/nginx/html/ diff --git a/deploy/revproxy/conf.d/default.conf b/deploy/revproxy/conf.d/default.conf deleted file mode 100644 index 8fdcb318..00000000 --- a/deploy/revproxy/conf.d/default.conf +++ /dev/null @@ -1,66 +0,0 @@ -server { - listen 80 default_server; - server_name _; - charset utf-8; - - # Rules could be optimized but as is it could help others to understand and customise them. - location * { - if ($request_method = 'OPTIONS') { - # CORS configuration, from http://enable-cors.org/server_nginx.html - add_header 'Access-Control-Allow-Origin' '*'; - # - # Om nom nom cookies - # - add_header 'Access-Control-Allow-Credentials' 'true'; - add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; - # - # Custom headers and headers various browsers *should* be OK with but aren't - # - add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type'; - # - # Tell client that this pre-flight info is valid for 20 days - # - add_header 'Access-Control-Max-Age' 1728000; - add_header 'Content-Type' 'text/plain charset=UTF-8'; - add_header 'Content-Length' 0; - return 204; - } - if ($request_method = 'POST') { - add_header 'Access-Control-Allow-Origin' '*'; - add_header 'Access-Control-Allow-Credentials' 'true'; - add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; - add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type'; - } - if ($request_method = 'GET') { - add_header 'Access-Control-Allow-Origin' '*'; - add_header 'Access-Control-Allow-Credentials' 'true'; - add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; - add_header 'Access-Control-Allow-Headers' 'DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type'; - } - } - - location /api/ { - proxy_pass http://api:5000/api/; - proxy_redirect off; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto https; - } - location /auth/ { - proxy_pass http://api:5000/auth/; - proxy_redirect off; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto https; - } - location /swaggerui/ { - proxy_pass http://api:5000/swaggerui/; - proxy_redirect off; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto https; - } -} diff --git a/deploy/revproxy/index.html b/deploy/revproxy/index.html deleted file mode 100644 index c55e4057..00000000 --- a/deploy/revproxy/index.html +++ /dev/null @@ -1,9 +0,0 @@ - - -Reverse Proxy - - - You just hit the nginx reverse proxy! - You might want to look at the API... - - diff --git a/deploy/run_app.py b/deploy/run_app.py deleted file mode 100644 index 3d407740..00000000 --- a/deploy/run_app.py +++ /dev/null @@ -1,40 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - -import os -import sys - -ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) -sys.path.insert(0, ROOT_DIR) - -if len(sys.argv) > 3: - config_filename = sys.argv[1] - run_mode = sys.argv[2] - port = sys.argv[3] -else: - config_filename = os.path.join(ROOT_DIR, "ispyb_core_config.yml") - run_mode = "dev" - port = 5000 - - -from pyispyb.app import create_app - -app = create_app(config_filename, run_mode) -app.run(host='0.0.0.0', port=port, debug=True) diff --git a/deploy/run_gevent.py b/deploy/run_gevent.py deleted file mode 100644 index fd997b4b..00000000 --- a/deploy/run_gevent.py +++ /dev/null @@ -1,40 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - -import os -import sys -from gevent.pywsgi import WSGIServer -from pyispyb import create_app - -ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) - -if len(sys.argv) > 3: - config_filename = sys.argv[1] - run_mode = sys.argv[2] - port = sys.argv[3] -else: - config_filename = os.path.join(ROOT_DIR, "ispyb_core_config.yml") - run_mode = "dev" - port = 5000 - - -app = create_app(config_filename, run_mode) -http_server = WSGIServer(('', 5000), app) -http_server.serve_forever() diff --git a/deploy/run_gunicorn.sh b/deploy/run_gunicorn.sh deleted file mode 100755 index cb42fcd1..00000000 --- a/deploy/run_gunicorn.sh +++ /dev/null @@ -1,3 +0,0 @@ -cd .. -gunicorn -b 127.0.0.1:4000 "app:create_app()" - diff --git a/docs/auth.md b/docs/auth.md new file mode 100644 index 00000000..3cd13bb6 --- /dev/null +++ b/docs/auth.md @@ -0,0 +1,189 @@ +# Authentication + +`py-ispyb` relies on plugins to handle different methods of authenticating users to the system. There are some mechanisms that are implemented natively like LDAP, keycloak and dummy that can be used out-of-the-box. However, it is worth noting that anyone can write his own plugin. + +There's a dedicated endpoint that allows to use the different plugins that are installed. This endpoint receives as parameters: + +- **plugin** - name of the plugin to be used for authentication, as specified in configuration +- **login** _(optional)_ +- **password** _(optional)_ +- **token** _(optional)_ + +Example of the request: + +```bash +curl -X 'POST' \ + 'http://localhost:8000/ispyb/api/v1/auth/login' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json' \ + -d '{ + "plugin": "dummy", + "login": "test", + "password": "Admin", + "token": "Admin" + +}' +``` + +If the authentication is successful the response will be a json with the following fields: + +```json +{ + "login": "test", + "token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6InRlc3QiLCJncm91cHMiOlsiQWRtaW4iXSwicGVybWlzc2lvbnMiOlsiQWRtaW4iXSwiaWF0IjoxNjUwOTgxNjA5LCJleHAiOjE2NTA5OTk2MDl9.3Iq2lGG5RR6Gebss5qEDdASrEMwCIne2jFhaVqp91m0", + "permissions": ["Admin"] +} +``` + +## Authorization + +For any authentication plugin, permissions are configured in the **database** using the following tables (with example data): + +- **UserGroup**: [Admin, user] +- **Permission**: [own_proposals, own_sessions, all_proposals, all_sessions] +- **UserGroup_has_Permission**: [{Admin, all_proposals}, {Admin, all_sessions}, {user, own_proposals}, {user, own_sessions}] + +## Configure a plugin + +One or more plugin can be enabled at the same time. A configuration file called `auth.yml` at the root of the project contains their configuration parameters. + +The next examples shows how to enable the dummy authentication plugin: + +```yml +AUTH: + - dummy: + ENABLED: true + AUTH_MODULE: "pyispyb.app.extensions.auth.DummyAuthentication" + AUTH_CLASS: "DummyAuthentication" +``` + +## List of plugins + +py-ISPyB is using the following authentication plugins, which code you can find in `pyispyb/app/extension/auth`. + +### DummyAuthentication + +Provides easy authentication for `tests`. Permissions listed in the password field are given. + +Configuration + +```yml +AUTH: + - dummy: # /!\/!\/!\ ONLY USE FOR TESTS /!\/!\/!\ + ENABLED: false + AUTH_MODULE: "pyispyb.app.extensions.auth.DummyAuthentication" + AUTH_CLASS: "DummyAuthentication" +``` + +### `KeycloakDBGroupsAuthentication` + +Provides authentication using keycloak with DB-managed groups. + +Configuration + +```yml +AUTH: + ENABLED: true + AUTH_MODULE: "pyispyb.app.extensions.auth.KeycloakDBGroupsAuthentication" + AUTH_CLASS: "KeycloakAuthentication" + CONFIG: + KEYCLOAK_SERVER_URL: "your_server" + KEYCLOAK_CLIENT_ID: "your_client" + KEYCLOAK_REALM_NAME: "your_realm" + KEYCLOAK_CLIENT_SECRET_KEY: "your_secret" +``` + +### `LdapAuthentication` + +Provides authentication using LDAP users and groups. + +Configuration + +```yml +AUTH: + - ldap: + ENABLED: true + AUTH_MODULE: "pyispyb.app.extensions.auth.LdapAuthentication" + AUTH_CLASS: "LdapAuthentication" + CONFIG: + LDAP_URI: "ldap://your_ldap" + LDAP_BASE_INTERNAL: "ou=People,dc=test,dc=fr" + LDAP_BASE_GROUPS: "ou=Pxwebgroups,dc=test,dc=fr" +``` + +## Implementing new plugins + +New plugins should inherit from `AbstractAuthentication` and override either `authenticate_by_login` or `authenticate_by_token` dependning on whether they accept a login / password combination or an authorisation token. Both functions return `Person` on success. This can be prepopulated with `familyName`, `givenName`, and `emailAddress`, which can be used to auto-create a new `Person` entry if the option is enabled (disabled by default) + +For example: + +```python +from typing import Optional + +from ispyb import models + +from .AbstractAuthentication import AbstractAuthentication + + +class MyAuthentication(AbstractAuthentication): + """My authentication class.""" + + def configure(self, config: dict[str, Any]): + self._config = config + + def authenticate_by_login(self, login: str, password: str) -> Optional[models.Person]: + if ... + return models.Person( + login=login, + familyName=..., + givenName=..., + ) + else: + logger.exception("Something went wrong") +``` + +Or for token based authentication: + +```python +from typing import Optional + +from ispyb import models + +from .AbstractAuthentication import AbstractAuthentication, AuthType + + +class MyAuthentication(AbstractAuthentication): + """My authentication class.""" + + authentication_type = AuthType.token + + def configure(self, config: dict[str, Any]): + self._config = config + + def authenticate_by_token(self, token: str) -> Optional[models.Person]: + if ... + return models.Person( + login=login + ) + else: + logger.exception("Something went wrong") +``` + +Plugins can export specific config variables to the UI as well by defining `config_export`, these properties are made available to the `/auth/config` endpoint: + +```python +from typing import Optional + +from ispyb import models + +from .AbstractAuthentication import AbstractAuthentication, AuthType + + +class MyAuthentication(AbstractAuthentication): + """My authentication class.""" + + authentication_type = AuthType.token + config_export = ["MY_CONFIG_PROPERTY"] + + ... +``` diff --git a/docs/authorization.md b/docs/authorization.md new file mode 100644 index 00000000..0c6d30d5 --- /dev/null +++ b/docs/authorization.md @@ -0,0 +1,92 @@ +# Proposal(s), Session(s), and related entities + +Authorization is applied to all user facing resources in py-ISPyB and different permissions are available to grant users and staff access to entities related to the core of ISPyB. These include but not limited to: + +- Proposal +- Protein, Crystal, BLSample, Shipping, LabContact +- BLSession, DataCollectionGroup, DataCollection + +etc ... + +The authorization rules are applied in four ways: + +### Users + +- A user can access entities related to a Proposal and the DataCollection(s) in which they are a member of one or more Session(s) [linked via SessionHasPerson]. _This is an intrinsic permission and is the default behaviour if the user has no other permissions._ +- A user can access entities related to all Session(s) in a Proposal [linked via ProposalHasPerson] + +### Administrators + +- An administrator can view all Sessions on a Proposal for specific beamline(s) via a `BeamLineGroup` permission +- An administrator can access all Sessions and Proposals via `all_proposals` + +## BeamLineGroups + +Beamline groups provide a way to grant access to all Proposals, Sessions and related entities to a set of staff members for a particular group of beamlines. + +For example: + +```json +"beamLineGroups": [ + { + "groupName": "BL0x", + "uiGroup": "mx", + "permission": "bl0_admin", + "beamlines": [ + {"beamLineName": "BL01"}, + {"beamLineName": "BL02"}, + ], + }, +] +``` + +A staff member with the `bl0_admin` permission will be able to access Proposal(s) and Session(s) allocated on beamlines `BL01` and `BL02`, but not other beamlines. `uiGroup` specifies how this group should be rendered in the UI. + +# Permissions + +Routes can require a specific permission by using the `permission` dependency. + +```python +from pyispyb.dependencies import permission + + +@router.get( + "/path", +) +def get_something(depends: bool = Depends(permission("my_permission"))): + ... +``` + +# Deprecated Authorization Mechanisms + +These functions are deprecated and currently only used in the legacy API resources. They should not be used for new developments. + +## Authorization dependencies + +The following decorators can be used to manage authentication and authorization rules. + +### `permission_required(operator, [permissions])` + +Makes the route only accessible to users with the **specified permissions**. + +- `operator` is either + - `"any"` User should have **any** of the specified permissions + - `"all"` User should have **all** of the specified permissions + +### `proposal_authorisation` + +Verifies that the user is **associated to the requested proposal**. To do so, it uses the `proposal_id` parameter. +User must verify any of the following conditions : + +- `Person.personId = Proposal.personId` +- `Person.personId = ProposalHasPerson.personId and ProposalHasPerson.proposalId = Proposal.proposalId` +- _has permission_ `all_proposals` + +### `session_authorisation` + +Verifies that the user is **associated to the requested session**. To do so, it uses the `session_id` parameter. +User must verify any of the following conditions : + +- `Person.personId = Session_has_Person.personId and Session_has_Person.sessionId = BLSession.sessionId` +- `BLSession.proposalId = Proposal.proposalId and Person.personId = Proposal.personId` +- _has permission_ `all_sessions` diff --git a/docs/conf.md b/docs/conf.md new file mode 100644 index 00000000..cf403cd4 --- /dev/null +++ b/docs/conf.md @@ -0,0 +1,36 @@ +Configuration is defined through environment variables. + +Ready-to-run preset are defined under `config` for the following environments: + +- `config/dev.env` +- `config/test.env` +- `config/ci.env` + +These preset are automatically used when the variable `ISPYB_ENVIRONMENT` is set to any of `dev`, `test` or `ci`. +This variable is already set to the proper value in the development and test scripts. + +If `ISPYB_ENVIRONMENT` is unset or empty, the default provided in `config/.env` will be used. + +Any setting can be overridden by defining the proper variable environment. + +Here are some examples from the `dev` environment: + +```bash +SERVICE_NAME=core + +API_ROOT=/ispyb/api/v1 + +QUERY_DEBUG=false + +JWT_CODING_ALGORITHM=HS256 +TOKEN_EXP_TIME=300 + +CORS=true + +SECRET_KEY=dev_secret + +SQLALCHEMY_DATABASE_URI=mysql+mysqlconnector://test:test@127.0.0.1/test + +AUTH_CONFIG=auth.yml + +``` diff --git a/docs/developers.md b/docs/developers.md new file mode 100644 index 00000000..8ce82d48 --- /dev/null +++ b/docs/developers.md @@ -0,0 +1,11 @@ +# Developer Notes + +## Authentication + +Authentication is generally handled via [JSON Web Tokens](https://jwt.io/) (JWT) which should be passed in the `Authorization` header with a value `Bearer {token}`. Most the of the py-ISPyB resources require a token to be present. + +In certain situations it is not possible to use a JWT. For example when downloading a file it is not possible to pass an Authorization header. For these situations py-ISPyB provides a one time token system. A one time token can be generated for a particular url and this token can then be used as a query parameter to access the specified url a single time. Unused tokens are expired on a short time scale. A signed url can be generated using the `/user/sign` resource, and used as so: + +``` +GET /datacollections/attachments/2?onetime={token} +``` diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..88f2dce0 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,21 @@ +# py-ISPyB + +[![License: LGPL v3](https://img.shields.io/badge/License-LGPL%20v3-blue.svg)](https://www.gnu.org/licenses/lgpl-3.0) + +ISPyB backend server based on python with FastAPI. + +--- + +## Main dependencies + +- [**Python**](https://www.python.org/) 3.10+ +- [**FastAPI**](https://fastapi.tiangolo.com/) - Web framework +- [**SQLAlchemy**](http://www.sqlalchemy.org/) - Database ORM +- [**pydantic**](https://pydantic-docs.helpmanual.io/) - Data validation and settings management +- [**MariaDB**](https://mariadb.org/) with an ISPyB database schema +- [**ISPyB Database Schema**](https://github.com/ispyb/ispyb-database) +- [**ISPyB Models**](https://github.com/ispyb/ispyb-models) + +--- + +Go to the [run the app](run.md) section to see how these dependencies can be satisfied, start exploring and contributing to the project. diff --git a/docs/ispyb_doc.png b/docs/ispyb_doc.png deleted file mode 100644 index ad4e24b9..00000000 Binary files a/docs/ispyb_doc.png and /dev/null differ diff --git a/docs/ispyb_meeting_SSX_data_model_Nov2020.pdf b/docs/ispyb_meeting_SSX_data_model_Nov2020.pdf deleted file mode 100644 index 5aa58d31..00000000 Binary files a/docs/ispyb_meeting_SSX_data_model_Nov2020.pdf and /dev/null differ diff --git a/docs/karpics_2020_maxiv_hzb_py-ispyb.pdf b/docs/karpics_2020_maxiv_hzb_py-ispyb.pdf deleted file mode 100644 index e323ac60..00000000 Binary files a/docs/karpics_2020_maxiv_hzb_py-ispyb.pdf and /dev/null differ diff --git a/docs/karpics_2021_elettra_pyispyb.pdf b/docs/karpics_2021_elettra_pyispyb.pdf deleted file mode 100644 index 9c851a8a..00000000 Binary files a/docs/karpics_2021_elettra_pyispyb.pdf and /dev/null differ diff --git a/docs/karpics_2021_embl_desy_pyispyb.pdf b/docs/karpics_2021_embl_desy_pyispyb.pdf deleted file mode 100644 index c508d149..00000000 Binary files a/docs/karpics_2021_embl_desy_pyispyb.pdf and /dev/null differ diff --git a/docs/permissions.md b/docs/permissions.md new file mode 100644 index 00000000..48539c15 --- /dev/null +++ b/docs/permissions.md @@ -0,0 +1,11 @@ +This is the current list of permissions and what they allow: + +| Permission | Type | Function | +|----------------|----------------|-------------------------------------------------| +| manage_options | Administration | Add and update the database application options | +| view_activity | Administration | View the activity log | +| manage_groups | Administration | Add, remove, and update UserGroups | +| manage_perms | Administration | Add, remove, and update Permissions | +| manage_persons | Administration | View full Person list | +| uportal_sync | Administration | Sync proposals from a User Portal | +| all_proposals | Administration | View all proposals | diff --git a/docs/routes.md b/docs/routes.md new file mode 100644 index 00000000..2d7a8561 --- /dev/null +++ b/docs/routes.md @@ -0,0 +1,17 @@ +# py-ISPyB routes + +--- + +## Routes documentation + +To get a documentation gathering all the available routes, simply open the `/docs` route. + +You may also check the automatic generated API documentation (Redoc) at: +[https://ispyb.github.io/py-ispyb/api/](https://ispyb.github.io/py-ispyb/api/) + +--- + +## Java ISPyB compatibility + +- Legacy routes for compatibility with Java ISPyB are available with the prefix `/ispyb/api/v1/legacy`. +- To preserve compatibility with Java ISPyB, `proposal_id` parameters in routes also accept proposal names like _MX2007_. diff --git a/docs/run.md b/docs/run.md new file mode 100644 index 00000000..4e40c71a --- /dev/null +++ b/docs/run.md @@ -0,0 +1,178 @@ +## How to run py-ispyb + +--- + +### Get project code + +Clone the [repository](https://github.com/ispyb/py-ispyb) + +```bash +# SSH (recommended) +git clone git@github.com:ispyb/py-ispyb.git + +# HTTPS +git clone https://github.com/ispyb/py-ispyb.git +``` + +Recommended IDE is [Visual Studio Code](https://code.visualstudio.com/), which will automatically get configured when opening the project. + +--- + +### Installation + +`python >= 3.10` and `pip` are required + +If you need to manage multiple versions of python in your system go to [Setup Python](#setup-python) + +Install dependencies: + +```bash +# For development and production +pip install -r requirements.txt + +# For development only +pip install -r requirements-dev.txt +``` + +#### System requirements + +For development purposes some packages need to be present on your system. These packages are needed for SALS (Simple Authentication and Security Layer) support, LDAP and MariaDB database development files + +- For Debian and derivatives: + +```bash +sudo apt-get update && sudo apt-get install -y libldap2-dev libsasl2-dev \ +libmariadb-dev build-essential +``` + +- For Fedora and derivatives (use `yum` if you don't have `dnf`): + +```bash +sudo dnf update && sudo dnf install -y openldap-devel mariadb-connector-c-devel \ +python3-devel +``` + +For Fedora you might also need to create a text file `/usr/lib/libldap_r.so`, adding only the line `INPUT ( libldap.so )` + +### Setup Python + +Virtual environments allows to install and manage different versions of python and dependencies from the system easily. + +#### Conda virtual environment + +Conda is an open source package management system and environment management system. [Installation instructions](https://docs.conda.io/en/latest/miniconda.html) + +Then set up the environment: + +```bash +conda create -n py-ispyb python=3.10 +conda activate py-ispyb +pip install -r requirements.txt +pip install -r requirements-dev.txt # For development +``` + +#### pyenv + +[pyenv](https://github.com/pyenv/pyenv) lets you easily switch between multiple versions of Python. [Installation instrucctions](https://github.com/pyenv/pyenv#installation) + +If you are using Ubuntu/Debian, you [need](https://github.com/pyenv/pyenv/wiki/common-build-problems) the following packages: + +``` +sudo apt install zlib1g zlib1g-dev libssl-dev libbz2-dev libsqlite3-dev +``` + +Then set up the environment + +```bash +pyenv install 3.10 +pyenv global 3.10 +pip install -r requirements.txt +pip install -r requirements-dev.txt # For development +``` + +--- + +### Configure py-ISPyB + +Configure authentication +(more information in [auth section](auth.md)). + +```bash +# edit this file to configure authentication +cp examples/auth.yml auth.yml +``` + +[Configuration](conf.md) is provided through environment variables. + +- Ready-to-run configuration preset is provided for test and development environments. +- Production needs some further configuration before running (see [configuration section](conf.md)). + +--- + +### Setup database + +#### Mockup database + +For development and test, a mockup database is available. +You can have it up and running easily with `docker`: + +```bash +sudo docker run -p 3306:3306 --rm --name ispyb-pydb ispyb/ispyb-pydb:latest +``` + +If you have `podman`, you can replace `sudo docker` with `podman` in the command above - no `sudo` needed. + +#### For tests + +To run the tests, you need to have the mockup database up and running. + +#### For development + +By default, the development environment will connect to the mockup database. +If you want to use a different one, make sure to override it by setting the `SQLALCHEMY_DATABASE_URI` environment variable. + +#### For production + +Make sure to set the `SQLALCHEMY_DATABASE_URI` environment variable. + +--- + +### Run application + +#### Tests + +```bash +. scripts/test.sh +``` + +#### Development + +```bash +. uvicorn.sh +``` + +#### Production + +To simplify production deployment, you can use the docker image built from the `Dockerfile`: + +```bash +sudo docker build . -t py-ispyb +``` + +To run it, you will need to provide two elements: + +- A set of configuration variables (at least `SECRET_KEY` and `SQLALCHEMY_DATABASE_URI`, defaults are provided for others). This can be set through a `.env` file (see `config/test.env` for example) and the `--env-file` docker option. + +- An authentication configuration file (see `examples/auth.yml` for example). This should be made available inside the container at the path indicated by the `AUTH_CONFIG` environment variable (default `/config/auth.yml`). + +Docker run command example: + +```bash +sudo docker run -p 80:80 --env-file ispyb.env --mount type=bind,source=/my_ispyb_auth_config_dir,target=/config --name py-ispyb-prod py-ispyb +``` + +--- + +### More information + +Please see the [routes section](routes.md) and the [authentication and authorization section](auth.md) for more information on how to use py-ispyb. diff --git a/docs/simulator.md b/docs/simulator.md new file mode 100644 index 00000000..c3dc4f2b --- /dev/null +++ b/docs/simulator.md @@ -0,0 +1,66 @@ +# Simulator + +`ispyb.simulate` creates a new DataCollection row in the ISPyB database from a simple yaml definition. It creates a data collection, related sample information, and associated shipping entities. It then copies some raw data and associated snapshots (and thumbnails). + +Simulate a data collection: + +```bash +ispyb.simulate +ispyb.simulate bm23 energy_scan1 +``` + +The simulator will create hierarchically a component (`Protein`), related `BLSample` (with intermediate `Crystal`), and potentially a `SubSample`, contained within a `Container`, `Dewar`, and `Shipment` belonging to the specified `Proposal` if they do not already exist with the defined name. Then the simulator creates a `DataCollection` and `DataCollectionGroup`, linked to the relevant `BLSample` and `BLSession`. If grid info information is specified it will also create an entry in `GridInfo` + +## Configuration + +The configuration file location is defined via the `SIMULATE_CONFIG` environment variable. An example configuration is available in `examples/simulation.yml`. The structure and requirements of this file are documented in the example. + +Each entry in `experiments` represents a different data collection. The `experimentType` column relates to a `DataCollectionGroup.experimentType` entry so must match one of the available types in the database. See [experimentType](https://github.com/ispyb/ispyb-database/blob/main/schema/1_tables.sql#L1518)s for a full list. + +## Available columns per table + +The ISPyB tables are large, and as such only a subset of the columns are exposed by this simulator, the most pertinent in order to create usable data collections and associated entries. These are as listed below for each table. + +### Component (Protein) + +- acronym +- name +- sequence +- density +- molecularMass +- description + +### BLSample + +- name + +### BLSubSample + +- x +- y +- x2 +- y2 +- type + +### DataCollection + +- imageContainerSubPath +- numberOfImages +- wavelength +- exposureTime +- xtalSnapshotFullPath1-4 + +### GridInfo + +- steps_x +- steps_y +- snapshot_offsetXPixel +- snapshot_offsetYPixel +- dx_mm +- dy_mm +- pixelsPerMicronX +- pixelsPerMicronY + +## Plugins + +The simulator can trigger events before and after the data is copied using the `ispyb.simulator.before_datacollection` and `ispyb.simulator.after_datacollection` entry points. These are passed just the new `DataCollection.dataCollectionId`. diff --git a/docs/swagger.json b/docs/swagger.json deleted file mode 100644 index 319db96f..00000000 --- a/docs/swagger.json +++ /dev/null @@ -1,6947 +0,0 @@ -{ - "swagger": "2.0", - "basePath": "/ispyb/api/v1", - "paths": { - "/auth/login": { - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "operationId": "get_login", - "tags": [ - "Authentication" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_login", - "tags": [ - "Authentication" - ] - } - }, - "/autoproc": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/AutoProc" - } - } - }, - "summary": "Adds a new auto proc", - "operationId": "post_auto_procs", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/AutoProc" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns auto proc entries", - "operationId": "get_auto_procs", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_auto_procs", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - } - }, - "/autoproc/programs": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/AutoProcProgram" - } - } - }, - "summary": "Adds a new auto proc program", - "operationId": "post_auto_proc_programs", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/AutoProcProgram" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns all auto_proc_program entries", - "operationId": "get_auto_proc_programs", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_auto_proc_programs", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - } - }, - "/autoproc/programs/attachments": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/AutoProcProgramAttachment" - } - } - }, - "summary": "Adds a new auto proc program", - "operationId": "post_auto_proc_program_attachments", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/AutoProcProgramAttachment" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns all auto_proc_program attachemnt entries", - "operationId": "get_attachments_by_query", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_auto_proc_program_attachments", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - } - }, - "/autoproc/programs/attachments/{attachment_id}": { - "parameters": [ - { - "in": "path", - "description": "attachment id (integer)", - "name": "attachment_id", - "required": true, - "type": "integer" - } - ], - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/AutoProcProgramAttachment" - } - }, - "404": { - "description": "auto_proc_program_attachment not found." - } - }, - "summary": "Returns a auto_proc by attachment_id", - "description": "attachment_id should be an integer", - "operationId": "get_auto_proc_program_attachment_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - }, - "options": { - "responses": { - "404": { - "description": "auto_proc_program_attachment not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_auto_proc_program_attachment_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - } - }, - "/autoproc/programs/{program_id}": { - "parameters": [ - { - "in": "path", - "description": "program id (integer)", - "name": "program_id", - "required": true, - "type": "integer" - } - ], - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/AutoProcProgram" - } - }, - "404": { - "description": "auto_proc_program not found." - } - }, - "summary": "Returns a auto_proc by auto_procId", - "description": "program_id should be an integer", - "operationId": "get_auto_proc_program_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - }, - "options": { - "responses": { - "404": { - "description": "auto_proc_program not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_auto_proc_program_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - } - }, - "/autoproc/status": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/AutoProcProgram" - } - } - }, - "summary": "Adds a new auto proc program", - "operationId": "post_auto_proc_status", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/AutoProcProgram" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns all auto_proc_status entries", - "operationId": "get_auto_proc_status", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_auto_proc_status", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - } - }, - "/autoproc/status/{status_id}": { - "parameters": [ - { - "in": "path", - "description": "status id (integer)", - "name": "status_id", - "required": true, - "type": "integer" - } - ], - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/AutoProcStatus" - } - }, - "404": { - "description": "auto_proc_status not found." - } - }, - "summary": "Returns a auto_proc by auto_procId", - "description": "status_id should be an integer", - "operationId": "get_auto_proc_status_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - }, - "options": { - "responses": { - "404": { - "description": "auto_proc_status not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_auto_proc_status_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - } - }, - "/autoproc/{auto_proc_id}": { - "parameters": [ - { - "in": "path", - "description": "auto_proc id (integer)", - "name": "auto_proc_id", - "required": true, - "type": "integer" - } - ], - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/AutoProc" - } - }, - "404": { - "description": "auto_proc not found." - } - }, - "summary": "Returns a auto_proc by auto_procId", - "description": "auto_proc_id should be an integer", - "operationId": "get_auto_proc_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - }, - "options": { - "responses": { - "404": { - "description": "auto_proc not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_auto_proc_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Auto processing" - ] - } - }, - "/beamline/detectors": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Detector" - } - } - }, - "summary": "Adds a new detector", - "operationId": "post_detectors", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Detector" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns list of detectors", - "operationId": "get_detectors", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_detectors", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - } - }, - "/beamline/detectors/{detector_id}": { - "parameters": [ - { - "in": "path", - "description": "detector id (integer)", - "name": "detector_id", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Detector" - } - }, - "404": { - "description": "detector not found." - } - }, - "summary": "Fully updates detector with detector_id", - "operationId": "put_detector_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Detector" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "options": { - "responses": { - "404": { - "description": "detector not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_detector_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Detector" - } - }, - "404": { - "description": "detector not found." - } - }, - "summary": "Returns a detector by detectorId", - "description": "detector_id should be an integer", - "operationId": "get_detector_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "delete": { - "responses": { - "404": { - "description": "detector not found." - } - }, - "summary": "Deletes a detector by detectorId", - "operationId": "delete_detector_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Detector" - } - }, - "404": { - "description": "detector not found." - } - }, - "summary": "Partially updates detector with id detectorId", - "operationId": "patch_detector_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Detector" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - } - }, - "/beamline/robot_actions": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/RobotAction" - } - } - }, - "summary": "Adds a new robot_action", - "operationId": "post_robot_actions", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/RobotAction" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns list of robot_actions", - "operationId": "get_robot_actions", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_robot_actions", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - } - }, - "/beamline/robot_actions/{robot_action_id}": { - "parameters": [ - { - "in": "path", - "description": "robot_action id (integer)", - "name": "robot_action_id", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/RobotAction" - } - }, - "404": { - "description": "robot_action not found." - } - }, - "summary": "Fully updates robot_action with robot_action_id", - "operationId": "put_robot_action_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/RobotAction" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "options": { - "responses": { - "404": { - "description": "robot_action not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_robot_action_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/RobotAction" - } - }, - "404": { - "description": "robot_action not found." - } - }, - "summary": "Returns a robot_action by robot_action_id", - "description": "robot_action_id should be an integer", - "operationId": "get_robot_action_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "delete": { - "responses": { - "404": { - "description": "robot_action not found." - } - }, - "summary": "Deletes a robot_action by robot_action_id", - "operationId": "delete_robot_action_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/RobotAction" - } - }, - "404": { - "description": "robot_action not found." - } - }, - "summary": "Partially updates robot_action with robot_action_id", - "operationId": "patch_robot_action_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/RobotAction" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - } - }, - "/beamline/setups": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/BeamLineSetup" - } - } - }, - "summary": "Adds a new beamline_setup", - "operationId": "post_beamline_setups", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/BeamLineSetup" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns list of beamline_setups", - "operationId": "get_beamline_setups", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_beamline_setups", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - } - }, - "/beamline/setups/{beamline_setup_id}": { - "parameters": [ - { - "in": "path", - "description": "beamline_setup id (integer)", - "name": "beamline_setup_id", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/BeamLineSetup" - } - }, - "404": { - "description": "beamline_setup not found." - } - }, - "summary": "Fully updates beamline_setup with beamline_setup_id", - "operationId": "put_beamline_setup_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/BeamLineSetup" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "options": { - "responses": { - "404": { - "description": "beamline_setup not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_beamline_setup_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/BeamLineSetup" - } - }, - "404": { - "description": "beamline_setup not found." - } - }, - "summary": "Returns a beamline_setup by beamline_setupId", - "description": "beamline_setup_id should be an integer", - "operationId": "get_beamline_setup_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "delete": { - "responses": { - "404": { - "description": "beamline_setup not found." - } - }, - "summary": "Deletes a beamline_setup by beamline_setupId", - "operationId": "delete_beamline_setup_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/BeamLineSetup" - } - }, - "404": { - "description": "beamline_setup not found." - } - }, - "summary": "Partially updates beamline_setup with id beamline_setupId", - "operationId": "patch_beamline_setup_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/BeamLineSetup" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Beamline" - ] - } - }, - "/contacts/lab_contacts": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/LabContact" - } - } - }, - "summary": "Adds a new lab contact", - "operationId": "post_lab_contacts", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/LabContact" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns list of local contacts", - "operationId": "get_lab_contacts", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_lab_contacts", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - } - }, - "/contacts/lab_contacts/{lab_contact_id}": { - "parameters": [ - { - "name": "lab_contact_id", - "in": "path", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/LabContact" - } - } - }, - "summary": "Fully updates person with id lab_contact_id", - "operationId": "put_lab_contact_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/LabContact" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_lab_contact_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/LabContact" - } - } - }, - "summary": "Returns a lab contact by lab_contact_id", - "description": "lab_contact_id should be an integer", - "operationId": "get_lab_contact_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "delete": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Deletes lab contact by lab_contact_id", - "operationId": "delete_lab_contact_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/LabContact" - } - } - }, - "summary": "Partially updates person with id lab_contact_id", - "operationId": "patch_lab_contact_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/LabContact" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - } - }, - "/contacts/labs": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Laboratory" - } - } - }, - "summary": "Adds a new laboratory", - "operationId": "post_laboratories", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Laboratory" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns all laboratory entries", - "operationId": "get_laboratories", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_laboratories", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - } - }, - "/contacts/labs/{laboratory_id}": { - "parameters": [ - { - "in": "path", - "description": "laboratory_id id (integer)", - "name": "laboratory_id", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Laboratory" - } - }, - "404": { - "description": "Laboratory not found." - } - }, - "summary": "Fully updates laboratory with id laboratory_id", - "operationId": "put_laboratory_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Laboratory" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "options": { - "responses": { - "404": { - "description": "Laboratory not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_laboratory_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Laboratory" - } - }, - "404": { - "description": "Laboratory not found." - } - }, - "summary": "Returns a laboratory by laboratoryId", - "description": "lab_id should be an integer", - "operationId": "get_laboratory_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "delete": { - "responses": { - "404": { - "description": "Laboratory not found." - } - }, - "summary": "Deletes laboratory by laboratory_id", - "operationId": "delete_laboratory_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Laboratory" - } - }, - "404": { - "description": "Laboratory not found." - } - }, - "summary": "Partially updates laboratory with id laboratory_id", - "operationId": "patch_laboratory_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Laboratory" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - } - }, - "/contacts/persons": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Person" - } - } - }, - "operationId": "post_persons", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Person" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns all persons", - "operationId": "get_persons", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_persons", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - } - }, - "/contacts/persons/{person_id}": { - "parameters": [ - { - "name": "person_id", - "in": "path", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Person" - } - } - }, - "summary": "Fully updates person with id person_id", - "operationId": "put_person_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Person" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_person_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Person" - } - } - }, - "summary": "Returns a person by personId", - "description": "person_id should be an integer", - "operationId": "get_person_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "delete": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Deletes person by person_id", - "operationId": "delete_person_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Person" - } - } - }, - "summary": "Partially updates person with id person_id", - "operationId": "patch_person_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Person" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Contacts" - ] - } - }, - "/data_collections": { - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns list of data_collections", - "operationId": "get_data_colletions", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Data collections" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_data_colletions", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Data collections" - ] - } - }, - "/data_collections/groups": { - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns list of data_collection_groups", - "operationId": "get_data_collection_groups", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Data collections" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_data_collection_groups", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Data collections" - ] - } - }, - "/data_collections/{data_collection_id}": { - "parameters": [ - { - "in": "path", - "description": "data_collection id (integer)", - "name": "data_collection_id", - "required": true, - "type": "integer" - } - ], - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/DataCollection" - } - }, - "404": { - "description": "data collection not found." - } - }, - "summary": "Returns a data_collection by data_collectionId", - "description": "data_collection_id should be an integer", - "operationId": "get_data_collection_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Data collections" - ] - }, - "options": { - "responses": { - "404": { - "description": "data collection not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_data_collection_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Data collections" - ] - } - }, - "/proposals": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Proposal" - } - } - }, - "summary": "Adds a new proposal", - "operationId": "post_proposals", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Proposal" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Proposals" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns proposals based on query parameters", - "operationId": "get_proposals_by_request", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Proposals" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_proposals", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Proposals" - ] - } - }, - "/proposals/{proposal_id}": { - "parameters": [ - { - "in": "path", - "description": "Proposal id (integer)", - "name": "proposal_id", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Proposal" - } - }, - "404": { - "description": "Proposal not found." - } - }, - "summary": "Fully updates proposal with id proposal_id", - "operationId": "put_proposal_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Proposal" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Proposals" - ] - }, - "options": { - "responses": { - "404": { - "description": "Proposal not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_proposal_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Proposals" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Proposal" - } - }, - "404": { - "description": "Proposal not found." - } - }, - "summary": "Returns a proposal by proposalId", - "description": "proposal_id should be an integer", - "operationId": "get_proposal_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Proposals" - ] - }, - "delete": { - "responses": { - "404": { - "description": "Proposal not found." - } - }, - "summary": "Deletes a proposal by proposal_id", - "operationId": "delete_proposal_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Proposals" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Proposal" - } - }, - "404": { - "description": "Proposal not found." - } - }, - "summary": "Partially updates proposal with id proposal_id", - "operationId": "patch_proposal_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Proposal" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Proposals" - ] - } - }, - "/proposals/{proposal_id}/info": { - "parameters": [ - { - "in": "path", - "description": "Proposal id (integer)", - "name": "proposal_id", - "required": true, - "type": "integer" - } - ], - "get": { - "responses": { - "404": { - "description": "Proposal not found." - } - }, - "summary": "Returns a full description of a proposal by proposalId", - "description": "proposal_id should be an integer", - "operationId": "get_proposal_info_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Proposals" - ] - }, - "options": { - "responses": { - "404": { - "description": "Proposal not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_proposal_info_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Proposals" - ] - } - }, - "/samples": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Sample" - } - } - }, - "summary": "Adds a new sample item", - "operationId": "post_sample", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Sample" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns all sample items", - "operationId": "get_sample", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_sample", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - } - }, - "/samples/crystals": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Crystal" - } - } - }, - "summary": "Adds a new crystal item", - "operationId": "post_crystals", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Crystal" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns all crystal items", - "operationId": "get_crystals_by_query", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_crystals", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - } - }, - "/samples/crystals/{crystal_id}": { - "parameters": [ - { - "in": "path", - "description": "Crystal id (integer)", - "name": "crystal_id", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Crystal" - } - }, - "404": { - "description": "Crystal not found." - } - }, - "summary": "Fully updates crystal with crystal_id", - "operationId": "put_crystal_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Crystal" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "options": { - "responses": { - "404": { - "description": "Crystal not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_crystal_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Crystal" - } - }, - "404": { - "description": "Crystal not found." - } - }, - "summary": "Returns a crystal by crystalId", - "description": "crystal_id should be an integer", - "operationId": "get_crystal_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "delete": { - "responses": { - "404": { - "description": "Crystal not found." - } - }, - "summary": "Deletes a crystal by crystalId", - "operationId": "delete_crystal_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Crystal" - } - }, - "404": { - "description": "Crystal not found." - } - }, - "summary": "Partially updates crystal with id crystalId", - "operationId": "patch_crystal_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Crystal" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - } - }, - "/samples/diffraction_plans": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/DiffractionPlan" - } - } - }, - "summary": "Adds a new diffraction_plan", - "operationId": "post_diffraction_plans", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/DiffractionPlan" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns list of diffraction_plans", - "operationId": "get_diffraction_plans", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_diffraction_plans", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - } - }, - "/samples/diffraction_plans/{diffraction_plan_id}": { - "parameters": [ - { - "in": "path", - "description": "diffraction_plan id (integer)", - "name": "diffraction_plan_id", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/DiffractionPlan" - } - }, - "404": { - "description": "diffraction_plan not found." - } - }, - "summary": "Fully updates diffraction_plan with diffraction_plan_id", - "operationId": "put_diffraction_plan_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/DiffractionPlan" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "options": { - "responses": { - "404": { - "description": "diffraction_plan not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_diffraction_plan_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/DiffractionPlan" - } - }, - "404": { - "description": "diffraction_plan not found." - } - }, - "summary": "Returns a diffraction_plan by diffraction_planId", - "description": "diffraction_plan_id should be an integer", - "operationId": "get_diffraction_plan_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "delete": { - "responses": { - "404": { - "description": "diffraction_plan not found." - } - }, - "summary": "Deletes a diffraction_plan by diffraction_planId", - "operationId": "delete_diffraction_plan_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/DiffractionPlan" - } - }, - "404": { - "description": "diffraction_plan not found." - } - }, - "summary": "Partially updates diffraction_plan with id diffraction_planId", - "operationId": "patch_diffraction_plan_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/DiffractionPlan" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - } - }, - "/samples/proteins": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Protein" - } - } - }, - "summary": "Adds a new protein item", - "operationId": "post_proteins", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Protein" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns all protein items", - "operationId": "get_proteins_by_request", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_proteins", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - } - }, - "/samples/proteins/{protein_id}": { - "parameters": [ - { - "in": "path", - "description": "protein id (integer)", - "name": "protein_id", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Protein" - } - }, - "404": { - "description": "protein not found." - } - }, - "summary": "Fully updates protein with proteinId", - "operationId": "put_protein_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Protein" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "options": { - "responses": { - "404": { - "description": "protein not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_protein_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Protein" - } - }, - "404": { - "description": "protein not found." - } - }, - "summary": "Returns a protein by proteinId", - "description": "protein_id should be an integer", - "operationId": "get_protein_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "delete": { - "responses": { - "404": { - "description": "protein not found." - } - }, - "summary": "Deletes a protein by proteinId", - "operationId": "delete_protein_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Protein" - } - }, - "404": { - "description": "protein not found." - } - }, - "summary": "Partially updates protein with proteinId", - "operationId": "patch_protein_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Protein" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - } - }, - "/samples/{sample_id}": { - "parameters": [ - { - "in": "path", - "description": "Sample id (integer)", - "name": "sample_id", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Sample" - } - }, - "404": { - "description": "Sample not found." - } - }, - "summary": "Fully updates sample with sample_id", - "operationId": "put_sample_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Sample" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "options": { - "responses": { - "404": { - "description": "Sample not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_sample_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Sample" - } - }, - "404": { - "description": "Sample not found." - } - }, - "summary": "Returns a sample by sampleId", - "description": "sample_id should be an integer", - "operationId": "get_sample_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "delete": { - "responses": { - "404": { - "description": "Sample not found." - } - }, - "summary": "Deletes a sample by sampleId", - "operationId": "delete_sample_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Sample" - } - }, - "404": { - "description": "Sample not found." - } - }, - "summary": "Partially updates sample with id sampleId", - "operationId": "patch_sample_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Sample" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Samples" - ] - } - }, - "/schemas/available_names": { - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns list of available schemas", - "description": "Returns:\n list: list of names", - "operationId": "get_schemas_list", - "tags": [ - "Schemas" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_schemas_list", - "tags": [ - "Schemas" - ] - } - }, - "/schemas/{name}": { - "parameters": [ - { - "in": "path", - "description": "name (string)", - "name": "name", - "required": true, - "type": "string" - } - ], - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns json schema", - "description": "name should be a string\nArgs:\n name (string): schema name\n\nReturns:\n json: schema as json", - "operationId": "get_schemas", - "tags": [ - "Schemas" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "name should be a string\nUse this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_schemas", - "tags": [ - "Schemas" - ] - } - }, - "/sessions": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Session" - } - } - }, - "summary": "Adds a new session", - "operationId": "post_sessions", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Session" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns list of sessions", - "operationId": "get_sessions", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_sessions", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - } - }, - "/sessions/beam_calendar/{beam_calendar_id}": { - "parameters": [ - { - "in": "path", - "description": "beam_calendar id (integer)", - "name": "beam_calendar_id", - "required": true, - "type": "integer" - } - ], - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/BeamCalendar" - } - }, - "404": { - "description": "beam_calendar not found." - } - }, - "summary": "Returns a beam_calendar by beam_calendarId", - "description": "beam_calendar_id should be an integer", - "operationId": "get_beam_calendar_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - }, - "options": { - "responses": { - "404": { - "description": "beam_calendar not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_beam_calendar_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - } - }, - "/sessions/beam_calendars": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/BeamCalendar" - } - } - }, - "summary": "Adds a new beam_calendar", - "operationId": "post_beam_calendars", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/BeamCalendar" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns beam_calendars based on query parameters", - "operationId": "get_beam_calendars", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_beam_calendars", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - } - }, - "/sessions/date": { - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns list of sessions by start_date, end_date and beamline", - "operationId": "get_sessions_by_date_beamline", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_sessions_by_date_beamline", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - } - }, - "/sessions/{session_id}": { - "parameters": [ - { - "in": "path", - "description": "Session id (integer)", - "name": "session_id", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Session" - } - }, - "404": { - "description": "Session not found." - } - }, - "summary": "Fully updates session with session_id", - "operationId": "put_session_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Session" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - }, - "options": { - "responses": { - "404": { - "description": "Session not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_session_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Session" - } - }, - "404": { - "description": "Session not found." - } - }, - "summary": "Returns a session by sessionId", - "description": "session_id should be an integer", - "operationId": "get_session_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - }, - "delete": { - "responses": { - "404": { - "description": "Session not found." - } - }, - "summary": "Deletes a session by sessionId", - "operationId": "delete_session_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Session" - } - }, - "404": { - "description": "Session not found." - } - }, - "summary": "Partially updates session with id sessionId", - "operationId": "patch_session_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Session" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - } - }, - "/sessions/{session_id}/info": { - "parameters": [ - { - "in": "path", - "description": "session id (integer)", - "name": "session_id", - "required": true, - "type": "integer" - } - ], - "get": { - "responses": { - "404": { - "description": "session not found." - } - }, - "summary": "Returns a full description of a session by sessionId", - "description": "session_id should be an integer", - "operationId": "get_session_info_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - }, - "options": { - "responses": { - "404": { - "description": "session not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_session_info_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Sessions" - ] - } - }, - "/shipments": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Shipping" - } - } - }, - "summary": "Adds a new shipment", - "operationId": "post_shipments", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Shipping" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns list of shipments", - "operationId": "get_shipments", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_shipments", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - } - }, - "/shipments/containers": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Container" - } - } - }, - "summary": "Adds a new container item", - "operationId": "post_containers", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Container" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns all container items", - "operationId": "get_containers", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_containers", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - } - }, - "/shipments/containers/{container_id}": { - "parameters": [ - { - "in": "path", - "description": "Container id (integer)", - "name": "container_id", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Container" - } - }, - "404": { - "description": "Container not found." - } - }, - "summary": "Fully updates container with container_id", - "operationId": "put_container_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Container" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "options": { - "responses": { - "404": { - "description": "Container not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_container_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Container" - } - }, - "404": { - "description": "Container not found." - } - }, - "summary": "Returns a container by container_id", - "description": "container_id should be an integer", - "operationId": "get_container_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "delete": { - "responses": { - "404": { - "description": "Container not found." - } - }, - "summary": "Deletes a container by containerId", - "operationId": "delete_container_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Container" - } - }, - "404": { - "description": "Container not found." - } - }, - "summary": "Partially updates container with id containerId", - "operationId": "patch_container_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Container" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - } - }, - "/shipments/dewars": { - "post": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Dewar" - } - } - }, - "summary": "Adds a new dewar item", - "operationId": "post_dewars", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Dewar" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Returns all dewars items", - "operationId": "get_dewars_by_query", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_dewars", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - } - }, - "/shipments/dewars/{dewar_id}": { - "parameters": [ - { - "in": "path", - "description": "Dewar id (integer)", - "name": "dewar_id", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Dewar" - } - }, - "404": { - "description": "Dewar not found." - } - }, - "summary": "Fully updates dewar with dewar_id", - "operationId": "put_dewar_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Dewar" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "options": { - "responses": { - "404": { - "description": "Dewar not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_dewar_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Dewar" - } - }, - "404": { - "description": "Dewar not found." - } - }, - "summary": "Returns a dewar by dewarId", - "description": "dewar_id should be an integer", - "operationId": "get_dewar_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "delete": { - "responses": { - "404": { - "description": "Dewar not found." - } - }, - "summary": "Deletes a dewar by dewarId", - "operationId": "delete_dewar_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Dewar" - } - }, - "404": { - "description": "Dewar not found." - } - }, - "summary": "Partially updates dewar with id dewarId", - "operationId": "patch_dewar_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Dewar" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - } - }, - "/shipments/{shipment_id}": { - "parameters": [ - { - "in": "path", - "description": "shipment id (integer)", - "name": "shipment_id", - "required": true, - "type": "integer" - } - ], - "put": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Shipping" - } - }, - "404": { - "description": "shipment not found." - } - }, - "summary": "Fully updates shipment with id shipment_id", - "operationId": "put_shipment_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Shipping" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "options": { - "responses": { - "404": { - "description": "shipment not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_shipment_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "get": { - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Shipping" - } - }, - "404": { - "description": "shipment not found." - } - }, - "summary": "Returns a shipment by shipmentId", - "description": "shipment_id should be an integer", - "operationId": "get_shipment_by_id", - "parameters": [ - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "delete": { - "responses": { - "404": { - "description": "shipment not found." - } - }, - "summary": "Deletes shipment by shipment_id", - "operationId": "delete_shipment_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "patch": { - "responses": { - "201": { - "description": "Success", - "schema": { - "$ref": "#/definitions/Shipping" - } - }, - "404": { - "description": "shipment not found." - } - }, - "summary": "Partially updates shipment with id shipment_id", - "operationId": "patch_shipment_by_id", - "parameters": [ - { - "name": "payload", - "required": true, - "in": "body", - "schema": { - "$ref": "#/definitions/Shipping" - } - }, - { - "name": "X-Fields", - "in": "header", - "type": "string", - "format": "mask", - "description": "An optional fields mask" - } - ], - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - } - }, - "/shipments/{shipment_id}/info": { - "parameters": [ - { - "in": "path", - "description": "shipment id (integer)", - "name": "shipment_id", - "required": true, - "type": "integer" - } - ], - "get": { - "responses": { - "404": { - "description": "shipment not found." - } - }, - "summary": "Returns a full description of a shipment by shipmentId", - "description": "shipment_id should be an integer", - "operationId": "get_shipment_info_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - }, - "options": { - "responses": { - "404": { - "description": "shipment not found." - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_shipment_info_by_id", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "Shipments" - ] - } - }, - "/user_office/sync_all": { - "post": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Sync with user office", - "operationId": "post_sync_all", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "User office" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_sync_all", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "User office" - ] - } - }, - "/user_office/update_proposal/{proposal_code}{proposal_number}": { - "parameters": [ - { - "in": "path", - "description": "Proposal number (integer)", - "name": "proposal_number", - "required": true, - "type": "integer" - }, - { - "in": "path", - "description": "Proposal code (string)", - "name": "proposal_code", - "required": true, - "type": "string" - } - ], - "post": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Sync with user office", - "description": "proposal_code should be a string, proposal_number should be an integer", - "operationId": "post_update_proposal", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "User office" - ] - }, - "options": { - "responses": { - "200": { - "description": "Success" - } - }, - "summary": "Check which methods are allowed", - "description": "Use this method if you need to know what operations are allowed to be\nperformed on this endpoint, e.g. to decide wether to display a button\nin your UI.\n\nThe list of allowed methods is provided in `Allow` response header.", - "operationId": "options_update_proposal", - "security": [ - { - "apikey": [] - } - ], - "tags": [ - "User office" - ] - } - } - }, - "info": { - "title": "ISPyB", - "version": "1.0", - "description": "ISPyB Flask rest server" - }, - "produces": [ - "application/json" - ], - "consumes": [ - "application/json" - ], - "securityDefinitions": { - "apikey": { - "type": "apiKey", - "in": "header", - "name": "Authorization" - } - }, - "tags": [ - { - "name": "Schemas", - "description": "Schemas related namespace" - }, - { - "name": "Contacts", - "description": "Contact related namespace" - }, - { - "name": "Sessions", - "description": "Session related namespace" - }, - { - "name": "Shipments", - "description": "Shipment related namespace" - }, - { - "name": "Proposals", - "description": "Proposal related namespace" - }, - { - "name": "Samples", - "description": "Sample related namespace" - }, - { - "name": "Data collections", - "description": "Data collection related namespace" - }, - { - "name": "Auto processing", - "description": "Auto processing related namespace" - }, - { - "name": "Beamline", - "description": "Beamline related namespace" - }, - { - "name": "User office", - "description": "User office related namespace" - }, - { - "name": "Authentication", - "description": "authentication namespace" - } - ], - "definitions": { - "Person": { - "required": [ - "personId", - "recordTimeStamp" - ], - "properties": { - "personId": { - "type": "integer", - "description": "" - }, - "laboratoryId": { - "type": "integer", - "description": "" - }, - "siteId": { - "type": "integer", - "description": "" - }, - "personUUID": { - "type": "string", - "description": "" - }, - "familyName": { - "type": "string", - "description": "" - }, - "givenName": { - "type": "string", - "description": "" - }, - "title": { - "type": "string", - "description": "" - }, - "emailAddress": { - "type": "string", - "description": "" - }, - "phoneNumber": { - "type": "string", - "description": "" - }, - "login": { - "type": "string", - "description": "" - }, - "faxNumber": { - "type": "string", - "description": "" - }, - "recordTimeStamp": { - "type": "string", - "format": "date-time", - "description": "Creation or last update date/time" - }, - "cache": { - "type": "string", - "description": "" - }, - "externalId": { - "type": "integer", - "description": "" - } - }, - "type": "object" - }, - "LabContact": { - "required": [ - "cardName", - "dewarAvgCustomsValue", - "dewarAvgTransportValue", - "labContactId", - "personId", - "proposalId", - "recordTimeStamp" - ], - "properties": { - "labContactId": { - "type": "integer", - "description": "" - }, - "personId": { - "type": "integer", - "description": "" - }, - "cardName": { - "type": "string", - "description": "" - }, - "proposalId": { - "type": "integer", - "description": "" - }, - "defaultCourrierCompany": { - "type": "string", - "description": "" - }, - "courierAccount": { - "type": "string", - "description": "" - }, - "billingReference": { - "type": "string", - "description": "" - }, - "dewarAvgCustomsValue": { - "type": "integer", - "description": "" - }, - "dewarAvgTransportValue": { - "type": "integer", - "description": "" - }, - "recordTimeStamp": { - "type": "string", - "format": "date-time", - "description": "Creation or last update date/time" - } - }, - "type": "object" - }, - "Laboratory": { - "required": [ - "laboratoryId", - "recordTimeStamp" - ], - "properties": { - "laboratoryId": { - "type": "integer", - "description": "" - }, - "laboratoryUUID": { - "type": "string", - "description": "" - }, - "name": { - "type": "string", - "description": "" - }, - "address": { - "type": "string", - "description": "" - }, - "city": { - "type": "string", - "description": "" - }, - "country": { - "type": "string", - "description": "" - }, - "url": { - "type": "string", - "description": "" - }, - "organization": { - "type": "string", - "description": "" - }, - "recordTimeStamp": { - "type": "string", - "format": "date-time", - "description": "Creation or last update date/time" - }, - "laboratoryPk": { - "type": "integer", - "description": "" - }, - "postcode": { - "type": "string", - "description": "" - } - }, - "type": "object" - }, - "Session": { - "required": [ - "bltimeStamp", - "lastUpdate", - "proposalId", - "sessionId" - ], - "properties": { - "sessionId": { - "type": "integer", - "description": "" - }, - "beamLineSetupId": { - "type": "integer", - "description": "" - }, - "proposalId": { - "type": "integer", - "description": "" - }, - "beamCalendarId": { - "type": "integer", - "description": "" - }, - "projectCode": { - "type": "string", - "description": "" - }, - "startDate": { - "type": "string", - "format": "date-time", - "description": "" - }, - "endDate": { - "type": "string", - "format": "date-time", - "description": "" - }, - "beamLineName": { - "type": "string", - "description": "" - }, - "scheduled": { - "type": "integer", - "description": "" - }, - "nbShifts": { - "type": "integer", - "description": "" - }, - "comments": { - "type": "string", - "description": "" - }, - "beamLineOperator": { - "type": "string", - "description": "" - }, - "bltimeStamp": { - "type": "string", - "format": "date-time", - "description": "" - }, - "visit_number": { - "type": "integer", - "description": "" - }, - "usedFlag": { - "type": "integer", - "description": "indicates if session has Datacollections or XFE or EnergyScans attached" - }, - "sessionTitle": { - "type": "string", - "description": "fx accounts only" - }, - "structureDeterminations": { - "type": "number", - "description": "" - }, - "dewarTransport": { - "type": "number", - "description": "" - }, - "databackupFrance": { - "type": "number", - "description": "data backup and express delivery France" - }, - "databackupEurope": { - "type": "number", - "description": "data backup and express delivery Europe" - }, - "expSessionPk": { - "type": "integer", - "description": "smis session Pk " - }, - "operatorSiteNumber": { - "type": "string", - "description": "matricule site" - }, - "lastUpdate": { - "type": "string", - "format": "date-time", - "description": "last update timestamp: by default the end of the session, the last collect..." - }, - "protectedData": { - "type": "string", - "description": "indicates if the data are protected or not" - }, - "externalId": { - "type": "integer", - "description": "" - }, - "archived": { - "type": "integer", - "description": "The data for the session is archived and no longer available on disk" - } - }, - "type": "object" - }, - "BeamCalendar": { - "required": [ - "beamCalendarId", - "beamStatus", - "endDate", - "run", - "startDate" - ], - "properties": { - "beamCalendarId": { - "type": "integer", - "description": "" - }, - "run": { - "type": "string", - "description": "" - }, - "beamStatus": { - "type": "string", - "description": "" - }, - "startDate": { - "type": "string", - "format": "date-time", - "description": "" - }, - "endDate": { - "type": "string", - "format": "date-time", - "description": "" - } - }, - "type": "object" - }, - "Shipping": { - "required": [ - "proposalId", - "shippingId" - ], - "properties": { - "shippingId": { - "type": "integer", - "description": "" - }, - "proposalId": { - "type": "integer", - "description": "" - }, - "shippingName": { - "type": "string", - "description": "" - }, - "deliveryAgent_agentName": { - "type": "string", - "description": "" - }, - "deliveryAgent_shippingDate": { - "type": "string", - "description": "" - }, - "deliveryAgent_deliveryDate": { - "type": "string", - "description": "" - }, - "deliveryAgent_agentCode": { - "type": "string", - "description": "" - }, - "deliveryAgent_flightCode": { - "type": "string", - "description": "" - }, - "shippingStatus": { - "type": "string", - "description": "" - }, - "bltimeStamp": { - "type": "string", - "format": "date-time", - "description": "" - }, - "laboratoryId": { - "type": "integer", - "description": "" - }, - "isStorageShipping": { - "type": "integer", - "description": "" - }, - "creationDate": { - "type": "string", - "format": "date-time", - "description": "" - }, - "comments": { - "type": "string", - "description": "" - }, - "sendingLabContactId": { - "type": "integer", - "description": "" - }, - "returnLabContactId": { - "type": "integer", - "description": "" - }, - "returnCourier": { - "type": "string", - "description": "" - }, - "dateOfShippingToUser": { - "type": "string", - "format": "date-time", - "description": "" - }, - "shippingType": { - "type": "string", - "description": "" - }, - "SAFETYLEVEL": { - "type": "string", - "description": "" - }, - "deliveryAgent_flightCodeTimestamp": { - "type": "string", - "format": "date-time", - "description": "Date flight code created, if automatic" - }, - "deliveryAgent_label": { - "type": "string", - "description": "Base64 encoded pdf of airway label" - }, - "readyByTime": { - "type": "string", - "description": "Time shipment will be ready" - }, - "closeTime": { - "type": "string", - "description": "Time after which shipment cannot be picked up" - }, - "physicalLocation": { - "type": "string", - "description": "Where shipment can be picked up from: i.e. Stores" - }, - "deliveryAgent_pickupConfirmationTimestamp": { - "type": "string", - "format": "date-time", - "description": "Date picked confirmed" - }, - "deliveryAgent_pickupConfirmation": { - "type": "string", - "description": "Confirmation number of requested pickup" - }, - "deliveryAgent_readyByTime": { - "type": "string", - "description": "Confirmed ready-by time" - }, - "deliveryAgent_callinTime": { - "type": "string", - "description": "Confirmed courier call-in time" - }, - "deliveryAgent_productcode": { - "type": "string", - "description": "A code that identifies which shipment service was used" - }, - "deliveryAgent_flightCodePersonId": { - "type": "integer", - "description": "The person who created the AWB (for auditing)" - } - }, - "type": "object" - }, - "Dewar": { - "required": [ - "dewarId", - "type" - ], - "properties": { - "dewarId": { - "type": "integer", - "description": "" - }, - "shippingId": { - "type": "integer", - "description": "" - }, - "code": { - "type": "string", - "description": "" - }, - "comments": { - "type": "string", - "description": "" - }, - "storageLocation": { - "type": "string", - "description": "" - }, - "dewarStatus": { - "type": "string", - "description": "" - }, - "bltimeStamp": { - "type": "string", - "format": "date-time", - "description": "" - }, - "isStorageDewar": { - "type": "integer", - "description": "" - }, - "barCode": { - "type": "string", - "description": "" - }, - "firstExperimentId": { - "type": "integer", - "description": "" - }, - "customsValue": { - "type": "integer", - "description": "" - }, - "transportValue": { - "type": "integer", - "description": "" - }, - "trackingNumberToSynchrotron": { - "type": "string", - "description": "" - }, - "trackingNumberFromSynchrotron": { - "type": "string", - "description": "" - }, - "type": { - "type": "string", - "description": "enum(Dewar,Toolbox)" - }, - "FACILITYCODE": { - "type": "string", - "description": "" - }, - "weight": { - "type": "number", - "description": "dewar weight in kg" - }, - "deliveryAgent_barcode": { - "type": "string", - "description": "Courier piece barcode (not the airway bill)" - } - }, - "type": "object" - }, - "Container": { - "required": [ - "containerId" - ], - "properties": { - "containerId": { - "type": "integer", - "description": "" - }, - "dewarId": { - "type": "integer", - "description": "" - }, - "code": { - "type": "string", - "description": "" - }, - "containerType": { - "type": "string", - "description": "" - }, - "capacity": { - "type": "integer", - "description": "" - }, - "sampleChangerLocation": { - "type": "string", - "description": "" - }, - "containerStatus": { - "type": "string", - "description": "" - }, - "bltimeStamp": { - "type": "string", - "format": "date-time", - "description": "" - }, - "beamlineLocation": { - "type": "string", - "description": "" - }, - "screenId": { - "type": "integer", - "description": "" - }, - "scheduleId": { - "type": "integer", - "description": "" - }, - "barcode": { - "type": "string", - "description": "" - }, - "imagerId": { - "type": "integer", - "description": "" - }, - "sessionId": { - "type": "integer", - "description": "" - }, - "ownerId": { - "type": "integer", - "description": "" - }, - "requestedImagerId": { - "type": "integer", - "description": "" - }, - "requestedReturn": { - "type": "integer", - "description": "True for requesting return, False means container will be disposed" - }, - "comments": { - "type": "string", - "description": "" - }, - "experimentType": { - "type": "string", - "description": "" - }, - "storageTemperature": { - "type": "number", - "description": "" - }, - "containerRegistryId": { - "type": "integer", - "description": "" - } - }, - "type": "object" - }, - "Proposal": { - "required": [ - "bltimeStamp", - "personId", - "proposalId" - ], - "properties": { - "proposalId": { - "type": "integer", - "description": "" - }, - "personId": { - "type": "integer", - "description": "" - }, - "title": { - "type": "string", - "description": "" - }, - "proposalCode": { - "type": "string", - "description": "" - }, - "proposalNumber": { - "type": "string", - "description": "" - }, - "bltimeStamp": { - "type": "string", - "format": "date-time", - "description": "" - }, - "proposalType": { - "type": "string", - "description": "Proposal type: MX, BX" - }, - "externalId": { - "type": "integer", - "description": "" - }, - "state": { - "type": "string", - "description": "enum(Open,Closed,Cancelled)" - } - }, - "type": "object" - }, - "Sample": { - "required": [ - "blSampleId", - "recordTimeStamp" - ], - "properties": { - "blSampleId": { - "type": "integer", - "description": "" - }, - "diffractionPlanId": { - "type": "integer", - "description": "" - }, - "crystalId": { - "type": "integer", - "description": "" - }, - "containerId": { - "type": "integer", - "description": "" - }, - "name": { - "type": "string", - "description": "" - }, - "code": { - "type": "string", - "description": "" - }, - "location": { - "type": "string", - "description": "" - }, - "holderLength": { - "type": "string", - "description": "" - }, - "loopLength": { - "type": "string", - "description": "" - }, - "loopType": { - "type": "string", - "description": "" - }, - "wireWidth": { - "type": "string", - "description": "" - }, - "comments": { - "type": "string", - "description": "" - }, - "completionStage": { - "type": "string", - "description": "" - }, - "structureStage": { - "type": "string", - "description": "" - }, - "publicationStage": { - "type": "string", - "description": "" - }, - "publicationComments": { - "type": "string", - "description": "" - }, - "blSampleStatus": { - "type": "string", - "description": "" - }, - "isInSampleChanger": { - "type": "integer", - "description": "" - }, - "lastKnownCenteringPosition": { - "type": "string", - "description": "" - }, - "POSITIONID": { - "type": "integer", - "description": "" - }, - "recordTimeStamp": { - "type": "string", - "format": "date-time", - "description": "Creation or last update date/time" - }, - "SMILES": { - "type": "string", - "description": "the symbolic description of the structure of a chemical compound" - }, - "blSubSampleId": { - "type": "integer", - "description": "" - }, - "lastImageURL": { - "type": "string", - "description": "" - }, - "screenComponentGroupId": { - "type": "integer", - "description": "" - }, - "volume": { - "type": "number", - "description": "" - }, - "dimension1": { - "type": "string", - "description": "" - }, - "dimension2": { - "type": "string", - "description": "" - }, - "dimension3": { - "type": "string", - "description": "" - }, - "shape": { - "type": "string", - "description": "" - }, - "packingFraction": { - "type": "number", - "description": "" - }, - "preparationTemeprature": { - "type": "integer", - "description": "Sample preparation temperature, Units: kelvin" - }, - "preparationHumidity": { - "type": "number", - "description": "Sample preparation humidity, Units: %" - }, - "blottingTime": { - "type": "integer", - "description": "Blotting time, Units: sec" - }, - "blottingForce": { - "type": "number", - "description": "Force used when blotting sample, Units: N?" - }, - "blottingDrainTime": { - "type": "integer", - "description": "Time sample left to drain after blotting, Units: sec" - }, - "support": { - "type": "string", - "description": "Sample support material" - }, - "subLocation": { - "type": "integer", - "description": "Indicates the samples location on a multi-sample pin, where 1 is closest to the pin base" - } - }, - "type": "object" - }, - "Crystal": { - "required": [ - "crystalId", - "proteinId", - "recordTimeStamp" - ], - "properties": { - "crystalId": { - "type": "integer", - "description": "" - }, - "diffractionPlanId": { - "type": "integer", - "description": "" - }, - "proteinId": { - "type": "integer", - "description": "" - }, - "crystalUUID": { - "type": "string", - "description": "" - }, - "name": { - "type": "string", - "description": "" - }, - "spaceGroup": { - "type": "string", - "description": "" - }, - "morphology": { - "type": "string", - "description": "" - }, - "color": { - "type": "string", - "description": "" - }, - "size_X": { - "type": "string", - "description": "" - }, - "size_Y": { - "type": "string", - "description": "" - }, - "size_Z": { - "type": "string", - "description": "" - }, - "cell_a": { - "type": "string", - "description": "" - }, - "cell_b": { - "type": "string", - "description": "" - }, - "cell_c": { - "type": "string", - "description": "" - }, - "cell_alpha": { - "type": "string", - "description": "" - }, - "cell_beta": { - "type": "string", - "description": "" - }, - "cell_gamma": { - "type": "string", - "description": "" - }, - "comments": { - "type": "string", - "description": "" - }, - "pdbFileName": { - "type": "string", - "description": "pdb file name" - }, - "pdbFilePath": { - "type": "string", - "description": "pdb file path" - }, - "recordTimeStamp": { - "type": "string", - "format": "date-time", - "description": "Creation or last update date/time" - }, - "abundance": { - "type": "number", - "description": "" - }, - "theoreticalDensity": { - "type": "number", - "description": "" - } - }, - "type": "object" - }, - "Protein": { - "required": [ - "bltimeStamp", - "proposalId", - "proteinId" - ], - "properties": { - "proteinId": { - "type": "integer", - "description": "" - }, - "proposalId": { - "type": "integer", - "description": "" - }, - "name": { - "type": "string", - "description": "" - }, - "acronym": { - "type": "string", - "description": "" - }, - "molecularMass": { - "type": "string", - "description": "" - }, - "proteinType": { - "type": "string", - "description": "" - }, - "personId": { - "type": "integer", - "description": "" - }, - "bltimeStamp": { - "type": "string", - "format": "date-time", - "description": "" - }, - "isCreatedBySampleSheet": { - "type": "integer", - "description": "" - }, - "sequence": { - "type": "string", - "description": "" - }, - "MOD_ID": { - "type": "string", - "description": "" - }, - "componentTypeId": { - "type": "integer", - "description": "" - }, - "concentrationTypeId": { - "type": "integer", - "description": "" - }, - "Global": { - "type": "integer", - "description": "" - }, - "externalId": { - "type": "integer", - "description": "" - }, - "density": { - "type": "number", - "description": "" - }, - "abundance": { - "type": "number", - "description": "Deprecated" - } - }, - "type": "object" - }, - "DiffractionPlan": { - "required": [ - "diffractionPlanId", - "recordTimeStamp" - ], - "properties": { - "diffractionPlanId": { - "type": "integer", - "description": "" - }, - "name": { - "type": "string", - "description": "" - }, - "experimentKind": { - "type": "string", - "description": "enum(Default,MXPressE,MXPressO,MXPressE_SAD,MXScore,MXPressM,MAD,SAD,Fixed,Ligand binding,Refinement,OSC,MAD - Inverse Beam,SAD - Inverse Beam,MESH,XFE,Stepped transmission)" - }, - "observedResolution": { - "type": "number", - "description": "" - }, - "minimalResolution": { - "type": "number", - "description": "" - }, - "exposureTime": { - "type": "number", - "description": "" - }, - "oscillationRange": { - "type": "number", - "description": "" - }, - "maximalResolution": { - "type": "number", - "description": "" - }, - "screeningResolution": { - "type": "number", - "description": "" - }, - "radiationSensitivity": { - "type": "number", - "description": "" - }, - "anomalousScatterer": { - "type": "string", - "description": "" - }, - "preferredBeamSizeX": { - "type": "number", - "description": "" - }, - "preferredBeamSizeY": { - "type": "number", - "description": "" - }, - "preferredBeamDiameter": { - "type": "number", - "description": "" - }, - "comments": { - "type": "string", - "description": "" - }, - "DIFFRACTIONPLANUUID": { - "type": "string", - "description": "" - }, - "aimedCompleteness": { - "type": "string", - "description": "" - }, - "aimedIOverSigmaAtHighestRes": { - "type": "string", - "description": "" - }, - "aimedMultiplicity": { - "type": "string", - "description": "" - }, - "aimedResolution": { - "type": "string", - "description": "" - }, - "anomalousData": { - "type": "integer", - "description": "" - }, - "complexity": { - "type": "string", - "description": "" - }, - "estimateRadiationDamage": { - "type": "integer", - "description": "" - }, - "forcedSpaceGroup": { - "type": "string", - "description": "" - }, - "requiredCompleteness": { - "type": "string", - "description": "" - }, - "requiredMultiplicity": { - "type": "string", - "description": "" - }, - "requiredResolution": { - "type": "string", - "description": "" - }, - "strategyOption": { - "type": "string", - "description": "" - }, - "kappaStrategyOption": { - "type": "string", - "description": "" - }, - "numberOfPositions": { - "type": "integer", - "description": "" - }, - "minDimAccrossSpindleAxis": { - "type": "string", - "description": "minimum dimension accross the spindle axis" - }, - "maxDimAccrossSpindleAxis": { - "type": "string", - "description": "maximum dimension accross the spindle axis" - }, - "radiationSensitivityBeta": { - "type": "string", - "description": "" - }, - "radiationSensitivityGamma": { - "type": "string", - "description": "" - }, - "minOscWidth": { - "type": "number", - "description": "" - }, - "recordTimeStamp": { - "type": "string", - "format": "date-time", - "description": "Creation or last update date/time" - }, - "monochromator": { - "type": "string", - "description": "DMM or DCM" - }, - "energy": { - "type": "number", - "description": "eV" - }, - "transmission": { - "type": "number", - "description": "Decimal fraction in range [0,1]" - }, - "boxSizeX": { - "type": "number", - "description": "microns" - }, - "boxSizeY": { - "type": "number", - "description": "microns" - }, - "kappaStart": { - "type": "number", - "description": "degrees" - }, - "axisStart": { - "type": "number", - "description": "degrees" - }, - "axisRange": { - "type": "number", - "description": "degrees" - }, - "numberOfImages": { - "type": "integer", - "description": "The number of images requested" - }, - "presetForProposalId": { - "type": "integer", - "description": "Indicates this plan is available to all sessions on given proposal" - }, - "beamLineName": { - "type": "string", - "description": "Indicates this plan is available to all sessions on given beamline" - }, - "detectorId": { - "type": "integer", - "description": "" - }, - "distance": { - "type": "string", - "description": "" - }, - "orientation": { - "type": "string", - "description": "" - }, - "monoBandwidth": { - "type": "string", - "description": "" - }, - "centringMethod": { - "type": "string", - "description": "enum(xray,loop,diffraction,optical)" - } - }, - "type": "object" - }, - "DataCollection": { - "required": [ - "dataCollectionGroupId", - "dataCollectionId" - ], - "properties": { - "dataCollectionId": { - "type": "integer", - "description": "Primary key (auto-incremented)" - }, - "BLSAMPLEID": { - "type": "integer", - "description": "" - }, - "SESSIONID": { - "type": "integer", - "description": "" - }, - "experimenttype": { - "type": "string", - "description": "" - }, - "dataCollectionNumber": { - "type": "integer", - "description": "" - }, - "startTime": { - "type": "string", - "format": "date-time", - "description": "Start time of the dataCollection" - }, - "endTime": { - "type": "string", - "format": "date-time", - "description": "end time of the dataCollection" - }, - "runStatus": { - "type": "string", - "description": "" - }, - "axisStart": { - "type": "number", - "description": "" - }, - "axisEnd": { - "type": "number", - "description": "" - }, - "axisRange": { - "type": "number", - "description": "" - }, - "overlap": { - "type": "number", - "description": "" - }, - "numberOfImages": { - "type": "integer", - "description": "" - }, - "startImageNumber": { - "type": "integer", - "description": "" - }, - "numberOfPasses": { - "type": "integer", - "description": "" - }, - "exposureTime": { - "type": "number", - "description": "" - }, - "imageDirectory": { - "type": "string", - "description": "The directory where files reside - should end with a slash" - }, - "imagePrefix": { - "type": "string", - "description": "" - }, - "imageSuffix": { - "type": "string", - "description": "" - }, - "imageContainerSubPath": { - "type": "string", - "description": "Internal path of a HDF5 file pointing to the data for this data collection" - }, - "fileTemplate": { - "type": "string", - "description": "" - }, - "wavelength": { - "type": "number", - "description": "" - }, - "resolution": { - "type": "number", - "description": "" - }, - "detectorDistance": { - "type": "number", - "description": "" - }, - "xBeam": { - "type": "number", - "description": "" - }, - "yBeam": { - "type": "number", - "description": "" - }, - "comments": { - "type": "string", - "description": "" - }, - "printableForReport": { - "type": "integer", - "description": "" - }, - "CRYSTALCLASS": { - "type": "string", - "description": "" - }, - "slitGapVertical": { - "type": "number", - "description": "" - }, - "slitGapHorizontal": { - "type": "number", - "description": "" - }, - "transmission": { - "type": "number", - "description": "" - }, - "synchrotronMode": { - "type": "string", - "description": "" - }, - "xtalSnapshotFullPath1": { - "type": "string", - "description": "" - }, - "xtalSnapshotFullPath2": { - "type": "string", - "description": "" - }, - "xtalSnapshotFullPath3": { - "type": "string", - "description": "" - }, - "xtalSnapshotFullPath4": { - "type": "string", - "description": "" - }, - "rotationAxis": { - "type": "string", - "description": "enum(Omega,Kappa,Phi)" - }, - "phiStart": { - "type": "number", - "description": "" - }, - "kappaStart": { - "type": "number", - "description": "" - }, - "omegaStart": { - "type": "number", - "description": "" - }, - "chiStart": { - "type": "number", - "description": "" - }, - "resolutionAtCorner": { - "type": "number", - "description": "" - }, - "detector2Theta": { - "type": "number", - "description": "" - }, - "DETECTORMODE": { - "type": "string", - "description": "" - }, - "undulatorGap1": { - "type": "number", - "description": "" - }, - "undulatorGap2": { - "type": "number", - "description": "" - }, - "undulatorGap3": { - "type": "number", - "description": "" - }, - "beamSizeAtSampleX": { - "type": "number", - "description": "" - }, - "beamSizeAtSampleY": { - "type": "number", - "description": "" - }, - "centeringMethod": { - "type": "string", - "description": "" - }, - "averageTemperature": { - "type": "number", - "description": "" - }, - "ACTUALSAMPLEBARCODE": { - "type": "string", - "description": "" - }, - "ACTUALSAMPLESLOTINCONTAINER": { - "type": "integer", - "description": "" - }, - "ACTUALCONTAINERBARCODE": { - "type": "string", - "description": "" - }, - "ACTUALCONTAINERSLOTINSC": { - "type": "integer", - "description": "" - }, - "actualCenteringPosition": { - "type": "string", - "description": "" - }, - "beamShape": { - "type": "string", - "description": "" - }, - "dataCollectionGroupId": { - "type": "integer", - "description": "references DataCollectionGroup table" - }, - "POSITIONID": { - "type": "integer", - "description": "" - }, - "detectorId": { - "type": "integer", - "description": "references Detector table" - }, - "FOCALSPOTSIZEATSAMPLEX": { - "type": "number", - "description": "" - }, - "POLARISATION": { - "type": "number", - "description": "" - }, - "FOCALSPOTSIZEATSAMPLEY": { - "type": "number", - "description": "" - }, - "APERTUREID": { - "type": "integer", - "description": "" - }, - "screeningOrigId": { - "type": "integer", - "description": "" - }, - "startPositionId": { - "type": "integer", - "description": "" - }, - "endPositionId": { - "type": "integer", - "description": "" - }, - "flux": { - "type": "string", - "description": "" - }, - "strategySubWedgeOrigId": { - "type": "integer", - "description": "references ScreeningStrategySubWedge table" - }, - "blSubSampleId": { - "type": "integer", - "description": "" - }, - "flux_end": { - "type": "string", - "description": "flux measured after the collect" - }, - "bestWilsonPlotPath": { - "type": "string", - "description": "" - }, - "processedDataFile": { - "type": "string", - "description": "" - }, - "datFullPath": { - "type": "string", - "description": "" - }, - "magnification": { - "type": "number", - "description": "Calibrated magnification, Units: dimensionless" - }, - "totalAbsorbedDose": { - "type": "number", - "description": "Unit: e-/A^2 for EM" - }, - "binning": { - "type": "integer", - "description": "1 or 2. Number of pixels to process as 1. (Use mean value.)" - }, - "particleDiameter": { - "type": "number", - "description": "Unit: nm" - }, - "boxSize_CTF": { - "type": "number", - "description": "Unit: pixels" - }, - "minResolution": { - "type": "number", - "description": "Unit: A" - }, - "minDefocus": { - "type": "number", - "description": "Unit: A" - }, - "maxDefocus": { - "type": "number", - "description": "Unit: A" - }, - "defocusStepSize": { - "type": "number", - "description": "Unit: A" - }, - "amountAstigmatism": { - "type": "number", - "description": "Unit: A" - }, - "extractSize": { - "type": "number", - "description": "Unit: pixels" - }, - "bgRadius": { - "type": "number", - "description": "Unit: nm" - }, - "voltage": { - "type": "number", - "description": "Unit: kV" - }, - "objAperture": { - "type": "number", - "description": "Unit: um" - }, - "c1aperture": { - "type": "number", - "description": "Unit: um" - }, - "c2aperture": { - "type": "number", - "description": "Unit: um" - }, - "c3aperture": { - "type": "number", - "description": "Unit: um" - }, - "c1lens": { - "type": "number", - "description": "Unit: %" - }, - "c2lens": { - "type": "number", - "description": "Unit: %" - }, - "c3lens": { - "type": "number", - "description": "Unit: %" - }, - "totalExposedDose": { - "type": "number", - "description": "Units: e-/A^2" - }, - "nominalMagnification": { - "type": "number", - "description": "Nominal magnification: Units: dimensionless" - }, - "nominalDefocus": { - "type": "number", - "description": "Nominal defocus, Units: A" - }, - "imageSizeX": { - "type": "integer", - "description": "Image size in x, incase crop has been used, Units: pixels" - }, - "imageSizeY": { - "type": "integer", - "description": "Image size in y, Units: pixels" - }, - "pixelSizeOnImage": { - "type": "number", - "description": "Pixel size on image, calculated from magnification, duplicate? Units: um?" - }, - "phasePlate": { - "type": "integer", - "description": "Whether the phase plate was used" - } - }, - "type": "object" - }, - "AutoProc": { - "required": [ - "autoProcId" - ], - "properties": { - "autoProcId": { - "type": "integer", - "description": "Primary key (auto-incremented)" - }, - "autoProcProgramId": { - "type": "integer", - "description": "Related program item" - }, - "spaceGroup": { - "type": "string", - "description": "Space group" - }, - "refinedCell_a": { - "type": "number", - "description": "Refined cell" - }, - "refinedCell_b": { - "type": "number", - "description": "Refined cell" - }, - "refinedCell_c": { - "type": "number", - "description": "Refined cell" - }, - "refinedCell_alpha": { - "type": "number", - "description": "Refined cell" - }, - "refinedCell_beta": { - "type": "number", - "description": "Refined cell" - }, - "refinedCell_gamma": { - "type": "number", - "description": "Refined cell" - }, - "recordTimeStamp": { - "type": "string", - "format": "date-time", - "description": "Creation or last update date/time" - } - }, - "type": "object" - }, - "AutoProcProgram": { - "required": [ - "autoProcProgramId" - ], - "properties": { - "autoProcProgramId": { - "type": "integer", - "description": "Primary key (auto-incremented)" - }, - "processingCommandLine": { - "type": "string", - "description": "Command line for running the automatic processing" - }, - "processingPrograms": { - "type": "string", - "description": "Processing programs (comma separated)" - }, - "processingStatus": { - "type": "integer", - "description": "success (1) / fail (0)" - }, - "processingMessage": { - "type": "string", - "description": "warning, error,..." - }, - "processingStartTime": { - "type": "string", - "format": "date-time", - "description": "Processing start time" - }, - "processingEndTime": { - "type": "string", - "format": "date-time", - "description": "Processing end time" - }, - "processingEnvironment": { - "type": "string", - "description": "Cpus, Nodes,..." - }, - "recordTimeStamp": { - "type": "string", - "format": "date-time", - "description": "Creation or last update date/time" - }, - "processingJobId": { - "type": "integer", - "description": "" - }, - "dataCollectionId": { - "type": "integer", - "description": "" - } - }, - "type": "object" - }, - "AutoProcStatus": { - "required": [ - "autoProcIntegrationId", - "autoProcStatusId", - "bltimeStamp", - "status", - "step" - ], - "properties": { - "autoProcStatusId": { - "type": "integer", - "description": "Primary key (auto-incremented)" - }, - "autoProcIntegrationId": { - "type": "integer", - "description": "" - }, - "step": { - "type": "string", - "description": "autoprocessing stepenum(Indexing,Integration,Correction,Scaling,Importing)" - }, - "status": { - "type": "string", - "description": "autoprocessing statusenum(Launched,Successful,Failed)" - }, - "comments": { - "type": "string", - "description": "comments" - }, - "bltimeStamp": { - "type": "string", - "format": "date-time", - "description": "" - } - }, - "type": "object" - }, - "AutoProcProgramAttachment": { - "required": [ - "autoProcProgramAttachmentId", - "autoProcProgramId" - ], - "properties": { - "autoProcProgramAttachmentId": { - "type": "integer", - "description": "Primary key (auto-incremented)" - }, - "autoProcProgramId": { - "type": "integer", - "description": "Related autoProcProgram item" - }, - "fileType": { - "type": "string", - "description": "Type of file Attachmentenum(Log,Result,Graph,Debug)" - }, - "fileName": { - "type": "string", - "description": "Attachment filename" - }, - "filePath": { - "type": "string", - "description": "Attachment filepath to disk storage" - }, - "recordTimeStamp": { - "type": "string", - "format": "date-time", - "description": "Creation or last update date/time" - }, - "importanceRank": { - "type": "integer", - "description": "For the particular autoProcProgramId and fileType, indicate the importance of the attachment. Higher numbers are more important" - } - }, - "type": "object" - }, - "BeamLineSetup": { - "required": [ - "active", - "beamLineSetupId", - "recordTimeStamp" - ], - "properties": { - "beamLineSetupId": { - "type": "integer", - "description": "" - }, - "detectorId": { - "type": "integer", - "description": "" - }, - "synchrotronMode": { - "type": "string", - "description": "" - }, - "undulatorType1": { - "type": "string", - "description": "" - }, - "undulatorType2": { - "type": "string", - "description": "" - }, - "undulatorType3": { - "type": "string", - "description": "" - }, - "focalSpotSizeAtSample": { - "type": "number", - "description": "" - }, - "focusingOptic": { - "type": "string", - "description": "" - }, - "beamDivergenceHorizontal": { - "type": "number", - "description": "" - }, - "beamDivergenceVertical": { - "type": "number", - "description": "" - }, - "polarisation": { - "type": "number", - "description": "" - }, - "monochromatorType": { - "type": "string", - "description": "" - }, - "setupDate": { - "type": "string", - "format": "date-time", - "description": "" - }, - "synchrotronName": { - "type": "string", - "description": "" - }, - "maxExpTimePerDataCollection": { - "type": "string", - "description": "" - }, - "maxExposureTimePerImage": { - "type": "number", - "description": "unit: seconds" - }, - "minExposureTimePerImage": { - "type": "string", - "description": "" - }, - "goniostatMaxOscillationSpeed": { - "type": "string", - "description": "" - }, - "goniostatMaxOscillationWidth": { - "type": "string", - "description": "unit: degrees" - }, - "goniostatMinOscillationWidth": { - "type": "string", - "description": "" - }, - "maxTransmission": { - "type": "string", - "description": "unit: percentage" - }, - "minTransmission": { - "type": "string", - "description": "" - }, - "recordTimeStamp": { - "type": "string", - "format": "date-time", - "description": "Creation or last update date/time" - }, - "CS": { - "type": "number", - "description": "Spherical Aberration, Units: mm?" - }, - "beamlineName": { - "type": "string", - "description": "Beamline that this setup relates to" - }, - "beamSizeXMin": { - "type": "number", - "description": "unit: um" - }, - "beamSizeXMax": { - "type": "number", - "description": "unit: um" - }, - "beamSizeYMin": { - "type": "number", - "description": "unit: um" - }, - "beamSizeYMax": { - "type": "number", - "description": "unit: um" - }, - "energyMin": { - "type": "number", - "description": "unit: eV" - }, - "energyMax": { - "type": "number", - "description": "unit: eV" - }, - "omegaMin": { - "type": "number", - "description": "unit: degrees" - }, - "omegaMax": { - "type": "number", - "description": "unit: degrees" - }, - "kappaMin": { - "type": "number", - "description": "unit: degrees" - }, - "kappaMax": { - "type": "number", - "description": "unit: degrees" - }, - "phiMin": { - "type": "number", - "description": "unit: degrees" - }, - "phiMax": { - "type": "number", - "description": "unit: degrees" - }, - "active": { - "type": "integer", - "description": "" - }, - "numberOfImagesMax": { - "type": "integer", - "description": "" - }, - "numberOfImagesMin": { - "type": "integer", - "description": "" - }, - "boxSizeXMin": { - "type": "string", - "description": "For gridscans, unit: um" - }, - "boxSizeXMax": { - "type": "string", - "description": "For gridscans, unit: um" - }, - "boxSizeYMin": { - "type": "string", - "description": "For gridscans, unit: um" - }, - "boxSizeYMax": { - "type": "string", - "description": "For gridscans, unit: um" - }, - "monoBandwidthMin": { - "type": "string", - "description": "unit: percentage" - }, - "monoBandwidthMax": { - "type": "string", - "description": "unit: percentage" - } - }, - "type": "object" - }, - "RobotAction": { - "required": [ - "blsessionId", - "endTimestamp", - "robotActionId", - "startTimestamp" - ], - "properties": { - "robotActionId": { - "type": "integer", - "description": "" - }, - "blsessionId": { - "type": "integer", - "description": "" - }, - "blsampleId": { - "type": "integer", - "description": "" - }, - "actionType": { - "type": "string", - "description": "enum(LOAD,UNLOAD,DISPOSE,STORE,WASH,ANNEAL)" - }, - "startTimestamp": { - "type": "string", - "format": "date-time", - "description": "" - }, - "endTimestamp": { - "type": "string", - "format": "date-time", - "description": "" - }, - "status": { - "type": "string", - "description": "enum(SUCCESS,ERROR,CRITICAL,WARNING,EPICSFAIL,COMMANDNOTSENT)" - }, - "message": { - "type": "string", - "description": "" - }, - "containerLocation": { - "type": "integer", - "description": "" - }, - "dewarLocation": { - "type": "integer", - "description": "" - }, - "sampleBarcode": { - "type": "string", - "description": "" - }, - "xtalSnapshotBefore": { - "type": "string", - "description": "" - }, - "xtalSnapshotAfter": { - "type": "string", - "description": "" - } - }, - "type": "object" - }, - "Detector": { - "required": [ - "detectorId" - ], - "properties": { - "detectorId": { - "type": "integer", - "description": "Primary key (auto-incremented)" - }, - "detectorType": { - "type": "string", - "description": "" - }, - "detectorManufacturer": { - "type": "string", - "description": "" - }, - "detectorModel": { - "type": "string", - "description": "" - }, - "detectorPixelSizeHorizontal": { - "type": "number", - "description": "" - }, - "detectorPixelSizeVertical": { - "type": "number", - "description": "" - }, - "DETECTORMAXRESOLUTION": { - "type": "number", - "description": "" - }, - "DETECTORMINRESOLUTION": { - "type": "number", - "description": "" - }, - "detectorSerialNumber": { - "type": "string", - "description": "" - }, - "detectorDistanceMin": { - "type": "string", - "description": "" - }, - "detectorDistanceMax": { - "type": "string", - "description": "" - }, - "trustedPixelValueRangeLower": { - "type": "string", - "description": "" - }, - "trustedPixelValueRangeUpper": { - "type": "string", - "description": "" - }, - "sensorThickness": { - "type": "number", - "description": "" - }, - "overload": { - "type": "number", - "description": "" - }, - "XGeoCorr": { - "type": "string", - "description": "" - }, - "YGeoCorr": { - "type": "string", - "description": "" - }, - "detectorMode": { - "type": "string", - "description": "" - }, - "density": { - "type": "number", - "description": "" - }, - "composition": { - "type": "string", - "description": "" - }, - "numberOfPixelsX": { - "type": "integer", - "description": "Detector number of pixels in x" - }, - "numberOfPixelsY": { - "type": "integer", - "description": "Detector number of pixels in y" - }, - "detectorRollMin": { - "type": "string", - "description": "unit: degrees" - }, - "detectorRollMax": { - "type": "string", - "description": "unit: degrees" - }, - "localName": { - "type": "string", - "description": "Colloquial name for the detector" - } - }, - "type": "object" - } - }, - "responses": { - "ParseError": { - "description": "When a mask can't be parsed" - }, - "MaskError": { - "description": "When any error occurs on mask" - } - } -} \ No newline at end of file diff --git a/docs/tests.md b/docs/tests.md new file mode 100644 index 00000000..c54ab62e --- /dev/null +++ b/docs/tests.md @@ -0,0 +1,41 @@ +## py-ispyb tests + +--- + +### Coverage information + +Test coverage information is available at +[https://app.codecov.io/gh/ispyb/py-ispyb/](https://app.codecov.io/gh/ispyb/py-ispyb/) + +### Run test + +In order to run the test, you need to have the test database up and running: + +```bash +sudo docker run -p 3306:3306 -d --rm --name pydb-test ispyb/ispyb-pydb:latest +``` + +Install dev dependencies: + +```bash +pip install -r requirements-dev.txt +``` + +Then, to run the tests, simply type: + +```bash +export ISPYB_ENVIRONMENT="test" +pytest +``` + +To run the linting, type: + +```bash +flake8 +``` + +Convenience script to run both of them: + +```bash +. scripts/test.sh +``` diff --git a/docs/upsync.md b/docs/upsync.md new file mode 100644 index 00000000..c2c47add --- /dev/null +++ b/docs/upsync.md @@ -0,0 +1,71 @@ +# User Portal Sync + +To import/sync User Portal data into py-ISPyB each facility needs to create a specific JSON structure as input for the sync route. + +Currently, there is a single API route to import a proposal including: sessions, lab contacts, proteins, etc. + +The API endpoint uses a JSON schema to validate the incoming data. + +## JSON schema + +To get the current User Portal Sync JSON schema just use the following API route: `ispyb/api/v1/openapi.json` + +You may find the schema under **components -> schemas -> UserPortalProposalSync**. + +Your User Portal application may use the schema to test/validate the input data. + +## Syncronize a proposal + +To sync a proposal just use the following route: `/ispyb/api/v1/webservices/userportalsync/sync_proposal` + +You may check more information about the User Portal Sync routes/endpoints at: +[https://ispyb.github.io/py-ispyb/api/](https://ispyb.github.io/py-ispyb/api/) + +or by running a local version of py-ISPyB and simply opening the `/docs` route. + +## Example JSON data + +You can check for User Portal Sync JSON example data at: +[https://github.com/ispyb/py-ispyb/tree/master/tests/core/api/data](https://github.com/ispyb/py-ispyb/tree/master/tests/core/api/data) + +## Sync process details per entity + +The sync process currently takes into account two options to establish the relation between the User Portal entities and the ISPyB DB, in order to keep backward compatibility with the legacy ISPyB Java API. The externalId field currently present in several ISPyB entities, is a MySQL binary(16) field. A binary(16) field can contain maximum 16 characters. The externalId fields must be properly encoded/decoded when dealing with the DB, and this is not the case with the Java API, since those fields were never used by it. + +The externalId fields can keep an encoded version of an external primary key or UUID from a User Portal to be able to create an entity link between the User Portal and the ISPyB database. + +At the time when the legacy Java API is not used anymore, it would be possible to switch exclusively to the externalId fields. + +### Person + +The Person sync is based on the **externalId** field or the **login** field. + +### Laboratory + +The Laboratory sync is based on the **laboratoryExtPk** field or the laboratory **name** and **city** and **country** fields. + +### Proposal + +The Proposal sync is based on the **externalId** field or the **proposalCode** (Ex: "MX") and **proposalNumber** (Ex: "3456") + +### Proposal participants + +Every proposal participant will have a relation within the **ProposalHasPerson** table and only the first one in the proposal persons list (Ex: the PI/Leader) will be added as **personID** within the Proposal table. + +### Lab Contacts + +Every entry within the JSON proposal persons list will create/update a lab contact entity in the DB. + +The Lab Contact sync is based on the **cardName** field. + +### Proteins + +The Protein sync is based on the **externalId** field or the **acronym** field. + +### Sessions + +The Session sync is based on the **externalId** field or the **expSessionPk** field. + +### Session participants + +Every session participant will have a relation within the **Session_has_Person** table. \ No newline at end of file diff --git a/examples/auth.yml b/examples/auth.yml new file mode 100644 index 00000000..52026c47 --- /dev/null +++ b/examples/auth.yml @@ -0,0 +1,22 @@ +AUTH: + - keycloak: + ENABLED: true + AUTH_MODULE: "pyispyb.app.extensions.auth.KeycloakDBGroupsAuthentication" + AUTH_CLASS: "KeycloakAuthentication" + CONFIG: + KEYCLOAK_SERVER_URL: "your_server" + KEYCLOAK_CLIENT_ID: "your_client" + KEYCLOAK_REALM_NAME: "your_realm" + KEYCLOAK_CLIENT_SECRET_KEY: "your_secret" + - ldap: + ENABLED: true + AUTH_MODULE: "pyispyb.app.extensions.auth.LdapAuthentication" + AUTH_CLASS: "LdapAuthentication" + CONFIG: + LDAP_URI: "ldap://your_ldap" + LDAP_BASE_INTERNAL: "ou=People,dc=esrf,dc=fr" + LDAP_BASE_GROUPS: "ou=Pxwebgroups,dc=esrf,dc=fr" + - dummy: # /!\/!\/!\ ONLY USE FOR TESTS /!\/!\/!\ + ENABLED: false + AUTH_MODULE: "pyispyb.app.extensions.auth.DummyAuthentication" + AUTH_CLASS: "DummyAuthentication" diff --git a/examples/core_db_mapping.csv b/examples/core_db_mapping.csv deleted file mode 100644 index 8a447e2e..00000000 --- a/examples/core_db_mapping.csv +++ /dev/null @@ -1,38 +0,0 @@ -auto_proc,AutoProc -auto_proc_integration,AutoProcIntegration -auto_proc_program,AutoProcProgram -auto_proc_program_attachment,AutoProcProgramAttachment -auto_proc_program_message,AutoProcProgramMessage -auto_proc_scaling,AutoProcScaling -auto_proc_scaling_statistics,AutoProcScalingStatistics -auto_proc_status,AutoProcStatus -sample,BLSample -session,BLSession -beam_calendar,BeamCalendar -beamline_setup,BeamLineSetup -container,Container -dewar,Dewar -component_type,ComponentType -crystal,Crystal -data_collection,DataCollection -data_collection_group,DataCollectionGroup -detector,Detector -diffraction_plan,DiffractionPlan -energy_scan,EnergyScan -image_quality_indicators,ImageQualityIndicators -laboratory,Laboratory -lab_contact,LabContact -person,Person -phasing,Phasing -phasing_program_attachment,PhasingProgramAttachment -phasing_program_run,PhasingProgramRun -phasing_statistics,PhasingStatistics -phasing_step,PhasingStep -phasing_has_scaling,Phasing_has_Scaling -proposal,Proposal -proposal_has_person,ProposalHasPerson -protein,Protein -robot_action,RobotAction -screening,Screening -shipping,Shipping -v_datacollection_summary_phasing,v_datacollection_summary_phasing diff --git a/examples/em_db_mapping.csv b/examples/em_db_mapping.csv deleted file mode 100644 index 0bdc74e0..00000000 --- a/examples/em_db_mapping.csv +++ /dev/null @@ -1,3 +0,0 @@ -particle,Particle -motion_correction,MotionCorrection -ctf,CTF diff --git a/examples/ispyb_core_config_example.yml b/examples/ispyb_core_config_example.yml deleted file mode 100644 index 96046543..00000000 --- a/examples/ispyb_core_config_example.yml +++ /dev/null @@ -1,14 +0,0 @@ -server: - SERVICE_NAME : "core" - API_ROOT : "/ispyb/api/v1" - SQLALCHEMY_DATABASE_URI : "mysql://mxuser:mxpass@localhost/pydb_test" - - AUTH_MODULE : "pyispyb.app.extensions.auth.DummyAuth" - AUTH_CLASS : "DummyAuth" - MASTER_TOKEN : "MasterToken" - -authorization_rules: - proposals : { - "get": ["manager", "admin", "user"], - "post": ["manager", "admin"] - } \ No newline at end of file diff --git a/examples/ispyb_em_config_example.yml b/examples/ispyb_em_config_example.yml deleted file mode 100644 index c8c75861..00000000 --- a/examples/ispyb_em_config_example.yml +++ /dev/null @@ -1,8 +0,0 @@ -server: - SERVICE_NAME : "em" - API_ROOT : "/ispyb/api/v1/em" - SQLALCHEMY_DATABASE_URI : "mysql://mxuser:mxpass@localhost/pydb_test" - - AUTH_MODULE : "pyispyb.app.extensions.auth.DummyAuth" - AUTH_CLASS : "DummyAuth" - MASTER_TOKEN : "MasterToken" diff --git a/examples/ispyb_ssx_config_example.yml b/examples/ispyb_ssx_config_example.yml deleted file mode 100644 index 0a4f8b74..00000000 --- a/examples/ispyb_ssx_config_example.yml +++ /dev/null @@ -1,8 +0,0 @@ -server: - SERVICE_NAME : "ssx" - API_ROOT : "/ispyb/api/v1/ssx" - SQLALCHEMY_DATABASE_URI : "mysql://mxuser:mxpass@localhost/ispyb_ssx" - - AUTH_MODULE : "pyispyb.app.extensions.auth.DummyAuth" - AUTH_CLASS : "DummyAuth" - MASTER_TOKEN : "MasterToken" \ No newline at end of file diff --git a/examples/ispyb_ssx_db.sql b/examples/ispyb_ssx_db.sql deleted file mode 100644 index dd336c87..00000000 --- a/examples/ispyb_ssx_db.sql +++ /dev/null @@ -1,169 +0,0 @@ -CREATE TABLE `CrystalSlurry` ( - `crystalSlurryId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `name` varchar(255), - `crystalSizeDistributionId` int, - `crystalDensity` float COMMENT '1/mm3', - `bufferId` float COMMENT 'reference to Buffer.bufferId' -); - -CREATE TABLE `CrystalSlurry_has_Crystal` ( - `CrystalSlurryHasCrystalId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `crystalSlurryId` int NOT NULL, - `crystalId` int NOT NULL -); - -CREATE TABLE `SampleStock` ( - `sampleStockId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `name` varchar(255) NOT NULL, - `crystalSlurryId` int NOT NULL, - `concentrationFactor` float NOT NULL, - `crystalDensity` float NOT NULL, - `additiveId` int COMMENT 'reference to Additive.additiveId', - `note` varchar(255) -); - -CREATE TABLE `CrystalSizeDistribution` ( - `crystalSizeDistributionId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `crystalHabit` varchar(255), - `characteristicDimensions` varchar(255), - `minDimension` varchar(255) COMMENT 'comma separated floats', - `maxDimension` varchar(255) COMMENT 'comma separated floats' -); - -CREATE TABLE `Micrograph` ( - `micrographId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `crystalSlurryId` int NOT NULL, - `url` varchar(255), - `objectSidePixelSize` varchar(255) COMMENT 'comma separated two floats', - `descriptionJson` varchar(255) -); - -CREATE TABLE `LoadedSample` ( - `loadedSampleId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `name` varchar(255) COMMENT 'to be used as part of the image and processing file names', - `sampleStockId` int, - `sampleDeliveryDeviceId` int, - `loadingPattern` int, - `descriptionJson` varchar(255) -); - -CREATE TABLE `SsxDataAcquisition` ( - `ssxDataAcquisitionId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `loadedSampleId` int NOT NULL, - `dataCollectionId` int NOT NULL COMMENT 'reference to DataCollection.dataCollectionId', - `experimentalPlanId` int NOT NULL, - `eventLogFilename` varchar(255) NOT NULL COMMENT 'url to shorlist file', - `dataSetId` int NOT NULL, - `autoprocessingProgrammId` int COMMENT 'reference to AutoProcProgram.autoProcProgramId' -); - -CREATE TABLE `DataSet` ( - `dataSetId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `name` varchar(255) NOT NULL, - `mergedResultsFilename` varchar(255) -); - -CREATE TABLE `ExperimentalPlan` ( - `experimentalPlanId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `name` varchar(255), - `numberOfRepetitions` int COMMENT 'for micro-fluidic, jet, tape but not for chip', - `period` float COMMENT 'seconds but unknown/self adjusting for chip', - `masterTriggerId` int, - `repeatedSequenceId` int NOT NULL -); - -CREATE TABLE `SampleDeliveryDevice` ( - `sampleDeliveryDeviceId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `type` ENUM ('photoChip', 'microFluidics', 'viscoousJet', 'tapeDevice'), - `descriptionJson` varchar(255) -); - -CREATE TABLE `MasterTrigger` ( - `masterTriggerId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `nameInEventLog` varchar(255), - `triggerDevice` int, - `descriptionJson` varchar(255) -); - -CREATE TABLE `RepeatedSequence` ( - `repeatedSequenceId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `name` varchar(255) -); - -CREATE TABLE `EventTrain` ( - `eventTrainId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `name` varchar(255), - `timeOn` float COMMENT 'sec', - `duration` float COMMENT 'sec', - `period` float, - `numberOfRepetitions` float, - `nameInEventLog` varchar(255), - `triggerDevice` varchar(255) -); - -CREATE TABLE `TimedExcitation` ( - `timedExcitationId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `name` varchar(255), - `repeatedSequenceId` int, - `eventTrainId` int, - `ssxExcitation` varchar(255) -); - -CREATE TABLE `TimedXrayExposure` ( - `timedXrayExposureId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `name` varchar(255), - `repeatedSequenceId` int, - `eventTrainId` int, - `timedBunches` varchar(255), - `shutter` varchar(255) -); - -CREATE TABLE `TimedXrayDetection` ( - `timedXrayDetectionId` int UNIQUE PRIMARY KEY AUTO_INCREMENT, - `name` varchar(255), - `repeatedSequenceId` int, - `eventTrainId` int, - `numberOfInternalTriggers` int, - `internalTriggerPeriod` int, - `internalGateDuration` int -); - -ALTER TABLE `CrystalSlurry` ADD FOREIGN KEY (`crystalSizeDistributionId`) REFERENCES `CrystalSizeDistribution` (`crystalSizeDistributionId`); - -ALTER TABLE `CrystalSlurry_has_Crystal` ADD FOREIGN KEY (`crystalSlurryId`) REFERENCES `CrystalSlurry` (`crystalSlurryId`); - -ALTER TABLE `SampleStock` ADD FOREIGN KEY (`crystalSlurryId`) REFERENCES `CrystalSlurry` (`crystalSlurryId`); - -ALTER TABLE `Micrograph` ADD FOREIGN KEY (`crystalSlurryId`) REFERENCES `CrystalSlurry` (`crystalSlurryId`); - -ALTER TABLE `LoadedSample` ADD FOREIGN KEY (`sampleStockId`) REFERENCES `SampleStock` (`sampleStockId`); - -ALTER TABLE `LoadedSample` ADD FOREIGN KEY (`sampleDeliveryDeviceId`) REFERENCES `SampleDeliveryDevice` (`sampleDeliveryDeviceId`); - -ALTER TABLE `SsxDataAcquisition` ADD FOREIGN KEY (`loadedSampleId`) REFERENCES `LoadedSample` (`loadedSampleId`); - -ALTER TABLE `SsxDataAcquisition` ADD FOREIGN KEY (`experimentalPlanId`) REFERENCES `ExperimentalPlan` (`experimentalPlanId`); - -ALTER TABLE `SsxDataAcquisition` ADD FOREIGN KEY (`dataSetId`) REFERENCES `DataSet` (`dataSetId`); - -ALTER TABLE `ExperimentalPlan` ADD FOREIGN KEY (`masterTriggerId`) REFERENCES `MasterTrigger` (`masterTriggerId`); - -ALTER TABLE `ExperimentalPlan` ADD FOREIGN KEY (`repeatedSequenceId`) REFERENCES `RepeatedSequence` (`repeatedSequenceId`); - -ALTER TABLE `TimedExcitation` ADD FOREIGN KEY (`repeatedSequenceId`) REFERENCES `RepeatedSequence` (`repeatedSequenceId`); - -ALTER TABLE `TimedExcitation` ADD FOREIGN KEY (`eventTrainId`) REFERENCES `EventTrain` (`eventTrainId`); - -ALTER TABLE `TimedXrayExposure` ADD FOREIGN KEY (`repeatedSequenceId`) REFERENCES `RepeatedSequence` (`repeatedSequenceId`); - -ALTER TABLE `TimedXrayExposure` ADD FOREIGN KEY (`eventTrainId`) REFERENCES `EventTrain` (`eventTrainId`); - -ALTER TABLE `TimedXrayDetection` ADD FOREIGN KEY (`repeatedSequenceId`) REFERENCES `RepeatedSequence` (`repeatedSequenceId`); - -ALTER TABLE `TimedXrayDetection` ADD FOREIGN KEY (`eventTrainId`) REFERENCES `EventTrain` (`eventTrainId`); - -ALTER TABLE `CrystalSlurry` COMMENT = "Describes sample as delivered to the beamline"; - -ALTER TABLE `SampleStock` COMMENT = "Describes sample prepared for loading on delivery device"; - -ALTER TABLE `CrystalSizeDistribution` COMMENT = "describes crystal mixture in suspension"; diff --git a/examples/simulation.yml b/examples/simulation.yml new file mode 100644 index 00000000..69aa86ef --- /dev/null +++ b/examples/simulation.yml @@ -0,0 +1,91 @@ +# Whether to link or copy data +copy_method: copy + +# Map each beamline to a session +sessions: + bl: blc00001-1 + +# Where to copy raw data from +raw_data: /data/ispyb-test + +# Where to write simulated data to, can use {beamline} placeholder +data_dir: /data/tests/{beamline}/simulation + +ispyb_url: https://ispyb.esrf.fr + +# Define Components (Proteins) +components: + # an internal reference for the component + comp1: + # columns to populate for this component + acronym: Component1 + sequence: SiSP + molecularMass: 12.5 + + comp2: + acronym: Component2 + +# Define BLSamples +samples: + # an internal reference for this sample + samp1: + # columns to populate for this sample + name: Sample1 + # which component this sample is an instance of (one of the keys in components above) + component: comp1 + + samp2: + name: Sample2 + component: comp2 + +# Define Experiments (DataCollections) +experiments: + # a shortname for this experiment (available via cli) + energy_scan1: + # the experimentType, must map to a valid type in DataCollectionGroup.experimentType + experimentType: OSC + # data will be split into its respective imageDirectory and fileTemplate columns + data: osc/oscillation.h5 + # which sample to link this data collection to (one of the keys in samples above) + sample: samp1 + + # columns to populate + # xtalSnapshot thumbnails should have a trailing t + # Fullsize image: osc/snapshot1.png + # Thumbnail: osc/snapshot1t.png + xtalSnapshotFullPath1: osc/snapshot1.png + numberOfImages: 4001 + exposureTime: 1 + #energy: 8.8143 + wavelength: 1.4065 + imageContainerSubPath: 1.1/measurement + + xrf_map1: + experimentType: Mesh + data: mesh/mesh.h5 + sample: samp1 + + xtalSnapshotFullPath1: mesh/snapshot1.png + numberOfImages: 1600 + exposureTime: 0.03 + #energy: 2.4817 + wavelength: 4.9959 + + # additionally populate GridInfo + grid: + steps_x: 40 + steps_y: 40 + dx_mm: 0.001 + dy_mm: 0.001 + pixelsPerMicronX: -0.44994 + pixelsPerMicronY: -0.46537 + snapshot_offsetXPixel: 682.16 + snapshot_offsetYPixel: 554 + + # additionally populate BlSubSample + subsample: + x: 9038007 + y: 24467003 + x2: 9078007 + y2: 24507003 + type: roi diff --git a/examples/ssx_db_mapping.csv b/examples/ssx_db_mapping.csv deleted file mode 100644 index 4ec6ac0a..00000000 --- a/examples/ssx_db_mapping.csv +++ /dev/null @@ -1,17 +0,0 @@ -crystal_size_distribution,CrystalSizeDistribution -crystal_slurry,CrystalSlurry -crystal_slurry_has_crystal,CrystalSlurryHasCrystal -data_set,DataSet -event_train,EventTrain -experimental_plan,ExperimentalPlan -loaded_sample,LoadedSample -master_trigger,MasterTrigger -micrograph,Micrograph -repeated_sequence,RepeatedSequence -sample_delivery_device,SampleDeliveryDevice -sample_stock,SampleStock -ssx_data_aquisition,SsxDataAcquisition -timed_excitation,TimedExcitation -timed_sequence,TimedSequence, -timed_xray_detection,TimedXrayDetection -timed_xray_exposure,TimedXrayExposure diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 00000000..03bee1e4 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,30 @@ +site_name: py-ISPyB +nav: + - Home: index.md + - Get Started: run.md + - Tests: tests.md + - Configuration: conf.md + - Authentication and Authorization: + - Authentication: auth.md + - Authorization: authorization.md + - Permissions: permissions.md + - Routes: + - About: routes.md + - ⧉ Endpoint documentation: https://ispyb.github.io/py-ispyb/api/ + - Simulator: simulator.md + - User Portal Sync: upsync.md + - Developers: + - Notes: developers.md + - ⧉ Test coverage: https://app.codecov.io/gh/ispyb/py-ispyb/ + +theme: + name: material + +site_dir: public +site_url: https://ispyb.github.io/py-ispyb/ +repo_url: https://github.com/ispyb/py-ispyb +repo_name: "GitHub" +markdown_extensions: + - toc: + permalink: "#" +dev_addr: "127.0.0.1:8080" diff --git a/py_file_header.txt b/py_file_header.txt deleted file mode 100644 index 14124376..00000000 --- a/py_file_header.txt +++ /dev/null @@ -1,25 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - diff --git a/pyispyb/__init__.py b/pyispyb/__init__.py index ee227ffc..aa5ae44e 100644 --- a/pyispyb/__init__.py +++ b/pyispyb/__init__.py @@ -18,91 +18,3 @@ # # You should have received a copy of the GNU Lesser General Public License # along with py-ispyb. If not, see . - - -import os -import sys -import importlib -from flask import Flask -from flask_cors import CORS - -from pyispyb import config - -__license__ = "LGPLv3+" - - -CONFIG_NAME_MAPPER = { - "dev": "DevelopmentConfig", - "test": "TestingConfig", - "prod": "ProductionConfig", -} - -def create_app(config_path=None, run_mode="dev", **kwargs): - """ - Entry point to the Flask RESTful Server application. - """ - - app = Flask(__name__, **kwargs) - CORS(app) - # TODO configure CORS via config file - - env_config_path = os.getenv("ISPYB_CONFIG") - if config_path is None: - config_path = env_config_path - if config_path is None: - config_path = "ispyb_core_config.yml" - - app.logger.debug("Starting ISPyB server in %s mode" % run_mode) - - try: - config_obj = getattr(config, CONFIG_NAME_MAPPER[run_mode]) - app.config.from_object(config_obj(config_path)) - except ImportError as ex: - app.logger.error( # pylint: disable=no-member - "Unabled to start the ISPyB server with configuration %s (%s)" % ( - config_path, - str(ex)) - ) - app.logger.error(str(ex)) - sys.exit(1) - raise - - from pyispyb.app import extensions - - extensions.init_app(app) - - service_module = importlib.import_module("pyispyb." + app.config["SERVICE_NAME"]) - service_module.init_app(app) - - from pyispyb.app import routes - - routes.init_app(app) - - # import ispyb_service_connector - # ispyb_service_connector.check_service_connection(app.config["SERVICE_CONNECTIONS"]) - - if app.config["TEMP_FOLDER"]: - if not os.path.exists(app.config["TEMP_FOLDER"]): - try: - os.makedirs(app.config["TEMP_FOLDER"]) - print("Temp dir %s created" % app.config["TEMP_FOLDER"]) - except PermissionError as ex: - print("Unable to create temp dir %s (%s)" % ( - app.config["TEMP_FOLDER"], - str(ex) - ) - ) - if app.config["UPLOAD_FOLDER"]: - if not os.path.exists(app.config["UPLOAD_FOLDER"]): - try: - os.makedirs(app.config["UPLOAD_FOLDER"]) - print("Upload dir %s created" % app.config["UPLOAD_FOLDER"]) - except PermissionError as ex: - print("Unable to create upload dir %s (%s)" % ( - app.config["UPLOAD_FOLDER"], - str(ex) - ) - ) - - app.logger.debug("ISPyB server started") - return app \ No newline at end of file diff --git a/pyispyb/app/base.py b/pyispyb/app/base.py new file mode 100644 index 00000000..27c27f45 --- /dev/null +++ b/pyispyb/app/base.py @@ -0,0 +1,22 @@ +from fastapi import APIRouter, Depends +from .extensions.auth.bearer import JWTBearer +from fastapi.routing import APIRoute + + +class BaseRouter(APIRouter): + pass + + +def custom_generate_unique_id(route: APIRoute): + res = f"{route.name}" + return res + + +class AuthenticatedAPIRouter(BaseRouter): + def __init__(self, *args, **kwargs): + super().__init__( + *args, + dependencies=[Depends(JWTBearer)], + **kwargs, + generate_unique_id_function=custom_generate_unique_id, + ) diff --git a/pyispyb/app/extensions/__init__.py b/pyispyb/app/extensions/__init__.py index 09d68eb2..cae19374 100644 --- a/pyispyb/app/extensions/__init__.py +++ b/pyispyb/app/extensions/__init__.py @@ -1,51 +1,17 @@ -""" -Project: py-ispyb -https://github.com/ispyb/py-ispyb +# Project: py-ispyb +# https://github.com/ispyb/py-ispyb -This file is part of py-ispyb software. +# This file is part of py-ispyb software. -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. +# py-ispyb is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. +# py-ispyb is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -from .flask_sqlalchemy import SQLAlchemy -from .report import report -from .user_office import user_office -from .auth import auth_provider -from . import api - -__license__ = "LGPLv3+" - - -from sqlalchemy.dialects.mysql.enumerated import ENUM -from sqlalchemy.dialects.mysql.types import LONGBLOB - -from pyispyb.app.extensions.logging import Logging - -logging = Logging() - - -db = SQLAlchemy() -db.ENUM = ENUM -db.LONGBLOB = LONGBLOB - - -def init_app(app): - """Initializes app extensions - - Args: - app (flask app): Flask application - """ - for extension in (api, auth_provider, logging, db, user_office): - extension.init_app(app) +# You should have received a copy of the GNU Lesser General Public License +# along with py-ispyb. If not, see . diff --git a/pyispyb/app/extensions/api/__init__.py b/pyispyb/app/extensions/api/__init__.py deleted file mode 100644 index 2ef87bf4..00000000 --- a/pyispyb/app/extensions/api/__init__.py +++ /dev/null @@ -1,49 +0,0 @@ -"""Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from copy import deepcopy - -from .api import Api -from .namespace import Namespace -from .http_exceptions import abort - -api_v1 = None - -def init_app(app, **kwargs): - # pylint: disable=unused-argument - """ - API extension initialization point. - """ - # Prevent config variable modification with runtime changes - - global api_v1 - api_v1 = Api( - version="1.0", - title="ISPyB", - description="ISPyB Flask rest server", - doc=app.config["SWAGGER_UI_URI"], - default="Main", - default_label="Main", - ) - api_v1.authorizations = deepcopy(app.config["AUTHORIZATIONS"]) \ No newline at end of file diff --git a/pyispyb/app/extensions/api/api.py b/pyispyb/app/extensions/api/api.py deleted file mode 100644 index c5dc37c5..00000000 --- a/pyispyb/app/extensions/api/api.py +++ /dev/null @@ -1,72 +0,0 @@ -""" -Project: py-ispyb -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from six import iteritems - -from flask_restx import Api as BaseApi - -from .namespace import Namespace - - -class Api(BaseApi): - """ - Having app-specific handlers here. - """ - - def namespace(self, *args, **kwargs): - # The only purpose of this method is to pass custom Namespace class - _namespace = Namespace(*args, **kwargs) - self.namespaces.append(_namespace) - return _namespace - - def add_oauth_scope(self, scope_name, scope_description): - for authorization_settings in self.authorizations.values(): - if authorization_settings["type"].startswith("oauth"): - assert scope_name not in authorization_settings["scopes"], ( - "OAuth scope %s already exists" % scope_name - ) - authorization_settings["scopes"][scope_name] = scope_description - - def add_namespace(self, ns, path=None): - # Rewrite security rules for OAuth scopes since Namespaces don't have - # enough information about authorization methods. - for resource, _, _ in ns.resources: - for method in resource.methods: - method_func = getattr(resource, method.lower()) - - if ( - hasattr(method_func, "__apidoc__") - and "security" in method_func.__apidoc__ - and "__oauth__" in method_func.__apidoc__["security"] - ): - oauth_scopes = method_func.__apidoc__["security"]["__oauth__"][ - "scopes" - ] - method_func.__apidoc__["security"] = { - auth_name: oauth_scopes - for auth_name, auth_settings in iteritems(self.authorizations) - if auth_settings["type"].startswith("oauth") - } - - super(Api, self).add_namespace(ns, path=path) diff --git a/pyispyb/app/extensions/api/http_exceptions.py b/pyispyb/app/extensions/api/http_exceptions.py deleted file mode 100644 index aba644ee..00000000 --- a/pyispyb/app/extensions/api/http_exceptions.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from flask_restx.errors import abort as restplus_abort -from flask_restx._http import HTTPStatus - - -API_DEFAULT_HTTP_CODE_MESSAGES = { - HTTPStatus.UNAUTHORIZED.value: ( - "The server could not verify that you are authorized to access the " - "URL requested. You either supplied the wrong credentials (e.g. a bad " - "password), or your browser doesn't understand how to supply the " - "credentials required." - ), - HTTPStatus.FORBIDDEN.value: ( - "You don't have the permission to access the requested resource." - ), - HTTPStatus.UNPROCESSABLE_ENTITY.value: ( - "The request was well-formed but was unable to be followed due to semantic errors." - ), -} - - -def abort(code, message=None, **kwargs): - """ - Custom abort function used to provide extra information in the error - response, namely, ``status`` and ``message`` info. - """ - if message is None: - if code in API_DEFAULT_HTTP_CODE_MESSAGES: # pylint: disable=consider-using-get - message = API_DEFAULT_HTTP_CODE_MESSAGES[code] - else: - message = HTTPStatus( - code - ).description # pylint: disable=no-value-for-parameter - restplus_abort(code=code, status=code, message=message, **kwargs) diff --git a/pyispyb/app/extensions/api/namespace.py b/pyispyb/app/extensions/api/namespace.py deleted file mode 100644 index 42840a09..00000000 --- a/pyispyb/app/extensions/api/namespace.py +++ /dev/null @@ -1,92 +0,0 @@ -"""Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from contextlib import contextmanager -from functools import wraps -import logging - -import flask_marshmallow -import sqlalchemy - -from flask_restx import Namespace as BaseNamespace -from flask_restx._http import HTTPStatus - -from . import http_exceptions -from .webargs_parser import CustomWebargsParser - - -log = logging.getLogger(__name__) - - -class Namespace(BaseNamespace): - """ - Having app-specific handlers here. - """ - - WEBARGS_PARSER = CustomWebargsParser() - - def resolve_object(self, object_arg_name, resolver): - """ - A helper decorator to resolve object instance from arguments (e.g. identity). - Example: - >>> @namespace.route('/') - ... class MyResource(Resource): - ... @namespace.resolve_object( - ... object_arg_name='user', - ... resolver=lambda kwargs: User.query.get_or_404(kwargs.pop('user_id')) - ... ) - ... def get(self, user): - ... # user is a User instance here - """ - - def decorator(func_or_class): - if isinstance(func_or_class, type): - # Handle Resource classes decoration - # pylint: disable=protected-access - func_or_class._apply_decorator_to_methods(decorator) - return func_or_class - - @wraps(func_or_class) - def wrapper(*args, **kwargs): - kwargs[object_arg_name] = resolver(kwargs) - return func_or_class(*args, **kwargs) - - return wrapper - - return decorator - - def model(self, name=None, model=None, **kwargs): - # pylint: disable=arguments-differ - """ - A decorator which registers a model (aka schema / definition). - - This extended implementation auto-generates a name for - ``Flask-Marshmallow.Schema``-based instances by using a class name - with stripped off `Schema` prefix. - """ - if isinstance(model, flask_marshmallow.Schema) and not name: - name = model.__class__.__name__ - if name.endswith("Schema"): - name = name[: -len("Schema")] - return super(Namespace, self).model(name=name, model=model, **kwargs) diff --git a/pyispyb/app/extensions/api/parameters.py b/pyispyb/app/extensions/api/parameters.py deleted file mode 100644 index 4d31634c..00000000 --- a/pyispyb/app/extensions/api/parameters.py +++ /dev/null @@ -1,45 +0,0 @@ -"""Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from marshmallow import validate - -from flask_marshmallow import base_fields -from pyispyb.flask_restx_patched import Parameters - - -class PaginationParameters(Parameters): - """ - Helper Parameters class to reuse pagination. - """ - - limit = base_fields.Integer( - description="limit a number of items (allowed range is 1-100), default is 20.", - missing=20, - validate=validate.Range(min=1, max=100), - ) - offset = base_fields.Integer( - description="a number of items to skip, default is 0.", - missing=0, - validate=validate.Range(min=0), - ) diff --git a/pyispyb/app/extensions/api/webargs_parser.py b/pyispyb/app/extensions/api/webargs_parser.py deleted file mode 100644 index 1ee0f239..00000000 --- a/pyispyb/app/extensions/api/webargs_parser.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from webargs.flaskparser import FlaskParser - -from .http_exceptions import abort - - -class CustomWebargsParser(FlaskParser): - """ - This custom Webargs Parser aims to overload :meth:``handle_error`` in order - to call our custom :func:``abort`` function. - - See the following issue and the related PR for more details: - https://github.com/sloria/webargs/issues/122 - """ - - def handle_error(self, error, *args, **kwargs): - # pylint: disable=arguments-differ - """ - Handles errors during parsing. Aborts the current HTTP request and - responds with a 422 error. - """ - status_code = getattr(error, "status_code", self.DEFAULT_VALIDATION_STATUS) - abort(status_code, messages=error.messages) diff --git a/pyispyb/app/extensions/auth/AbstractAuthentication.py b/pyispyb/app/extensions/auth/AbstractAuthentication.py index f1272d4f..7cce4fb3 100644 --- a/pyispyb/app/extensions/auth/AbstractAuthentication.py +++ b/pyispyb/app/extensions/auth/AbstractAuthentication.py @@ -1,53 +1,76 @@ -"""Project: py-ispyb. +from abc import ABC +import enum +import logging +from typing import Any, Optional -https://github.com/ispyb/py-ispyb +from ispyb import models -This file is part of py-ispyb software. -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. +logger = logging.getLogger(__name__) -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" +class AuthType(str, enum.Enum): + login = "login" + token = "token" # nosec -__license__ = "LGPLv3+" - - -import abc - - -class AbstractAuthentication(object): - +class AbstractAuthentication(ABC): """ Abstract authentication class. Base class for all site specific authentication classes """ - __metaclass__ = abc.ABCMeta + authentication_type: AuthType = AuthType.login + config_export = [] - def init_app(self, app): - """Initializes auth class. + def configure(self, config: dict[str, Any]) -> None: + """Configure auth plugin. Args: - app (flask app): Flask app + config (dict): plugin configuration from file """ return - @abc.abstractmethod - def get_roles(self, username, password): - """Returns roles associated to the user. + def authenticate( + self, login: Optional[str], password: Optional[str], token: Optional[str] + ) -> Optional[models.Person]: + if self.authentication_type == AuthType.token: + logger.debug("Authenticating via token") + person = self.authenticate_by_token(token) + else: + logger.debug("Authenticating via login") + person = self.authenticate_by_login(login, password) + + return person + + def authenticate_by_login( + self, login: str, password: str + ) -> Optional[models.Person]: + """Child method if authenticating via login / password + + Returns the login if authentication succeeded Args: - username (str): username - password (str): password + login (str): The login + password (str): The password + + Returns: + person (models.Person): If authenticated, a prepopulated `Person` + + """ + pass + + def authenticate_by_token(self, token: str) -> Optional[models.Person]: + """Child method if authenticating via token + + Returns the login if authentication succeeded + + Args: + token (str): The token + + Returns: + person (models.Person): If authenticated, a prepopulated `Person` + """ + pass diff --git a/pyispyb/app/extensions/auth/DummyAuthentication.py b/pyispyb/app/extensions/auth/DummyAuthentication.py index 40f8daa4..2ef13d2d 100644 --- a/pyispyb/app/extensions/auth/DummyAuthentication.py +++ b/pyispyb/app/extensions/auth/DummyAuthentication.py @@ -1,48 +1,14 @@ -"""Project: py-ispyb. +from typing import Optional -https://github.com/ispyb/py-ispyb +from ispyb import models -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions.auth.AbstractAuthentication import AbstractAuthentication +from .AbstractAuthentication import AbstractAuthentication class DummyAuthentication(AbstractAuthentication): """Dummy authentication class.""" - def get_roles(self, username, password): - """Returns roles - - Args: - username (str): str - password (str): str - Returns: - list: list of roles - """ - - roles = ["user"] - - if "manager" in username: - roles.append("manager") - if "admin" in username: - roles.append("admin") - - return roles + def authenticate_by_login( + self, login: str, password: str + ) -> Optional[models.Person]: + return models.Person(login=login) diff --git a/pyispyb/app/extensions/auth/KeycloakDBGroupsAuthentication.py b/pyispyb/app/extensions/auth/KeycloakDBGroupsAuthentication.py new file mode 100644 index 00000000..233b8062 --- /dev/null +++ b/pyispyb/app/extensions/auth/KeycloakDBGroupsAuthentication.py @@ -0,0 +1,49 @@ +import logging +from typing import Any, Optional + +from keycloak.exceptions import KeycloakAuthenticationError +from keycloak.keycloak_openid import KeycloakOpenID +from ispyb import models + +from .AbstractAuthentication import AbstractAuthentication, AuthType + + +logger = logging.getLogger(__name__) + + +class KeycloakAuthentication(AbstractAuthentication): + """Keycloak authentication class.""" + + authentication_type = AuthType.token + config_export = ["KEYCLOAK_SERVER_URL", "KEYCLOAK_CLIENT_ID", "KEYCLOAK_REALM_NAME"] + + def configure(self, config: dict[str, Any]): + """Configure auth plugin. + + Args: + config (dict): plugin configuration from file + """ + server_url = config["KEYCLOAK_SERVER_URL"] + client_id = config["KEYCLOAK_CLIENT_ID"] + realm_name = config["KEYCLOAK_REALM_NAME"] + client_secret_key = config["KEYCLOAK_CLIENT_SECRET_KEY"] + + self.keycloak_openid = KeycloakOpenID( + server_url=server_url, + client_id=client_id, + realm_name=realm_name, + client_secret_key=client_secret_key, + verify=True, + ) + + def authenticate_by_token(self, token: str) -> Optional[models.Person]: + try: + userinfo = self.keycloak_openid.userinfo(token) + return models.Person( + givenName=userinfo["given_name"], + familyName=userinfo["family_name"], + login=userinfo["preferred_username"], + emailAddress=userinfo["email"], + ) + except KeycloakAuthenticationError: + logger.exception("Could not log user in via keycloak token") diff --git a/pyispyb/app/extensions/auth/LdapAuthentication.py b/pyispyb/app/extensions/auth/LdapAuthentication.py index 2f1cbe22..1af921dd 100644 --- a/pyispyb/app/extensions/auth/LdapAuthentication.py +++ b/pyispyb/app/extensions/auth/LdapAuthentication.py @@ -1,119 +1,51 @@ -""" -Project: py-ispyb -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - -import logging import ldap +import logging +from typing import Any, Optional +from ispyb import models -from flask import current_app -from pyispyb.app.extensions.auth.AbstractAuthentication import AbstractAuthentication - - -__license__ = "LGPLv3+" +from .AbstractAuthentication import AbstractAuthentication -log = logging.getLogger(__name__) +logger = logging.getLogger(__name__) class LdapAuthentication(AbstractAuthentication): - def __init__(self): - AbstractAuthentication.__init__(self) - - self.ldap_conn = None - - def init_app(self, app): - """ - Initializes ldap connection - - Args: - app (flask app): current flask app - """ - self.ldap_conn = ldap.initialize(app.config["LDAP_URI"]) - - def get_roles(self, username, password): - """ - Returns list of roles based on username and password. - - Args: - username (str): user name - password (str): password - - Returns: - list: [list of roles as strings - """ - roles = [] - search_filter = "(uid=%s)" % username - attrs = ["*"] - + def configure(self, config: dict[str, Any]) -> None: + self.ldap_url = config["LDAP_URI"] + self.ldap_base_internal = config["LDAP_BASE_INTERNAL"] + self.ldap_base_groups = config["LDAP_BASE_GROUPS"] + + def authenticate_by_login( + self, login: str, password: str + ) -> Optional[models.Person]: try: - msg = "LDAP login: try to authenticate user %s as internal user" % username - log.debug(msg) - search_str = ( - "uid=" + username + "," + current_app.config["LDAP_BASE_INTERNAL"] - ) - self.ldap_conn.simple_bind_s(search_str, password) - result = self.ldap_conn.search_s( - current_app.config["LDAP_BASE_INTERNAL"], - ldap.SCOPE_ONELEVEL, - search_filter, - attrs, + logger.debug( + f"LDAP login: try to authenticate user `{login}` as internal user" ) - if result: - roles.append("manager") - msg = ( - "LDAP login: user %s authenticated as internal user (manager role)" - % username - ) - log.debug(msg) - except ldap.INVALID_CREDENTIALS as ex: - msg = "LDAP login: unable to authenticate user %s (%s)" % ( - username, - str(ex), + self.ldap_conn = ldap.initialize(self.ldap_url) + self.ldap_conn.simple_bind_s( + f"uid={login},{self.ldap_base_internal}", password ) - log.exception(msg) - - try: - msg = "LDAP login: try to authenticate user %s as external user" % username - log.debug(msg) - search_str = ( - "uid=" + username + "," + current_app.config["LDAP_BASE_EXTERNAL"] - ) - self.ldap_conn.simple_bind_s(search_str, password) - result = self.ldap_conn.search_s( - current_app.config["LDAP_BASE_EXTERNAL"], + res = self.ldap_conn.search_s( + self.ldap_base_internal, ldap.SCOPE_ONELEVEL, - search_filter, - attrs, + f"(uid={login})", + ["*"], + )[0][1] + + def get_value(v: str): + if v in res: + return res[v][0] + return None + + return models.Person( + login=login, + emailAddress=get_value("mail"), + siteId=get_value("uidNumber"), + familyName=get_value("sn"), + givenName=get_value("givenName"), + phoneNumber=get_value("telephoneNumber"), ) - if result: - roles.append("user") - msg = ( - "LDAP login: user %s authenticated as external user (user role)" - % username - ) - log.debug(msg) - except ldap.INVALID_CREDENTIALS as ex: - msg = "LDAP login: unable to authenticate user %s (%s)" % ( - username, - str(ex), - ) - log.exception(msg) - - return roles + except ldap.INVALID_CREDENTIALS: + logger.exception(f"LDAP login: unable to authenticate user {login}") diff --git a/pyispyb/app/extensions/auth/__init__.py b/pyispyb/app/extensions/auth/__init__.py index 5be50800..1e292999 100644 --- a/pyispyb/app/extensions/auth/__init__.py +++ b/pyispyb/app/extensions/auth/__init__.py @@ -1,278 +1,77 @@ -""" -Project: py-ispyb -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - -import logging -import datetime import importlib -from functools import wraps - -import jwt -from flask import current_app, request -from flask_restx._http import HTTPStatus +import logging +from typing import Any, Optional +from ispyb import models -__license__ = "LGPLv3+" +from ....config import settings log = logging.getLogger(__name__) class AuthProvider: - """Allows to authentificate users and create tokens.""" + """Allows to authentificate users.""" def __init__(self): - self.site_authentication = None + self.site_authentications = {} def init_app(self, app): - module_name = app.config["AUTH_MODULE"] - class_name = app.config["AUTH_CLASS"] - cls = getattr(importlib.import_module(module_name), class_name) - self.site_authentication = cls() - self.site_authentication.init_app(app) - - assert app.config["SECRET_KEY"], "SECRET_KEY must be configured!" - - def get_roles(self, username, password): + """Init extension.""" + + self._config = {} + auth_list = settings.auth + for auth_plugin in auth_list: + for auth_name in auth_plugin: + enabled = auth_plugin[auth_name]["ENABLED"] + if enabled: + module_name: str = auth_plugin[auth_name]["AUTH_MODULE"] + class_name: str = auth_plugin[auth_name]["AUTH_CLASS"] + config = {} + if "CONFIG" in auth_plugin[auth_name]: + config = auth_plugin[auth_name]["CONFIG"] + cls = getattr(importlib.import_module(module_name), class_name) + instance = cls() + instance.configure(config) + self._config[auth_name] = config + self.site_authentications[auth_name] = instance + + def get_auth( + self, *, plugin: str, login: str | None, password: str | None, token: str | None + ) -> Optional[models.Person]: """ - Returns roles associated to user. Basically this is the main - authentification method where site_auth is site specific authentication - class. - - Args: - username (str): username - password (str): password + Check the user is authenticated and return the login. - Returns: - tuple or list: tuple or list with roles associated to the username - """ - return self.site_authentication.get_roles(username, password) - - def get_user_info_from_auth_header(self, auth_header): - """ - Returns dict with user info based on auth header. + Basically this is the main authentification method where site_auth is site specific authentication class. Args: - auth_header ([type]): [description] + plugin (str): plugin to be used + login (str): auth login + password (str): auth password + token (str): auth token Returns: - dict: {"username": "", "roles": [], "is_admin": bool} + person (models.Person): The current `Person` """ - user_info = {} - token = None - - try: - parts = auth_header.split() - token = parts[1] - if current_app.config.get("MASTER_TOKEN") == token: - user_info["sub"] = "MasterToken" - #user_info["roles"] = current_app.config.get("ADMIN_ROLES") - user_info["roles"] = ["manager"] - else: - user_info, msg = decode_token(token) - user_info["is_admin"] = any( - role in current_app.config.get("ADMIN_ROLES", []) - for role in user_info["roles"] + if plugin not in self.site_authentications: + return None + + return self.site_authentications[plugin].authenticate(login, password, token) + + def get_export_config(self) -> list[dict[str, Any]]: + """Return auth config that should be provided to the UI""" + export_config = [] + for plugin, instance in self.site_authentications.items(): + export_config.append( + { + "name": plugin, + "config": { + key: self._config[plugin][key] for key in instance.config_export + }, + } ) - except BaseException as ex: - print("Unable to extract token from Authorization header (%s)" % str(ex)) - - return user_info - - def generate_token(self, username, roles): - """ - Generates token. - - Args: - username (string): username - roles (list): list of roles associated to the user - - Returns: - str: token - """ - iat = datetime.datetime.utcnow() - exp = datetime.datetime.utcnow() + datetime.timedelta( - minutes=current_app.config["TOKEN_EXP_TIME"] - ) - - token = jwt.encode( - {"sub": username, "roles": roles, "iat": iat, "exp": exp}, - current_app.config["SECRET_KEY"], - algorithm=current_app.config["JWT_CODING_ALGORITHM"], - ) - - # TravisCI fix - if not isinstance(token, str): - token = token.decode("UTF-8") - - return { - "sub": username, - "token": token, - "iat": iat.strftime("%Y-%m-%d %H:%M:%S"), - "exp": exp.strftime("%Y-%m-%d %H:%M:%S"), - "roles": roles, - } + return export_config auth_provider = AuthProvider() - - -def decode_token(token): - user_info = {} - msg = None - - try: - user_info = jwt.decode( - token, - current_app.config["SECRET_KEY"], - algorithms=current_app.config["JWT_CODING_ALGORITHM"], - ) - except jwt.ExpiredSignatureError: - current_app.logger.info("Token expired. Please log in again") - msg = "Token expired. Please log in again" - current_app.logger.info(msg) - except jwt.InvalidTokenError: - msg = "Invalid token. Please log in again" - current_app.logger.info(msg) - - return user_info, msg - - -def token_required(func): - """ - Token required decorator. - - Checks if the token is valid - - Args: - func (method): python method - - Returns: - func: if success - """ - - @wraps(func) - def decorated(*args, **kwargs): - """ - Actual decorator function - - Returns: - [type]: [description] - """ - token = None - - auth = request.headers.get("Authorization", None) - if not auth: - return ( - {"message": "Authorization header is expected"}, - HTTPStatus.UNAUTHORIZED, - ) - - parts = auth.split() - - if parts[0].lower() != "bearer": - return ( - {"message": "Authorization header must start with Bearer"}, - HTTPStatus.UNAUTHORIZED, - ) - elif len(parts) == 1: - return {"message": "Token not found"}, HTTPStatus.UNAUTHORIZED - elif len(parts) > 2: - return ( - {"message": "Authorization header must be Bearer token"}, - HTTPStatus.UNAUTHORIZED, - ) - - token = parts[1] - - if current_app.config.get("MASTER_TOKEN"): - if current_app.config["MASTER_TOKEN"] == token: - current_app.logger.info("Master token validated") - return func(*args, **kwargs) - - user_info, msg = decode_token(token) - if not user_info: - return {"message": msg}, HTTPStatus.UNAUTHORIZED - else: - return func(*args, **kwargs) - - return decorated - - -def role_required(func): - """ - Checks if user has role required to access the given resource. - - Authorization is done via AUTHORIZATION_RULES dictionary that contains - mapping of endpoints with user groups. For example: - - AUTHORIZATION_RULES = { - "proposals": { - "get": ["all"], - "post": ["admin"] - } - - define that method GET of endpoint proposals is available for all user groups - and method POST is accessible just for admin group. - If an endpoint is not defined in the AUTHORIZATION_RULES then it is available - for all user groups. - - Args: - func (function): function - - Returns: - function: [description] - """ - - @wraps(func) - def decorated(self, *args, **kwargs): - """ - Actual decorator function - - Returns: - [type]: [description] - """ - - user_info = auth_provider.get_user_info_from_auth_header( - request.headers.get("Authorization") - ) - - methods = current_app.config.get("AUTHORIZATION_RULES").get(self.endpoint, {}) - # If no role is defined then just manager is allowed to access the resource - roles = methods.get(func.__name__, ["manager"]) - - if ( - not roles - or "all" in roles - or any(role in list(roles) for role in list(user_info.get("roles", []))) - ): - return func(self, *args, **kwargs) - else: - msg = "User %s (roles assigned: %s) has no appropriate role (%s) " % ( - user_info.get("sub"), - str(user_info.get("roles")), - str(roles), - ) - msg += " to execute method." - return {"message": msg}, HTTPStatus.UNAUTHORIZED - - return func(self, *args, **kwargs) - - return decorated diff --git a/pyispyb/app/extensions/auth/bearer.py b/pyispyb/app/extensions/auth/bearer.py new file mode 100644 index 00000000..de025337 --- /dev/null +++ b/pyispyb/app/extensions/auth/bearer.py @@ -0,0 +1,108 @@ +import logging +from fastapi import HTTPException, Depends, Request +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials +import jwt + +from ...globals import g +from .token import decode_token, set_token_data +from .onetime import onetime, validate_onetime_token + + +logger = logging.getLogger(__name__) + +# auto_error=False to correct 403 -> 401 +# https://github.com/tiangolo/fastapi/issues/2026 +security = HTTPBearer(auto_error=False) + + +def verify_jwt(token: str): + try: + return decode_token(token) + except jwt.ExpiredSignatureError: + raise HTTPException( + status_code=401, detail="Token expired. Please log in again" + ) + except jwt.InvalidTokenError: + raise HTTPException(status_code=401, detail="Invalid token") + + +async def JWTBearer( + request: Request, + onetime: str = Depends(onetime), + credentials: HTTPAuthorizationCredentials = Depends(security), +): + # JWT authentication + if credentials: + if not credentials.scheme == "Bearer": + raise HTTPException( + status_code=401, detail="Invalid authentication scheme." + ) + decoded = verify_jwt(credentials.credentials) + if not decoded: + raise HTTPException( + status_code=401, detail="Invalid token or expired token." + ) + + set_token_data(decoded) + + return credentials.credentials + + # One time token authentication + elif onetime: + person_dict = validate_onetime_token(onetime, request.url.components.path) + set_token_data(person_dict) + else: + raise HTTPException(status_code=401, detail="No token provided.") + + +def permission_required(operator, permissions): + """Make the route only accesible to users with the specified permissions. + + Args: + operator (str): any or all + permissions (str[]): permissions required + """ + operator = operator.lower() + if operator != "any" and operator != "all": + raise Exception("operator must be 'any' or 'all'.") + + async def res(): + + user_permissions: list[str] = g.permissions + if user_permissions is None: + user_permissions = [] + + if ( + operator == "any" + and ( + "all" in permissions + or any( + permission in list(permissions) + for permission in list(user_permissions) + ) + ) + ) or ( + operator == "all" + and ( + all( + permission in list(permissions) + for permission in list(user_permissions) + ) + ) + ): + return user_permissions + else: + msg = ( + "User %s (permissions assigned: %s) has no appropriate permission (%s: %s) " + % ( + g.login, + str(user_permissions), + operator, + str(permissions), + ) + ) + msg += " to execute method." + logger.info(msg) + raise HTTPException(status_code=403, detail="Not Authorized") + + return res diff --git a/pyispyb/app/extensions/auth/onetime.py b/pyispyb/app/extensions/auth/onetime.py new file mode 100644 index 00000000..fdb122a3 --- /dev/null +++ b/pyispyb/app/extensions/auth/onetime.py @@ -0,0 +1,135 @@ +import asyncio +import logging +import secrets +from typing import Optional +from urllib.parse import urlparse + +from fastapi import Query, HTTPException +from sqlalchemy import text +from starlette.concurrency import run_in_threadpool +from ispyb import models + +from ....config import settings +from ...extensions.database.definitions import get_current_person +from ...extensions.database.session import get_session +from ...extensions.database.middleware import db + +logger = logging.getLogger(__name__) + + +def onetime( + onetime: Optional[str] = Query( + None, + description="One time token", + include_in_schema=False, + regex=r"^([\w\-_])+$", + ) +) -> str: + return onetime + + +def generate_onetime_token(validity: str, personId: int) -> str: + """Generate a one time token + + Kwargs: + validity (str): The path this token is valid for + login (str): The associated person login + + Returns: + token(str): The generated token + """ + parsed = urlparse(validity) + path = parsed.path + path = path.replace(settings.api_root, "") + + token = secrets.token_urlsafe(96) + once_token = models.SWOnceToken( + personId=personId, + validity=path, + token=token, + ) + db.session.add(once_token) + return token + + +def validate_onetime_token(token: str, validity: str) -> models.Person: + """Validate a one time token + + Kwargs: + token (str): The token to validate + validity (str): The current path + + Returns: + person (models.Person): The validated person + """ + if not hasattr(models, "SWOnceToken"): + raise RuntimeError("Missing table `SWOnceToken`") + + once_token: models.SWOnceToken = ( + db.session.query(models.SWOnceToken) + .filter(models.SWOnceToken.token == token) + .first() + ) + + if not once_token: + logger.warning("Unknown one time token") + raise HTTPException(status_code=401, detail="Invalid one time token.") + + if validity != settings.api_root + once_token.validity: + logger.warning( + f"One time token validity `{settings.api_root+once_token.validity}` and path `{validity}` do not match" + ) + raise HTTPException(status_code=401, detail="Invalid one time token.") + + login = ( + db.session.query(models.Person.login) + .filter(models.Person.personId == once_token.personId) + .first() + ) + person = get_current_person(login[0]) + + db.session.delete(once_token) + db.session.commit() + + return { + "login": person.login, + "personId": person.personId, + "permissions": person._metadata["permissions"], + } + + +def expire_onetime_tokens(expiry: int = 10) -> None: + """Expire one time tokens + + Delete all tokens generated more than 10 seconds ago that are unused + + Kwargs: + expiry (int): Seconds tokens are valid for + """ + if not isinstance(expiry, int): + raise RuntimeError(f"Expiry {expiry} is a none integer value") + + with get_session() as session: + session.query(models.SWOnceToken).filter( + models.SWOnceToken.recordTimeStamp + < text(f"NOW() - INTERVAL {expiry} SECOND") + ).delete(synchronize_session="fetch") + + +async def expire_ontime_tokens_periodically(interval: int = 5) -> None: + """Periodically remove onetime tokens that have expired + + Mostly stolen from https://github.com/dmontagu/fastapi-utils/blob/master/fastapi_utils/tasks.py + """ + + async def loop(): + while True: + try: + logger.debug("Expiring onetime tokens") + await run_in_threadpool(expire_onetime_tokens) + except Exception: + logger.exception("Could not expire onetime tokens") + + await asyncio.sleep(interval) + + asyncio.ensure_future(loop()) diff --git a/pyispyb/app/extensions/auth/token.py b/pyispyb/app/extensions/auth/token.py new file mode 100644 index 00000000..0bfe5b0b --- /dev/null +++ b/pyispyb/app/extensions/auth/token.py @@ -0,0 +1,72 @@ +import datetime +import jwt +from typing import Any + +from ....config import settings +from ...globals import g + + +def generate_token( + login: str, + personId: int, + permissions: list[str], +): + """ + Generate token. + + Args: + login (string): login + personid: (int): Person.personid + permissions (list): list of permissions associated to the user + + Returns: + str: token + """ + iat = datetime.datetime.utcnow() + exp = datetime.datetime.utcnow() + datetime.timedelta( + minutes=settings.token_exp_time + ) + + token = jwt.encode( + { + "login": login, + "personId": personId, + "permissions": permissions, + "iat": iat, + "exp": exp, + }, + settings.secret_key, + algorithm=settings.jwt_coding_algorithm, + ) + + return { + "login": login, + "personId": personId, + "token": token, + "iat": iat.strftime("%Y-%m-%d %H:%M:%S"), + "exp": exp.strftime("%Y-%m-%d %H:%M:%S"), + "permissions": permissions, + } + + +def decode_token(token: str) -> dict[str, Any]: + """Decode authentication token. + + Args: + token (str): authentication token + + Returns: + user_info: object describing user + msg: error if present + """ + return jwt.decode( + token, + settings.secret_key, + algorithms=settings.jwt_coding_algorithm, + ) + + +def set_token_data(token: dict[str, Any]) -> None: + g.login = token["login"] + g.personId = token["personId"] + g.permissions = token["permissions"] diff --git a/pyispyb/app/extensions/database/definitions.py b/pyispyb/app/extensions/database/definitions.py new file mode 100644 index 00000000..16633a17 --- /dev/null +++ b/pyispyb/app/extensions/database/definitions.py @@ -0,0 +1,148 @@ +import logging +from typing import Optional, Any + +import sqlalchemy +from sqlalchemy.orm import joinedload +from ispyb import models +from pyispyb.app.extensions.options.schema import Options + +from pyispyb.app.globals import g +from pyispyb.app.extensions.database.middleware import db + +logger = logging.getLogger(__name__) + +_session = sqlalchemy.func.concat( + models.Proposal.proposalCode, + models.Proposal.proposalNumber, + "-", + models.BLSession.visit_number, +).label("session") + +_proposal = sqlalchemy.func.concat( + models.Proposal.proposalCode, models.Proposal.proposalNumber +).label("proposal") + + +def get_current_person(login: str) -> Optional[models.Person]: + person = ( + db.session.query(models.Person) + .options(joinedload(models.Person.UserGroup)) + .options(joinedload(models.Person.UserGroup, models.UserGroup.Permission)) + .filter(models.Person.login == login) + .first() + ) + + if not person: + return + + permissions = [] + for group in person.UserGroup: + for permission in group.Permission: + permissions.append(permission.type) + person._metadata["permissions"] = permissions + + return person + + +def get_options() -> Options: + """Get db_options from app""" + # Avoid circular import + from pyispyb.app.main import app + + return app.db_options + + +def with_authorization( + query: "sqlalchemy.orm.Query[Any]", + includeArchived: bool = False, + proposalColumn: "sqlalchemy.Column[Any]" = None, + joinBLSession: bool = True, +) -> "sqlalchemy.orm.Query[Any]": + """Apply authorization to a query + + Checks in the following order: + * `all_proposals` allowing access to everything + * checks if the user is in a beamLineGroup to allow access to all proposals on a beamline + * checks ProposalHasPerson + * falls back to SessionHasPerson allowing access to entities related to where the + user is registered on a session + + Kwargs: + includeArchived: whether to exclude archived beamlines + proposalColumn: the column used to join to `models.Proposal`, will force a join with `models.Proposal` + joinBLSession: whether to join `models.BLSession` + joinSessionHasPerson: whether to join `models.SessionHasPerson` + """ + # `all_proposals`` can access all sessions + if "all_proposals" in g.permissions: + logger.info("user has `all_proposals`") + return query + + # Iterate through users permissions and match them to the relevant groups + beamLines = [] + permissions_applied = [] + db_options = get_options() + for group in db_options.beamLineGroups: + if group.permission in g.permissions: + permissions_applied.append(group.permission) + for beamLine in group.beamLines: + if (beamLine.archived and includeArchived) or not includeArchived: + beamLines.append(beamLine.beamLineName) + + if proposalColumn: + query = query.join( + models.Proposal, models.Proposal.proposalId == proposalColumn + ) + + if joinBLSession: + query = query.outerjoin( + models.BLSession, models.BLSession.proposalId == models.Proposal.proposalId + ) + + conditions = [] + if beamLines: + logger.info( + f"filtered to beamlines `{beamLines}` with permissions `{permissions_applied}`" + ) + + conditions.append(models.BLSession.beamLineName.in_(beamLines)) + + # Sessions + sessions = db.session.query(models.SessionHasPerson.sessionId).filter( + models.SessionHasPerson.personId == g.personId + ) + sessions = [r._asdict()["sessionId"] for r in sessions.all()] + conditions.append(models.BLSession.sessionId.in_(sessions if sessions else [])) + + # Proposals + proposals = db.session.query(models.ProposalHasPerson.proposalId).filter( + models.ProposalHasPerson.personId == g.personId + ) + proposals = [r._asdict()["proposalId"] for r in proposals.all()] + conditions.append(models.Proposal.proposalId.in_(proposals if proposals else [])) + + query = query.filter(sqlalchemy.or_(*conditions)) + return query + + +def groups_from_beamlines(beamLines: list[str]) -> list[list]: + """Get uiGroups from a list of beamlines""" + db_options = get_options() + groups = [] + for beamline in beamLines: + for group in db_options.beamLineGroups: + for groupBeamline in group.beamLines: + if beamline == groupBeamline.beamLineName: + groups.append(group.uiGroup) + + return list(set(groups)) + + +def beamlines_from_group(beamLineGroup: str) -> list[str]: + """Get a list of beamlines from a groupName""" + db_options = get_options() + for group in db_options.beamLineGroups: + if group.groupName == beamLineGroup: + return [beamline.beamLineName for beamline in group.beamLines] + + return [] diff --git a/pyispyb/app/extensions/database/middleware.py b/pyispyb/app/extensions/database/middleware.py new file mode 100644 index 00000000..5c604f69 --- /dev/null +++ b/pyispyb/app/extensions/database/middleware.py @@ -0,0 +1,44 @@ +# https://www.algoo.fr/fr/actualites/article/fastapi-et-sqlalchemy-un-duo-puissant-mais-attention-aux-transactions + +import contextlib +import contextvars +import sqlalchemy.orm +from .session import _session as sqlsession + +_session = contextvars.ContextVar("_session", default=None) + + +class Database: + @classmethod + def set_session(cls, session): + _session.set(session) + + @property + def session(cls) -> sqlalchemy.orm.Session: + try: + if _session.get() is None: + raise AttributeError + return _session.get() + except (AttributeError, LookupError): + raise Exception("Cant get session." "Please, call Database.set_session()") + + +db = Database() + + +@contextlib.contextmanager +def get_session() -> sqlalchemy.orm.Session: + db_session = sqlsession() + try: + Database.set_session(db_session) + yield db_session + # print("DB Setting session") + db_session.commit() + except Exception as e: # noqa + # print("DB Except", e) + db_session.rollback() + raise + finally: + # print("DB Finally") + Database.set_session(None) + db_session.close() diff --git a/pyispyb/app/extensions/database/session.py b/pyispyb/app/extensions/database/session.py new file mode 100644 index 00000000..af110dc6 --- /dev/null +++ b/pyispyb/app/extensions/database/session.py @@ -0,0 +1,41 @@ +import contextlib +from typing import Generator, Any +import os +import sqlalchemy +import sqlalchemy.orm +import sqlalchemy.schema + +from pyispyb.config import settings + +engine = sqlalchemy.create_engine( + url=settings.sqlalchemy_database_uri, + # Blobs get decoded as str without this resulting in TypeError: string argument without an encoding + # https://stackoverflow.com/a/53468522 + connect_args={"use_pure": True}, # type: ignore + isolation_level="READ UNCOMMITTED", + # https://docs.sqlalchemy.org/en/13/core/pooling.html#dealing-with-disconnects + pool_pre_ping=True, + pool_recycle=3600, + # pooling + # https://docs.sqlalchemy.org/en/13/errors.html#error-3o7r + # maybe consider https://docs.sqlalchemy.org/en/13/core/pooling.html#sqlalchemy.pool.NullPool ? + pool_size=os.environ.get("ISPYB_DATABASE_POOL", 10), + max_overflow=os.environ.get("ISPYB_DATABASE_OVERFLOW", 20), +) + +_session = sqlalchemy.orm.sessionmaker(autocommit=False, autoflush=False, bind=engine) + +# Base.metadata.create_all(bind=engine) # type: ignore + + +@contextlib.contextmanager +def get_session() -> Generator[sqlalchemy.orm.Session, Any, None]: + session = _session() + try: + yield session + session.commit() + except: # noqa + session.rollback() + raise + finally: + session.close() diff --git a/pyispyb/app/extensions/database/utils.py b/pyispyb/app/extensions/database/utils.py new file mode 100644 index 00000000..854789b4 --- /dev/null +++ b/pyispyb/app/extensions/database/utils.py @@ -0,0 +1,189 @@ +import enum +import os +import time +import logging +from typing import Optional, Generic, TypeVar, Any + +from pydantic import BaseModel +import sqlalchemy.engine +import sqlalchemy.engine.interfaces +import sqlalchemy.event +import sqlalchemy.orm +import sqlparse + + +logger = logging.getLogger("db") + + +def order( + query: "sqlalchemy.orm.Query[Any]", + sort_map: dict[str, "sqlalchemy.Column[Any]"], + order: Optional[dict[str]], + default: Optional[dict[str]] = None, +) -> "sqlalchemy.orm.Query[Any]": + """Sort a result set by a column + + Args: + query (sqlalchemy.query): The current query + sort_map (dict): A mapping of field(str) -> sqlalchemy.Column + order (dict): { order_by: column, order: Asc or desc } + + Returns + query (sqlalchemy.orm.Query): The ordered query + """ + order_by = order.get("order_by") if order else None + if order_by is None: + if default: + order_by = default.get("order_by") + else: + # Defaults are strings for convenience + # API (mashalled) values are an enum so need their value extracting + order_by = order_by.value + order_direction = order.get("order") if order else None + if order_direction is None: + if default: + order_direction = default.get("order") + else: + order_direction = order_direction.value + + if not (order_by and order_direction): + return query + + logger.debug(f"Ordering by {order_by} {order_direction}") + + if order_by not in sort_map: + raise RuntimeError(f"Unknown order_by {order_by}") + + return query.order_by(getattr(sort_map[order_by], order_direction)()) + + +def page( + query: "sqlalchemy.orm.Query[Any]", *, skip: int, limit: int +) -> "sqlalchemy.orm.Query[Any]": + """Paginate a `Query` + + Kwargs: + skip (str): Offset to start at + limit(str): Number of items to display + + Returns + query (sqlalchemy.orm.Query): The paginated query + """ + return query.limit(limit).offset(skip) + + +T = TypeVar("T") + + +class Paged(BaseModel, Generic[T]): + """Page a model result set""" + + total: int + results: list[T] + skip: Optional[int] + limit: Optional[int] + + @property + def first(self) -> T: + return self.results[0] + + +def pretty(query: "sqlalchemy.orm.Query[Any]", show: bool = False) -> str: + """Pretty print a `Query`""" + text: str = sqlparse.format(str(query), reindent=True, keyword_case="upper") + if show: + print(text) + + return text + + +def with_metadata( + results: list[sqlalchemy.engine.row.Row], metadata: list[str] +) -> list[sqlalchemy.engine.row.Row]: + """Add metadata to rows base _metadata attribute""" + if not metadata: + return results + + parsed = [] + for result in results: + for meta_id, meta_value in enumerate(result[1:]): + result[0]._metadata[metadata[meta_id]] = meta_value + parsed.append(result[0]) + + return parsed + + +def update_model(model: any, values: dict[str, any]): + """Update a model with new values including nested models""" + for key, value in values.items(): + if isinstance(value, dict): + update_model(getattr(model, key), value) + else: + if isinstance(value, enum.Enum): + value = value.value + setattr(model, key, value) + + +ENABLE_DEBUG_LOGGING = False + + +def enable_debug_logging() -> None: + global ENABLE_DEBUG_LOGGING + """Write debug level logging output for every executed SQL query. + This setting will persist throughout the Python process lifetime and affect + all existing and future sqlalchemy sessions. This should not be used in + production as it can be expensive, can leak sensitive information, and, + once enabled, cannot be disabled. + """ + if ENABLE_DEBUG_LOGGING: + return + ENABLE_DEBUG_LOGGING = True + + _sqlalchemy_root = os.path.dirname(sqlalchemy.__file__) + + import traceback + + indent = " " + + @sqlalchemy.event.listens_for(sqlalchemy.engine.Engine, "before_cursor_execute") # type: ignore + def before_cursor_execute( + conn: sqlalchemy.engine.Connection, + cursor: "sqlalchemy.engine.interfaces.DBAPICursor", # type: ignore + statement: "sqlalchemy.orm.Query[Any]", + parameters: tuple[Any], + context: sqlalchemy.engine.ExecutionContext, + executemany: bool, + ) -> None: + conn.info.setdefault("query_start_time", []).append(time.perf_counter()) + conn.info.setdefault("count", 0) + conn.info["count"] += 1 + + cause = "" + for frame, line in traceback.walk_stack(None): + if frame.f_code.co_filename.startswith(_sqlalchemy_root): + continue + cause = f"\n{indent}originating from {frame.f_code.co_filename}:{line}" + break + if parameters: + str_parameters = f"\n{indent}with parameters={parameters}" + else: + str_parameters = "" + + logger.debug( + f"SQL query #{conn.info['count']}:\n" + + pretty(statement) + + str_parameters + + cause + ) + + @sqlalchemy.event.listens_for(sqlalchemy.engine.Engine, "after_cursor_execute") # type: ignore + def after_cursor_execute( + conn: sqlalchemy.engine.Connection, + cursor: "sqlalchemy.engine.interfaces.DBAPICursor", # type: ignore + statement: "sqlalchemy.orm.Query[Any]", + parameters: tuple[Any], + context: sqlalchemy.engine.ExecutionContext, + executemany: bool, + ) -> None: + total = round(time.perf_counter() - conn.info["query_start_time"].pop(-1), 4) + logger.debug(indent + f"SQL query #{conn.info['count']} took: {total} seconds") diff --git a/pyispyb/app/extensions/flask_sqlalchemy/__init__.py b/pyispyb/app/extensions/flask_sqlalchemy/__init__.py deleted file mode 100644 index 47c62b1c..00000000 --- a/pyispyb/app/extensions/flask_sqlalchemy/__init__.py +++ /dev/null @@ -1,345 +0,0 @@ -""" -Project: py-ispyb -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -import sys -import sqlite3 - -from flask_restx import abort -from flask_restx._http import HTTPStatus - - -from flask import current_app -import sqlalchemy -from flask_sqlalchemy import SQLAlchemy as BaseSQLAlchemy - -from pyispyb.app.utils import create_response_item - - -def set_sqlite_pragma(dbapi_connection, connection_record): - # pylint: disable=unused-argument - """ - SQLite supports FOREIGN KEY syntax when emitting CREATE statements for tables. - - By default these constraints have no effect on the - operation of the table. - - http://docs.sqlalchemy.org/en/latest/dialects/sqlite.html#foreign-key-support - - Args: - dbapi_connection ([type]): [description] - connection_record ([type]): [description] - """ - if not isinstance(dbapi_connection, sqlite3.Connection): - return - cursor = dbapi_connection.cursor() - cursor.execute("PRAGMA foreign_keys=ON") - cursor.close() - - -class AlembicDatabaseMigrationConfig: - """ - Helper config holder that provides missing functions of Flask-Alembic. - - Args: - object ([type]): [description] - """ - - def __init__(self, database, directory="migrations", **kwargs): - self.db = database # pylint: disable=invalid-name - self.directory = directory - self.configure_args = kwargs - - -class SQLAlchemy(BaseSQLAlchemy): - """ - Customized Flask-SQLAlchemy adapter - - Args: - BaseSQLAlchemy ([type]): [description] - """ - - def __init__(self, *args, **kwargs): - """ - Init method - """ - - if "session_options" not in kwargs: - kwargs["session_options"] = {} - kwargs["session_options"]["autocommit"] = False - # Configure Constraint Naming Conventions: - # http://docs.sqlalchemy.org/en/latest/core/constraints.html - # #constraint-naming-conventions - """ - kwargs["metadata"] = MetaData( - naming_convention={ - "pk": "pk_%(table_name)s", - "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", - "ix": "ix_%(table_name)s_%(column_0_name)s", - "uq": "uq_%(table_name)s_%(column_0_name)s", - "ck": "ck_%(table_name)s_%(constraint_name)s", - } - ) - """ - super().__init__(*args, **kwargs) - - def init_app(self, app): - """ - Called to init extension. - - Args: - app ([type]): [description] - """ - super().init_app(app) - - database_uri = app.config["SQLALCHEMY_DATABASE_URI"] - if not database_uri or database_uri == "sqlite:///:memory:": - raise Exception("SQLALCHEMY_DATABASE_URI must be configured!") - # assert database_uri, "SQLALCHEMY_DATABASE_URI must be configured!" - if database_uri.startswith("sqlite:"): - self.event.listens_for(sqlalchemy.engine.Engine, "connect")( - set_sqlite_pragma - ) - - app.extensions["migrate"] = AlembicDatabaseMigrationConfig( - self, compare_type=True - ) - - def get_db_items(self, sql_alchemy_model, dict_schema, ma_schema, query_dict): - """ - Returns resource based on the passed models and query parameter - - Args: - sql_alchemy_model ([type]): SQLAlchemy ORM model - dict_schema ([type]): dict with flask fields - ma_schema ([type]): marshmallows schema - query_dict (dict): query parameters - - Returns: - dict: {"data": {"total": int, "rows": list}, - "message" : str, - "error": str - } - """ - if "offset" in query_dict.keys(): - offset = query_dict.get("offset") - else: - offset = 0 - if "limit" in query_dict.keys(): - limit = query_dict.get("limit") - else: - limit = current_app.config.get("PAGINATION_ITEMS_LIMIT") - - msg = None - schema_keys = {} - multiple_value_query_dict = {} - - for key in query_dict.keys(): - if key in dict_schema.keys(): - if isinstance(query_dict[key], (list, tuple)): - multiple_value_query_dict[key] = query_dict[key] - else: - schema_keys[key] = query_dict.get(key) - query = sql_alchemy_model.query - - # Filter items based on schema keys with one value - if schema_keys: - try: - query = query.filter_by(**schema_keys) - except sqlalchemy.exc.InvalidRequestError as ex: - print(ex) - msg = "Unable to filter items based on query items (%s)" % str(ex) - - # Filter items based on schema keys with multiple values - if multiple_value_query_dict: - for key, value in multiple_value_query_dict.items(): - attr = getattr(sql_alchemy_model, key) - try: - query = query.filter(attr.in_(value)) - except sqlalchemy.exc.InvalidRequestError as ex: - print(ex) - msg = "Unable to filter items based on query items (%s)" % str(ex) - - total = query.count() - - if limit: - query = query.limit(limit) - if offset: - query = query.offset(offset) - - items = ma_schema.dump(query, many=True)[0] - return create_response_item(msg, total, items) - - def get_db_item(self, sql_alchemy_model, ma_schema, query_dict): - """ - Returns data base item by its Id. - - Args: - item_id (int): - - Returns: - dict: info dict - """ - db_item = sql_alchemy_model.query.filter_by(**query_dict).first_or_404( - description="There is no data with item id %s" % str(query_dict) - ) - # db_item = sql_alchemy_model.query.filter_by(**item_id_dict).first() - db_item_json = ma_schema.dump(db_item)[0] - - return db_item_json - - def get_db_items_by_view( - self, sql_alchemy_model, dict_schema, ma_schema, query_dict - ): - msg = None - schema_keys = {} - multiple_value_query_dict = {} - - for key in query_dict.keys(): - if key in dict_schema.keys(): - if isinstance(query_dict[key], (list, tuple)): - multiple_value_query_dict[key] = query_dict[key] - else: - schema_keys[key] = query_dict.get(key) - - query = self.session.query(sql_alchemy_model) - - total = query.count() - - items = ma_schema.dump(query, many=True)[0] - - return create_response_item(msg, total, items) - - def add_db_item(self, sql_alchemy_model, ma_schema, data): - """ - Adds item to db. - - Args: - sql_alchemy_model ([type]): [description] - data (dict): [description] - - Returns: - SQLAlchemy db item: [description] - """ - try: - db_item = sql_alchemy_model(**data) - self.session.add(db_item) - self.session.commit() - json_data = ma_schema.dump(db_item)[0] - return json_data, HTTPStatus.OK - except TypeError as ex: - self.session.rollback() - print(ex) - abort(HTTPStatus.NOT_ACCEPTABLE, "Unable to add db item (%s)" % str(ex)) - except sqlalchemy.exc.DataError as ex: - self.session.rollback() - print(ex) - abort(HTTPStatus.NOT_ACCEPTABLE, "Unable to add db item (%s)" % str(ex)) - except Exception as ex: - self.session.rollback() - print(ex) - abort(HTTPStatus.NOT_ACCEPTABLE, "Unable to add db item (%s)" % str(ex)) - - def update_db_item( - self, sql_alchemy_model, ma_schema, item_id_dict, item_update_dict - ): - """ - Updates item in db - - Args: - sql_alchemy_model ([type]): [description] - item_id_dict ([type]): [description] - item_update_dict ([type]): [description] - - Returns: - [type]: [description] - """ - result = None - db_item = sql_alchemy_model.query.filter_by(**item_id_dict).first_or_404( - description="There is no data with item id %s" % str(item_id_dict) - ) - if db_item: - for key, value in item_update_dict.items(): - if hasattr(db_item, key): - setattr(db_item, key, value) - else: - print("Attribute %s not defined in the item model" % key) - self.session.commit() - result = ma_schema.dump(db_item)[0] - return result - - def patch_db_item(self, sql_alchemy_model, ma_schema, item_id_dict, item_data_dict): - """ - Patch db item. - - Args: - sql_alchemy_model ([type]): [description] - ma_schema : Marshmallows schema - item_id_dict ([type]): [description] - item_data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - result = None - db_item = sql_alchemy_model.query.filter_by(**item_id_dict).first_or_404( - description="There is no data with item id %s" % str(item_id_dict) - ) - if db_item: - for key, value in item_data_dict.items(): - if hasattr(db_item, key): - setattr(db_item, key, value) - else: - abort( - HTTPStatus.NOT_ACCEPTABLE, - "Attribute %s not defined in the item model" % key, - ) - self.session.commit() - result = ma_schema.dump(db_item)[0] - - return result - - def delete_db_item(self, sql_alchemy_model, item_id_dict): - """ - Deletes db item - - Args: - sql_alchemy_model ([type]): [description] - item_id_dict ([type]): [description] - - Returns: - [type]: [description] - """ - db_item = sql_alchemy_model.query.filter_by(**item_id_dict).first_or_404( - description="There is no data with item id %s" % str(item_id_dict) - ) - - try: - self.session.delete(db_item) - self.session.commit() - return True - except Exception as ex: - print(ex) - # log.exception(str(ex)) - self.session.rollback() - abort(HTTPStatus.INTERNAL_SERVER_ERROR, str(ex)) diff --git a/pyispyb/app/extensions/logging/__init__.py b/pyispyb/app/extensions/logging/__init__.py deleted file mode 100644 index 3fb6e940..00000000 --- a/pyispyb/app/extensions/logging/__init__.py +++ /dev/null @@ -1,60 +0,0 @@ -""" -Project: py-ispyb -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - -import logging - - -class Logging(object): - """ - This is a helper extension, which adjusts logging configuration for the - application. - """ - - def __init__(self, app=None): - if app: - self.init_app(app) - - def init_app(self, app): - """ - Common Flask interface to initialize the logging according to the - application configuration. - """ - # We don't need the default Flask's loggers when using our invoke tasks - # since we set up beautiful colorful loggers globally. - for handler in list(app.logger.handlers): - app.logger.removeHandler(handler) - app.logger.propagate = True - - if app.debug: - logging.getLogger("flask_oauthlib").setLevel(logging.DEBUG) - app.logger.setLevel(logging.DEBUG) - - # We don't need the default SQLAlchemy loggers when using our invoke - # tasks since we set up beautiful colorful loggers globally. - # NOTE: This particular workaround is for the SQLALCHEMY_ECHO mode, - # when all SQL commands get printed (without these lines, they will get - # printed twice). - sqla_logger = logging.getLogger("sqlalchemy.engine.base.Engine") - for hdlr in list(sqla_logger.handlers): - sqla_logger.removeHandler(hdlr) - sqla_logger.addHandler(logging.NullHandler()) diff --git a/pyispyb/app/extensions/options/__init__.py b/pyispyb/app/extensions/options/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pyispyb/app/extensions/options/base.py b/pyispyb/app/extensions/options/base.py new file mode 100644 index 00000000..30d28e94 --- /dev/null +++ b/pyispyb/app/extensions/options/base.py @@ -0,0 +1,97 @@ +from datetime import datetime +import json +import logging + +from sqlalchemy import exc +from starlette.types import ASGIApp + +from ispyb import models + +from ...globals import g +from ..database.middleware import db +from ..database.session import get_session +from .schema import Options, UIOptions, BeamLineGroup + + +logger = logging.getLogger(__file__) + + +def setup_options(app: ASGIApp): + """Add the db_options to the current app global""" + with get_session() as session: + app.db_options = get_options(get_all=True, session=session) + + if not app.db_options.beamLineGroups: + logger.warning("`beamLineGroups` are not configured, setting default empty") + app.db_options.beamLineGroups = [ + BeamLineGroup( + groupName="Empty", + uiGroup="empty", + permission="bl_admin", + beamLines=[], + ) + ] + + +def get_options(get_all: bool = False, session=None) -> Options: + if not session: + session = db.session + + adminVars: list[models.AdminVar] = (session.query(models.AdminVar)).all() + + options = {} + for adminVar in adminVars: + try: + # To support more complex data types `value` is currently varchar(1000) + options[adminVar.name] = json.loads(adminVar.value) + except json.decoder.JSONDecodeError: + options[adminVar.name] = adminVar.value + + return Options(**options) if get_all else UIOptions(**options) + + +def update_options(options: Options) -> Options: + options_dict = options.dict(exclude_unset=True) + current_options_dict = get_options(get_all=True).dict() + + for option_key, option_value in options_dict.items(): + if current_options_dict[option_key] == option_value: + continue + + adminVar = ( + db.session.query(models.AdminVar) + .filter(models.AdminVar.name == option_key) + .first() + ) + if adminVar: + adminVar.value = json.dumps(option_value) + else: + adminVar = models.AdminVar(name=option_key, value=json.dumps(option_value)) + db.session.add(adminVar) + + db.session.commit() + + # Log changes in db_options + try: + # Requires unique constraint to be lifted on `username` to enable storing more than + # just online stats + adminComment = "" + if not isinstance(option_value, dict) and not isinstance( + option_value, list + ): + adminComment = f" to `{str(option_value)[:80]}`" + + adminActivity = models.AdminActivity( + username=g.username, + action="db_options", + comments=f"changed `{option_key}`{adminComment}", + dateTime=datetime.now(), + ) + db.session.add(adminActivity) + db.session.commit() + db.session.flush() + except exc.SQLAlchemyError: + db.session.rollback() + logger.exception("Could not log option change") + + return options diff --git a/pyispyb/app/extensions/options/schema.py b/pyispyb/app/extensions/options/schema.py new file mode 100644 index 00000000..0d84c26e --- /dev/null +++ b/pyispyb/app/extensions/options/schema.py @@ -0,0 +1,61 @@ +from typing import Optional + +from pydantic import BaseModel, Field + + +class BeamLineGroupBeamLine(BaseModel): + beamLineName: str = Field(title="Beamline Name") + sampleChangerType: Optional[str] = Field( + None, title="Sample Changer Type", nullable=True + ) + sampleChangerCapacity: Optional[int] = Field( + None, + title="Sample Changer Capacity", + description="If no specific type is available a capacity can be defined for the generic view", + nullable=True, + ) + archived: bool = Field( + False, + title="Archived", + description="Whether this beamline is archived (no longer displayed on landing page)", + ) + + +class BeamLineGroup(BaseModel): + groupName: str = Field(title="Group Name", descriptiopn="A group of beamlines") + uiGroup: str = Field(title="UI Group", description="Display type to use in the UI") + permission: str = Field( + title="Permission", + description="Permission required to view all proposals from these beamlines", + ) + beamLines: list[BeamLineGroupBeamLine] = Field([], title="Beamlines") + + +class UIOptions(BaseModel): + """Publicly available UI options""" + + motd: str = Field( + "", title="Message of the Day", description="Displayed at the top of the UI" + ) + beamLineGroups: list[BeamLineGroup] = Field([], title="Beamline Groups") + + +class Options(UIOptions): + """All available application options""" + + query_debug: bool = Field( + False, title="Query Debugging", description="Enable query debugging" + ) + enable_legacy_routes: bool = Field( + True, title="Legacy Routes", description="Enable legacy routes" + ) + enable_webservice_routes: bool = Field( + True, + title="Webservice Routes", + description="Enable webservices called from external applications", + ) + create_person_on_missing: bool = Field( + False, + title="Create Missing Login", + description="Automatically create a `Person` entry if the `login` is missing from the database. (!) Warning modifies the database", + ) diff --git a/pyispyb/app/extensions/report/__init__.py b/pyispyb/app/extensions/report/__init__.py deleted file mode 100644 index a722c46e..00000000 --- a/pyispyb/app/extensions/report/__init__.py +++ /dev/null @@ -1,94 +0,0 @@ -""" -Project: py-ispyb -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - -import os -import logging - -from flask import current_app - -import barcode -import qrcode -import pdfkit - - -class Report(object): - """ - This is a helper extension, which adjusts logging configuration for the - application. - """ - - def create_dewar_labels(self, shipping_info_dict, dewar_dict): - html_filename = "dewar_%d_label.html" % dewar_dict["dewarId"] - pdf_filename = "dewar_%d_label.pdf" % dewar_dict["dewarId"] - - with open(current_app.config["DEWAR_LABEL_TEMPLATE_FILEPATH"]) as template_file: - html_template = template_file.read() - - barcode_image = barcode.get( - current_app.config["BARCODE_TYPE"], - dewar_dict["barCode"], - writer=barcode.writer.ImageWriter(format="PNG"), - ) - barcode_filepath = os.path.join( - current_app.config["TEMP_FOLDER"], "barcode" - ) - barcode_image.save(barcode_filepath) - - qrcode_filepath = os.path.join( - current_app.config["TEMP_FOLDER"], "qrcode.png" - ) - qrcode_image = qrcode.make(dewar_dict["barCode"]) - qrcode_image.save(qrcode_filepath) - - html_template = html_template.format( - site_logo_filepath=current_app.config["SITE_LOGO_PATH"], - barcode_filepath=barcode_filepath + ".png", - qrcode_filepath=qrcode_filepath, - site_name=current_app.config["SITE_NAME"], - parcel_label=dewar_dict["code"], - shipping_label=shipping_info_dict["shipping"]["shippingName"], - num_parcels="1", - proposal_number="%s%s" - % ( - shipping_info_dict["proposal"]["proposalCode"], - shipping_info_dict["proposal"]["proposalNumber"], - ), - laboratory_name=shipping_info_dict["send_lab"]["name"], - local_contact="5", - ) - - html_file = open( - os.path.join(current_app.config["TEMP_FOLDER"], html_filename), "w" - ) - html_file.write(html_template) - html_file.close() - - pdfkit.from_file( - str(os.path.join(current_app.config["TEMP_FOLDER"], html_filename)), - str(os.path.join(current_app.config["TEMP_FOLDER"], pdf_filename)), - ) - - return html_filename, pdf_filename - - -report = Report() diff --git a/pyispyb/app/extensions/user_office/AbstractUserOffice.py b/pyispyb/app/extensions/user_office/AbstractUserOffice.py deleted file mode 100644 index bf14ffd3..00000000 --- a/pyispyb/app/extensions/user_office/AbstractUserOffice.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -import abc - - -class AbstractUserOffice(object): - - """Abstract class to define link with the user office.""" - - __metaclass__ = abc.ABCMeta - - def init_app(self, app): - """Initializes user office class. - - Args: - app (flask app): Flask app - """ - return - - @abc.abstractmethod - def sync_all(self): - """Main method to sync with user office""" - - @abc.abstractmethod - def sync_proposal(self, code, number): - """Updates proposal based on the code and number. - - Args: - code (str): MX, SAXS, mxihr, etc - number (int): proposals number - """ diff --git a/pyispyb/app/extensions/user_office/DummyUserOffice.py b/pyispyb/app/extensions/user_office/DummyUserOffice.py deleted file mode 100644 index b2b79454..00000000 --- a/pyispyb/app/extensions/user_office/DummyUserOffice.py +++ /dev/null @@ -1,50 +0,0 @@ -"""Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions.user_office.AbstractUserOffice import ( - AbstractUserOffice, -) - - -class DummyUserOffice(AbstractUserOffice): - def init_app(self, app): - """Initializes user office class. - - Args: - app (flask app): Flask app - """ - return - - def sync_all(self): - """Main method to sync with user office""" - print("Sync with user office") - - def sync_proposal(self, code, number): - """Updates proposal based on the code and number. - - Args: - code (str): MX, SAXS, mxihr, etc - number (int): proposals number - """ - print("sync proposal %s%d" % (code, number)) diff --git a/pyispyb/app/extensions/user_office/Smis.py b/pyispyb/app/extensions/user_office/Smis.py deleted file mode 100644 index 64001c54..00000000 --- a/pyispyb/app/extensions/user_office/Smis.py +++ /dev/null @@ -1,61 +0,0 @@ -# encoding: utf-8 -"""Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - -from datetime import datetime, timedelta -from suds.client import Client -from suds.transport.http import HttpAuthenticated - - -from pyispyb.app.extensions.user_office.AbstractUserOfficeLink import ( - AbstractUserOfficeLink, -) -from pyispyb.core.modules import proposal - - -class SmisLink(AbstractUserOfficeLink): - def init_app(self, app): - """Initializes user office class. - - Args: - app (flask app): Flask app - """ - http_auth = HttpAuthenticated( - username=app.config.get("SMIS_USERNAME"), - password=app.config.get("SMIS_PASSWORD"), - ) - self.smis_ws = Client( - app.config.get("SMIS_WS_URL"), transport=http_auth, cache=None, timeout=180 - ) - - def sync_all(self): - # Get 1 month old proposals - print("sync proposals") - past_str = datetime.strftime(datetime.now() - timedelta(days=30), "%d/%m/%YYYY") - now_str = datetime.strftime(datetime.now(), "%d/%m/%YYYY") - smis_proposals = self.smis_ws.service.findNewMXProposalPKs(past_str, now_str) - current_proposals = proposal.get_proposals_by_query() - - print(smis_proposals) - print(current_proposals) - print("Done!") diff --git a/pyispyb/app/extensions/user_office/__init__.py b/pyispyb/app/extensions/user_office/__init__.py deleted file mode 100644 index 71366eb1..00000000 --- a/pyispyb/app/extensions/user_office/__init__.py +++ /dev/null @@ -1,66 +0,0 @@ -""" -Project: py-ispyb -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - -import logging -import datetime -import importlib - -import time - -# import gevent -# from gevent import monkey -# monkey.patch_all() - - -from flask import current_app - -__license__ = "LGPLv3+" - - -log = logging.getLogger(__name__) - - -class UserOffice: - """Allows to retrieve information from the user office""" - - def __init__(self): - self.site_user_office = None - - def init_app(self, app): - module_name = app.config["USER_OFFICE_MODULE"] - class_name = app.config["USER_OFFICE_CLASS"] - cls = getattr(importlib.import_module(module_name), class_name) - self.site_user_office = cls() - self.site_user_office.init_app(app) - - # self.sync_polling = gevent.spawn_later( - # 10, - # self.sync_with_user_office, - # app - # ) - - def sync_all(self): - self.site_user_office.sync_all() - - def sync_proposal(self, code, number): - self.site_user_office.sync_proposal(code, number) - - -user_office = UserOffice() diff --git a/pyispyb/app/globals.py b/pyispyb/app/globals.py new file mode 100644 index 00000000..ee391cde --- /dev/null +++ b/pyispyb/app/globals.py @@ -0,0 +1,92 @@ +""" +https://gist.github.com/ddanier/ead419826ac6c3d75c96f9d89bea9bd0 +This allows to use global variables inside the FastAPI application +using async mode. + +# Usage + +Just import `g` and then access (set/get) attributes of it: +```python +from your_project.globals import g + + +g.foo = "foo" + +# In some other code +assert g.foo == "foo" +``` + +Best way to utilize the global `g` in your code is to set the desired +value in a FastAPI dependency, like so: +```python +async def set_global_foo() -> None: + g.foo = "foo" + + +@app.get("/test/", dependencies=[Depends(set_global_foo)]) +async def test(): + assert g.foo == "foo" +``` + +# Setup + +Add the `GlobalsMiddleware` to your app: +```python +app = fastapi.FastAPI( + title="Your app API", +) +app.add_middleware(GlobalsMiddleware) # <-- This line is necessary +``` + +Then just use it. ;-) +""" +from contextvars import ContextVar, Token +from typing import Any, Dict + +from starlette.types import ASGIApp, Receive, Scope, Send + + +class Globals: + __slots__ = ("_vars", "_reset_tokens") + + _vars: Dict[str, ContextVar] + _reset_tokens: Dict[str, Token] + + def __init__(self) -> None: + object.__setattr__(self, "_vars", {}) + object.__setattr__(self, "_reset_tokens", {}) + + def reset(self) -> None: + for _name, var in self._vars.items(): + try: + var.reset(self._reset_tokens[_name]) + # ValueError will be thrown if the reset() happens in + # a different context compared to the original set(). + # Then just set to None for this new context. + except ValueError: + var.set(None) + + def _ensure_var(self, item: str) -> None: + if item not in self._vars: + self._vars[item] = ContextVar(f"globals:{item}", default=None) + self._reset_tokens[item] = self._vars[item].set(None) + + def __getattr__(self, item: str) -> Any: + self._ensure_var(item) + return self._vars[item].get() + + def __setattr__(self, item: str, value: Any) -> None: + self._ensure_var(item) + self._vars[item].set(value) + + +class GlobalsMiddleware: + def __init__(self, app: ASGIApp) -> None: + self.app = app + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + g.reset() + await self.app(scope, receive, send) + + +g = Globals() diff --git a/pyispyb/app/main.py b/pyispyb/app/main.py new file mode 100644 index 00000000..c82dc4aa --- /dev/null +++ b/pyispyb/app/main.py @@ -0,0 +1,87 @@ +import logging +from logging.config import dictConfig +from typing import Any + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi.openapi.utils import get_openapi + +from ..app.extensions.auth.onetime import expire_ontime_tokens_periodically +from ..app.extensions.database.utils import enable_debug_logging +from ..app.extensions.database.middleware import get_session +from ..app.extensions.options.base import setup_options +from ..app.globals import GlobalsMiddleware + +from ..config import settings, LogConfig +from ..app import routes as base_routes +from ..core import routes as core_routes +from ..app.extensions.auth import auth_provider + +dictConfig(LogConfig().dict()) +logger = logging.getLogger("ispyb") + + +app = FastAPI(openapi_url=f"{settings.api_root}/openapi.json") +app.add_middleware(GlobalsMiddleware) + + +@app.middleware("http") +async def get_session_as_middleware(request, call_next): + with get_session(): + return await call_next(request) + + +setup_options(app) + + +@app.on_event("startup") +async def expire_onetime_tokens() -> None: + await expire_ontime_tokens_periodically() + + +def enable_cors() -> None: + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + +if settings.cors: + enable_cors() + +if settings.query_debug: + enable_debug_logging() + + +def custom_openapi() -> dict[str, Any]: + if app.openapi_schema: + return app.openapi_schema + + openapi_schema = get_openapi( + title="py-ISPyB", + version="0.1alpha", + description="FastAPI Prototype", + routes=app.routes, + ) + + # Convert nullable to ["null", type] for rjsf + # https://github.com/rjsf-team/react-jsonschema-form/pull/1213 + # This is technically incorrect for OpenAPI v3, but nullable is not yet supported in rjsf + for schema_name, schema in openapi_schema["components"]["schemas"].items(): + if "properties" in schema: + for property_name, property in schema["properties"].items(): + if property.get("nullable"): + property["type"] = ["null", property["type"]] + + app.openapi_schema = openapi_schema + return app.openapi_schema + + +setattr(app, "openapi", custom_openapi) + +auth_provider.init_app(app) +base_routes.init_app(app, prefix=settings.api_root) +core_routes.init_app(app, prefix=settings.api_root) diff --git a/pyispyb/app/routes/__init__.py b/pyispyb/app/routes/__init__.py index 7bb79c90..69afffce 100644 --- a/pyispyb/app/routes/__init__.py +++ b/pyispyb/app/routes/__init__.py @@ -1,30 +1,29 @@ -""" -Project: py-ispyb -https://github.com/ispyb/py-ispyb +# Project: py-ispyb +# https://github.com/ispyb/py-ispyb -This file is part of py-ispyb software. +# This file is part of py-ispyb software. -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. +# py-ispyb is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" +# py-ispyb is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# You should have received a copy of the GNU Lesser General Public License +# along with py-ispyb. If not, see . +from fastapi import FastAPI __license__ = "LGPLv3+" -def init_app(app, **kwargs): - +def init_app(app: FastAPI, prefix: str = None, **kwargs): + """Init extension routes.""" from importlib import import_module - for module_name in ["auth"]: - import_module(".%s" % module_name, package=__name__) + for module_name in ["auth", "options"]: + module = import_module(".%s" % module_name, package=__name__) + app.include_router(module.router, prefix=prefix) diff --git a/pyispyb/app/routes/auth.py b/pyispyb/app/routes/auth.py index 0575f791..3bec8625 100644 --- a/pyispyb/app/routes/auth.py +++ b/pyispyb/app/routes/auth.py @@ -1,96 +1,93 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - import logging -from flask import request, make_response -from sqlalchemy.exc import SQLAlchemyError - -from pyispyb.flask_restx_patched import HTTPStatus, Resource -from pyispyb.app.extensions.api import api_v1, Namespace -from pyispyb.app.extensions import auth_provider - - -__license__ = "LGPLv3+" - -log = logging.getLogger(__name__) -api = Namespace("Authentication", description="authentication namespace", path="/auth") -api_v1.add_namespace(api) - - -@api.errorhandler(SQLAlchemyError) -@api.header('ErrorType', 'SQLAlchemy Error') -def handle_sqlalchemy_exception(error): - '''This is a sqlalchemy error handler''' - log.error(str(error)) - return {'message': "Server error: %s" % str(error)}, HTTPStatus.BAD_REQUEST, {'ErrorType': 'SQLAlchemyError'} - -@api.errorhandler(ZeroDivisionError) -@api.header('ErrorType', 'Zero division') -def handle_zero_division_exception(error): - '''This is a zero division error''' - log.error(str(error)) - return {'message': "Server error: %s" % str(error)}, HTTPStatus.BAD_REQUEST, {'ErrorType': 'ZeroDivisionError'} - -@api.errorhandler(Exception) -@api.header('ErrorType', 'Exception') -def handle_exception(error): - '''This is a base error handler''' - log.error(str(error)) - print("Got the exception") - return {'message': "Server error: %s" % str(error)}, HTTPStatus.BAD_REQUEST , {'ErrorType': 'Exception'} - - -@api.route("/login") -class Login(Resource): - """Login resource""" - - def get(self): - authorization = request.authorization - - if ( - not authorization - or not authorization.username - or not authorization.password - ): - if not request.headers.get("username") or not request.headers.get( - "password" - ): - return make_response( - "Could not verify", - 401, - {"WWW-Authenticate": 'Basic realm="Login required!"'}, +from typing import Any, Optional + +from pydantic import BaseModel +from fastapi import Request, status, HTTPException + +from ..extensions.database.middleware import db +from ..extensions.database.definitions import get_current_person +from ..extensions.auth import auth_provider +from ..extensions.auth.token import generate_token +from ..base import BaseRouter + + +class Login(BaseModel): + plugin: Optional[str] + login: Optional[str] + password: Optional[str] + # keycloak token, not jwt (!) + token: Optional[str] + + +class TokenResponse(BaseModel): + login: str + token: str + permissions: list[str] + + +class PluginConfig(BaseModel): + name: str + config: dict[str, Any] + + +class AuthConfig(BaseModel): + plugins: list[PluginConfig] + + +logger = logging.getLogger(__name__) +router = BaseRouter(prefix="/auth", tags=["Authentication"]) + + +@router.get("/config", response_model=AuthConfig) +def config() -> AuthConfig: + return {"plugins": auth_provider.get_export_config()} + + +@router.post( + "/login", + response_model=TokenResponse, + status_code=status.HTTP_201_CREATED, + responses={401: {"description": "Could not login user"}}, +) +def login(login_details: Login, request: Request) -> TokenResponse: + """Login a user""" + person = auth_provider.get_auth(**login_details.dict()) + if not person: + logger.warning( + f"Failed login attempt from `{login_details.login}` with ip `{request.client.host}`" + ) + raise HTTPException(status_code=401, detail="Could not verify") + + person_check = get_current_person(person.login) + if not person_check: + if request.app.db_options.create_person_on_missing: + db.session.add(person) + db.session.commit() + if not person.personId: + logger.warning(f"Could not create person from login `{login}`") + raise HTTPException( + status_code=401, detail="User does not exist in database." ) - else: - username = request.headers.get("username") - password = request.headers.get("password") + # New user should have empty permission list + person._metadata["permissions"] = [] + person_check = person + logger.info(f"Created new Person `{person.personId}` for `{login}`") else: - username = authorization.username - password = authorization.password - - roles = auth_provider.get_roles(username, password) - if not roles: - return make_response( - "Could not verify", - 401, - {"WWW-Authenticate": 'Basic realm="Login required!"'}, + logger.warning( + f"Login attempt for unknown user `{login_details.login}` with ip `{request.client.host}`" ) - else: - token_info = auth_provider.generate_token(username, roles) - return token_info + raise HTTPException( + status_code=401, detail="User does not exist in database." + ) + + token_info = generate_token( + person_check.login, + person_check.personId, + person_check._metadata["permissions"], + ) + + logger.info( + f"Successful login attempt from `{login_details.login}` with ip `{request.client.host}`" + ) + + return token_info diff --git a/pyispyb/app/routes/options.py b/pyispyb/app/routes/options.py new file mode 100644 index 00000000..bac45078 --- /dev/null +++ b/pyispyb/app/routes/options.py @@ -0,0 +1,42 @@ +from fastapi import Depends + +from ...dependencies import permission +from ..base import AuthenticatedAPIRouter +from ..extensions.options import base as crud +from ..extensions.options.schema import Options, UIOptions + +router = AuthenticatedAPIRouter(prefix="/options", tags=["Options"]) + + +@router.get( + "/ui", + response_model=UIOptions, +) +def get_ui_options() -> UIOptions: + """Get the available UI database options""" + return crud.get_options() + + +@router.get( + "", + response_model=Options, +) +def get_options(depends: bool = Depends(permission("manage_options"))) -> Options: + """Get the available database options""" + return crud.get_options(get_all=True) + + +@router.patch( + "", + response_model=Options, +) +def update_options( + options: Options, depends=Depends(permission("manage_options")) +) -> Options: + """Update the database options""" + from pyispyb.app.main import app + + crud.update_options(options) + options = crud.get_options(get_all=True) + app.db_options = options + return options diff --git a/pyispyb/app/utils/__init__.py b/pyispyb/app/utils/__init__.py index 2b4c3dae..f6f75a54 100644 --- a/pyispyb/app/utils/__init__.py +++ b/pyispyb/app/utils/__init__.py @@ -17,32 +17,73 @@ You should have received a copy of the GNU Lesser General Public License along with py-ispyb. If not, see . """ +from datetime import datetime +from decimal import Decimal import os -from requests import get -from flask import current_app +import time +import logging +from sqlalchemy import text +from functools import wraps +from pyispyb.config import settings +logger = logging.getLogger("ispyb") -def create_response_item(msg=None, num_items=None, data=[]): + +def get_sql_query(name, append=""): + """Get sql query string from matching file. + + Args: + name (str): name of the query + append (str, optional): text to append at the end of the query. Defaults to "". + + Returns: + str: query string """ - Creates response dictionary. + path = os.path.join(settings.queries_dir, name + ".sql") + file = open(path) + query_string = file.read() + append + query = text(query_string) + return query + + +def queryresult_to_dict(result): + """Convert a sql query result to a python dictinary. Args: - info_msg ([type]): [description] - error_msg ([type]): [description] - num_items ([type]): [description] - data ([type]): [description] + result : sql alchemy query result Returns: - [type]: [description] + dict: converted result + """ + res = [] + + for row in result: + row_dict = {} + for field in row._mapping.items(): + field_name = field[0] + field_value = field[1] + if isinstance(field_value, datetime): + field_value = field_value.isoformat() + if isinstance(field_value, Decimal): + field_value = float(field_value) + row_dict[field_name] = field_value + res.append(row_dict) + + return res + + +def timed(fn): + """ + Decorator to log the time that a class function takes to execute """ - return { - "data": {"total": num_items, "rows": data}, - "message": msg, - } + @wraps(fn) + def wrapper(self, *args, **kwargs): + start = time.time() + result = fn(self, *args, **kwargs) + took = round(time.time() - start, 3) + logger.debug(f"Class {self.__class__} - Function {fn.__name__} took {took}") + return result -def download_pdb_file(pdb_filename): - response = get(current_app.config["PDB_URI"] + "/" + pdb_filename) - if response.status_code == 200: - return response.content + return wrapper diff --git a/pyispyb/cli/__init__.py b/pyispyb/cli/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pyispyb/cli/requests.py b/pyispyb/cli/requests.py new file mode 100644 index 00000000..b0060a9f --- /dev/null +++ b/pyispyb/cli/requests.py @@ -0,0 +1,86 @@ +import requests +import json + + +class RestClient: + base = "http://localhost" + port = 8000 + _token = None + + def __init__(self, base=None, port=None, verify=True): + if base is not None: + self.base = base + + if port is not None: + self.port = port + + self.verify = verify + + def token(self): + return self._token + + def print(self, data): + print(json.dumps(data, indent=4, sort_keys=True)) + + def req( + self, + url, + method="get", + data=None, + params=None, + pprint=False, + token=True, + show_headers=False, + ): + parts = self.base.split("/") + base = ( + parts[0] + + "/" + + parts[1] + + "/" + + parts[2] + + ":" + + str(self.port) + + "/" + + "/".join(parts[3:]) + ) + + headers = None + if self._token and token: + headers = {"Authorization": "Bearer " + self._token} + resp = getattr(requests, method)( + base + url, + json=data, + params=params, + headers=headers, + verify=self.verify, + ) + + if pprint: + print(resp.url, resp.status_code, len(resp.content)) + if show_headers: + print(resp.headers) + try: + self.print(resp.json()) + except Exception: + print(resp.text) + + return + + else: + return resp + + def login(self, login, password): + auth = {"login": login, "password": password, "plugin": "dummy"} + resp = self.req("/auth/login", "post", auth) + if resp.status_code == 201: + self._token = resp.json()["token"] + print("permissions: ", resp.json()["permissions"]) + else: + try: + print(resp.status_code, resp.json()) + except Exception: + print(resp.status_code, resp.text) + return False + + return True diff --git a/pyispyb/cli/rest.py b/pyispyb/cli/rest.py new file mode 100644 index 00000000..5eea6553 --- /dev/null +++ b/pyispyb/cli/rest.py @@ -0,0 +1,109 @@ +from argparse import ArgumentParser +import enum +import json + +from ..config import settings +from .requests import RestClient + + +class MethodEnum(enum.Enum): + GET = "get" + POST = "post" + PUT = "put" + PATCH = "patch" + DELETE = "delete" + + +def run(): + parser = ArgumentParser(description="Use and display result from ISPyB REST API") + parser.add_argument( + "url", + type=str, + help="URL of the REST API. Can be a local url: /samples, or a fully qualified url: https://host:port/ispyb/v1/api/samples", + ) + parser.add_argument( + "-m", + "--method", + type=MethodEnum, + default=MethodEnum.GET, + help="HTTP method to use: get, post, put, patch, delete", + ) + parser.add_argument("-l", "--login", type=str, default="abcd", help="Login to use") + parser.add_argument( + "-a", "--admin", action="store_true", help="Login as admin (efgh)" + ) + parser.add_argument( + "-d", "--data", type=str, help="Data to pass to the request as a json string" + ) + parser.add_argument( + "-r", "--api-root", type=str, default=settings.api_root, help="API root" + ) + parser.add_argument( + "-b", "--base-url", type=str, default="localhost", help="Base url" + ) + parser.add_argument("-s", "--https", action="store_true", help="Use https") + parser.add_argument( + "-nv", + "--no-verify", + action="store_true", + help="Dont verify https certificate, assume --https", + ) + + options = parser.parse_args() + + print("- Log-in to the service") + _base_url = options.base_url + options.api_root + url = options.url + https = options.https + port = None + if options.url.startswith("http"): + if options.url.startswith("https"): + https = True + + parts = options.url.split("/") + _base_url = parts[2] + + parts2 = _base_url.split(":") + if len(parts2) == 2: + _base_url = parts2[0] + port = parts2[1] + + url = "/" + "/".join(parts[3:]) + + base_url = f"{'https' if (https or options.no_verify) else 'http'}://{_base_url}" + if options.no_verify: + import urllib3 + + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + + client = RestClient(base_url, port=port, verify=not options.no_verify) + if not client.login("efgh" if options.admin else options.login, "a"): + print("** Could not login **") + exit(0) + + if url.startswith("/api"): + url = url[4:] + + print("- Send request:") + print(f" method: {options.method.value}") + print(f" url : {url}") + + data = None + if options.data: + try: + data = json.loads(options.data) + print(f" data : {options.data}") + except json.JSONDecodeError as e: + print("** Could not deseralise json data **") + print(f" {str(e)}") + exit(0) + + print("-" * 80) + client.req(url, method=options.method.value, data=data, pprint=True) + print("-" * 80) + + print("- Terminated") + + +if __name__ == "__main__": + run() diff --git a/pyispyb/config.py b/pyispyb/config.py index 7c69b139..26a02262 100644 --- a/pyispyb/config.py +++ b/pyispyb/config.py @@ -24,125 +24,98 @@ import os -import tempfile -import ruamel.yaml -class BaseConfig: - """Base config class""" +from functools import lru_cache +from pydantic import BaseSettings, BaseModel +import yaml - PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - STATIC_ROOT = os.path.join(PROJECT_ROOT, "static") - SITE_NAME = "Generic" - API_ROOT = "/ispyb/api/v1" - SECRET_KEY = os.urandom(16) - SQLALCHEMY_TRACK_MODIFICATIONS = True - # SQLALCHEMY_POOL_RECYCLE = 2999 - # SQLALCHEMY_POOL_TIMEOUT = 20 - PAGINATION_ITEMS_LIMIT = 1000 +PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) +RESOURCES_ROOT = os.path.join(PROJECT_ROOT, "resources") - DEBUG = True - ERROR_404_HELP = False - REVERSE_PROXY_SETUP = bool(os.getenv("EXAMPLE_API_REVERSE_PROXY_SETUP", "")) - AUTHORIZATIONS = { - "apikey": {"type": "apiKey", "in": "header", "name": "Authorization"} - } - - AUTHORIZATION_RULES = {} - - AUTH_MODULE = "pyispyb.app.extensions.auth.DummyAuthentication" - AUTH_CLASS = "DummyAuthentication" - JWT_CODING_ALGORITHM = "HS256" - TOKEN_EXP_TIME = 300 # in minutes - MASTER_TOKEN = "MasterToken" - ADMIN_ROLES = ["manager", "admin"] # allows to access all resources - - BARCODE_TYPE = "code39" - TEMP_FOLDER = os.path.join(tempfile.gettempdir(), "pyispyb", "tmp") - UPLOAD_FOLDER = os.path.join(tempfile.gettempdir(), "pyispyb", "upload") - SITE_LOGO_PATH = os.path.join(STATIC_ROOT, "favicon.png") - DEWAR_LABEL_TEMPLATE_FILEPATH = os.path.join( - STATIC_ROOT, - "dewar_label_template.html" - ) - - SWAGGER_UI_URI = "/docs" #False disable docs - SWAGGER_UI_JSONEDITOR = True - SWAGGER_UI_OAUTH_CLIENT_ID = "documentation" - SWAGGER_UI_OAUTH_REALM = "Authentication for ISPyB server" - SWAGGER_UI_OAUTH_APP_NAME = "ISPyB server documentation" +def get_env_file(): + res = os.getenv("ISPYB_ENVIRONMENT", None) + if res is None or res == "": + return "config/.env" + v = f"config/{res}.env" + if os.path.exists(v): + return v - CSRF_ENABLED = True + raise Exception(f"Config file {v} could not be found.") - USER_OFFICE_MODULE = ( - "pyispyb.app.extensions.user_office.DummyUserOffice" - ) - USER_OFFICE_CLASS = "DummyUserOffice" - # user_office_SYNC_INTERVAL = 60 * 60 * 5 #in seconds - USER_OFFICE_SYNC_INTERVAL = 30 - def __init__(self, config_filename=None): - with open(config_filename) as f: - config = ruamel.yaml.load(f.read(), ruamel.yaml.RoundTripLoader) +class Settings(BaseSettings): + static_root: str = os.path.join(PROJECT_ROOT, "static") + queries_dir: str = os.path.join(RESOURCES_ROOT, "queries") - for key, value in config["server"].items(): - setattr(self, key, value) + api_root: str + service_name: str - if config.get("authorization_rules") is not None: - self.AUTHORIZATION_RULES = {} - for key, value in config["authorization_rules"].items(): - self.AUTHORIZATION_RULES[key] = value + sqlalchemy_database_uri: str + query_debug: bool - print("Authorization rules: ") - print("[method] Endpoint - Allowed roles") - for endpoint, value in self.AUTHORIZATION_RULES.items(): - for method, roles in value.items(): - print("[%s] %s - %s" % (method, endpoint, str(roles))) + auth = {} + auth_config: str - PDB_URI = "https://files.rcsb.org/download" + jwt_coding_algorithm: str + token_exp_time: int # in minutes + secret_key: str + cors: bool = False -class ProductionConfig(BaseConfig): - """Production config + simulation_config: str = None - Args: - BaseConfig ([type]): [description] - """ + # Map file paths in the database to a different root directory + path_map: str = None - def __init__(self, config_filename=None): - super().__init__(config_filename) + class Config: + env_file = get_env_file() - SECRET_KEY = os.getenv("ISPYB_SECRET_KEY") - SQLALCHEMY_DATABASE_URI = os.getenv( - "ISPYB_DATABASE_URI" - ) - MASTER_TOKEN = None - SWAGGER_UI_URI = False +@lru_cache +def get_settings() -> Settings: + return Settings() -class DevelopmentConfig(BaseConfig): - """Dev config - Args: - BaseConfig ([type]): [description] - """ +settings = get_settings() - def __init__(self, config_filename=None): - super().__init__(config_filename) +AUTH_CONFIG = os.path.realpath(os.path.join(PROJECT_ROOT, "..", settings.auth_config)) +try: + with open(AUTH_CONFIG) as f: + yaml_settings = dict() + yaml_settings.update(yaml.safe_load(f)) + settings.auth = yaml_settings["AUTH"] +except IOError: + raise Exception(f"Could not access auth config: {AUTH_CONFIG}") - DEBUG = True +class LogConfig(BaseModel): + """Logging configuration to be set for the server""" -class TestingConfig(BaseConfig): - """Testing config + LOGGER_NAME: str = "pyispyb" + LOG_FORMAT: str = "%(levelprefix)s | %(asctime)s | %(name)s | %(message)s" + LOG_LEVEL: str = "INFO" - Args: - BaseConfig ([type]): [description] - """ - - def __init__(self, config_filename=None): - super().__init__(config_filename) - - TESTING = True + version = 1 + disable_existing_loggers = False + formatters = { + "default": { + "()": "uvicorn.logging.DefaultFormatter", + "fmt": LOG_FORMAT, + "datefmt": "%Y-%m-%d %H:%M:%S", + }, + } + handlers = { + "default": { + "formatter": "default", + "class": "logging.StreamHandler", + "stream": "ext://sys.stderr", + }, + } + loggers = { + "root": {"handlers": ["default"], "level": LOG_LEVEL}, + "ispyb": {"handlers": ["default"], "level": LOG_LEVEL}, + "db": {"handlers": ["default"], "level": "DEBUG"}, + } diff --git a/pyispyb/connector/__init__.py b/pyispyb/connector/__init__.py deleted file mode 100644 index 4e7422a3..00000000 --- a/pyispyb/connector/__init__.py +++ /dev/null @@ -1,54 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -from flask import current_app -from requests import get, post - - -__license__ = "LGPLv3+" - - -def is_resource_available(service_name): - try: - headers = { - "Authorization": "Bearer %s" % current_app.config["MASTER_TOKEN"], - "Host": service_name} - response = get(current_app.config["API_GATEWAY_URL"] + "/schemas/available_names", headers=headers) - data = response.json() - status_code = response.status_code - except ConnectionError: - status_code = 400 - data = "ISPyB service %s is not available" % service_name - return status_code, data - - -def get_ispyb_resource(service_name, path): - status_code, data = is_resource_available(service_name) - if status_code == 200: - headers = { - "Authorization": "Bearer %s" % current_app.config["MASTER_TOKEN"], - "Host": service_name} - response = get( - current_app.config["API_GATEWAY_URL"] + path, - headers=headers) - data = response.json() - status_code = response.status_code - return status_code, data diff --git a/pyispyb/core/__init__.py b/pyispyb/core/__init__.py index 3b00efdd..6c744c77 100644 --- a/pyispyb/core/__init__.py +++ b/pyispyb/core/__init__.py @@ -21,17 +21,3 @@ __license__ = "LGPLv3+" - - -def init_app(app): - - from . import modules - - modules.init_app(app) - - from . import routes - - routes.init_app(app) - - print("ispyb-core loaded") - # app.logger.debug("ISPyB server started") diff --git a/pyispyb/core/models.py b/pyispyb/core/models.py deleted file mode 100644 index d9b9f426..00000000 --- a/pyispyb/core/models.py +++ /dev/null @@ -1,3496 +0,0 @@ -# coding: utf-8 -from flask_sqlalchemy import SQLAlchemy - - -from pyispyb.app.extensions import db - - - -class AbInitioModel(db.Model): - __tablename__ = 'AbInitioModel' - - abInitioModelId = db.Column(db.Integer, primary_key=True) - modelListId = db.Column(db.ForeignKey('ModelList.modelListId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - averagedModelId = db.Column(db.ForeignKey('Model.modelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - rapidShapeDeterminationModelId = db.Column(db.ForeignKey('Model.modelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - shapeDeterminationModelId = db.Column(db.ForeignKey('Model.modelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - comments = db.Column(db.String(512)) - creationTime = db.Column(db.DateTime) - - Model = db.relationship('Model', primaryjoin='AbInitioModel.averagedModelId == Model.modelId') - ModelList = db.relationship('ModelList', primaryjoin='AbInitioModel.modelListId == ModelList.modelListId') - Model1 = db.relationship('Model', primaryjoin='AbInitioModel.rapidShapeDeterminationModelId == Model.modelId') - Model2 = db.relationship('Model', primaryjoin='AbInitioModel.shapeDeterminationModelId == Model.modelId') - - - -class Additive(db.Model): - __tablename__ = 'Additive' - - additiveId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - additiveType = db.Column(db.String(45)) - comments = db.Column(db.String(512)) - - - -class AdminActivity(db.Model): - __tablename__ = 'AdminActivity' - - adminActivityId = db.Column(db.Integer, primary_key=True) - username = db.Column(db.String(45), nullable=False, unique=True, server_default=db.FetchedValue()) - action = db.Column(db.String(45), index=True) - comments = db.Column(db.String(100)) - dateTime = db.Column(db.DateTime) - - - -class AdminVar(db.Model): - __tablename__ = 'AdminVar' - - varId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(32), index=True) - value = db.Column(db.String(1024), index=True) - - - -class Aperture(db.Model): - __tablename__ = 'Aperture' - - apertureId = db.Column(db.Integer, primary_key=True) - sizeX = db.Column(db.Float) - - - -class Assembly(db.Model): - __tablename__ = 'Assembly' - - assemblyId = db.Column(db.Integer, primary_key=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - creationDate = db.Column(db.DateTime) - comments = db.Column(db.String(255)) - - Macromolecule = db.relationship('Macromolecule', primaryjoin='Assembly.macromoleculeId == Macromolecule.macromoleculeId') - - - -class AssemblyHasMacromolecule(db.Model): - __tablename__ = 'AssemblyHasMacromolecule' - - AssemblyHasMacromoleculeId = db.Column(db.Integer, primary_key=True) - assemblyId = db.Column(db.ForeignKey('Assembly.assemblyId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - - Assembly = db.relationship('Assembly', primaryjoin='AssemblyHasMacromolecule.assemblyId == Assembly.assemblyId') - Macromolecule = db.relationship('Macromolecule', primaryjoin='AssemblyHasMacromolecule.macromoleculeId == Macromolecule.macromoleculeId') - - - -class AssemblyRegion(db.Model): - __tablename__ = 'AssemblyRegion' - - assemblyRegionId = db.Column(db.Integer, primary_key=True) - assemblyHasMacromoleculeId = db.Column(db.ForeignKey('AssemblyHasMacromolecule.AssemblyHasMacromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - assemblyRegionType = db.Column(db.String(45)) - name = db.Column(db.String(45)) - fromResiduesBases = db.Column(db.String(45)) - toResiduesBases = db.Column(db.String(45)) - - AssemblyHasMacromolecule = db.relationship('AssemblyHasMacromolecule', primaryjoin='AssemblyRegion.assemblyHasMacromoleculeId == AssemblyHasMacromolecule.AssemblyHasMacromoleculeId') - - - -class AutoProc(db.Model): - __tablename__ = 'AutoProc' - - autoProcId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - autoProcProgramId = db.Column(db.Integer, index=True, info='Related program item') - spaceGroup = db.Column(db.String(45), info='Space group') - refinedCell_a = db.Column(db.Float, info='Refined cell') - refinedCell_b = db.Column(db.Float, info='Refined cell') - refinedCell_c = db.Column(db.Float, info='Refined cell') - refinedCell_alpha = db.Column(db.Float, info='Refined cell') - refinedCell_beta = db.Column(db.Float, info='Refined cell') - refinedCell_gamma = db.Column(db.Float, info='Refined cell') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - - -class AutoProcIntegration(db.Model): - __tablename__ = 'AutoProcIntegration' - - autoProcIntegrationId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='DataCollection item') - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related program item') - startImageNumber = db.Column(db.Integer, info='start image number') - endImageNumber = db.Column(db.Integer, info='end image number') - refinedDetectorDistance = db.Column(db.Float, info='Refined DataCollection.detectorDistance') - refinedXBeam = db.Column(db.Float, info='Refined DataCollection.xBeam') - refinedYBeam = db.Column(db.Float, info='Refined DataCollection.yBeam') - rotationAxisX = db.Column(db.Float, info='Rotation axis') - rotationAxisY = db.Column(db.Float, info='Rotation axis') - rotationAxisZ = db.Column(db.Float, info='Rotation axis') - beamVectorX = db.Column(db.Float, info='Beam vector') - beamVectorY = db.Column(db.Float, info='Beam vector') - beamVectorZ = db.Column(db.Float, info='Beam vector') - cell_a = db.Column(db.Float, info='Unit cell') - cell_b = db.Column(db.Float, info='Unit cell') - cell_c = db.Column(db.Float, info='Unit cell') - cell_alpha = db.Column(db.Float, info='Unit cell') - cell_beta = db.Column(db.Float, info='Unit cell') - cell_gamma = db.Column(db.Float, info='Unit cell') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - anomalous = db.Column(db.Integer, server_default=db.FetchedValue(), info='boolean type:0 noanoum - 1 anoum') - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='AutoProcIntegration.autoProcProgramId == AutoProcProgram.autoProcProgramId') - DataCollection = db.relationship('DataCollection', primaryjoin='AutoProcIntegration.dataCollectionId == DataCollection.dataCollectionId') - - - -class AutoProcProgram(db.Model): - __tablename__ = 'AutoProcProgram' - - autoProcProgramId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - processingCommandLine = db.Column(db.String(255), info='Command line for running the automatic processing') - processingPrograms = db.Column(db.String(255), info='Processing programs (comma separated)') - processingStatus = db.Column(db.Integer, info='success (1) / fail (0)') - processingMessage = db.Column(db.String(255), info='warning, error,...') - processingStartTime = db.Column(db.DateTime, info='Processing start time') - processingEndTime = db.Column(db.DateTime, info='Processing end time') - processingEnvironment = db.Column(db.String(255), info='Cpus, Nodes,...') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - processingJobId = db.Column(db.ForeignKey('ProcessingJob.processingJobId'), index=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - - DataCollection = db.relationship('DataCollection', primaryjoin='AutoProcProgram.dataCollectionId == DataCollection.dataCollectionId') - ProcessingJob = db.relationship('ProcessingJob', primaryjoin='AutoProcProgram.processingJobId == ProcessingJob.processingJobId') - - - -class AutoProcProgramAttachment(db.Model): - __tablename__ = 'AutoProcProgramAttachment' - - autoProcProgramAttachmentId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related autoProcProgram item') - fileType = db.Column(db.Enum('Log', 'Result', 'Graph', 'Debug'), info='Type of file Attachment') - fileName = db.Column(db.String(255), info='Attachment filename') - filePath = db.Column(db.String(255), info='Attachment filepath to disk storage') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - importanceRank = db.Column(db.Integer, info='For the particular autoProcProgramId and fileType, indicate the importance of the attachment. Higher numbers are more important') - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='AutoProcProgramAttachment.autoProcProgramId == AutoProcProgram.autoProcProgramId') - - - -class AutoProcProgramMessage(db.Model): - __tablename__ = 'AutoProcProgramMessage' - - autoProcProgramMessageId = db.Column(db.Integer, primary_key=True) - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId'), index=True) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - severity = db.Column(db.Enum('ERROR', 'WARNING', 'INFO')) - message = db.Column(db.String(200)) - description = db.Column(db.Text) - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='AutoProcProgramMessage.autoProcProgramId == AutoProcProgram.autoProcProgramId') - - - -class AutoProcScaling(db.Model): - __tablename__ = 'AutoProcScaling' - __table_args__ = ( - db.Index('AutoProcScalingIdx1', 'autoProcScalingId', 'autoProcId'), - ) - - autoProcScalingId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - autoProcId = db.Column(db.ForeignKey('AutoProc.autoProcId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related autoProc item (used by foreign key)') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - AutoProc = db.relationship('AutoProc', primaryjoin='AutoProcScaling.autoProcId == AutoProc.autoProcId') - - - -class AutoProcScalingStatistic(db.Model): - __tablename__ = 'AutoProcScalingStatistics' - - autoProcScalingStatisticsId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - autoProcScalingId = db.Column(db.ForeignKey('AutoProcScaling.autoProcScalingId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related autoProcScaling item (used by foreign key)') - scalingStatisticsType = db.Column(db.Enum('overall', 'innerShell', 'outerShell'), nullable=False, index=True, server_default=db.FetchedValue(), info='Scaling statistics type') - comments = db.Column(db.String(255), info='Comments...') - resolutionLimitLow = db.Column(db.Float, info='Low resolution limit') - resolutionLimitHigh = db.Column(db.Float, info='High resolution limit') - rMerge = db.Column(db.Float, info='Rmerge') - rMeasWithinIPlusIMinus = db.Column(db.Float, info='Rmeas (within I+/I-)') - rMeasAllIPlusIMinus = db.Column(db.Float, info='Rmeas (all I+ & I-)') - rPimWithinIPlusIMinus = db.Column(db.Float, info='Rpim (within I+/I-) ') - rPimAllIPlusIMinus = db.Column(db.Float, info='Rpim (all I+ & I-)') - fractionalPartialBias = db.Column(db.Float, info='Fractional partial bias') - nTotalObservations = db.Column(db.Integer, info='Total number of observations') - nTotalUniqueObservations = db.Column(db.Integer, info='Total number unique') - meanIOverSigI = db.Column(db.Float, info='Mean((I)/sd(I))') - completeness = db.Column(db.Float, info='Completeness') - multiplicity = db.Column(db.Float, info='Multiplicity') - anomalousCompleteness = db.Column(db.Float, info='Anomalous completeness') - anomalousMultiplicity = db.Column(db.Float, info='Anomalous multiplicity') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - anomalous = db.Column(db.Integer, server_default=db.FetchedValue(), info='boolean type:0 noanoum - 1 anoum') - ccHalf = db.Column(db.Float, info='information from XDS') - ccAnomalous = db.Column(db.Float) - - AutoProcScaling = db.relationship('AutoProcScaling', primaryjoin='AutoProcScalingStatistic.autoProcScalingId == AutoProcScaling.autoProcScalingId') - - - -class AutoProcScalingHasInt(db.Model): - __tablename__ = 'AutoProcScaling_has_Int' - __table_args__ = ( - db.Index('AutoProcScalingHasInt_FKIndex3', 'autoProcScalingId', 'autoProcIntegrationId'), - ) - - autoProcScaling_has_IntId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - autoProcScalingId = db.Column(db.ForeignKey('AutoProcScaling.autoProcScalingId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='AutoProcScaling item') - autoProcIntegrationId = db.Column(db.ForeignKey('AutoProcIntegration.autoProcIntegrationId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='AutoProcIntegration item') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - AutoProcIntegration = db.relationship('AutoProcIntegration', primaryjoin='AutoProcScalingHasInt.autoProcIntegrationId == AutoProcIntegration.autoProcIntegrationId') - AutoProcScaling = db.relationship('AutoProcScaling', primaryjoin='AutoProcScalingHasInt.autoProcScalingId == AutoProcScaling.autoProcScalingId') - - - -class AutoProcStatus(db.Model): - __tablename__ = 'AutoProcStatus' - - autoProcStatusId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - autoProcIntegrationId = db.Column(db.ForeignKey('AutoProcIntegration.autoProcIntegrationId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - step = db.Column(db.Enum('Indexing', 'Integration', 'Correction', 'Scaling', 'Importing'), nullable=False, info='autoprocessing step') - status = db.Column(db.Enum('Launched', 'Successful', 'Failed'), nullable=False, info='autoprocessing status') - comments = db.Column(db.String(1024), info='comments') - bltimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - AutoProcIntegration = db.relationship('AutoProcIntegration', primaryjoin='AutoProcStatus.autoProcIntegrationId == AutoProcIntegration.autoProcIntegrationId') - - - -class BFComponent(db.Model): - __tablename__ = 'BF_component' - - componentId = db.Column(db.Integer, primary_key=True) - systemId = db.Column(db.ForeignKey('BF_system.systemId'), index=True) - name = db.Column(db.String(100)) - description = db.Column(db.String(200)) - - BF_system = db.relationship('BFSystem', primaryjoin='BFComponent.systemId == BFSystem.systemId') - - - -class BFComponentBeamline(db.Model): - __tablename__ = 'BF_component_beamline' - - component_beamlineId = db.Column(db.Integer, primary_key=True) - componentId = db.Column(db.ForeignKey('BF_component.componentId'), index=True) - beamlinename = db.Column(db.String(20)) - - BF_component = db.relationship('BFComponent', primaryjoin='BFComponentBeamline.componentId == BFComponent.componentId') - - - -class BFFault(db.Model): - __tablename__ = 'BF_fault' - - faultId = db.Column(db.Integer, primary_key=True) - sessionId = db.Column(db.ForeignKey('BLSession.sessionId'), nullable=False, index=True) - owner = db.Column(db.String(50)) - subcomponentId = db.Column(db.ForeignKey('BF_subcomponent.subcomponentId'), index=True) - starttime = db.Column(db.DateTime) - endtime = db.Column(db.DateTime) - beamtimelost = db.Column(db.Integer) - beamtimelost_starttime = db.Column(db.DateTime) - beamtimelost_endtime = db.Column(db.DateTime) - title = db.Column(db.String(200)) - description = db.Column(db.Text) - resolved = db.Column(db.Integer) - resolution = db.Column(db.Text) - attachment = db.Column(db.String(200)) - eLogId = db.Column(db.Integer) - assignee = db.Column(db.String(50)) - personId = db.Column(db.ForeignKey('Person.personId'), index=True) - assigneeId = db.Column(db.ForeignKey('Person.personId'), index=True) - - Person = db.relationship('Person', primaryjoin='BFFault.assigneeId == Person.personId') - Person1 = db.relationship('Person', primaryjoin='BFFault.personId == Person.personId') - BLSession = db.relationship('BLSession', primaryjoin='BFFault.sessionId == BLSession.sessionId') - BF_subcomponent = db.relationship('BFSubcomponent', primaryjoin='BFFault.subcomponentId == BFSubcomponent.subcomponentId') - - - -class BFSubcomponent(db.Model): - __tablename__ = 'BF_subcomponent' - - subcomponentId = db.Column(db.Integer, primary_key=True) - componentId = db.Column(db.ForeignKey('BF_component.componentId'), index=True) - name = db.Column(db.String(100)) - description = db.Column(db.String(200)) - - BF_component = db.relationship('BFComponent', primaryjoin='BFSubcomponent.componentId == BFComponent.componentId') - - - -class BFSubcomponentBeamline(db.Model): - __tablename__ = 'BF_subcomponent_beamline' - - subcomponent_beamlineId = db.Column(db.Integer, primary_key=True) - subcomponentId = db.Column(db.ForeignKey('BF_subcomponent.subcomponentId'), index=True) - beamlinename = db.Column(db.String(20)) - - BF_subcomponent = db.relationship('BFSubcomponent', primaryjoin='BFSubcomponentBeamline.subcomponentId == BFSubcomponent.subcomponentId') - - - -class BFSystem(db.Model): - __tablename__ = 'BF_system' - - systemId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(100)) - description = db.Column(db.String(200)) - - - -class BFSystemBeamline(db.Model): - __tablename__ = 'BF_system_beamline' - - system_beamlineId = db.Column(db.Integer, primary_key=True) - systemId = db.Column(db.ForeignKey('BF_system.systemId'), index=True) - beamlineName = db.Column(db.String(20)) - - BF_system = db.relationship('BFSystem', primaryjoin='BFSystemBeamline.systemId == BFSystem.systemId') - - - -class BLSample(db.Model): - __tablename__ = 'BLSample' - __table_args__ = ( - db.Index('crystalId', 'crystalId', 'containerId'), - ) - - blSampleId = db.Column(db.Integer, primary_key=True) - diffractionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - crystalId = db.Column(db.ForeignKey('Crystal.crystalId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - containerId = db.Column(db.ForeignKey('Container.containerId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - name = db.Column(db.String(45), index=True) - code = db.Column(db.String(45)) - location = db.Column(db.String(45)) - holderLength = db.Column(db.Float(asdecimal=True)) - loopLength = db.Column(db.Float(asdecimal=True)) - loopType = db.Column(db.String(45)) - wireWidth = db.Column(db.Float(asdecimal=True)) - comments = db.Column(db.String(1024)) - completionStage = db.Column(db.String(45)) - structureStage = db.Column(db.String(45)) - publicationStage = db.Column(db.String(45)) - publicationComments = db.Column(db.String(255)) - blSampleStatus = db.Column(db.String(20), index=True) - isInSampleChanger = db.Column(db.Integer) - lastKnownCenteringPosition = db.Column(db.String(255)) - POSITIONID = db.Column(db.Integer) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - SMILES = db.Column(db.String(400), info='the symbolic description of the structure of a chemical compound') - blSubSampleId = db.Column(db.ForeignKey('BLSubSample.blSubSampleId'), index=True) - lastImageURL = db.Column(db.String(255)) - screenComponentGroupId = db.Column(db.ForeignKey('ScreenComponentGroup.screenComponentGroupId'), index=True) - volume = db.Column(db.Float) - dimension1 = db.Column(db.Float(asdecimal=True)) - dimension2 = db.Column(db.Float(asdecimal=True)) - dimension3 = db.Column(db.Float(asdecimal=True)) - shape = db.Column(db.String(15)) - packingFraction = db.Column(db.Float) - preparationTemeprature = db.Column(db.Integer, info='Sample preparation temperature, Units: kelvin') - preparationHumidity = db.Column(db.Float, info='Sample preparation humidity, Units: %') - blottingTime = db.Column(db.Integer, info='Blotting time, Units: sec') - blottingForce = db.Column(db.Float, info='Force used when blotting sample, Units: N?') - blottingDrainTime = db.Column(db.Integer, info='Time sample left to drain after blotting, Units: sec') - support = db.Column(db.String(50), info='Sample support material') - subLocation = db.Column(db.SmallInteger, info="Indicates the sample's location on a multi-sample pin, where 1 is closest to the pin base") - - BLSubSample = db.relationship('BLSubSample', primaryjoin='BLSample.blSubSampleId == BLSubSample.blSubSampleId') - Container = db.relationship('Container', primaryjoin='BLSample.containerId == Container.containerId') - Crystal = db.relationship('Crystal', primaryjoin='BLSample.crystalId == Crystal.crystalId') - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='BLSample.diffractionPlanId == DiffractionPlan.diffractionPlanId') - ScreenComponentGroup = db.relationship('ScreenComponentGroup', primaryjoin='BLSample.screenComponentGroupId == ScreenComponentGroup.screenComponentGroupId') - Project = db.relationship('Project', secondary='Project_has_BLSample') - - - -class BLSampleGroup(db.Model): - __tablename__ = 'BLSampleGroup' - - blSampleGroupId = db.Column(db.Integer, primary_key=True) - - - -class BLSampleGroupHasBLSample(db.Model): - __tablename__ = 'BLSampleGroup_has_BLSample' - - blSampleGroupId = db.Column(db.ForeignKey('BLSampleGroup.blSampleGroupId'), primary_key=True, nullable=False) - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId'), primary_key=True, nullable=False, index=True) - groupOrder = db.Column(db.Integer) - type = db.Column(db.Enum('background', 'container', 'sample', 'calibrant')) - - BLSampleGroup = db.relationship('BLSampleGroup', primaryjoin='BLSampleGroupHasBLSample.blSampleGroupId == BLSampleGroup.blSampleGroupId') - BLSample = db.relationship('BLSample', primaryjoin='BLSampleGroupHasBLSample.blSampleId == BLSample.blSampleId') - - - -class BLSampleImage(db.Model): - __tablename__ = 'BLSampleImage' - - blSampleImageId = db.Column(db.Integer, primary_key=True) - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId'), nullable=False, index=True) - micronsPerPixelX = db.Column(db.Float) - micronsPerPixelY = db.Column(db.Float) - imageFullPath = db.Column(db.String(255)) - blSampleImageScoreId = db.Column(db.ForeignKey('BLSampleImageScore.blSampleImageScoreId', onupdate='CASCADE'), index=True) - comments = db.Column(db.String(255)) - blTimeStamp = db.Column(db.DateTime) - containerInspectionId = db.Column(db.ForeignKey('ContainerInspection.containerInspectionId'), index=True) - modifiedTimeStamp = db.Column(db.DateTime) - - BLSample = db.relationship('BLSample', primaryjoin='BLSampleImage.blSampleId == BLSample.blSampleId') - BLSampleImageScore = db.relationship('BLSampleImageScore', primaryjoin='BLSampleImage.blSampleImageScoreId == BLSampleImageScore.blSampleImageScoreId') - ContainerInspection = db.relationship('ContainerInspection', primaryjoin='BLSampleImage.containerInspectionId == ContainerInspection.containerInspectionId') - - - -class BLSampleImageAnalysi(db.Model): - __tablename__ = 'BLSampleImageAnalysis' - - blSampleImageAnalysisId = db.Column(db.Integer, primary_key=True) - blSampleImageId = db.Column(db.ForeignKey('BLSampleImage.blSampleImageId'), index=True) - oavSnapshotBefore = db.Column(db.String(255)) - oavSnapshotAfter = db.Column(db.String(255)) - deltaX = db.Column(db.Integer) - deltaY = db.Column(db.Integer) - goodnessOfFit = db.Column(db.Float) - scaleFactor = db.Column(db.Float) - resultCode = db.Column(db.String(15)) - matchStartTimeStamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - matchEndTimeStamp = db.Column(db.DateTime) - - BLSampleImage = db.relationship('BLSampleImage', primaryjoin='BLSampleImageAnalysi.blSampleImageId == BLSampleImage.blSampleImageId') - - - -class BLSampleImageAutoScoreClas(db.Model): - __tablename__ = 'BLSampleImageAutoScoreClass' - - blSampleImageAutoScoreClassId = db.Column(db.Integer, primary_key=True) - blSampleImageAutoScoreSchemaId = db.Column(db.ForeignKey('BLSampleImageAutoScoreSchema.blSampleImageAutoScoreSchemaId', onupdate='CASCADE'), index=True) - scoreClass = db.Column(db.String(15), nullable=False, info='Thing being scored e.g. crystal, precipitant') - - BLSampleImageAutoScoreSchema = db.relationship('BLSampleImageAutoScoreSchema', primaryjoin='BLSampleImageAutoScoreClas.blSampleImageAutoScoreSchemaId == BLSampleImageAutoScoreSchema.blSampleImageAutoScoreSchemaId') - - - -class BLSampleImageAutoScoreSchema(db.Model): - __tablename__ = 'BLSampleImageAutoScoreSchema' - - blSampleImageAutoScoreSchemaId = db.Column(db.Integer, primary_key=True) - schemaName = db.Column(db.String(25), nullable=False, info='Name of the schema e.g. Hampton, MARCO') - enabled = db.Column(db.Integer, server_default=db.FetchedValue(), info='Whether this schema is enabled (could be configurable in the UI)') - - - -class BLSampleImageMeasurement(db.Model): - __tablename__ = 'BLSampleImageMeasurement' - - blSampleImageMeasurementId = db.Column(db.Integer, primary_key=True) - blSampleImageId = db.Column(db.ForeignKey('BLSampleImage.blSampleImageId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - blSubSampleId = db.Column(db.ForeignKey('BLSubSample.blSubSampleId'), index=True) - startPosX = db.Column(db.Float(asdecimal=True)) - startPosY = db.Column(db.Float(asdecimal=True)) - endPosX = db.Column(db.Float(asdecimal=True)) - endPosY = db.Column(db.Float(asdecimal=True)) - blTimeStamp = db.Column(db.DateTime) - - BLSampleImage = db.relationship('BLSampleImage', primaryjoin='BLSampleImageMeasurement.blSampleImageId == BLSampleImage.blSampleImageId') - BLSubSample = db.relationship('BLSubSample', primaryjoin='BLSampleImageMeasurement.blSubSampleId == BLSubSample.blSubSampleId') - - - -class BLSampleImageScore(db.Model): - __tablename__ = 'BLSampleImageScore' - - blSampleImageScoreId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - score = db.Column(db.Float) - colour = db.Column(db.String(15)) - - - -class BLSampleImageHasAutoScoreClas(db.Model): - __tablename__ = 'BLSampleImage_has_AutoScoreClass' - - blSampleImageId = db.Column(db.ForeignKey('BLSampleImage.blSampleImageId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False) - blSampleImageAutoScoreClassId = db.Column(db.ForeignKey('BLSampleImageAutoScoreClass.blSampleImageAutoScoreClassId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) - probability = db.Column(db.Float) - - BLSampleImageAutoScoreClas = db.relationship('BLSampleImageAutoScoreClas', primaryjoin='BLSampleImageHasAutoScoreClas.blSampleImageAutoScoreClassId == BLSampleImageAutoScoreClas.blSampleImageAutoScoreClassId') - BLSampleImage = db.relationship('BLSampleImage', primaryjoin='BLSampleImageHasAutoScoreClas.blSampleImageId == BLSampleImage.blSampleImageId') - - - -class BLSampleTypeHasComponent(db.Model): - __tablename__ = 'BLSampleType_has_Component' - - blSampleTypeId = db.Column(db.ForeignKey('Crystal.crystalId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False) - componentId = db.Column(db.ForeignKey('Protein.proteinId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) - abundance = db.Column(db.Float) - - Crystal = db.relationship('Crystal', primaryjoin='BLSampleTypeHasComponent.blSampleTypeId == Crystal.crystalId') - Protein = db.relationship('Protein', primaryjoin='BLSampleTypeHasComponent.componentId == Protein.proteinId') - - - -class BLSampleHasDataCollectionPlan(db.Model): - __tablename__ = 'BLSample_has_DataCollectionPlan' - - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId'), primary_key=True, nullable=False) - dataCollectionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId'), primary_key=True, nullable=False, index=True) - planOrder = db.Column(db.Integer) - - BLSample = db.relationship('BLSample', primaryjoin='BLSampleHasDataCollectionPlan.blSampleId == BLSample.blSampleId') - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='BLSampleHasDataCollectionPlan.dataCollectionPlanId == DiffractionPlan.diffractionPlanId') - - - -class BLSampleHasEnergyScan(db.Model): - __tablename__ = 'BLSample_has_EnergyScan' - - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - energyScanId = db.Column(db.ForeignKey('EnergyScan.energyScanId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - blSampleHasEnergyScanId = db.Column(db.Integer, primary_key=True) - - BLSample = db.relationship('BLSample', primaryjoin='BLSampleHasEnergyScan.blSampleId == BLSample.blSampleId') - EnergyScan = db.relationship('EnergyScan', primaryjoin='BLSampleHasEnergyScan.energyScanId == EnergyScan.energyScanId') - - - -class BLSession(db.Model): - __tablename__ = 'BLSession' - - sessionId = db.Column(db.Integer, primary_key=True) - beamLineSetupId = db.Column(db.ForeignKey('BeamLineSetup.beamLineSetupId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - beamCalendarId = db.Column(db.ForeignKey('BeamCalendar.beamCalendarId'), index=True) - projectCode = db.Column(db.String(45)) - startDate = db.Column(db.DateTime, index=True) - endDate = db.Column(db.DateTime, index=True) - beamLineName = db.Column(db.String(45), index=True) - scheduled = db.Column(db.Integer) - nbShifts = db.Column(db.Integer) - comments = db.Column(db.String(2000)) - beamLineOperator = db.Column(db.String(45)) - bltimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - visit_number = db.Column(db.Integer, server_default=db.FetchedValue()) - usedFlag = db.Column(db.Integer, info='indicates if session has Datacollections or XFE or EnergyScans attached') - sessionTitle = db.Column(db.String(255), info='fx accounts only') - structureDeterminations = db.Column(db.Float) - dewarTransport = db.Column(db.Float) - databackupFrance = db.Column(db.Float, info='data backup and express delivery France') - databackupEurope = db.Column(db.Float, info='data backup and express delivery Europe') - expSessionPk = db.Column(db.Integer, info='smis session Pk ') - operatorSiteNumber = db.Column(db.String(10), index=True, info='matricule site') - lastUpdate = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='last update timestamp: by default the end of the session, the last collect...') - protectedData = db.Column(db.String(1024), info='indicates if the data are protected or not') - externalId = db.Column(db.BINARY(16)) - archived = db.Column(db.Integer, server_default=db.FetchedValue(), info='The data for the session is archived and no longer available on disk') - - BeamCalendar = db.relationship('BeamCalendar', primaryjoin='BLSession.beamCalendarId == BeamCalendar.beamCalendarId') - BeamLineSetup = db.relationship('BeamLineSetup', primaryjoin='BLSession.beamLineSetupId == BeamLineSetup.beamLineSetupId') - Proposal = db.relationship('Proposal', primaryjoin='BLSession.proposalId == Proposal.proposalId') - Shipping = db.relationship('Shipping', secondary='ShippingHasSession') - - - -class BLSessionHasSCPosition(db.Model): - __tablename__ = 'BLSession_has_SCPosition' - - blsessionhasscpositionid = db.Column(db.Integer, primary_key=True) - blsessionid = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - scContainer = db.Column(db.SmallInteger, info='Position of container within sample changer') - containerPosition = db.Column(db.SmallInteger, info='Position of sample within container') - - BLSession = db.relationship('BLSession', primaryjoin='BLSessionHasSCPosition.blsessionid == BLSession.sessionId') - - - -class BLSubSample(db.Model): - __tablename__ = 'BLSubSample' - - blSubSampleId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='sample') - diffractionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='eventually diffractionPlan') - blSampleImageId = db.Column(db.ForeignKey('BLSampleImage.blSampleImageId'), index=True) - positionId = db.Column(db.ForeignKey('Position.positionId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='position of the subsample') - position2Id = db.Column(db.ForeignKey('Position.positionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - motorPositionId = db.Column(db.ForeignKey('MotorPosition.motorPositionId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='motor position') - blSubSampleUUID = db.Column(db.String(45), info='uuid of the blsubsample') - imgFileName = db.Column(db.String(255), info='image filename') - imgFilePath = db.Column(db.String(1024), info='url image') - comments = db.Column(db.String(1024), info='comments') - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - - BLSample = db.relationship('BLSample', primaryjoin='BLSubSample.blSampleId == BLSample.blSampleId') - BLSampleImage = db.relationship('BLSampleImage', primaryjoin='BLSubSample.blSampleImageId == BLSampleImage.blSampleImageId') - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='BLSubSample.diffractionPlanId == DiffractionPlan.diffractionPlanId') - MotorPosition = db.relationship('MotorPosition', primaryjoin='BLSubSample.motorPositionId == MotorPosition.motorPositionId') - Position = db.relationship('Position', primaryjoin='BLSubSample.position2Id == Position.positionId') - Position1 = db.relationship('Position', primaryjoin='BLSubSample.positionId == Position.positionId') - - - -class BeamAperture(db.Model): - __tablename__ = 'BeamApertures' - - beamAperturesid = db.Column(db.Integer, primary_key=True) - beamlineStatsId = db.Column(db.ForeignKey('BeamlineStats.beamlineStatsId', ondelete='CASCADE'), index=True) - flux = db.Column(db.Float(asdecimal=True)) - x = db.Column(db.Float) - y = db.Column(db.Float) - apertureSize = db.Column(db.SmallInteger) - - BeamlineStat = db.relationship('BeamlineStat', primaryjoin='BeamAperture.beamlineStatsId == BeamlineStat.beamlineStatsId') - - - -class BeamCalendar(db.Model): - __tablename__ = 'BeamCalendar' - - beamCalendarId = db.Column(db.Integer, primary_key=True) - run = db.Column(db.String(7), nullable=False) - beamStatus = db.Column(db.String(24), nullable=False) - startDate = db.Column(db.DateTime, nullable=False) - endDate = db.Column(db.DateTime, nullable=False) - - - -class BeamCentre(db.Model): - __tablename__ = 'BeamCentres' - - beamCentresid = db.Column(db.Integer, primary_key=True) - beamlineStatsId = db.Column(db.ForeignKey('BeamlineStats.beamlineStatsId', ondelete='CASCADE'), index=True) - x = db.Column(db.Float) - y = db.Column(db.Float) - zoom = db.Column(db.Integer) - - BeamlineStat = db.relationship('BeamlineStat', primaryjoin='BeamCentre.beamlineStatsId == BeamlineStat.beamlineStatsId') - - - -class BeamLineSetup(db.Model): - __tablename__ = 'BeamLineSetup' - - beamLineSetupId = db.Column(db.Integer, primary_key=True) - detectorId = db.Column(db.ForeignKey('Detector.detectorId'), index=True) - synchrotronMode = db.Column(db.String(255)) - undulatorType1 = db.Column(db.String(45)) - undulatorType2 = db.Column(db.String(45)) - undulatorType3 = db.Column(db.String(45)) - focalSpotSizeAtSample = db.Column(db.Float) - focusingOptic = db.Column(db.String(255)) - beamDivergenceHorizontal = db.Column(db.Float) - beamDivergenceVertical = db.Column(db.Float) - polarisation = db.Column(db.Float) - monochromatorType = db.Column(db.String(255)) - setupDate = db.Column(db.DateTime) - synchrotronName = db.Column(db.String(255)) - maxExpTimePerDataCollection = db.Column(db.Float(asdecimal=True)) - maxExposureTimePerImage = db.Column(db.Float, info='unit: seconds') - minExposureTimePerImage = db.Column(db.Float(asdecimal=True)) - goniostatMaxOscillationSpeed = db.Column(db.Float(asdecimal=True)) - goniostatMaxOscillationWidth = db.Column(db.Float(asdecimal=True), info='unit: degrees') - goniostatMinOscillationWidth = db.Column(db.Float(asdecimal=True)) - maxTransmission = db.Column(db.Float(asdecimal=True), info='unit: percentage') - minTransmission = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - CS = db.Column(db.Float, info='Spherical Aberration, Units: mm?') - beamlineName = db.Column(db.String(50), info='Beamline that this setup relates to') - beamSizeXMin = db.Column(db.Float, info='unit: um') - beamSizeXMax = db.Column(db.Float, info='unit: um') - beamSizeYMin = db.Column(db.Float, info='unit: um') - beamSizeYMax = db.Column(db.Float, info='unit: um') - energyMin = db.Column(db.Float, info='unit: eV') - energyMax = db.Column(db.Float, info='unit: eV') - omegaMin = db.Column(db.Float, info='unit: degrees') - omegaMax = db.Column(db.Float, info='unit: degrees') - kappaMin = db.Column(db.Float, info='unit: degrees') - kappaMax = db.Column(db.Float, info='unit: degrees') - phiMin = db.Column(db.Float, info='unit: degrees') - phiMax = db.Column(db.Float, info='unit: degrees') - active = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - numberOfImagesMax = db.Column(db.Integer) - numberOfImagesMin = db.Column(db.Integer) - boxSizeXMin = db.Column(db.Float(asdecimal=True), info='For gridscans, unit: um') - boxSizeXMax = db.Column(db.Float(asdecimal=True), info='For gridscans, unit: um') - boxSizeYMin = db.Column(db.Float(asdecimal=True), info='For gridscans, unit: um') - boxSizeYMax = db.Column(db.Float(asdecimal=True), info='For gridscans, unit: um') - monoBandwidthMin = db.Column(db.Float(asdecimal=True), info='unit: percentage') - monoBandwidthMax = db.Column(db.Float(asdecimal=True), info='unit: percentage') - - Detector = db.relationship('Detector', primaryjoin='BeamLineSetup.detectorId == Detector.detectorId') - - - -class BeamlineAction(db.Model): - __tablename__ = 'BeamlineAction' - - beamlineActionId = db.Column(db.Integer, primary_key=True) - sessionId = db.Column(db.ForeignKey('BLSession.sessionId'), index=True) - startTimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - endTimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - message = db.Column(db.String(255)) - parameter = db.Column(db.String(50)) - value = db.Column(db.String(30)) - loglevel = db.Column(db.Enum('DEBUG', 'CRITICAL', 'INFO')) - status = db.Column(db.Enum('PAUSED', 'RUNNING', 'TERMINATED', 'COMPLETE', 'ERROR', 'EPICSFAIL')) - - BLSession = db.relationship('BLSession', primaryjoin='BeamlineAction.sessionId == BLSession.sessionId') - - - -class BeamlineStat(db.Model): - __tablename__ = 'BeamlineStats' - - beamlineStatsId = db.Column(db.Integer, primary_key=True) - beamline = db.Column(db.String(10)) - recordTimeStamp = db.Column(db.DateTime) - ringCurrent = db.Column(db.Float) - energy = db.Column(db.Float) - gony = db.Column(db.Float) - beamW = db.Column(db.Float) - beamH = db.Column(db.Float) - flux = db.Column(db.Float(asdecimal=True)) - scanFileW = db.Column(db.String(255)) - scanFileH = db.Column(db.String(255)) - - - -class Buffer(db.Model): - __tablename__ = 'Buffer' - - bufferId = db.Column(db.Integer, primary_key=True) - BLSESSIONID = db.Column(db.Integer) - safetyLevelId = db.Column(db.ForeignKey('SafetyLevel.safetyLevelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - name = db.Column(db.String(45)) - acronym = db.Column(db.String(45)) - pH = db.Column(db.String(45)) - composition = db.Column(db.String(45)) - comments = db.Column(db.String(512)) - proposalId = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - - SafetyLevel = db.relationship('SafetyLevel', primaryjoin='Buffer.safetyLevelId == SafetyLevel.safetyLevelId') - - - -class BufferHasAdditive(db.Model): - __tablename__ = 'BufferHasAdditive' - - bufferHasAdditiveId = db.Column(db.Integer, primary_key=True) - bufferId = db.Column(db.ForeignKey('Buffer.bufferId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - additiveId = db.Column(db.ForeignKey('Additive.additiveId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - measurementUnitId = db.Column(db.ForeignKey('MeasurementUnit.measurementUnitId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - quantity = db.Column(db.String(45)) - - Additive = db.relationship('Additive', primaryjoin='BufferHasAdditive.additiveId == Additive.additiveId') - Buffer = db.relationship('Buffer', primaryjoin='BufferHasAdditive.bufferId == Buffer.bufferId') - MeasurementUnit = db.relationship('MeasurementUnit', primaryjoin='BufferHasAdditive.measurementUnitId == MeasurementUnit.measurementUnitId') - - - -class CTF(db.Model): - __tablename__ = 'CTF' - - ctfId = db.Column(db.Integer, primary_key=True) - motionCorrectionId = db.Column(db.ForeignKey('MotionCorrection.motionCorrectionId'), index=True) - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId'), index=True) - boxSizeX = db.Column(db.Float, info='Box size in x, Units: pixels') - boxSizeY = db.Column(db.Float, info='Box size in y, Units: pixels') - minResolution = db.Column(db.Float, info='Minimum resolution for CTF, Units: A') - maxResolution = db.Column(db.Float, info='Units: A') - minDefocus = db.Column(db.Float, info='Units: A') - maxDefocus = db.Column(db.Float, info='Units: A') - defocusStepSize = db.Column(db.Float, info='Units: A') - astigmatism = db.Column(db.Float, info='Units: A') - astigmatismAngle = db.Column(db.Float, info='Units: deg?') - estimatedResolution = db.Column(db.Float, info='Units: A') - estimatedDefocus = db.Column(db.Float, info='Units: A') - amplitudeContrast = db.Column(db.Float, info='Units: %?') - ccValue = db.Column(db.Float, info='Correlation value') - fftTheoreticalFullPath = db.Column(db.String(255), info='Full path to the jpg image of the simulated FFT') - comments = db.Column(db.String(255)) - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='CTF.autoProcProgramId == AutoProcProgram.autoProcProgramId') - MotionCorrection = db.relationship('MotionCorrection', primaryjoin='CTF.motionCorrectionId == MotionCorrection.motionCorrectionId') - - - -class CalendarHash(db.Model): - __tablename__ = 'CalendarHash' - - calendarHashId = db.Column(db.Integer, primary_key=True) - ckey = db.Column(db.String(50)) - hash = db.Column(db.String(128)) - beamline = db.Column(db.Integer) - - - -class ComponentLattice(db.Model): - __tablename__ = 'ComponentLattice' - - componentLatticeId = db.Column(db.Integer, primary_key=True) - componentId = db.Column(db.ForeignKey('Protein.proteinId'), index=True) - spaceGroup = db.Column(db.String(20)) - cell_a = db.Column(db.Float(asdecimal=True)) - cell_b = db.Column(db.Float(asdecimal=True)) - cell_c = db.Column(db.Float(asdecimal=True)) - cell_alpha = db.Column(db.Float(asdecimal=True)) - cell_beta = db.Column(db.Float(asdecimal=True)) - cell_gamma = db.Column(db.Float(asdecimal=True)) - - Protein = db.relationship('Protein', primaryjoin='ComponentLattice.componentId == Protein.proteinId') - - - -class ComponentSubType(db.Model): - __tablename__ = 'ComponentSubType' - - componentSubTypeId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(31), nullable=False) - hasPh = db.Column(db.Integer, server_default=db.FetchedValue()) - - - -class ComponentType(db.Model): - __tablename__ = 'ComponentType' - - componentTypeId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(31), nullable=False) - - - -t_Component_has_SubType = db.Table( - 'Component_has_SubType', - db.Column('componentId', db.ForeignKey('Protein.proteinId', ondelete='CASCADE'), primary_key=True, nullable=False), - db.Column('componentSubTypeId', db.ForeignKey('ComponentSubType.componentSubTypeId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -class ConcentrationType(db.Model): - __tablename__ = 'ConcentrationType' - - concentrationTypeId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(31), nullable=False) - symbol = db.Column(db.String(8), nullable=False) - - - -class Container(db.Model): - __tablename__ = 'Container' - - containerId = db.Column(db.Integer, primary_key=True) - dewarId = db.Column(db.ForeignKey('Dewar.dewarId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - code = db.Column(db.String(45)) - containerType = db.Column(db.String(20)) - capacity = db.Column(db.Integer) - sampleChangerLocation = db.Column(db.String(20)) - containerStatus = db.Column(db.String(45), index=True) - bltimeStamp = db.Column(db.DateTime) - beamlineLocation = db.Column(db.String(20), index=True) - screenId = db.Column(db.ForeignKey('Screen.screenId'), index=True) - scheduleId = db.Column(db.ForeignKey('Schedule.scheduleId'), index=True) - barcode = db.Column(db.String(45), unique=True) - imagerId = db.Column(db.ForeignKey('Imager.imagerId'), index=True) - sessionId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='SET NULL', onupdate='CASCADE'), index=True) - ownerId = db.Column(db.ForeignKey('Person.personId'), index=True) - requestedImagerId = db.Column(db.ForeignKey('Imager.imagerId'), index=True) - requestedReturn = db.Column(db.Integer, server_default=db.FetchedValue(), info='True for requesting return, False means container will be disposed') - comments = db.Column(db.String(255)) - experimentType = db.Column(db.String(20)) - storageTemperature = db.Column(db.Float) - containerRegistryId = db.Column(db.ForeignKey('ContainerRegistry.containerRegistryId'), index=True) - - ContainerRegistry = db.relationship('ContainerRegistry', primaryjoin='Container.containerRegistryId == ContainerRegistry.containerRegistryId') - Dewar = db.relationship('Dewar', primaryjoin='Container.dewarId == Dewar.dewarId') - Imager = db.relationship('Imager', primaryjoin='Container.imagerId == Imager.imagerId') - Person = db.relationship('Person', primaryjoin='Container.ownerId == Person.personId') - Imager1 = db.relationship('Imager', primaryjoin='Container.requestedImagerId == Imager.imagerId') - Schedule = db.relationship('Schedule', primaryjoin='Container.scheduleId == Schedule.scheduleId') - Screen = db.relationship('Screen', primaryjoin='Container.screenId == Screen.screenId') - BLSession = db.relationship('BLSession', primaryjoin='Container.sessionId == BLSession.sessionId') - - - -class ContainerHistory(db.Model): - __tablename__ = 'ContainerHistory' - - containerHistoryId = db.Column(db.Integer, primary_key=True) - containerId = db.Column(db.ForeignKey('Container.containerId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - location = db.Column(db.String(45)) - blTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - status = db.Column(db.String(45)) - beamlineName = db.Column(db.String(20)) - - Container = db.relationship('Container', primaryjoin='ContainerHistory.containerId == Container.containerId') - - - -class ContainerInspection(db.Model): - __tablename__ = 'ContainerInspection' - - containerInspectionId = db.Column(db.Integer, primary_key=True) - containerId = db.Column(db.ForeignKey('Container.containerId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - inspectionTypeId = db.Column(db.ForeignKey('InspectionType.inspectionTypeId'), nullable=False, index=True) - imagerId = db.Column(db.ForeignKey('Imager.imagerId'), index=True) - temperature = db.Column(db.Float) - blTimeStamp = db.Column(db.DateTime) - scheduleComponentid = db.Column(db.ForeignKey('ScheduleComponent.scheduleComponentId'), index=True) - state = db.Column(db.String(20)) - priority = db.Column(db.SmallInteger) - manual = db.Column(db.Integer) - scheduledTimeStamp = db.Column(db.DateTime) - completedTimeStamp = db.Column(db.DateTime) - - Container = db.relationship('Container', primaryjoin='ContainerInspection.containerId == Container.containerId') - Imager = db.relationship('Imager', primaryjoin='ContainerInspection.imagerId == Imager.imagerId') - InspectionType = db.relationship('InspectionType', primaryjoin='ContainerInspection.inspectionTypeId == InspectionType.inspectionTypeId') - ScheduleComponent = db.relationship('ScheduleComponent', primaryjoin='ContainerInspection.scheduleComponentid == ScheduleComponent.scheduleComponentId') - - - -class ContainerQueue(db.Model): - __tablename__ = 'ContainerQueue' - - containerQueueId = db.Column(db.Integer, primary_key=True) - containerId = db.Column(db.ForeignKey('Container.containerId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - personId = db.Column(db.ForeignKey('Person.personId', onupdate='CASCADE'), index=True) - createdTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - completedTimeStamp = db.Column(db.DateTime) - - Container = db.relationship('Container', primaryjoin='ContainerQueue.containerId == Container.containerId') - Person = db.relationship('Person', primaryjoin='ContainerQueue.personId == Person.personId') - - - -class ContainerQueueSample(db.Model): - __tablename__ = 'ContainerQueueSample' - - containerQueueSampleId = db.Column(db.Integer, primary_key=True) - containerQueueId = db.Column(db.ForeignKey('ContainerQueue.containerQueueId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - blSubSampleId = db.Column(db.ForeignKey('BLSubSample.blSubSampleId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - - BLSubSample = db.relationship('BLSubSample', primaryjoin='ContainerQueueSample.blSubSampleId == BLSubSample.blSubSampleId') - ContainerQueue = db.relationship('ContainerQueue', primaryjoin='ContainerQueueSample.containerQueueId == ContainerQueue.containerQueueId') - - - -class ContainerRegistry(db.Model): - __tablename__ = 'ContainerRegistry' - - containerRegistryId = db.Column(db.Integer, primary_key=True) - barcode = db.Column(db.String(20)) - comments = db.Column(db.String(255)) - recordTimestamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - - - -class ContainerRegistryHasProposal(db.Model): - __tablename__ = 'ContainerRegistry_has_Proposal' - __table_args__ = ( - db.Index('containerRegistryId', 'containerRegistryId', 'proposalId'), - ) - - containerRegistryHasProposalId = db.Column(db.Integer, primary_key=True) - containerRegistryId = db.Column(db.ForeignKey('ContainerRegistry.containerRegistryId')) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId'), index=True) - personId = db.Column(db.ForeignKey('Person.personId'), index=True, info='Person registering the container') - recordTimestamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - - ContainerRegistry = db.relationship('ContainerRegistry', primaryjoin='ContainerRegistryHasProposal.containerRegistryId == ContainerRegistry.containerRegistryId') - Person = db.relationship('Person', primaryjoin='ContainerRegistryHasProposal.personId == Person.personId') - Proposal = db.relationship('Proposal', primaryjoin='ContainerRegistryHasProposal.proposalId == Proposal.proposalId') - - - -class ContainerReport(db.Model): - __tablename__ = 'ContainerReport' - - containerReportId = db.Column(db.Integer, primary_key=True) - containerRegistryId = db.Column(db.ForeignKey('ContainerRegistry.containerRegistryId'), index=True) - personId = db.Column(db.ForeignKey('Person.personId'), index=True, info='Person making report') - report = db.Column(db.Text) - attachmentFilePath = db.Column(db.String(255)) - recordTimestamp = db.Column(db.DateTime) - - ContainerRegistry = db.relationship('ContainerRegistry', primaryjoin='ContainerReport.containerRegistryId == ContainerRegistry.containerRegistryId') - Person = db.relationship('Person', primaryjoin='ContainerReport.personId == Person.personId') - - - -class CourierTermsAccepted(db.Model): - __tablename__ = 'CourierTermsAccepted' - - courierTermsAcceptedId = db.Column(db.Integer, primary_key=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId'), nullable=False, index=True) - personId = db.Column(db.ForeignKey('Person.personId'), nullable=False, index=True) - shippingName = db.Column(db.String(100)) - timestamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - shippingId = db.Column(db.ForeignKey('Shipping.shippingId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - - Person = db.relationship('Person', primaryjoin='CourierTermsAccepted.personId == Person.personId') - Proposal = db.relationship('Proposal', primaryjoin='CourierTermsAccepted.proposalId == Proposal.proposalId') - Shipping = db.relationship('Shipping', primaryjoin='CourierTermsAccepted.shippingId == Shipping.shippingId') - - - -class Crystal(db.Model): - __tablename__ = 'Crystal' - - crystalId = db.Column(db.Integer, primary_key=True) - diffractionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - proteinId = db.Column(db.ForeignKey('Protein.proteinId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - crystalUUID = db.Column(db.String(45)) - name = db.Column(db.String(255)) - spaceGroup = db.Column(db.String(20)) - morphology = db.Column(db.String(255)) - color = db.Column(db.String(45)) - size_X = db.Column(db.Float(asdecimal=True)) - size_Y = db.Column(db.Float(asdecimal=True)) - size_Z = db.Column(db.Float(asdecimal=True)) - cell_a = db.Column(db.Float(asdecimal=True)) - cell_b = db.Column(db.Float(asdecimal=True)) - cell_c = db.Column(db.Float(asdecimal=True)) - cell_alpha = db.Column(db.Float(asdecimal=True)) - cell_beta = db.Column(db.Float(asdecimal=True)) - cell_gamma = db.Column(db.Float(asdecimal=True)) - comments = db.Column(db.String(255)) - pdbFileName = db.Column(db.String(255), info='pdb file name') - pdbFilePath = db.Column(db.String(1024), info='pdb file path') - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - abundance = db.Column(db.Float) - theoreticalDensity = db.Column(db.Float) - - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='Crystal.diffractionPlanId == DiffractionPlan.diffractionPlanId') - Protein = db.relationship('Protein', primaryjoin='Crystal.proteinId == Protein.proteinId') - - - -class CrystalHasUUID(db.Model): - __tablename__ = 'Crystal_has_UUID' - - crystal_has_UUID_Id = db.Column(db.Integer, primary_key=True) - crystalId = db.Column(db.ForeignKey('Crystal.crystalId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - UUID = db.Column(db.String(45), index=True) - imageURL = db.Column(db.String(255)) - - Crystal = db.relationship('Crystal', primaryjoin='CrystalHasUUID.crystalId == Crystal.crystalId') - - - -class DataAcquisition(db.Model): - __tablename__ = 'DataAcquisition' - - dataAcquisitionId = db.Column(db.Integer, primary_key=True) - sampleCellId = db.Column(db.Integer, nullable=False) - framesCount = db.Column(db.String(45)) - energy = db.Column(db.String(45)) - waitTime = db.Column(db.String(45)) - detectorDistance = db.Column(db.String(45)) - - - -class DataCollection(db.Model): - __tablename__ = 'DataCollection' - - dataCollectionId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - BLSAMPLEID = db.Column(db.Integer, index=True) - SESSIONID = db.Column(db.Integer, index=True, server_default=db.FetchedValue()) - experimenttype = db.Column(db.String(24)) - dataCollectionNumber = db.Column(db.Integer, index=True) - startTime = db.Column(db.DateTime, index=True, info='Start time of the dataCollection') - endTime = db.Column(db.DateTime, info='end time of the dataCollection') - runStatus = db.Column(db.String(45)) - axisStart = db.Column(db.Float) - axisEnd = db.Column(db.Float) - axisRange = db.Column(db.Float) - overlap = db.Column(db.Float) - numberOfImages = db.Column(db.Integer) - startImageNumber = db.Column(db.Integer) - numberOfPasses = db.Column(db.Integer) - exposureTime = db.Column(db.Float) - imageDirectory = db.Column(db.String(255), index=True, info='The directory where files reside - should end with a slash') - imagePrefix = db.Column(db.String(45), index=True) - imageSuffix = db.Column(db.String(45)) - imageContainerSubPath = db.Column(db.String(255), info='Internal path of a HDF5 file pointing to the data for this data collection') - fileTemplate = db.Column(db.String(255)) - wavelength = db.Column(db.Float) - resolution = db.Column(db.Float) - detectorDistance = db.Column(db.Float) - xBeam = db.Column(db.Float) - yBeam = db.Column(db.Float) - comments = db.Column(db.String(1024)) - printableForReport = db.Column(db.Integer, server_default=db.FetchedValue()) - CRYSTALCLASS = db.Column(db.String(20)) - slitGapVertical = db.Column(db.Float) - slitGapHorizontal = db.Column(db.Float) - transmission = db.Column(db.Float) - synchrotronMode = db.Column(db.String(20)) - xtalSnapshotFullPath1 = db.Column(db.String(255)) - xtalSnapshotFullPath2 = db.Column(db.String(255)) - xtalSnapshotFullPath3 = db.Column(db.String(255)) - xtalSnapshotFullPath4 = db.Column(db.String(255)) - rotationAxis = db.Column(db.Enum('Omega', 'Kappa', 'Phi')) - phiStart = db.Column(db.Float) - kappaStart = db.Column(db.Float) - omegaStart = db.Column(db.Float) - chiStart = db.Column(db.Float) - resolutionAtCorner = db.Column(db.Float) - detector2Theta = db.Column(db.Float) - DETECTORMODE = db.Column(db.String(255)) - undulatorGap1 = db.Column(db.Float) - undulatorGap2 = db.Column(db.Float) - undulatorGap3 = db.Column(db.Float) - beamSizeAtSampleX = db.Column(db.Float) - beamSizeAtSampleY = db.Column(db.Float) - centeringMethod = db.Column(db.String(255)) - averageTemperature = db.Column(db.Float) - ACTUALSAMPLEBARCODE = db.Column(db.String(45)) - ACTUALSAMPLESLOTINCONTAINER = db.Column(db.Integer) - ACTUALCONTAINERBARCODE = db.Column(db.String(45)) - ACTUALCONTAINERSLOTINSC = db.Column(db.Integer) - actualCenteringPosition = db.Column(db.String(255)) - beamShape = db.Column(db.String(45)) - dataCollectionGroupId = db.Column(db.ForeignKey('DataCollectionGroup.dataCollectionGroupId'), nullable=False, index=True, info='references DataCollectionGroup table') - POSITIONID = db.Column(db.Integer) - detectorId = db.Column(db.ForeignKey('Detector.detectorId'), index=True, info='references Detector table') - FOCALSPOTSIZEATSAMPLEX = db.Column(db.Float) - POLARISATION = db.Column(db.Float) - FOCALSPOTSIZEATSAMPLEY = db.Column(db.Float) - APERTUREID = db.Column(db.Integer) - screeningOrigId = db.Column(db.Integer) - startPositionId = db.Column(db.ForeignKey('MotorPosition.motorPositionId'), index=True) - endPositionId = db.Column(db.ForeignKey('MotorPosition.motorPositionId'), index=True) - flux = db.Column(db.Float(asdecimal=True)) - strategySubWedgeOrigId = db.Column(db.ForeignKey('ScreeningStrategySubWedge.screeningStrategySubWedgeId'), index=True, info='references ScreeningStrategySubWedge table') - blSubSampleId = db.Column(db.ForeignKey('BLSubSample.blSubSampleId'), index=True) - flux_end = db.Column(db.Float(asdecimal=True), info='flux measured after the collect') - bestWilsonPlotPath = db.Column(db.String(255)) - processedDataFile = db.Column(db.String(255)) - datFullPath = db.Column(db.String(255)) - magnification = db.Column(db.Float, info='Calibrated magnification, Units: dimensionless') - totalAbsorbedDose = db.Column(db.Float, info='Unit: e-/A^2 for EM') - binning = db.Column(db.Integer, server_default=db.FetchedValue(), info='1 or 2. Number of pixels to process as 1. (Use mean value.)') - particleDiameter = db.Column(db.Float, info='Unit: nm') - boxSize_CTF = db.Column(db.Float, info='Unit: pixels') - minResolution = db.Column(db.Float, info='Unit: A') - minDefocus = db.Column(db.Float, info='Unit: A') - maxDefocus = db.Column(db.Float, info='Unit: A') - defocusStepSize = db.Column(db.Float, info='Unit: A') - amountAstigmatism = db.Column(db.Float, info='Unit: A') - extractSize = db.Column(db.Float, info='Unit: pixels') - bgRadius = db.Column(db.Float, info='Unit: nm') - voltage = db.Column(db.Float, info='Unit: kV') - objAperture = db.Column(db.Float, info='Unit: um') - c1aperture = db.Column(db.Float, info='Unit: um') - c2aperture = db.Column(db.Float, info='Unit: um') - c3aperture = db.Column(db.Float, info='Unit: um') - c1lens = db.Column(db.Float, info='Unit: %') - c2lens = db.Column(db.Float, info='Unit: %') - c3lens = db.Column(db.Float, info='Unit: %') - totalExposedDose = db.Column(db.Float, info='Units: e-/A^2') - nominalMagnification = db.Column(db.Float, info='Nominal magnification: Units: dimensionless') - nominalDefocus = db.Column(db.Float, info='Nominal defocus, Units: A') - imageSizeX = db.Column(db.Integer, info='Image size in x, incase crop has been used, Units: pixels') - imageSizeY = db.Column(db.Integer, info='Image size in y, Units: pixels') - pixelSizeOnImage = db.Column(db.Float, info='Pixel size on image, calculated from magnification, duplicate? Units: um?') - phasePlate = db.Column(db.Integer, info='Whether the phase plate was used') - - BLSubSample = db.relationship('BLSubSample', primaryjoin='DataCollection.blSubSampleId == BLSubSample.blSubSampleId') - DataCollectionGroup = db.relationship('DataCollectionGroup', primaryjoin='DataCollection.dataCollectionGroupId == DataCollectionGroup.dataCollectionGroupId') - Detector = db.relationship('Detector', primaryjoin='DataCollection.detectorId == Detector.detectorId') - MotorPosition = db.relationship('MotorPosition', primaryjoin='DataCollection.endPositionId == MotorPosition.motorPositionId') - MotorPosition1 = db.relationship('MotorPosition', primaryjoin='DataCollection.startPositionId == MotorPosition.motorPositionId') - ScreeningStrategySubWedge = db.relationship('ScreeningStrategySubWedge', primaryjoin='DataCollection.strategySubWedgeOrigId == ScreeningStrategySubWedge.screeningStrategySubWedgeId') - - - -class DataCollectionComment(db.Model): - __tablename__ = 'DataCollectionComment' - - dataCollectionCommentId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - personId = db.Column(db.ForeignKey('Person.personId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - comments = db.Column(db.String(4000)) - createTime = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - modTime = db.Column(db.Date) - - DataCollection = db.relationship('DataCollection', primaryjoin='DataCollectionComment.dataCollectionId == DataCollection.dataCollectionId') - Person = db.relationship('Person', primaryjoin='DataCollectionComment.personId == Person.personId') - - - -class DataCollectionFileAttachment(db.Model): - __tablename__ = 'DataCollectionFileAttachment' - - dataCollectionFileAttachmentId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - fileFullPath = db.Column(db.String(255), nullable=False) - fileType = db.Column(db.Enum('snapshot', 'log', 'xy', 'recip', 'pia', 'warning')) - createTime = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - DataCollection = db.relationship('DataCollection', primaryjoin='DataCollectionFileAttachment.dataCollectionId == DataCollection.dataCollectionId') - - - -class DataCollectionGroup(db.Model): - __tablename__ = 'DataCollectionGroup' - - dataCollectionGroupId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - sessionId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='references Session table') - comments = db.Column(db.String(1024), info='comments') - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='references BLSample table') - experimentType = db.Column(db.Enum('SAD', 'SAD - Inverse Beam', 'OSC', 'Collect - Multiwedge', 'MAD', 'Helical', 'Multi-positional', 'Mesh', 'Burn', 'MAD - Inverse Beam', 'Characterization', 'Dehydration', 'tomo', 'experiment', 'EM', 'PDF', 'PDF+Bragg', 'Bragg', 'single particle', 'Serial Fixed', 'Serial Jet', 'Standard', 'Time Resolved', 'Diamond Anvil High Pressure', 'Custom'), info='Standard: Routine structure determination experiment. Time Resolved: Investigate the change of a system over time. Custom: Special or non-standard data collection.') - startTime = db.Column(db.DateTime, info='Start time of the dataCollectionGroup') - endTime = db.Column(db.DateTime, info='end time of the dataCollectionGroup') - crystalClass = db.Column(db.String(20), info='Crystal Class for industrials users') - detectorMode = db.Column(db.String(255), info='Detector mode') - actualSampleBarcode = db.Column(db.String(45), info='Actual sample barcode') - actualSampleSlotInContainer = db.Column(db.Integer, info='Actual sample slot number in container') - actualContainerBarcode = db.Column(db.String(45), info='Actual container barcode') - actualContainerSlotInSC = db.Column(db.Integer, info='Actual container slot number in sample changer') - workflowId = db.Column(db.ForeignKey('Workflow.workflowId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - xtalSnapshotFullPath = db.Column(db.String(255)) - scanParameters = db.Column(db.String(collation='utf8mb4_bin')) - - BLSample = db.relationship('BLSample', primaryjoin='DataCollectionGroup.blSampleId == BLSample.blSampleId') - BLSession = db.relationship('BLSession', primaryjoin='DataCollectionGroup.sessionId == BLSession.sessionId') - Workflow = db.relationship('Workflow', primaryjoin='DataCollectionGroup.workflowId == Workflow.workflowId') - Project = db.relationship('Project', secondary='Project_has_DCGroup') - - - -class DataCollectionPlanHasDetector(db.Model): - __tablename__ = 'DataCollectionPlan_has_Detector' - __table_args__ = ( - db.Index('dataCollectionPlanId', 'dataCollectionPlanId', 'detectorId'), - ) - - dataCollectionPlanHasDetectorId = db.Column(db.Integer, primary_key=True) - dataCollectionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId'), nullable=False) - detectorId = db.Column(db.ForeignKey('Detector.detectorId'), nullable=False, index=True) - exposureTime = db.Column(db.Float(asdecimal=True)) - distance = db.Column(db.Float(asdecimal=True)) - roll = db.Column(db.Float(asdecimal=True)) - - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='DataCollectionPlanHasDetector.dataCollectionPlanId == DiffractionPlan.diffractionPlanId') - Detector = db.relationship('Detector', primaryjoin='DataCollectionPlanHasDetector.detectorId == Detector.detectorId') - - - -class DataReductionStatu(db.Model): - __tablename__ = 'DataReductionStatus' - - dataReductionStatusId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.Integer, nullable=False) - status = db.Column(db.String(15)) - filename = db.Column(db.String(255)) - message = db.Column(db.String(255)) - - - -class Detector(db.Model): - __tablename__ = 'Detector' - __table_args__ = ( - db.Index('Detector_FKIndex1', 'detectorType', 'detectorManufacturer', 'detectorModel', 'detectorPixelSizeHorizontal', 'detectorPixelSizeVertical'), - ) - - detectorId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - detectorType = db.Column(db.String(255)) - detectorManufacturer = db.Column(db.String(255)) - detectorModel = db.Column(db.String(255)) - detectorPixelSizeHorizontal = db.Column(db.Float) - detectorPixelSizeVertical = db.Column(db.Float) - DETECTORMAXRESOLUTION = db.Column(db.Float) - DETECTORMINRESOLUTION = db.Column(db.Float) - detectorSerialNumber = db.Column(db.String(30), unique=True) - detectorDistanceMin = db.Column(db.Float(asdecimal=True)) - detectorDistanceMax = db.Column(db.Float(asdecimal=True)) - trustedPixelValueRangeLower = db.Column(db.Float(asdecimal=True)) - trustedPixelValueRangeUpper = db.Column(db.Float(asdecimal=True)) - sensorThickness = db.Column(db.Float) - overload = db.Column(db.Float) - XGeoCorr = db.Column(db.String(255)) - YGeoCorr = db.Column(db.String(255)) - detectorMode = db.Column(db.String(255)) - density = db.Column(db.Float) - composition = db.Column(db.String(16)) - numberOfPixelsX = db.Column(db.Integer, info='Detector number of pixels in x') - numberOfPixelsY = db.Column(db.Integer, info='Detector number of pixels in y') - detectorRollMin = db.Column(db.Float(asdecimal=True), info='unit: degrees') - detectorRollMax = db.Column(db.Float(asdecimal=True), info='unit: degrees') - localName = db.Column(db.String(40), info='Colloquial name for the detector') - - - -class Dewar(db.Model): - __tablename__ = 'Dewar' - - dewarId = db.Column(db.Integer, primary_key=True) - shippingId = db.Column(db.ForeignKey('Shipping.shippingId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - code = db.Column(db.String(45), index=True) - comments = db.Column(db.String) - storageLocation = db.Column(db.String(45)) - dewarStatus = db.Column(db.String(45), index=True) - bltimeStamp = db.Column(db.DateTime) - isStorageDewar = db.Column(db.Integer, server_default=db.FetchedValue()) - barCode = db.Column(db.String(45), unique=True) - firstExperimentId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - customsValue = db.Column(db.Integer) - transportValue = db.Column(db.Integer) - trackingNumberToSynchrotron = db.Column(db.String(30)) - trackingNumberFromSynchrotron = db.Column(db.String(30)) - type = db.Column(db.Enum('Dewar', 'Toolbox'), nullable=False, server_default=db.FetchedValue()) - FACILITYCODE = db.Column(db.String(20)) - weight = db.Column(db.Float, info='dewar weight in kg') - deliveryAgent_barcode = db.Column(db.String(30), info='Courier piece barcode (not the airway bill)') - - BLSession = db.relationship('BLSession', primaryjoin='Dewar.firstExperimentId == BLSession.sessionId') - Shipping = db.relationship('Shipping', primaryjoin='Dewar.shippingId == Shipping.shippingId') - - - -class DewarLocation(db.Model): - __tablename__ = 'DewarLocation' - - eventId = db.Column(db.Integer, primary_key=True) - dewarNumber = db.Column(db.String(128), nullable=False, info='Dewar number') - userId = db.Column(db.String(128), info='User who locates the dewar') - dateTime = db.Column(db.DateTime, info='Date and time of locatization') - locationName = db.Column(db.String(128), info='Location of the dewar') - courierName = db.Column(db.String(128), info="Carrier name who's shipping back the dewar") - courierTrackingNumber = db.Column(db.String(128), info='Tracking number of the shippment') - - - -class DewarLocationList(db.Model): - __tablename__ = 'DewarLocationList' - - locationId = db.Column(db.Integer, primary_key=True) - locationName = db.Column(db.String(128), nullable=False, server_default=db.FetchedValue(), info='Location') - - - -class DewarRegistry(db.Model): - __tablename__ = 'DewarRegistry' - - facilityCode = db.Column(db.String(20), primary_key=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId', ondelete='CASCADE'), nullable=False, index=True) - labContactId = db.Column(db.ForeignKey('LabContact.labContactId', ondelete='CASCADE'), nullable=False, index=True) - purchaseDate = db.Column(db.DateTime) - bltimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - LabContact = db.relationship('LabContact', primaryjoin='DewarRegistry.labContactId == LabContact.labContactId') - Proposal = db.relationship('Proposal', primaryjoin='DewarRegistry.proposalId == Proposal.proposalId') - - - -class DewarReport(db.Model): - __tablename__ = 'DewarReport' - - dewarReportId = db.Column(db.Integer, primary_key=True) - facilityCode = db.Column(db.ForeignKey('DewarRegistry.facilityCode', ondelete='CASCADE'), nullable=False, index=True) - report = db.Column(db.Text) - attachment = db.Column(db.String(255)) - bltimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - DewarRegistry = db.relationship('DewarRegistry', primaryjoin='DewarReport.facilityCode == DewarRegistry.facilityCode') - - - -class DewarTransportHistory(db.Model): - __tablename__ = 'DewarTransportHistory' - - DewarTransportHistoryId = db.Column(db.Integer, primary_key=True) - dewarId = db.Column(db.ForeignKey('Dewar.dewarId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - dewarStatus = db.Column(db.String(45), nullable=False) - storageLocation = db.Column(db.String(45), nullable=False) - arrivalDate = db.Column(db.DateTime, nullable=False) - - Dewar = db.relationship('Dewar', primaryjoin='DewarTransportHistory.dewarId == Dewar.dewarId') - - - -class DiffractionPlan(db.Model): - __tablename__ = 'DiffractionPlan' - - diffractionPlanId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(20)) - experimentKind = db.Column(db.Enum('Default', 'MXPressE', 'MXPressO', 'MXPressE_SAD', 'MXScore', 'MXPressM', 'MAD', 'SAD', 'Fixed', 'Ligand binding', 'Refinement', 'OSC', 'MAD - Inverse Beam', 'SAD - Inverse Beam', 'MESH', 'XFE', 'Stepped transmission')) - observedResolution = db.Column(db.Float) - minimalResolution = db.Column(db.Float) - exposureTime = db.Column(db.Float) - oscillationRange = db.Column(db.Float) - maximalResolution = db.Column(db.Float) - screeningResolution = db.Column(db.Float) - radiationSensitivity = db.Column(db.Float) - anomalousScatterer = db.Column(db.String(255)) - preferredBeamSizeX = db.Column(db.Float) - preferredBeamSizeY = db.Column(db.Float) - preferredBeamDiameter = db.Column(db.Float) - comments = db.Column(db.String(1024)) - DIFFRACTIONPLANUUID = db.Column(db.String(1000)) - aimedCompleteness = db.Column(db.Float(asdecimal=True)) - aimedIOverSigmaAtHighestRes = db.Column(db.Float(asdecimal=True)) - aimedMultiplicity = db.Column(db.Float(asdecimal=True)) - aimedResolution = db.Column(db.Float(asdecimal=True)) - anomalousData = db.Column(db.Integer, server_default=db.FetchedValue()) - complexity = db.Column(db.String(45)) - estimateRadiationDamage = db.Column(db.Integer, server_default=db.FetchedValue()) - forcedSpaceGroup = db.Column(db.String(45)) - requiredCompleteness = db.Column(db.Float(asdecimal=True)) - requiredMultiplicity = db.Column(db.Float(asdecimal=True)) - requiredResolution = db.Column(db.Float(asdecimal=True)) - strategyOption = db.Column(db.String(45)) - kappaStrategyOption = db.Column(db.String(45)) - numberOfPositions = db.Column(db.Integer) - minDimAccrossSpindleAxis = db.Column(db.Float(asdecimal=True), info='minimum dimension accross the spindle axis') - maxDimAccrossSpindleAxis = db.Column(db.Float(asdecimal=True), info='maximum dimension accross the spindle axis') - radiationSensitivityBeta = db.Column(db.Float(asdecimal=True)) - radiationSensitivityGamma = db.Column(db.Float(asdecimal=True)) - minOscWidth = db.Column(db.Float) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - monochromator = db.Column(db.String(8), info='DMM or DCM') - energy = db.Column(db.Float, info='eV') - transmission = db.Column(db.Float, info='Decimal fraction in range [0,1]') - boxSizeX = db.Column(db.Float, info='microns') - boxSizeY = db.Column(db.Float, info='microns') - kappaStart = db.Column(db.Float, info='degrees') - axisStart = db.Column(db.Float, info='degrees') - axisRange = db.Column(db.Float, info='degrees') - numberOfImages = db.Column(db.Integer, info='The number of images requested') - presetForProposalId = db.Column(db.ForeignKey('Proposal.proposalId'), index=True, info='Indicates this plan is available to all sessions on given proposal') - beamLineName = db.Column(db.String(45), info='Indicates this plan is available to all sessions on given beamline') - detectorId = db.Column(db.ForeignKey('Detector.detectorId', onupdate='CASCADE'), index=True) - distance = db.Column(db.Float(asdecimal=True)) - orientation = db.Column(db.Float(asdecimal=True)) - monoBandwidth = db.Column(db.Float(asdecimal=True)) - centringMethod = db.Column(db.Enum('xray', 'loop', 'diffraction', 'optical')) - - Detector = db.relationship('Detector', primaryjoin='DiffractionPlan.detectorId == Detector.detectorId') - Proposal = db.relationship('Proposal', primaryjoin='DiffractionPlan.presetForProposalId == Proposal.proposalId') - - - -class EMMicroscope(db.Model): - __tablename__ = 'EMMicroscope' - - emMicroscopeId = db.Column(db.Integer, primary_key=True) - instrumentName = db.Column(db.String(100), nullable=False) - voltage = db.Column(db.Float) - CS = db.Column(db.Float) - detectorPixelSize = db.Column(db.Float) - C2aperture = db.Column(db.Float) - ObjAperture = db.Column(db.Float) - C2lens = db.Column(db.Float) - - - -class EnergyScan(db.Model): - __tablename__ = 'EnergyScan' - - energyScanId = db.Column(db.Integer, primary_key=True) - sessionId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId'), index=True) - fluorescenceDetector = db.Column(db.String(255)) - scanFileFullPath = db.Column(db.String(255)) - jpegChoochFileFullPath = db.Column(db.String(255)) - element = db.Column(db.String(45)) - startEnergy = db.Column(db.Float) - endEnergy = db.Column(db.Float) - transmissionFactor = db.Column(db.Float) - exposureTime = db.Column(db.Float) - axisPosition = db.Column(db.Float) - synchrotronCurrent = db.Column(db.Float) - temperature = db.Column(db.Float) - peakEnergy = db.Column(db.Float) - peakFPrime = db.Column(db.Float) - peakFDoublePrime = db.Column(db.Float) - inflectionEnergy = db.Column(db.Float) - inflectionFPrime = db.Column(db.Float) - inflectionFDoublePrime = db.Column(db.Float) - xrayDose = db.Column(db.Float) - startTime = db.Column(db.DateTime) - endTime = db.Column(db.DateTime) - edgeEnergy = db.Column(db.String(255)) - filename = db.Column(db.String(255)) - beamSizeVertical = db.Column(db.Float) - beamSizeHorizontal = db.Column(db.Float) - choochFileFullPath = db.Column(db.String(255)) - crystalClass = db.Column(db.String(20)) - comments = db.Column(db.String(1024)) - flux = db.Column(db.Float(asdecimal=True), info='flux measured before the energyScan') - flux_end = db.Column(db.Float(asdecimal=True), info='flux measured after the energyScan') - workingDirectory = db.Column(db.String(45)) - blSubSampleId = db.Column(db.ForeignKey('BLSubSample.blSubSampleId'), index=True) - - BLSample = db.relationship('BLSample', primaryjoin='EnergyScan.blSampleId == BLSample.blSampleId') - BLSubSample = db.relationship('BLSubSample', primaryjoin='EnergyScan.blSubSampleId == BLSubSample.blSubSampleId') - BLSession = db.relationship('BLSession', primaryjoin='EnergyScan.sessionId == BLSession.sessionId') - Project = db.relationship('Project', secondary='Project_has_EnergyScan') - - - -class Experiment(db.Model): - __tablename__ = 'Experiment' - - experimentId = db.Column(db.Integer, primary_key=True) - proposalId = db.Column(db.Integer, nullable=False) - name = db.Column(db.String(255)) - creationDate = db.Column(db.DateTime) - comments = db.Column(db.String(512)) - experimentType = db.Column(db.String(128)) - sourceFilePath = db.Column(db.String(256)) - dataAcquisitionFilePath = db.Column(db.String(256), info='The file path pointing to the data acquisition. Eventually it may be a compressed file with all the files or just the folder') - status = db.Column(db.String(45)) - sessionId = db.Column(db.Integer) - - - -class ExperimentKindDetail(db.Model): - __tablename__ = 'ExperimentKindDetails' - - experimentKindId = db.Column(db.Integer, primary_key=True) - diffractionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - exposureIndex = db.Column(db.Integer) - dataCollectionType = db.Column(db.String(45)) - dataCollectionKind = db.Column(db.String(45)) - wedgeValue = db.Column(db.Float) - - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='ExperimentKindDetail.diffractionPlanId == DiffractionPlan.diffractionPlanId') - - - -class Frame(db.Model): - __tablename__ = 'Frame' - - frameId = db.Column(db.Integer, primary_key=True) - FRAMESETID = db.Column(db.Integer) - filePath = db.Column(db.String(255)) - comments = db.Column(db.String(45)) - - - -class FrameList(db.Model): - __tablename__ = 'FrameList' - - frameListId = db.Column(db.Integer, primary_key=True) - comments = db.Column(db.Integer) - - - -class FrameSet(db.Model): - __tablename__ = 'FrameSet' - - frameSetId = db.Column(db.Integer, primary_key=True) - runId = db.Column(db.ForeignKey('Run.runId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - FILEPATH = db.Column(db.String(255)) - INTERNALPATH = db.Column(db.String(255)) - frameListId = db.Column(db.ForeignKey('FrameList.frameListId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - detectorId = db.Column(db.Integer) - detectorDistance = db.Column(db.String(45)) - - FrameList = db.relationship('FrameList', primaryjoin='FrameSet.frameListId == FrameList.frameListId') - Run = db.relationship('Run', primaryjoin='FrameSet.runId == Run.runId') - - - -class FrameToList(db.Model): - __tablename__ = 'FrameToList' - - frameToListId = db.Column(db.Integer, primary_key=True) - frameListId = db.Column(db.ForeignKey('FrameList.frameListId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - frameId = db.Column(db.ForeignKey('Frame.frameId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - - Frame = db.relationship('Frame', primaryjoin='FrameToList.frameId == Frame.frameId') - FrameList = db.relationship('FrameList', primaryjoin='FrameToList.frameListId == FrameList.frameListId') - - - -class GeometryClassname(db.Model): - __tablename__ = 'GeometryClassname' - - geometryClassnameId = db.Column(db.Integer, primary_key=True) - geometryClassname = db.Column(db.String(45)) - geometryOrder = db.Column(db.Integer, nullable=False) - - - -class GridImageMap(db.Model): - __tablename__ = 'GridImageMap' - - gridImageMapId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - imageNumber = db.Column(db.Integer, info='Movie number, sequential 1-n in time order') - outputFileId = db.Column(db.String(80), info='File number, file 1 may not be movie 1') - positionX = db.Column(db.Float, info='X position of stage, Units: um') - positionY = db.Column(db.Float, info='Y position of stage, Units: um') - - DataCollection = db.relationship('DataCollection', primaryjoin='GridImageMap.dataCollectionId == DataCollection.dataCollectionId') - - - -class GridInfo(db.Model): - __tablename__ = 'GridInfo' - - gridInfoId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - xOffset = db.Column(db.Float(asdecimal=True)) - yOffset = db.Column(db.Float(asdecimal=True)) - dx_mm = db.Column(db.Float(asdecimal=True)) - dy_mm = db.Column(db.Float(asdecimal=True)) - steps_x = db.Column(db.Float(asdecimal=True)) - steps_y = db.Column(db.Float(asdecimal=True)) - meshAngle = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - workflowMeshId = db.Column(db.ForeignKey('WorkflowMesh.workflowMeshId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - orientation = db.Column(db.Enum('vertical', 'horizontal'), server_default=db.FetchedValue()) - dataCollectionGroupId = db.Column(db.ForeignKey('DataCollectionGroup.dataCollectionGroupId'), index=True) - pixelsPerMicronX = db.Column(db.Float) - pixelsPerMicronY = db.Column(db.Float) - snapshot_offsetXPixel = db.Column(db.Float) - snapshot_offsetYPixel = db.Column(db.Float) - snaked = db.Column(db.Integer, server_default=db.FetchedValue(), info='True: The images associated with the DCG were collected in a snaked pattern') - - DataCollectionGroup = db.relationship('DataCollectionGroup', primaryjoin='GridInfo.dataCollectionGroupId == DataCollectionGroup.dataCollectionGroupId') - WorkflowMesh = db.relationship('WorkflowMesh', primaryjoin='GridInfo.workflowMeshId == WorkflowMesh.workflowMeshId') - - - -class Image(db.Model): - __tablename__ = 'Image' - __table_args__ = ( - db.Index('Image_Index3', 'fileLocation', 'fileName'), - ) - - imageId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - imageNumber = db.Column(db.Integer, index=True) - fileName = db.Column(db.String(255)) - fileLocation = db.Column(db.String(255)) - measuredIntensity = db.Column(db.Float) - jpegFileFullPath = db.Column(db.String(255)) - jpegThumbnailFileFullPath = db.Column(db.String(255)) - temperature = db.Column(db.Float) - cumulativeIntensity = db.Column(db.Float) - synchrotronCurrent = db.Column(db.Float) - comments = db.Column(db.String(1024)) - machineMessage = db.Column(db.String(1024)) - BLTIMESTAMP = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - motorPositionId = db.Column(db.ForeignKey('MotorPosition.motorPositionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - - DataCollection = db.relationship('DataCollection', primaryjoin='Image.dataCollectionId == DataCollection.dataCollectionId') - MotorPosition = db.relationship('MotorPosition', primaryjoin='Image.motorPositionId == MotorPosition.motorPositionId') - - - -class ImageQualityIndicator(db.Model): - __tablename__ = 'ImageQualityIndicators' - - dataCollectionId = db.Column(db.Integer, primary_key=True, nullable=False) - imageNumber = db.Column(db.Integer, primary_key=True, nullable=False) - imageId = db.Column(db.Integer) - autoProcProgramId = db.Column(db.Integer, info='Foreign key to the AutoProcProgram table') - spotTotal = db.Column(db.Integer, info='Total number of spots') - inResTotal = db.Column(db.Integer, info='Total number of spots in resolution range') - goodBraggCandidates = db.Column(db.Integer, info='Total number of Bragg diffraction spots') - iceRings = db.Column(db.Integer, info='Number of ice rings identified') - method1Res = db.Column(db.Float, info='Resolution estimate 1 (see publication)') - method2Res = db.Column(db.Float, info='Resolution estimate 2 (see publication)') - maxUnitCell = db.Column(db.Float, info='Estimation of the largest possible unit cell edge') - pctSaturationTop50Peaks = db.Column(db.Float, info='The fraction of the dynamic range being used') - inResolutionOvrlSpots = db.Column(db.Integer, info='Number of spots overloaded') - binPopCutOffMethod2Res = db.Column(db.Float, info='Cut off used in resolution limit calculation') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - totalIntegratedSignal = db.Column(db.Float(asdecimal=True)) - dozor_score = db.Column(db.Float(asdecimal=True), info='dozor_score') - driftFactor = db.Column(db.Float, info='EM movie drift factor') - - - -class Imager(db.Model): - __tablename__ = 'Imager' - - imagerId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45), nullable=False) - temperature = db.Column(db.Float) - serial = db.Column(db.String(45)) - capacity = db.Column(db.SmallInteger) - - - -class InspectionType(db.Model): - __tablename__ = 'InspectionType' - - inspectionTypeId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - - - -class Instruction(db.Model): - __tablename__ = 'Instruction' - - instructionId = db.Column(db.Integer, primary_key=True) - instructionSetId = db.Column(db.ForeignKey('InstructionSet.instructionSetId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - INSTRUCTIONORDER = db.Column(db.Integer) - comments = db.Column(db.String(255)) - order = db.Column(db.Integer, nullable=False) - - InstructionSet = db.relationship('InstructionSet', primaryjoin='Instruction.instructionSetId == InstructionSet.instructionSetId') - - - -class InstructionSet(db.Model): - __tablename__ = 'InstructionSet' - - instructionSetId = db.Column(db.Integer, primary_key=True) - type = db.Column(db.String(50)) - - - -class IspybCrystalClas(db.Model): - __tablename__ = 'IspybCrystalClass' - - crystalClassId = db.Column(db.Integer, primary_key=True) - crystalClass_code = db.Column(db.String(20), nullable=False) - crystalClass_name = db.Column(db.String(255), nullable=False) - - - -class IspybReference(db.Model): - __tablename__ = 'IspybReference' - - referenceId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - referenceName = db.Column(db.String(255), info='reference name') - referenceUrl = db.Column(db.String(1024), info='url of the reference') - referenceBibtext = db.Column(db.LargeBinary, info='bibtext value of the reference') - beamline = db.Column(db.Enum('All', 'ID14-4', 'ID23-1', 'ID23-2', 'ID29', 'XRF', 'AllXRF', 'Mesh'), info='beamline involved') - - - -class LabContact(db.Model): - __tablename__ = 'LabContact' - __table_args__ = ( - db.Index('cardNameAndProposal', 'cardName', 'proposalId'), - db.Index('personAndProposal', 'personId', 'proposalId') - ) - - labContactId = db.Column(db.Integer, primary_key=True) - personId = db.Column(db.ForeignKey('Person.personId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) - cardName = db.Column(db.String(40), nullable=False) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - defaultCourrierCompany = db.Column(db.String(45)) - courierAccount = db.Column(db.String(45)) - billingReference = db.Column(db.String(45)) - dewarAvgCustomsValue = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - dewarAvgTransportValue = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - - Person = db.relationship('Person', primaryjoin='LabContact.personId == Person.personId') - Proposal = db.relationship('Proposal', primaryjoin='LabContact.proposalId == Proposal.proposalId') - - - -class Laboratory(db.Model): - __tablename__ = 'Laboratory' - - laboratoryId = db.Column(db.Integer, primary_key=True) - laboratoryUUID = db.Column(db.String(45)) - name = db.Column(db.String(45)) - address = db.Column(db.String(255)) - city = db.Column(db.String(45)) - country = db.Column(db.String(45)) - url = db.Column(db.String(255)) - organization = db.Column(db.String(45)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - laboratoryPk = db.Column(db.Integer) - postcode = db.Column(db.String(15)) - - - -class Log4Stat(db.Model): - __tablename__ = 'Log4Stat' - - id = db.Column(db.Integer, primary_key=True) - priority = db.Column(db.String(15)) - LOG4JTIMESTAMP = db.Column(db.DateTime) - msg = db.Column(db.String(255)) - detail = db.Column(db.String(255)) - value = db.Column(db.String(255)) - timestamp = db.Column(db.DateTime) - - - -class MXMRRun(db.Model): - __tablename__ = 'MXMRRun' - - mxMRRunId = db.Column(db.Integer, primary_key=True) - autoProcScalingId = db.Column(db.ForeignKey('AutoProcScaling.autoProcScalingId'), nullable=False, index=True) - success = db.Column(db.Integer, server_default=db.FetchedValue(), info='Indicates whether the program completed. 1 for success, 0 for failure.') - message = db.Column(db.String(255), info='A short summary of the findings, success or failure.') - pipeline = db.Column(db.String(50)) - inputCoordFile = db.Column(db.String(255)) - outputCoordFile = db.Column(db.String(255)) - inputMTZFile = db.Column(db.String(255)) - outputMTZFile = db.Column(db.String(255)) - runDirectory = db.Column(db.String(255)) - logFile = db.Column(db.String(255)) - commandLine = db.Column(db.String(255)) - rValueStart = db.Column(db.Float) - rValueEnd = db.Column(db.Float) - rFreeValueStart = db.Column(db.Float) - rFreeValueEnd = db.Column(db.Float) - starttime = db.Column(db.DateTime) - endtime = db.Column(db.DateTime) - - AutoProcScaling = db.relationship('AutoProcScaling', primaryjoin='MXMRRun.autoProcScalingId == AutoProcScaling.autoProcScalingId') - - - -class MXMRRunBlob(db.Model): - __tablename__ = 'MXMRRunBlob' - - mxMRRunBlobId = db.Column(db.Integer, primary_key=True) - mxMRRunId = db.Column(db.ForeignKey('MXMRRun.mxMRRunId'), nullable=False, index=True) - view1 = db.Column(db.String(255)) - view2 = db.Column(db.String(255)) - view3 = db.Column(db.String(255)) - - MXMRRun = db.relationship('MXMRRun', primaryjoin='MXMRRunBlob.mxMRRunId == MXMRRun.mxMRRunId') - - - -class Macromolecule(db.Model): - __tablename__ = 'Macromolecule' - - macromoleculeId = db.Column(db.Integer, primary_key=True) - proposalId = db.Column(db.Integer) - safetyLevelId = db.Column(db.ForeignKey('SafetyLevel.safetyLevelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - name = db.Column(db.String(45)) - acronym = db.Column(db.String(45)) - molecularMass = db.Column(db.String(45)) - extintionCoefficient = db.Column(db.String(45)) - sequence = db.Column(db.String(1000)) - creationDate = db.Column(db.DateTime) - comments = db.Column(db.String(1024)) - - SafetyLevel = db.relationship('SafetyLevel', primaryjoin='Macromolecule.safetyLevelId == SafetyLevel.safetyLevelId') - - - -class MacromoleculeRegion(db.Model): - __tablename__ = 'MacromoleculeRegion' - - macromoleculeRegionId = db.Column(db.Integer, primary_key=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - regionType = db.Column(db.String(45)) - id = db.Column(db.String(45)) - count = db.Column(db.String(45)) - sequence = db.Column(db.String(45)) - - Macromolecule = db.relationship('Macromolecule', primaryjoin='MacromoleculeRegion.macromoleculeId == Macromolecule.macromoleculeId') - - - -class Measurement(db.Model): - __tablename__ = 'Measurement' - - specimenId = db.Column(db.ForeignKey('Specimen.specimenId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - runId = db.Column(db.ForeignKey('Run.runId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - code = db.Column(db.String(100)) - priorityLevelId = db.Column(db.Integer) - exposureTemperature = db.Column(db.String(45)) - viscosity = db.Column(db.String(45)) - flow = db.Column(db.Integer) - extraFlowTime = db.Column(db.String(45)) - volumeToLoad = db.Column(db.String(45)) - waitTime = db.Column(db.String(45)) - transmission = db.Column(db.String(45)) - comments = db.Column(db.String(512)) - measurementId = db.Column(db.Integer, primary_key=True) - - Run = db.relationship('Run', primaryjoin='Measurement.runId == Run.runId') - Speciman = db.relationship('Speciman', primaryjoin='Measurement.specimenId == Speciman.specimenId') - - - -class MeasurementToDataCollection(db.Model): - __tablename__ = 'MeasurementToDataCollection' - - measurementToDataCollectionId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('SaxsDataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - measurementId = db.Column(db.ForeignKey('Measurement.measurementId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - dataCollectionOrder = db.Column(db.Integer) - - SaxsDataCollection = db.relationship('SaxsDataCollection', primaryjoin='MeasurementToDataCollection.dataCollectionId == SaxsDataCollection.dataCollectionId') - Measurement = db.relationship('Measurement', primaryjoin='MeasurementToDataCollection.measurementId == Measurement.measurementId') - - - -class MeasurementUnit(db.Model): - __tablename__ = 'MeasurementUnit' - - measurementUnitId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - unitType = db.Column(db.String(45)) - - - -class Merge(db.Model): - __tablename__ = 'Merge' - - mergeId = db.Column(db.Integer, primary_key=True) - measurementId = db.Column(db.ForeignKey('Measurement.measurementId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - frameListId = db.Column(db.ForeignKey('FrameList.frameListId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - discardedFrameNameList = db.Column(db.String(1024)) - averageFilePath = db.Column(db.String(255)) - framesCount = db.Column(db.String(45)) - framesMerge = db.Column(db.String(45)) - - FrameList = db.relationship('FrameList', primaryjoin='Merge.frameListId == FrameList.frameListId') - Measurement = db.relationship('Measurement', primaryjoin='Merge.measurementId == Measurement.measurementId') - - - -class Model(db.Model): - __tablename__ = 'Model' - - modelId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - pdbFile = db.Column(db.String(255)) - fitFile = db.Column(db.String(255)) - firFile = db.Column(db.String(255)) - logFile = db.Column(db.String(255)) - rFactor = db.Column(db.String(45)) - chiSqrt = db.Column(db.String(45)) - volume = db.Column(db.String(45)) - rg = db.Column(db.String(45)) - dMax = db.Column(db.String(45)) - - - -class ModelBuilding(db.Model): - __tablename__ = 'ModelBuilding' - - modelBuildingId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingAnalysisId = db.Column(db.ForeignKey('PhasingAnalysis.phasingAnalysisId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related phasing analysis item') - phasingProgramRunId = db.Column(db.ForeignKey('PhasingProgramRun.phasingProgramRunId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related program item') - spaceGroupId = db.Column(db.ForeignKey('SpaceGroup.spaceGroupId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related spaceGroup') - lowRes = db.Column(db.Float(asdecimal=True)) - highRes = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - PhasingAnalysi = db.relationship('PhasingAnalysi', primaryjoin='ModelBuilding.phasingAnalysisId == PhasingAnalysi.phasingAnalysisId') - PhasingProgramRun = db.relationship('PhasingProgramRun', primaryjoin='ModelBuilding.phasingProgramRunId == PhasingProgramRun.phasingProgramRunId') - SpaceGroup = db.relationship('SpaceGroup', primaryjoin='ModelBuilding.spaceGroupId == SpaceGroup.spaceGroupId') - - - -class ModelList(db.Model): - __tablename__ = 'ModelList' - - modelListId = db.Column(db.Integer, primary_key=True) - nsdFilePath = db.Column(db.String(255)) - chi2RgFilePath = db.Column(db.String(255)) - - - -class ModelToList(db.Model): - __tablename__ = 'ModelToList' - - modelToListId = db.Column(db.Integer, primary_key=True) - modelId = db.Column(db.ForeignKey('Model.modelId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - modelListId = db.Column(db.ForeignKey('ModelList.modelListId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - - Model = db.relationship('Model', primaryjoin='ModelToList.modelId == Model.modelId') - ModelList = db.relationship('ModelList', primaryjoin='ModelToList.modelListId == ModelList.modelListId') - - - -class MotionCorrection(db.Model): - __tablename__ = 'MotionCorrection' - - motionCorrectionId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId'), index=True) - imageNumber = db.Column(db.SmallInteger, info='Movie number, sequential in time 1-n') - firstFrame = db.Column(db.SmallInteger, info='First frame of movie used') - lastFrame = db.Column(db.SmallInteger, info='Last frame of movie used') - dosePerFrame = db.Column(db.Float, info='Dose per frame, Units: e-/A^2') - doseWeight = db.Column(db.Float, info='Dose weight, Units: dimensionless') - totalMotion = db.Column(db.Float, info='Total motion, Units: A') - averageMotionPerFrame = db.Column(db.Float, info='Average motion per frame, Units: A') - driftPlotFullPath = db.Column(db.String(255), info='Full path to the drift plot') - micrographFullPath = db.Column(db.String(255), info='Full path to the micrograph') - micrographSnapshotFullPath = db.Column(db.String(255), info='Full path to a snapshot (jpg) of the micrograph') - patchesUsedX = db.Column(db.Integer, info='Number of patches used in x (for motioncor2)') - patchesUsedY = db.Column(db.Integer, info='Number of patches used in y (for motioncor2)') - fftFullPath = db.Column(db.String(255), info='Full path to the jpg image of the raw micrograph FFT') - fftCorrectedFullPath = db.Column(db.String(255), info='Full path to the jpg image of the drift corrected micrograph FFT') - comments = db.Column(db.String(255)) - movieId = db.Column(db.ForeignKey('Movie.movieId'), index=True) - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='MotionCorrection.autoProcProgramId == AutoProcProgram.autoProcProgramId') - DataCollection = db.relationship('DataCollection', primaryjoin='MotionCorrection.dataCollectionId == DataCollection.dataCollectionId') - Movie = db.relationship('Movie', primaryjoin='MotionCorrection.movieId == Movie.movieId') - - - -class MotionCorrectionDrift(db.Model): - __tablename__ = 'MotionCorrectionDrift' - - motionCorrectionDriftId = db.Column(db.Integer, primary_key=True) - motionCorrectionId = db.Column(db.ForeignKey('MotionCorrection.motionCorrectionId'), index=True) - frameNumber = db.Column(db.SmallInteger, info='Frame number of the movie these drift values relate to') - deltaX = db.Column(db.Float, info='Drift in x, Units: A') - deltaY = db.Column(db.Float, info='Drift in y, Units: A') - - MotionCorrection = db.relationship('MotionCorrection', primaryjoin='MotionCorrectionDrift.motionCorrectionId == MotionCorrection.motionCorrectionId') - - - -class MotorPosition(db.Model): - __tablename__ = 'MotorPosition' - - motorPositionId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phiX = db.Column(db.Float(asdecimal=True)) - phiY = db.Column(db.Float(asdecimal=True)) - phiZ = db.Column(db.Float(asdecimal=True)) - sampX = db.Column(db.Float(asdecimal=True)) - sampY = db.Column(db.Float(asdecimal=True)) - omega = db.Column(db.Float(asdecimal=True)) - kappa = db.Column(db.Float(asdecimal=True)) - phi = db.Column(db.Float(asdecimal=True)) - chi = db.Column(db.Float(asdecimal=True)) - gridIndexY = db.Column(db.Integer) - gridIndexZ = db.Column(db.Integer) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - - - -class Movie(db.Model): - __tablename__ = 'Movie' - - movieId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - movieNumber = db.Column(db.Integer) - movieFullPath = db.Column(db.String(255)) - createdTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - positionX = db.Column(db.Float) - positionY = db.Column(db.Float) - nominalDefocus = db.Column(db.Float, info='Nominal defocus, Units: A') - - DataCollection = db.relationship('DataCollection', primaryjoin='Movie.dataCollectionId == DataCollection.dataCollectionId') - - - -class PDB(db.Model): - __tablename__ = 'PDB' - - pdbId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(255)) - contents = db.Column(db.String) - code = db.Column(db.String(4)) - - - -class PDBEntry(db.Model): - __tablename__ = 'PDBEntry' - - pdbEntryId = db.Column(db.Integer, primary_key=True) - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId', ondelete='CASCADE'), nullable=False, index=True) - code = db.Column(db.String(4)) - cell_a = db.Column(db.Float) - cell_b = db.Column(db.Float) - cell_c = db.Column(db.Float) - cell_alpha = db.Column(db.Float) - cell_beta = db.Column(db.Float) - cell_gamma = db.Column(db.Float) - resolution = db.Column(db.Float) - pdbTitle = db.Column(db.String(255)) - pdbAuthors = db.Column(db.String(600)) - pdbDate = db.Column(db.DateTime) - pdbBeamlineName = db.Column(db.String(50)) - beamlines = db.Column(db.String(100)) - distance = db.Column(db.Float) - autoProcCount = db.Column(db.SmallInteger) - dataCollectionCount = db.Column(db.SmallInteger) - beamlineMatch = db.Column(db.Integer) - authorMatch = db.Column(db.Integer) - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='PDBEntry.autoProcProgramId == AutoProcProgram.autoProcProgramId') - - - -class PDBEntryHasAutoProcProgram(db.Model): - __tablename__ = 'PDBEntry_has_AutoProcProgram' - - pdbEntryHasAutoProcId = db.Column(db.Integer, primary_key=True) - pdbEntryId = db.Column(db.ForeignKey('PDBEntry.pdbEntryId', ondelete='CASCADE'), nullable=False, index=True) - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId', ondelete='CASCADE'), nullable=False, index=True) - distance = db.Column(db.Float) - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='PDBEntryHasAutoProcProgram.autoProcProgramId == AutoProcProgram.autoProcProgramId') - PDBEntry = db.relationship('PDBEntry', primaryjoin='PDBEntryHasAutoProcProgram.pdbEntryId == PDBEntry.pdbEntryId') - - - -class PHPSession(db.Model): - __tablename__ = 'PHPSession' - - id = db.Column(db.String(50), primary_key=True) - accessDate = db.Column(db.DateTime) - data = db.Column(db.String(4000)) - - - -class Particle(db.Model): - __tablename__ = 'Particle' - - particleId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - x = db.Column(db.Float) - y = db.Column(db.Float) - - DataCollection = db.relationship('DataCollection', primaryjoin='Particle.dataCollectionId == DataCollection.dataCollectionId') - - - -class Permission(db.Model): - __tablename__ = 'Permission' - - permissionId = db.Column(db.Integer, primary_key=True) - type = db.Column(db.String(15), nullable=False) - description = db.Column(db.String(100)) - - UserGroup = db.relationship('UserGroup', secondary='UserGroup_has_Permission') - - - -class Person(db.Model): - __tablename__ = 'Person' - - personId = db.Column(db.Integer, primary_key=True) - laboratoryId = db.Column(db.ForeignKey('Laboratory.laboratoryId'), index=True) - siteId = db.Column(db.Integer, index=True) - personUUID = db.Column(db.String(45)) - familyName = db.Column(db.String(100), index=True) - givenName = db.Column(db.String(45)) - title = db.Column(db.String(45)) - emailAddress = db.Column(db.String(60)) - phoneNumber = db.Column(db.String(45)) - login = db.Column(db.String(45), unique=True) - faxNumber = db.Column(db.String(45)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - cache = db.Column(db.Text) - externalId = db.Column(db.BINARY(16)) - - Laboratory = db.relationship('Laboratory', primaryjoin='Person.laboratoryId == Laboratory.laboratoryId') - Project = db.relationship('Project', secondary='Project_has_Person') - UserGroup = db.relationship('UserGroup', secondary='UserGroup_has_Person') - - - -class Phasing(db.Model): - __tablename__ = 'Phasing' - - phasingId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingAnalysisId = db.Column(db.ForeignKey('PhasingAnalysis.phasingAnalysisId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related phasing analysis item') - phasingProgramRunId = db.Column(db.ForeignKey('PhasingProgramRun.phasingProgramRunId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related program item') - spaceGroupId = db.Column(db.ForeignKey('SpaceGroup.spaceGroupId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related spaceGroup') - method = db.Column(db.Enum('solvent flattening', 'solvent flipping'), info='phasing method') - solventContent = db.Column(db.Float(asdecimal=True)) - enantiomorph = db.Column(db.Integer, info='0 or 1') - lowRes = db.Column(db.Float(asdecimal=True)) - highRes = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - - PhasingAnalysi = db.relationship('PhasingAnalysi', primaryjoin='Phasing.phasingAnalysisId == PhasingAnalysi.phasingAnalysisId') - PhasingProgramRun = db.relationship('PhasingProgramRun', primaryjoin='Phasing.phasingProgramRunId == PhasingProgramRun.phasingProgramRunId') - SpaceGroup = db.relationship('SpaceGroup', primaryjoin='Phasing.spaceGroupId == SpaceGroup.spaceGroupId') - - - -class PhasingAnalysi(db.Model): - __tablename__ = 'PhasingAnalysis' - - phasingAnalysisId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - - -class PhasingProgramAttachment(db.Model): - __tablename__ = 'PhasingProgramAttachment' - - phasingProgramAttachmentId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingProgramRunId = db.Column(db.ForeignKey('PhasingProgramRun.phasingProgramRunId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related program item') - fileType = db.Column(db.Enum('Map', 'Logfile', 'PDB', 'CSV', 'INS', 'RES', 'TXT'), info='file type') - fileName = db.Column(db.String(45), info='file name') - filePath = db.Column(db.String(255), info='file path') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - PhasingProgramRun = db.relationship('PhasingProgramRun', primaryjoin='PhasingProgramAttachment.phasingProgramRunId == PhasingProgramRun.phasingProgramRunId') - - - -class PhasingProgramRun(db.Model): - __tablename__ = 'PhasingProgramRun' - - phasingProgramRunId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingCommandLine = db.Column(db.String(255), info='Command line for phasing') - phasingPrograms = db.Column(db.String(255), info='Phasing programs (comma separated)') - phasingStatus = db.Column(db.Integer, info='success (1) / fail (0)') - phasingMessage = db.Column(db.String(255), info='warning, error,...') - phasingStartTime = db.Column(db.DateTime, info='Processing start time') - phasingEndTime = db.Column(db.DateTime, info='Processing end time') - phasingEnvironment = db.Column(db.String(255), info='Cpus, Nodes,...') - recordTimeStamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - - - -class PhasingStatistic(db.Model): - __tablename__ = 'PhasingStatistics' - - phasingStatisticsId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingHasScalingId1 = db.Column(db.ForeignKey('Phasing_has_Scaling.phasingHasScalingId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='the dataset in question') - phasingHasScalingId2 = db.Column(db.ForeignKey('Phasing_has_Scaling.phasingHasScalingId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='if this is MIT or MAD, which scaling are being compared, null otherwise') - phasingStepId = db.Column(db.ForeignKey('PhasingStep.phasingStepId'), index=True) - numberOfBins = db.Column(db.Integer, info='the total number of bins') - binNumber = db.Column(db.Integer, info='binNumber, 999 for overall') - lowRes = db.Column(db.Float(asdecimal=True), info='low resolution cutoff of this binfloat') - highRes = db.Column(db.Float(asdecimal=True), info='high resolution cutoff of this binfloat') - metric = db.Column(db.Enum('Rcullis', 'Average Fragment Length', 'Chain Count', 'Residues Count', 'CC', 'PhasingPower', 'FOM', '', 'Best CC', 'CC(1/2)', 'Weak CC', 'CFOM', 'Pseudo_free_CC', 'CC of partial model'), info='metric') - statisticsValue = db.Column(db.Float(asdecimal=True), info='the statistics value') - nReflections = db.Column(db.Integer) - recordTimeStamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - - Phasing_has_Scaling = db.relationship('PhasingHasScaling', primaryjoin='PhasingStatistic.phasingHasScalingId1 == PhasingHasScaling.phasingHasScalingId') - Phasing_has_Scaling1 = db.relationship('PhasingHasScaling', primaryjoin='PhasingStatistic.phasingHasScalingId2 == PhasingHasScaling.phasingHasScalingId') - PhasingStep = db.relationship('PhasingStep', primaryjoin='PhasingStatistic.phasingStepId == PhasingStep.phasingStepId') - - - -class PhasingStep(db.Model): - __tablename__ = 'PhasingStep' - - phasingStepId = db.Column(db.Integer, primary_key=True) - previousPhasingStepId = db.Column(db.Integer) - programRunId = db.Column(db.ForeignKey('PhasingProgramRun.phasingProgramRunId'), index=True) - spaceGroupId = db.Column(db.ForeignKey('SpaceGroup.spaceGroupId'), index=True) - autoProcScalingId = db.Column(db.ForeignKey('AutoProcScaling.autoProcScalingId'), index=True) - phasingAnalysisId = db.Column(db.Integer, index=True) - phasingStepType = db.Column(db.Enum('PREPARE', 'SUBSTRUCTUREDETERMINATION', 'PHASING', 'MODELBUILDING')) - method = db.Column(db.String(45)) - solventContent = db.Column(db.String(45)) - enantiomorph = db.Column(db.String(45)) - lowRes = db.Column(db.String(45)) - highRes = db.Column(db.String(45)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - AutoProcScaling = db.relationship('AutoProcScaling', primaryjoin='PhasingStep.autoProcScalingId == AutoProcScaling.autoProcScalingId') - PhasingProgramRun = db.relationship('PhasingProgramRun', primaryjoin='PhasingStep.programRunId == PhasingProgramRun.phasingProgramRunId') - SpaceGroup = db.relationship('SpaceGroup', primaryjoin='PhasingStep.spaceGroupId == SpaceGroup.spaceGroupId') - - - -class PhasingHasScaling(db.Model): - __tablename__ = 'Phasing_has_Scaling' - - phasingHasScalingId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingAnalysisId = db.Column(db.ForeignKey('PhasingAnalysis.phasingAnalysisId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related phasing analysis item') - autoProcScalingId = db.Column(db.ForeignKey('AutoProcScaling.autoProcScalingId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related autoProcScaling item') - datasetNumber = db.Column(db.Integer, info='serial number of the dataset and always reserve 0 for the reference') - recordTimeStamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - - AutoProcScaling = db.relationship('AutoProcScaling', primaryjoin='PhasingHasScaling.autoProcScalingId == AutoProcScaling.autoProcScalingId') - PhasingAnalysi = db.relationship('PhasingAnalysi', primaryjoin='PhasingHasScaling.phasingAnalysisId == PhasingAnalysi.phasingAnalysisId') - - - -class PlateGroup(db.Model): - __tablename__ = 'PlateGroup' - - plateGroupId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(255)) - storageTemperature = db.Column(db.String(45)) - - - -class PlateType(db.Model): - __tablename__ = 'PlateType' - - PlateTypeId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - description = db.Column(db.String(45)) - shape = db.Column(db.String(45)) - rowCount = db.Column(db.Integer) - columnCount = db.Column(db.Integer) - experimentId = db.Column(db.Integer, index=True) - - - -class Position(db.Model): - __tablename__ = 'Position' - - positionId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - relativePositionId = db.Column(db.ForeignKey('Position.positionId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='relative position, null otherwise') - posX = db.Column(db.Float(asdecimal=True)) - posY = db.Column(db.Float(asdecimal=True)) - posZ = db.Column(db.Float(asdecimal=True)) - scale = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - X = db.Column(db.Float(asdecimal=True), server_default=db.FetchedValue()) - Y = db.Column(db.Float(asdecimal=True), server_default=db.FetchedValue()) - Z = db.Column(db.Float(asdecimal=True), server_default=db.FetchedValue()) - - parent = db.relationship('Position', remote_side=[positionId], primaryjoin='Position.relativePositionId == Position.positionId') - - - -class PreparePhasingDatum(db.Model): - __tablename__ = 'PreparePhasingData' - - preparePhasingDataId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingAnalysisId = db.Column(db.ForeignKey('PhasingAnalysis.phasingAnalysisId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related phasing analysis item') - phasingProgramRunId = db.Column(db.ForeignKey('PhasingProgramRun.phasingProgramRunId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related program item') - spaceGroupId = db.Column(db.ForeignKey('SpaceGroup.spaceGroupId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related spaceGroup') - lowRes = db.Column(db.Float(asdecimal=True)) - highRes = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - PhasingAnalysi = db.relationship('PhasingAnalysi', primaryjoin='PreparePhasingDatum.phasingAnalysisId == PhasingAnalysi.phasingAnalysisId') - PhasingProgramRun = db.relationship('PhasingProgramRun', primaryjoin='PreparePhasingDatum.phasingProgramRunId == PhasingProgramRun.phasingProgramRunId') - SpaceGroup = db.relationship('SpaceGroup', primaryjoin='PreparePhasingDatum.spaceGroupId == SpaceGroup.spaceGroupId') - - - -class ProcessingJob(db.Model): - __tablename__ = 'ProcessingJob' - - processingJobId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - displayName = db.Column(db.String(80), info='xia2, fast_dp, dimple, etc') - comments = db.Column(db.String(255), info='For users to annotate the job and see the motivation for the job') - recordTimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='When job was submitted') - recipe = db.Column(db.String(50), info='What we want to run (xia, dimple, etc).') - automatic = db.Column(db.Integer, info='Whether this processing job was triggered automatically or not') - - DataCollection = db.relationship('DataCollection', primaryjoin='ProcessingJob.dataCollectionId == DataCollection.dataCollectionId') - - - -class ProcessingJobImageSweep(db.Model): - __tablename__ = 'ProcessingJobImageSweep' - - processingJobImageSweepId = db.Column(db.Integer, primary_key=True) - processingJobId = db.Column(db.ForeignKey('ProcessingJob.processingJobId'), index=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - startImage = db.Column(db.Integer) - endImage = db.Column(db.Integer) - - DataCollection = db.relationship('DataCollection', primaryjoin='ProcessingJobImageSweep.dataCollectionId == DataCollection.dataCollectionId') - ProcessingJob = db.relationship('ProcessingJob', primaryjoin='ProcessingJobImageSweep.processingJobId == ProcessingJob.processingJobId') - - - -class ProcessingJobParameter(db.Model): - __tablename__ = 'ProcessingJobParameter' - - processingJobParameterId = db.Column(db.Integer, primary_key=True) - processingJobId = db.Column(db.ForeignKey('ProcessingJob.processingJobId'), index=True) - parameterKey = db.Column(db.String(80), info='E.g. resolution, spacegroup, pipeline') - parameterValue = db.Column(db.String(1024)) - - ProcessingJob = db.relationship('ProcessingJob', primaryjoin='ProcessingJobParameter.processingJobId == ProcessingJob.processingJobId') - - - -class Project(db.Model): - __tablename__ = 'Project' - - projectId = db.Column(db.Integer, primary_key=True) - personId = db.Column(db.ForeignKey('Person.personId'), index=True) - title = db.Column(db.String(200)) - acronym = db.Column(db.String(100)) - owner = db.Column(db.String(50)) - - Person = db.relationship('Person', primaryjoin='Project.personId == Person.personId') - Protein = db.relationship('Protein', secondary='Project_has_Protein') - BLSession = db.relationship('BLSession', secondary='Project_has_Session') - Shipping = db.relationship('Shipping', secondary='Project_has_Shipping') - XFEFluorescenceSpectrum = db.relationship('XFEFluorescenceSpectrum', secondary='Project_has_XFEFSpectrum') - - - -t_Project_has_BLSample = db.Table( - 'Project_has_BLSample', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False), - db.Column('blSampleId', db.ForeignKey('BLSample.blSampleId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_Project_has_DCGroup = db.Table( - 'Project_has_DCGroup', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False), - db.Column('dataCollectionGroupId', db.ForeignKey('DataCollectionGroup.dataCollectionGroupId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_Project_has_EnergyScan = db.Table( - 'Project_has_EnergyScan', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False), - db.Column('energyScanId', db.ForeignKey('EnergyScan.energyScanId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_Project_has_Person = db.Table( - 'Project_has_Person', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE'), primary_key=True, nullable=False), - db.Column('personId', db.ForeignKey('Person.personId', ondelete='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_Project_has_Protein = db.Table( - 'Project_has_Protein', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE'), primary_key=True, nullable=False), - db.Column('proteinId', db.ForeignKey('Protein.proteinId', ondelete='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_Project_has_Session = db.Table( - 'Project_has_Session', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False), - db.Column('sessionId', db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_Project_has_Shipping = db.Table( - 'Project_has_Shipping', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE'), primary_key=True, nullable=False), - db.Column('shippingId', db.ForeignKey('Shipping.shippingId', ondelete='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -class ProjectHasUser(db.Model): - __tablename__ = 'Project_has_User' - - projecthasuserid = db.Column(db.Integer, primary_key=True) - projectid = db.Column(db.ForeignKey('Project.projectId'), nullable=False, index=True) - username = db.Column(db.String(15)) - - Project = db.relationship('Project', primaryjoin='ProjectHasUser.projectid == Project.projectId') - - - -t_Project_has_XFEFSpectrum = db.Table( - 'Project_has_XFEFSpectrum', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE'), primary_key=True, nullable=False), - db.Column('xfeFluorescenceSpectrumId', db.ForeignKey('XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId', ondelete='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -class Proposal(db.Model): - __tablename__ = 'Proposal' - __table_args__ = ( - db.Index('Proposal_FKIndexCodeNumber', 'proposalCode', 'proposalNumber'), - ) - - proposalId = db.Column(db.Integer, primary_key=True) - personId = db.Column(db.ForeignKey('Person.personId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - title = db.Column(db.String(200)) - proposalCode = db.Column(db.String(45)) - proposalNumber = db.Column(db.String(45)) - bltimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - proposalType = db.Column(db.String(2), info='Proposal type: MX, BX') - externalId = db.Column(db.BINARY(16)) - state = db.Column(db.Enum('Open', 'Closed', 'Cancelled'), server_default=db.FetchedValue()) - - Person = db.relationship('Person', primaryjoin='Proposal.personId == Person.personId') - - - -class ProposalHasPerson(db.Model): - __tablename__ = 'ProposalHasPerson' - - proposalHasPersonId = db.Column(db.Integer, primary_key=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId'), nullable=False, index=True) - personId = db.Column(db.ForeignKey('Person.personId'), nullable=False, index=True) - role = db.Column(db.Enum('Co-Investigator', 'Principal Investigator', 'Alternate Contact')) - - Person = db.relationship('Person', primaryjoin='ProposalHasPerson.personId == Person.personId') - Proposal = db.relationship('Proposal', primaryjoin='ProposalHasPerson.proposalId == Proposal.proposalId') - - - -class Protein(db.Model): - __tablename__ = 'Protein' - __table_args__ = ( - db.Index('ProteinAcronym_Index', 'proposalId', 'acronym'), - ) - - proteinId = db.Column(db.Integer, primary_key=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - name = db.Column(db.String(255)) - acronym = db.Column(db.String(45), index=True) - molecularMass = db.Column(db.Float(asdecimal=True)) - proteinType = db.Column(db.String(45)) - personId = db.Column(db.Integer, index=True) - bltimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - isCreatedBySampleSheet = db.Column(db.Integer, server_default=db.FetchedValue()) - sequence = db.Column(db.Text) - MOD_ID = db.Column(db.String(20)) - componentTypeId = db.Column(db.ForeignKey('ComponentType.componentTypeId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - concentrationTypeId = db.Column(db.ForeignKey('ConcentrationType.concentrationTypeId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - _global = db.Column('global', db.Integer, server_default=db.FetchedValue()) - externalId = db.Column(db.BINARY(16)) - density = db.Column(db.Float) - abundance = db.Column(db.Float, info='Deprecated') - - ComponentType = db.relationship('ComponentType', primaryjoin='Protein.componentTypeId == ComponentType.componentTypeId') - ConcentrationType = db.relationship('ConcentrationType', primaryjoin='Protein.concentrationTypeId == ConcentrationType.concentrationTypeId') - Proposal = db.relationship('Proposal', primaryjoin='Protein.proposalId == Proposal.proposalId') - ComponentSubType = db.relationship('ComponentSubType', secondary='Component_has_SubType') - - - -class ProteinHasPDB(db.Model): - __tablename__ = 'Protein_has_PDB' - - proteinhaspdbid = db.Column(db.Integer, primary_key=True) - proteinid = db.Column(db.ForeignKey('Protein.proteinId'), nullable=False, index=True) - pdbid = db.Column(db.ForeignKey('PDB.pdbId'), nullable=False, index=True) - - PDB = db.relationship('PDB', primaryjoin='ProteinHasPDB.pdbid == PDB.pdbId') - Protein = db.relationship('Protein', primaryjoin='ProteinHasPDB.proteinid == Protein.proteinId') - - - -class Reprocessing(db.Model): - __tablename__ = 'Reprocessing' - - reprocessingId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - displayName = db.Column(db.String(80), info='xia2, fast_dp, dimple, etc') - comments = db.Column(db.String(255), info='For users to annotate the job and see the motivation for the job') - recordTimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='When job was submitted') - recipe = db.Column(db.String(50), info='What we want to run (xia, dimple, etc) ') - automatic = db.Column(db.Integer, info='Whether this processing was triggered automatically or not') - - DataCollection = db.relationship('DataCollection', primaryjoin='Reprocessing.dataCollectionId == DataCollection.dataCollectionId') - - - -class ReprocessingImageSweep(db.Model): - __tablename__ = 'ReprocessingImageSweep' - - reprocessingImageSweepId = db.Column(db.Integer, primary_key=True) - reprocessingId = db.Column(db.ForeignKey('Reprocessing.reprocessingId'), index=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - startImage = db.Column(db.Integer) - endImage = db.Column(db.Integer) - - DataCollection = db.relationship('DataCollection', primaryjoin='ReprocessingImageSweep.dataCollectionId == DataCollection.dataCollectionId') - Reprocessing = db.relationship('Reprocessing', primaryjoin='ReprocessingImageSweep.reprocessingId == Reprocessing.reprocessingId') - - - -class ReprocessingParameter(db.Model): - __tablename__ = 'ReprocessingParameter' - - reprocessingParameterId = db.Column(db.Integer, primary_key=True) - reprocessingId = db.Column(db.ForeignKey('Reprocessing.reprocessingId'), index=True) - parameterKey = db.Column(db.String(80), info='E.g. resolution, spacegroup, pipeline') - parameterValue = db.Column(db.String(255)) - - Reprocessing = db.relationship('Reprocessing', primaryjoin='ReprocessingParameter.reprocessingId == Reprocessing.reprocessingId') - - - -class RobotAction(db.Model): - __tablename__ = 'RobotAction' - - robotActionId = db.Column(db.Integer, primary_key=True) - blsessionId = db.Column(db.ForeignKey('BLSession.sessionId'), nullable=False, index=True) - blsampleId = db.Column(db.ForeignKey('BLSample.blSampleId'), index=True) - actionType = db.Column(db.Enum('LOAD', 'UNLOAD', 'DISPOSE', 'STORE', 'WASH', 'ANNEAL')) - startTimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - endTimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - status = db.Column(db.Enum('SUCCESS', 'ERROR', 'CRITICAL', 'WARNING', 'EPICSFAIL', 'COMMANDNOTSENT')) - message = db.Column(db.String(255)) - containerLocation = db.Column(db.SmallInteger) - dewarLocation = db.Column(db.SmallInteger) - sampleBarcode = db.Column(db.String(45)) - xtalSnapshotBefore = db.Column(db.String(255)) - xtalSnapshotAfter = db.Column(db.String(255)) - - BLSample = db.relationship('BLSample', primaryjoin='RobotAction.blsampleId == BLSample.blSampleId') - BLSession = db.relationship('BLSession', primaryjoin='RobotAction.blsessionId == BLSession.sessionId') - - - -class Run(db.Model): - __tablename__ = 'Run' - - runId = db.Column(db.Integer, primary_key=True) - timePerFrame = db.Column(db.String(45)) - timeStart = db.Column(db.String(45)) - timeEnd = db.Column(db.String(45)) - storageTemperature = db.Column(db.String(45)) - exposureTemperature = db.Column(db.String(45)) - spectrophotometer = db.Column(db.String(45)) - energy = db.Column(db.String(45)) - creationDate = db.Column(db.DateTime) - frameAverage = db.Column(db.String(45)) - frameCount = db.Column(db.String(45)) - transmission = db.Column(db.String(45)) - beamCenterX = db.Column(db.String(45)) - beamCenterY = db.Column(db.String(45)) - pixelSizeX = db.Column(db.String(45)) - pixelSizeY = db.Column(db.String(45)) - radiationRelative = db.Column(db.String(45)) - radiationAbsolute = db.Column(db.String(45)) - normalization = db.Column(db.String(45)) - - - -t_SAFETYREQUEST = db.Table( - 'SAFETYREQUEST', - db.Column('SAFETYREQUESTID', db.Numeric(10, 0)), - db.Column('XMLDOCUMENTID', db.Numeric(10, 0)), - db.Column('PROTEINID', db.Numeric(10, 0)), - db.Column('PROJECTCODE', db.String(45)), - db.Column('SUBMISSIONDATE', db.DateTime), - db.Column('RESPONSE', db.Numeric(3, 0)), - db.Column('REPONSEDATE', db.DateTime), - db.Column('RESPONSEDETAILS', db.String(255)) -) - - - -class SAMPLECELL(db.Model): - __tablename__ = 'SAMPLECELL' - - SAMPLECELLID = db.Column(db.Integer, primary_key=True) - SAMPLEEXPOSUREUNITID = db.Column(db.Integer) - ID = db.Column(db.String(45)) - NAME = db.Column(db.String(45)) - DIAMETER = db.Column(db.String(45)) - MATERIAL = db.Column(db.String(45)) - - - -class SAMPLEEXPOSUREUNIT(db.Model): - __tablename__ = 'SAMPLEEXPOSUREUNIT' - - SAMPLEEXPOSUREUNITID = db.Column(db.Integer, primary_key=True) - ID = db.Column(db.String(45)) - PATHLENGTH = db.Column(db.String(45)) - VOLUME = db.Column(db.String(45)) - - - -class SAXSDATACOLLECTIONGROUP(db.Model): - __tablename__ = 'SAXSDATACOLLECTIONGROUP' - - DATACOLLECTIONGROUPID = db.Column(db.Integer, primary_key=True) - DEFAULTDATAACQUISITIONID = db.Column(db.Integer) - SAXSDATACOLLECTIONARRAYID = db.Column(db.Integer) - - - -class SWOnceToken(db.Model): - __tablename__ = 'SW_onceToken' - - onceTokenId = db.Column(db.Integer, primary_key=True) - token = db.Column(db.String(128)) - personId = db.Column(db.ForeignKey('Person.personId'), index=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId'), index=True) - validity = db.Column(db.String(200)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - Person = db.relationship('Person', primaryjoin='SWOnceToken.personId == Person.personId') - Proposal = db.relationship('Proposal', primaryjoin='SWOnceToken.proposalId == Proposal.proposalId') - - - -class SafetyLevel(db.Model): - __tablename__ = 'SafetyLevel' - - safetyLevelId = db.Column(db.Integer, primary_key=True) - code = db.Column(db.String(45)) - description = db.Column(db.String(45)) - - - -class SamplePlate(db.Model): - __tablename__ = 'SamplePlate' - - samplePlateId = db.Column(db.Integer, primary_key=True) - BLSESSIONID = db.Column(db.Integer) - plateGroupId = db.Column(db.ForeignKey('PlateGroup.plateGroupId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - plateTypeId = db.Column(db.ForeignKey('PlateType.PlateTypeId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - instructionSetId = db.Column(db.ForeignKey('InstructionSet.instructionSetId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - boxId = db.Column(db.Integer) - name = db.Column(db.String(45)) - slotPositionRow = db.Column(db.String(45)) - slotPositionColumn = db.Column(db.String(45)) - storageTemperature = db.Column(db.String(45)) - experimentId = db.Column(db.ForeignKey('Experiment.experimentId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - - Experiment = db.relationship('Experiment', primaryjoin='SamplePlate.experimentId == Experiment.experimentId') - InstructionSet = db.relationship('InstructionSet', primaryjoin='SamplePlate.instructionSetId == InstructionSet.instructionSetId') - PlateGroup = db.relationship('PlateGroup', primaryjoin='SamplePlate.plateGroupId == PlateGroup.plateGroupId') - PlateType = db.relationship('PlateType', primaryjoin='SamplePlate.plateTypeId == PlateType.PlateTypeId') - - - -class SamplePlatePosition(db.Model): - __tablename__ = 'SamplePlatePosition' - - samplePlatePositionId = db.Column(db.Integer, primary_key=True) - samplePlateId = db.Column(db.ForeignKey('SamplePlate.samplePlateId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - rowNumber = db.Column(db.Integer) - columnNumber = db.Column(db.Integer) - volume = db.Column(db.String(45)) - - SamplePlate = db.relationship('SamplePlate', primaryjoin='SamplePlatePosition.samplePlateId == SamplePlate.samplePlateId') - - - -class SaxsDataCollection(db.Model): - __tablename__ = 'SaxsDataCollection' - - dataCollectionId = db.Column(db.Integer, primary_key=True) - BLSESSIONID = db.Column(db.Integer) - experimentId = db.Column(db.ForeignKey('Experiment.experimentId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - comments = db.Column(db.String(5120)) - - Experiment = db.relationship('Experiment', primaryjoin='SaxsDataCollection.experimentId == Experiment.experimentId') - - - -class ScanParametersModel(db.Model): - __tablename__ = 'ScanParametersModel' - - scanParametersModelId = db.Column(db.Integer, primary_key=True) - scanParametersServiceId = db.Column(db.ForeignKey('ScanParametersService.scanParametersServiceId', onupdate='CASCADE'), index=True) - dataCollectionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId', onupdate='CASCADE'), index=True) - sequenceNumber = db.Column(db.Integer) - start = db.Column(db.Float(asdecimal=True)) - stop = db.Column(db.Float(asdecimal=True)) - step = db.Column(db.Float(asdecimal=True)) - array = db.Column(db.Text) - duration = db.Column(db.Integer, info='Duration for parameter change in seconds') - - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='ScanParametersModel.dataCollectionPlanId == DiffractionPlan.diffractionPlanId') - ScanParametersService = db.relationship('ScanParametersService', primaryjoin='ScanParametersModel.scanParametersServiceId == ScanParametersService.scanParametersServiceId') - - - -class ScanParametersService(db.Model): - __tablename__ = 'ScanParametersService' - - scanParametersServiceId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - description = db.Column(db.String(45)) - - - -class Schedule(db.Model): - __tablename__ = 'Schedule' - - scheduleId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - - - -class ScheduleComponent(db.Model): - __tablename__ = 'ScheduleComponent' - - scheduleComponentId = db.Column(db.Integer, primary_key=True) - scheduleId = db.Column(db.ForeignKey('Schedule.scheduleId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - offset_hours = db.Column(db.Integer) - inspectionTypeId = db.Column(db.ForeignKey('InspectionType.inspectionTypeId', ondelete='CASCADE'), index=True) - - InspectionType = db.relationship('InspectionType', primaryjoin='ScheduleComponent.inspectionTypeId == InspectionType.inspectionTypeId') - Schedule = db.relationship('Schedule', primaryjoin='ScheduleComponent.scheduleId == Schedule.scheduleId') - - - -class SchemaStatu(db.Model): - __tablename__ = 'SchemaStatus' - - schemaStatusId = db.Column(db.Integer, primary_key=True) - scriptName = db.Column(db.String(100), nullable=False, unique=True) - schemaStatus = db.Column(db.String(10)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - - -class Screen(db.Model): - __tablename__ = 'Screen' - - screenId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId'), index=True) - _global = db.Column('global', db.Integer) - - Proposal = db.relationship('Proposal', primaryjoin='Screen.proposalId == Proposal.proposalId') - - - -class ScreenComponent(db.Model): - __tablename__ = 'ScreenComponent' - - screenComponentId = db.Column(db.Integer, primary_key=True) - screenComponentGroupId = db.Column(db.ForeignKey('ScreenComponentGroup.screenComponentGroupId'), nullable=False, index=True) - componentId = db.Column(db.ForeignKey('Protein.proteinId'), index=True) - concentration = db.Column(db.Float) - pH = db.Column(db.Float) - - Protein = db.relationship('Protein', primaryjoin='ScreenComponent.componentId == Protein.proteinId') - ScreenComponentGroup = db.relationship('ScreenComponentGroup', primaryjoin='ScreenComponent.screenComponentGroupId == ScreenComponentGroup.screenComponentGroupId') - - - -class ScreenComponentGroup(db.Model): - __tablename__ = 'ScreenComponentGroup' - - screenComponentGroupId = db.Column(db.Integer, primary_key=True) - screenId = db.Column(db.ForeignKey('Screen.screenId'), nullable=False, index=True) - position = db.Column(db.SmallInteger) - - Screen = db.relationship('Screen', primaryjoin='ScreenComponentGroup.screenId == Screen.screenId') - - - -class Screening(db.Model): - __tablename__ = 'Screening' - - screeningId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - bltimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - programVersion = db.Column(db.String(45)) - comments = db.Column(db.String(255)) - shortComments = db.Column(db.String(20)) - diffractionPlanId = db.Column(db.Integer, index=True, info='references DiffractionPlan') - dataCollectionGroupId = db.Column(db.ForeignKey('DataCollectionGroup.dataCollectionGroupId'), index=True) - xmlSampleInformation = db.Column(db.LONGBLOB) - - DataCollectionGroup = db.relationship('DataCollectionGroup', primaryjoin='Screening.dataCollectionGroupId == DataCollectionGroup.dataCollectionGroupId') - DataCollection = db.relationship('DataCollection', primaryjoin='Screening.dataCollectionId == DataCollection.dataCollectionId') - - - -class ScreeningInput(db.Model): - __tablename__ = 'ScreeningInput' - - screeningInputId = db.Column(db.Integer, primary_key=True) - screeningId = db.Column(db.ForeignKey('Screening.screeningId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - beamX = db.Column(db.Float) - beamY = db.Column(db.Float) - rmsErrorLimits = db.Column(db.Float) - minimumFractionIndexed = db.Column(db.Float) - maximumFractionRejected = db.Column(db.Float) - minimumSignalToNoise = db.Column(db.Float) - diffractionPlanId = db.Column(db.Integer, info='references DiffractionPlan table') - xmlSampleInformation = db.Column(db.LONGBLOB) - - Screening = db.relationship('Screening', primaryjoin='ScreeningInput.screeningId == Screening.screeningId') - - - -class ScreeningOutput(db.Model): - __tablename__ = 'ScreeningOutput' - - screeningOutputId = db.Column(db.Integer, primary_key=True) - screeningId = db.Column(db.ForeignKey('Screening.screeningId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - statusDescription = db.Column(db.String(1024)) - rejectedReflections = db.Column(db.Integer) - resolutionObtained = db.Column(db.Float) - spotDeviationR = db.Column(db.Float) - spotDeviationTheta = db.Column(db.Float) - beamShiftX = db.Column(db.Float) - beamShiftY = db.Column(db.Float) - numSpotsFound = db.Column(db.Integer) - numSpotsUsed = db.Column(db.Integer) - numSpotsRejected = db.Column(db.Integer) - mosaicity = db.Column(db.Float) - iOverSigma = db.Column(db.Float) - diffractionRings = db.Column(db.Integer) - SCREENINGSUCCESS = db.Column(db.Integer, server_default=db.FetchedValue(), info='Column to be deleted') - mosaicityEstimated = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - rankingResolution = db.Column(db.Float(asdecimal=True)) - program = db.Column(db.String(45)) - doseTotal = db.Column(db.Float(asdecimal=True)) - totalExposureTime = db.Column(db.Float(asdecimal=True)) - totalRotationRange = db.Column(db.Float(asdecimal=True)) - totalNumberOfImages = db.Column(db.Integer) - rFriedel = db.Column(db.Float(asdecimal=True)) - indexingSuccess = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - strategySuccess = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - alignmentSuccess = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - - Screening = db.relationship('Screening', primaryjoin='ScreeningOutput.screeningId == Screening.screeningId') - - - -class ScreeningOutputLattice(db.Model): - __tablename__ = 'ScreeningOutputLattice' - - screeningOutputLatticeId = db.Column(db.Integer, primary_key=True) - screeningOutputId = db.Column(db.ForeignKey('ScreeningOutput.screeningOutputId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - spaceGroup = db.Column(db.String(45)) - pointGroup = db.Column(db.String(45)) - bravaisLattice = db.Column(db.String(45)) - rawOrientationMatrix_a_x = db.Column(db.Float) - rawOrientationMatrix_a_y = db.Column(db.Float) - rawOrientationMatrix_a_z = db.Column(db.Float) - rawOrientationMatrix_b_x = db.Column(db.Float) - rawOrientationMatrix_b_y = db.Column(db.Float) - rawOrientationMatrix_b_z = db.Column(db.Float) - rawOrientationMatrix_c_x = db.Column(db.Float) - rawOrientationMatrix_c_y = db.Column(db.Float) - rawOrientationMatrix_c_z = db.Column(db.Float) - unitCell_a = db.Column(db.Float) - unitCell_b = db.Column(db.Float) - unitCell_c = db.Column(db.Float) - unitCell_alpha = db.Column(db.Float) - unitCell_beta = db.Column(db.Float) - unitCell_gamma = db.Column(db.Float) - bltimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - labelitIndexing = db.Column(db.Integer, server_default=db.FetchedValue()) - - ScreeningOutput = db.relationship('ScreeningOutput', primaryjoin='ScreeningOutputLattice.screeningOutputId == ScreeningOutput.screeningOutputId') - - - -class ScreeningRank(db.Model): - __tablename__ = 'ScreeningRank' - - screeningRankId = db.Column(db.Integer, primary_key=True) - screeningRankSetId = db.Column(db.ForeignKey('ScreeningRankSet.screeningRankSetId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - screeningId = db.Column(db.ForeignKey('Screening.screeningId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - rankValue = db.Column(db.Float) - rankInformation = db.Column(db.String(1024)) - - Screening = db.relationship('Screening', primaryjoin='ScreeningRank.screeningId == Screening.screeningId') - ScreeningRankSet = db.relationship('ScreeningRankSet', primaryjoin='ScreeningRank.screeningRankSetId == ScreeningRankSet.screeningRankSetId') - - - -class ScreeningRankSet(db.Model): - __tablename__ = 'ScreeningRankSet' - - screeningRankSetId = db.Column(db.Integer, primary_key=True) - rankEngine = db.Column(db.String(255)) - rankingProjectFileName = db.Column(db.String(255)) - rankingSummaryFileName = db.Column(db.String(255)) - - - -class ScreeningStrategy(db.Model): - __tablename__ = 'ScreeningStrategy' - - screeningStrategyId = db.Column(db.Integer, primary_key=True) - screeningOutputId = db.Column(db.ForeignKey('ScreeningOutput.screeningOutputId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - phiStart = db.Column(db.Float) - phiEnd = db.Column(db.Float) - rotation = db.Column(db.Float) - exposureTime = db.Column(db.Float) - resolution = db.Column(db.Float) - completeness = db.Column(db.Float) - multiplicity = db.Column(db.Float) - anomalous = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - program = db.Column(db.String(45)) - rankingResolution = db.Column(db.Float) - transmission = db.Column(db.Float, info='Transmission for the strategy as given by the strategy program.') - - ScreeningOutput = db.relationship('ScreeningOutput', primaryjoin='ScreeningStrategy.screeningOutputId == ScreeningOutput.screeningOutputId') - - - -class ScreeningStrategySubWedge(db.Model): - __tablename__ = 'ScreeningStrategySubWedge' - - screeningStrategySubWedgeId = db.Column(db.Integer, primary_key=True, info='Primary key') - screeningStrategyWedgeId = db.Column(db.ForeignKey('ScreeningStrategyWedge.screeningStrategyWedgeId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Foreign key to parent table') - subWedgeNumber = db.Column(db.Integer, info='The number of this subwedge within the wedge') - rotationAxis = db.Column(db.String(45), info='Angle where subwedge starts') - axisStart = db.Column(db.Float, info='Angle where subwedge ends') - axisEnd = db.Column(db.Float, info='Exposure time for subwedge') - exposureTime = db.Column(db.Float, info='Transmission for subwedge') - transmission = db.Column(db.Float) - oscillationRange = db.Column(db.Float) - completeness = db.Column(db.Float) - multiplicity = db.Column(db.Float) - RESOLUTION = db.Column(db.Float) - doseTotal = db.Column(db.Float, info='Total dose for this subwedge') - numberOfImages = db.Column(db.Integer, info='Number of images for this subwedge') - comments = db.Column(db.String(255)) - - ScreeningStrategyWedge = db.relationship('ScreeningStrategyWedge', primaryjoin='ScreeningStrategySubWedge.screeningStrategyWedgeId == ScreeningStrategyWedge.screeningStrategyWedgeId') - - - -class ScreeningStrategyWedge(db.Model): - __tablename__ = 'ScreeningStrategyWedge' - - screeningStrategyWedgeId = db.Column(db.Integer, primary_key=True, info='Primary key') - screeningStrategyId = db.Column(db.ForeignKey('ScreeningStrategy.screeningStrategyId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Foreign key to parent table') - wedgeNumber = db.Column(db.Integer, info='The number of this wedge within the strategy') - resolution = db.Column(db.Float) - completeness = db.Column(db.Float) - multiplicity = db.Column(db.Float) - doseTotal = db.Column(db.Float, info='Total dose for this wedge') - numberOfImages = db.Column(db.Integer, info='Number of images for this wedge') - phi = db.Column(db.Float) - kappa = db.Column(db.Float) - chi = db.Column(db.Float) - comments = db.Column(db.String(255)) - wavelength = db.Column(db.Float(asdecimal=True)) - - ScreeningStrategy = db.relationship('ScreeningStrategy', primaryjoin='ScreeningStrategyWedge.screeningStrategyId == ScreeningStrategy.screeningStrategyId') - - - -class SessionType(db.Model): - __tablename__ = 'SessionType' - - sessionTypeId = db.Column(db.Integer, primary_key=True) - sessionId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - typeName = db.Column(db.String(31), nullable=False) - - BLSession = db.relationship('BLSession', primaryjoin='SessionType.sessionId == BLSession.sessionId') - - - -class SessionHasPerson(db.Model): - __tablename__ = 'Session_has_Person' - - sessionId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True, server_default=db.FetchedValue()) - personId = db.Column(db.ForeignKey('Person.personId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True, server_default=db.FetchedValue()) - role = db.Column(db.Enum('Local Contact', 'Local Contact 2', 'Staff', 'Team Leader', 'Co-Investigator', 'Principal Investigator', 'Alternate Contact', 'Data Access', 'Team Member')) - remote = db.Column(db.Integer, server_default=db.FetchedValue()) - - Person = db.relationship('Person', primaryjoin='SessionHasPerson.personId == Person.personId') - BLSession = db.relationship('BLSession', primaryjoin='SessionHasPerson.sessionId == BLSession.sessionId') - - - -class Shipping(db.Model): - __tablename__ = 'Shipping' - - shippingId = db.Column(db.Integer, primary_key=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - shippingName = db.Column(db.String(45), index=True) - deliveryAgent_agentName = db.Column(db.String(45)) - deliveryAgent_shippingDate = db.Column(db.Date) - deliveryAgent_deliveryDate = db.Column(db.Date) - deliveryAgent_agentCode = db.Column(db.String(45)) - deliveryAgent_flightCode = db.Column(db.String(45)) - shippingStatus = db.Column(db.String(45), index=True) - bltimeStamp = db.Column(db.DateTime) - laboratoryId = db.Column(db.Integer, index=True) - isStorageShipping = db.Column(db.Integer, server_default=db.FetchedValue()) - creationDate = db.Column(db.DateTime, index=True) - comments = db.Column(db.String(255)) - sendingLabContactId = db.Column(db.ForeignKey('LabContact.labContactId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - returnLabContactId = db.Column(db.ForeignKey('LabContact.labContactId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - returnCourier = db.Column(db.String(45)) - dateOfShippingToUser = db.Column(db.DateTime) - shippingType = db.Column(db.String(45)) - SAFETYLEVEL = db.Column(db.String(8)) - deliveryAgent_flightCodeTimestamp = db.Column(db.DateTime, info='Date flight code created, if automatic') - deliveryAgent_label = db.Column(db.Text, info='Base64 encoded pdf of airway label') - readyByTime = db.Column(db.Time, info='Time shipment will be ready') - closeTime = db.Column(db.Time, info='Time after which shipment cannot be picked up') - physicalLocation = db.Column(db.String(50), info='Where shipment can be picked up from: i.e. Stores') - deliveryAgent_pickupConfirmationTimestamp = db.Column(db.DateTime, info='Date picked confirmed') - deliveryAgent_pickupConfirmation = db.Column(db.String(10), info='Confirmation number of requested pickup') - deliveryAgent_readyByTime = db.Column(db.Time, info='Confirmed ready-by time') - deliveryAgent_callinTime = db.Column(db.Time, info='Confirmed courier call-in time') - deliveryAgent_productcode = db.Column(db.String(10), info='A code that identifies which shipment service was used') - deliveryAgent_flightCodePersonId = db.Column(db.ForeignKey('Person.personId'), index=True, info='The person who created the AWB (for auditing)') - - Person = db.relationship('Person', primaryjoin='Shipping.deliveryAgent_flightCodePersonId == Person.personId') - Proposal = db.relationship('Proposal', primaryjoin='Shipping.proposalId == Proposal.proposalId') - LabContact = db.relationship('LabContact', primaryjoin='Shipping.returnLabContactId == LabContact.labContactId') - LabContact1 = db.relationship('LabContact', primaryjoin='Shipping.sendingLabContactId == LabContact.labContactId') - - - -t_ShippingHasSession = db.Table( - 'ShippingHasSession', - db.Column('shippingId', db.ForeignKey('Shipping.shippingId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True), - db.Column('sessionId', db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -class Sleeve(db.Model): - __tablename__ = 'Sleeve' - - sleeveId = db.Column(db.Integer, primary_key=True, info='The unique sleeve id 1...255 which also identifies its home location in the freezer') - location = db.Column(db.Integer, info='NULL == freezer, 1...255 for local storage locations') - lastMovedToFreezer = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - lastMovedFromFreezer = db.Column(db.DateTime, server_default=db.FetchedValue()) - - - -class SpaceGroup(db.Model): - __tablename__ = 'SpaceGroup' - - spaceGroupId = db.Column(db.Integer, primary_key=True, info='Primary key') - spaceGroupNumber = db.Column(db.Integer, info='ccp4 number pr IUCR') - spaceGroupShortName = db.Column(db.String(45), index=True, info='short name without blank') - spaceGroupName = db.Column(db.String(45), info='verbose name') - bravaisLattice = db.Column(db.String(45), info='short name') - bravaisLatticeName = db.Column(db.String(45), info='verbose name') - pointGroup = db.Column(db.String(45), info='point group') - geometryClassnameId = db.Column(db.ForeignKey('GeometryClassname.geometryClassnameId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - MX_used = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue(), info='1 if used in the crystal form') - - GeometryClassname = db.relationship('GeometryClassname', primaryjoin='SpaceGroup.geometryClassnameId == GeometryClassname.geometryClassnameId') - - - -class Speciman(db.Model): - __tablename__ = 'Specimen' - - specimenId = db.Column(db.Integer, primary_key=True) - BLSESSIONID = db.Column(db.Integer) - bufferId = db.Column(db.ForeignKey('Buffer.bufferId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - samplePlatePositionId = db.Column(db.ForeignKey('SamplePlatePosition.samplePlatePositionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - safetyLevelId = db.Column(db.ForeignKey('SafetyLevel.safetyLevelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - stockSolutionId = db.Column(db.ForeignKey('StockSolution.stockSolutionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - code = db.Column(db.String(255)) - concentration = db.Column(db.String(45)) - volume = db.Column(db.String(45)) - experimentId = db.Column(db.ForeignKey('Experiment.experimentId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - comments = db.Column(db.String(5120)) - - Buffer = db.relationship('Buffer', primaryjoin='Speciman.bufferId == Buffer.bufferId') - Experiment = db.relationship('Experiment', primaryjoin='Speciman.experimentId == Experiment.experimentId') - Macromolecule = db.relationship('Macromolecule', primaryjoin='Speciman.macromoleculeId == Macromolecule.macromoleculeId') - SafetyLevel = db.relationship('SafetyLevel', primaryjoin='Speciman.safetyLevelId == SafetyLevel.safetyLevelId') - SamplePlatePosition = db.relationship('SamplePlatePosition', primaryjoin='Speciman.samplePlatePositionId == SamplePlatePosition.samplePlatePositionId') - StockSolution = db.relationship('StockSolution', primaryjoin='Speciman.stockSolutionId == StockSolution.stockSolutionId') - - - -class StockSolution(db.Model): - __tablename__ = 'StockSolution' - - stockSolutionId = db.Column(db.Integer, primary_key=True) - BLSESSIONID = db.Column(db.Integer) - bufferId = db.Column(db.ForeignKey('Buffer.bufferId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - instructionSetId = db.Column(db.ForeignKey('InstructionSet.instructionSetId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - boxId = db.Column(db.Integer) - name = db.Column(db.String(45)) - storageTemperature = db.Column(db.String(55)) - volume = db.Column(db.String(55)) - concentration = db.Column(db.String(55)) - comments = db.Column(db.String(255)) - proposalId = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - - Buffer = db.relationship('Buffer', primaryjoin='StockSolution.bufferId == Buffer.bufferId') - InstructionSet = db.relationship('InstructionSet', primaryjoin='StockSolution.instructionSetId == InstructionSet.instructionSetId') - Macromolecule = db.relationship('Macromolecule', primaryjoin='StockSolution.macromoleculeId == Macromolecule.macromoleculeId') - - - -class Stoichiometry(db.Model): - __tablename__ = 'Stoichiometry' - - stoichiometryId = db.Column(db.Integer, primary_key=True) - hostMacromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - ratio = db.Column(db.String(45)) - - Macromolecule = db.relationship('Macromolecule', primaryjoin='Stoichiometry.hostMacromoleculeId == Macromolecule.macromoleculeId') - Macromolecule1 = db.relationship('Macromolecule', primaryjoin='Stoichiometry.macromoleculeId == Macromolecule.macromoleculeId') - - - -class Structure(db.Model): - __tablename__ = 'Structure' - - structureId = db.Column(db.Integer, primary_key=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - PDB = db.Column(db.String(45)) - structureType = db.Column(db.String(45)) - fromResiduesBases = db.Column(db.String(45)) - toResiduesBases = db.Column(db.String(45)) - sequence = db.Column(db.String(45)) - - Macromolecule = db.relationship('Macromolecule', primaryjoin='Structure.macromoleculeId == Macromolecule.macromoleculeId') - - - -class SubstructureDetermination(db.Model): - __tablename__ = 'SubstructureDetermination' - - substructureDeterminationId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingAnalysisId = db.Column(db.ForeignKey('PhasingAnalysis.phasingAnalysisId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related phasing analysis item') - phasingProgramRunId = db.Column(db.ForeignKey('PhasingProgramRun.phasingProgramRunId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related program item') - spaceGroupId = db.Column(db.ForeignKey('SpaceGroup.spaceGroupId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related spaceGroup') - method = db.Column(db.Enum('SAD', 'MAD', 'SIR', 'SIRAS', 'MR', 'MIR', 'MIRAS', 'RIP', 'RIPAS'), info='phasing method') - lowRes = db.Column(db.Float(asdecimal=True)) - highRes = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - PhasingAnalysi = db.relationship('PhasingAnalysi', primaryjoin='SubstructureDetermination.phasingAnalysisId == PhasingAnalysi.phasingAnalysisId') - PhasingProgramRun = db.relationship('PhasingProgramRun', primaryjoin='SubstructureDetermination.phasingProgramRunId == PhasingProgramRun.phasingProgramRunId') - SpaceGroup = db.relationship('SpaceGroup', primaryjoin='SubstructureDetermination.spaceGroupId == SpaceGroup.spaceGroupId') - - - -class Subtraction(db.Model): - __tablename__ = 'Subtraction' - - subtractionId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('SaxsDataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - rg = db.Column(db.String(45)) - rgStdev = db.Column(db.String(45)) - I0 = db.Column(db.String(45)) - I0Stdev = db.Column(db.String(45)) - firstPointUsed = db.Column(db.String(45)) - lastPointUsed = db.Column(db.String(45)) - quality = db.Column(db.String(45)) - isagregated = db.Column(db.String(45)) - concentration = db.Column(db.String(45)) - gnomFilePath = db.Column(db.String(255)) - rgGuinier = db.Column(db.String(45)) - rgGnom = db.Column(db.String(45)) - dmax = db.Column(db.String(45)) - total = db.Column(db.String(45)) - volume = db.Column(db.String(45)) - creationTime = db.Column(db.DateTime) - kratkyFilePath = db.Column(db.String(255)) - scatteringFilePath = db.Column(db.String(255)) - guinierFilePath = db.Column(db.String(255)) - SUBTRACTEDFILEPATH = db.Column(db.String(255)) - gnomFilePathOutput = db.Column(db.String(255)) - substractedFilePath = db.Column(db.String(255)) - - SaxsDataCollection = db.relationship('SaxsDataCollection', primaryjoin='Subtraction.dataCollectionId == SaxsDataCollection.dataCollectionId') - - - -class SubtractionToAbInitioModel(db.Model): - __tablename__ = 'SubtractionToAbInitioModel' - - subtractionToAbInitioModelId = db.Column(db.Integer, primary_key=True) - abInitioId = db.Column(db.ForeignKey('AbInitioModel.abInitioModelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - subtractionId = db.Column(db.ForeignKey('Subtraction.subtractionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - - AbInitioModel = db.relationship('AbInitioModel', primaryjoin='SubtractionToAbInitioModel.abInitioId == AbInitioModel.abInitioModelId') - Subtraction = db.relationship('Subtraction', primaryjoin='SubtractionToAbInitioModel.subtractionId == Subtraction.subtractionId') - - - -class UserGroup(db.Model): - __tablename__ = 'UserGroup' - - userGroupId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(31), nullable=False, unique=True) - - - -t_UserGroup_has_Permission = db.Table( - 'UserGroup_has_Permission', - db.Column('userGroupId', db.ForeignKey('UserGroup.userGroupId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False), - db.Column('permissionId', db.ForeignKey('Permission.permissionId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_UserGroup_has_Person = db.Table( - 'UserGroup_has_Person', - db.Column('userGroupId', db.ForeignKey('UserGroup.userGroupId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False), - db.Column('personId', db.ForeignKey('Person.personId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -class Workflow(db.Model): - __tablename__ = 'Workflow' - - workflowId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - workflowTitle = db.Column(db.String(255)) - workflowType = db.Column(db.Enum('Undefined', 'BioSAXS Post Processing', 'EnhancedCharacterisation', 'LineScan', 'MeshScan', 'Dehydration', 'KappaReorientation', 'BurnStrategy', 'XrayCentering', 'DiffractionTomography', 'TroubleShooting', 'VisualReorientation', 'HelicalCharacterisation', 'GroupedProcessing', 'MXPressE', 'MXPressO', 'MXPressL', 'MXScore', 'MXPressI', 'MXPressM', 'MXPressA')) - workflowTypeId = db.Column(db.Integer) - comments = db.Column(db.String(1024)) - status = db.Column(db.String(255)) - resultFilePath = db.Column(db.String(255)) - logFilePath = db.Column(db.String(255)) - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - workflowDescriptionFullPath = db.Column(db.String(255), info='Full file path to a json description of the workflow') - - - -class WorkflowMesh(db.Model): - __tablename__ = 'WorkflowMesh' - - workflowMeshId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - workflowId = db.Column(db.ForeignKey('Workflow.workflowId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related workflow') - bestPositionId = db.Column(db.ForeignKey('MotorPosition.motorPositionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - bestImageId = db.Column(db.ForeignKey('Image.imageId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - value1 = db.Column(db.Float(asdecimal=True)) - value2 = db.Column(db.Float(asdecimal=True)) - value3 = db.Column(db.Float(asdecimal=True), info='N value') - value4 = db.Column(db.Float(asdecimal=True)) - cartographyPath = db.Column(db.String(255)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - - Image = db.relationship('Image', primaryjoin='WorkflowMesh.bestImageId == Image.imageId') - MotorPosition = db.relationship('MotorPosition', primaryjoin='WorkflowMesh.bestPositionId == MotorPosition.motorPositionId') - Workflow = db.relationship('Workflow', primaryjoin='WorkflowMesh.workflowId == Workflow.workflowId') - - - -class WorkflowStep(db.Model): - __tablename__ = 'WorkflowStep' - - workflowStepId = db.Column(db.Integer, primary_key=True) - workflowId = db.Column(db.ForeignKey('Workflow.workflowId'), nullable=False, index=True) - type = db.Column(db.String(45)) - status = db.Column(db.String(45)) - folderPath = db.Column(db.String(1024)) - imageResultFilePath = db.Column(db.String(1024)) - htmlResultFilePath = db.Column(db.String(1024)) - resultFilePath = db.Column(db.String(1024)) - comments = db.Column(db.String(2048)) - crystalSizeX = db.Column(db.String(45)) - crystalSizeY = db.Column(db.String(45)) - crystalSizeZ = db.Column(db.String(45)) - maxDozorScore = db.Column(db.String(45)) - recordTimeStamp = db.Column(db.DateTime) - - Workflow = db.relationship('Workflow', primaryjoin='WorkflowStep.workflowId == Workflow.workflowId') - - - -class WorkflowType(db.Model): - __tablename__ = 'WorkflowType' - - workflowTypeId = db.Column(db.Integer, primary_key=True) - workflowTypeName = db.Column(db.String(45)) - comments = db.Column(db.String(2048)) - recordTimeStamp = db.Column(db.DateTime) - - - -class XFEFluorescenceSpectrum(db.Model): - __tablename__ = 'XFEFluorescenceSpectrum' - - xfeFluorescenceSpectrumId = db.Column(db.Integer, primary_key=True) - sessionId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - jpegScanFileFullPath = db.Column(db.String(255)) - startTime = db.Column(db.DateTime) - endTime = db.Column(db.DateTime) - filename = db.Column(db.String(255)) - exposureTime = db.Column(db.Float) - axisPosition = db.Column(db.Float) - beamTransmission = db.Column(db.Float) - annotatedPymcaXfeSpectrum = db.Column(db.String(255)) - fittedDataFileFullPath = db.Column(db.String(255)) - scanFileFullPath = db.Column(db.String(255)) - energy = db.Column(db.Float) - beamSizeVertical = db.Column(db.Float) - beamSizeHorizontal = db.Column(db.Float) - crystalClass = db.Column(db.String(20)) - comments = db.Column(db.String(1024)) - blSubSampleId = db.Column(db.ForeignKey('BLSubSample.blSubSampleId'), index=True) - flux = db.Column(db.Float(asdecimal=True), info='flux measured before the xrfSpectra') - flux_end = db.Column(db.Float(asdecimal=True), info='flux measured after the xrfSpectra') - workingDirectory = db.Column(db.String(512)) - - BLSample = db.relationship('BLSample', primaryjoin='XFEFluorescenceSpectrum.blSampleId == BLSample.blSampleId') - BLSubSample = db.relationship('BLSubSample', primaryjoin='XFEFluorescenceSpectrum.blSubSampleId == BLSubSample.blSubSampleId') - BLSession = db.relationship('BLSession', primaryjoin='XFEFluorescenceSpectrum.sessionId == BLSession.sessionId') - - - -class XRFFluorescenceMapping(db.Model): - __tablename__ = 'XRFFluorescenceMapping' - - xrfFluorescenceMappingId = db.Column(db.Integer, primary_key=True) - xrfFluorescenceMappingROIId = db.Column(db.ForeignKey('XRFFluorescenceMappingROI.xrfFluorescenceMappingROIId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - imageNumber = db.Column(db.Integer, nullable=False) - counts = db.Column(db.Integer, nullable=False) - - DataCollection = db.relationship('DataCollection', primaryjoin='XRFFluorescenceMapping.dataCollectionId == DataCollection.dataCollectionId') - XRFFluorescenceMappingROI = db.relationship('XRFFluorescenceMappingROI', primaryjoin='XRFFluorescenceMapping.xrfFluorescenceMappingROIId == XRFFluorescenceMappingROI.xrfFluorescenceMappingROIId') - - - -class XRFFluorescenceMappingROI(db.Model): - __tablename__ = 'XRFFluorescenceMappingROI' - - xrfFluorescenceMappingROIId = db.Column(db.Integer, primary_key=True) - startEnergy = db.Column(db.Float, nullable=False) - endEnergy = db.Column(db.Float, nullable=False) - element = db.Column(db.String(2)) - edge = db.Column(db.String(2), info='In future may be changed to enum(K, L)') - r = db.Column(db.Integer, info='R colour component') - g = db.Column(db.Integer, info='G colour component') - b = db.Column(db.Integer, info='B colour component') - - - -class XrayCentringResult(db.Model): - __tablename__ = 'XrayCentringResult' - - xrayCentringResultId = db.Column(db.Integer, primary_key=True) - gridInfoId = db.Column(db.ForeignKey('GridInfo.gridInfoId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - method = db.Column(db.String(15), info='Type of X-ray centering calculation') - status = db.Column(db.Enum('success', 'failure', 'pending'), nullable=False, server_default=db.FetchedValue()) - x = db.Column(db.Float, info='position in number of boxes in direction of the fast scan within GridInfo grid') - y = db.Column(db.Float, info='position in number of boxes in direction of the slow scan within GridInfo grid') - - GridInfo = db.relationship('GridInfo', primaryjoin='XrayCentringResult.gridInfoId == GridInfo.gridInfoId') - - - -class VRun(db.Model): - __tablename__ = 'v_run' - __table_args__ = ( - db.Index('v_run_idx1', 'startDate', 'endDate'), - ) - - runId = db.Column(db.Integer, primary_key=True) - run = db.Column(db.String(7), nullable=False, server_default=db.FetchedValue()) - startDate = db.Column(db.DateTime) - endDate = db.Column(db.DateTime) diff --git a/pyispyb/core/modules/__init__.py b/pyispyb/core/modules/__init__.py index d9910e6c..de8b62ae 100644 --- a/pyispyb/core/modules/__init__.py +++ b/pyispyb/core/modules/__init__.py @@ -28,13 +28,7 @@ def init_app(app, **kwargs): - """ - Inits extensions. - - Args: - app (Flask app): [description] - """ - + """Init modules.""" for module_name in os.listdir(os.path.dirname(__file__)): if not module_name.startswith("__") and module_name.endswith(".py"): module = import_module(".%s" % module_name[:-3], package=__name__) diff --git a/pyispyb/core/modules/admin/__init__.py b/pyispyb/core/modules/admin/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pyispyb/core/modules/admin/activity.py b/pyispyb/core/modules/admin/activity.py new file mode 100644 index 00000000..89cd810b --- /dev/null +++ b/pyispyb/core/modules/admin/activity.py @@ -0,0 +1,24 @@ +from typing import Optional + +from ispyb import models + +from ....app.extensions.database.middleware import db +from ....app.extensions.database.utils import Paged, page +from ...schemas.admin.activity import ActionType + + +def get_activity( + skip: int, limit: int, action_type: Optional[ActionType] = None +) -> Paged[models.AdminActivity]: + """Get admin activity""" + query = db.session.query(models.AdminActivity).order_by( + models.AdminActivity.dateTime.desc() + ) + + if action_type: + query = query.filter(models.AdminActivity.action == action_type.value) + + total = query.count() + query = page(query, skip=skip, limit=limit) + + return Paged(total=total, results=query.all(), skip=skip, limit=limit) diff --git a/pyispyb/core/modules/admin/groups.py b/pyispyb/core/modules/admin/groups.py new file mode 100644 index 00000000..ed83733f --- /dev/null +++ b/pyispyb/core/modules/admin/groups.py @@ -0,0 +1,240 @@ +from typing import Optional + +from sqlalchemy import func, distinct +from sqlalchemy.orm import joinedload +from ispyb import models + +from ....app.extensions.database.utils import Paged, page, with_metadata +from ....app.extensions.database.middleware import db +from ...schemas.admin import groups as schema + + +def get_groups( + skip: int, + limit: int, + userGroupId: Optional[int] = None, +) -> Paged[models.UserGroup]: + metadata = { + "permissions": func.count(distinct(models.Permission.permissionId)), + "people": func.count(distinct(models.Person.personId)), + } + + query = ( + db.session.query(models.UserGroup, *metadata.values()) + .outerjoin(models.UserGroup.Permission) + .outerjoin(models.UserGroup.Person) + .group_by(models.UserGroup.userGroupId) + ) + + if userGroupId: + query = query.filter(models.UserGroup.userGroupId == userGroupId) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def add_group(group: schema.NewUserGroup) -> Optional[models.UserGroup]: + check = ( + db.session.query(models.UserGroup) + .filter(models.UserGroup.name == group.name) + .first() + ) + if check: + raise AttributeError(f"UserGroup `{group.name}` already exists") + + userGroup = models.UserGroup(**group.dict()) + db.session.add(userGroup) + db.session.commit() + + userGroups = get_groups(userGroupId=userGroup.userGroupId, skip=0, limit=1) + return userGroups.first + + +def update_group(userGroupId: int, group: schema.UserGroup): + userGroup = ( + db.session.query(models.UserGroup) + .filter(models.UserGroup.userGroupId == userGroupId) + .first() + ) + + if not userGroup: + raise FileNotFoundError() + + group_dict = group.dict(exclude_unset=True) + for key in ["name"]: + if key in group_dict: + setattr(userGroup, key, group_dict[key]) + + db.session.commit() + + updatedUserGroups = get_groups(userGroupId=userGroup.userGroupId, skip=0, limit=1) + return updatedUserGroups.first + + +def add_person_to_group(personId: int, userGroupId: int) -> None: + person = ( + db.session.query(models.Person) + .filter(models.Person.personId == personId) + .first() + ) + userGroup = ( + db.session.query(models.UserGroup) + .options(joinedload(models.UserGroup.Person)) + .filter(models.UserGroup.userGroupId == userGroupId) + .first() + ) + + if not person: + raise AttributeError(f"Person `{personId}` does not exist") + + if not userGroup: + raise AttributeError(f"UserGroup `{userGroupId}` does not exist") + + if person in userGroup.Person: + raise AttributeError( + f"UserGroup `{userGroupId}` already contains person `{personId}`" + ) + + userGroup.Person.append(person) + db.session.commit() + + +def remove_person_from_group(personId: int, userGroupId: int) -> None: + person = ( + db.session.query(models.Person) + .filter(models.Person.personId == personId) + .first() + ) + userGroup = ( + db.session.query(models.UserGroup) + .options(joinedload(models.UserGroup.Person)) + .filter(models.UserGroup.userGroupId == userGroupId) + .first() + ) + + if not person: + raise AttributeError(f"Person `{personId}` does not exist") + + if not userGroup: + raise AttributeError(f"UserGroup `{userGroupId}` does not exist") + + userGroup.Person.remove(person) + db.session.commit() + + +def add_permission_to_group(permissionId: int, userGroupId: int) -> None: + permission = ( + db.session.query(models.Permission) + .filter(models.Permission.permissionId == permissionId) + .first() + ) + userGroup = ( + db.session.query(models.UserGroup) + .options(joinedload(models.UserGroup.Permission)) + .filter(models.UserGroup.userGroupId == userGroupId) + .first() + ) + + if not permission: + raise AttributeError(f"Permission `{permissionId}` does not exist") + + if not userGroup: + raise AttributeError(f"UserGroup `{userGroupId}` does not exist") + + if permission in userGroup.Permission: + raise AttributeError( + f"UserGroup `{userGroupId}` already contains permission `{permissionId}`" + ) + + userGroup.Permission.append(permission) + db.session.commit() + + +def remove_permission_from_group(permissionId: int, userGroupId: int) -> None: + permission = ( + db.session.query(models.Permission) + .filter(models.Permission.permissionId == permissionId) + .first() + ) + userGroup = ( + db.session.query(models.UserGroup) + .options(joinedload(models.UserGroup.Permission)) + .filter(models.UserGroup.userGroupId == userGroupId) + .first() + ) + + if not permission: + raise AttributeError(f"Permission `{permissionId}` does not exist") + + if not userGroup: + raise AttributeError(f"UserGroup `{userGroupId}` does not exist") + + userGroup.Person.remove(permission) + db.session.commit() + + +def get_permissions( + skip: int, + limit: int, + permissionId: Optional[int] = None, + userGroupId: Optional[int] = None, + search: Optional[str] = None, +): + query = db.session.query(models.Permission) + + if permissionId: + query = query.filter(models.Permission.permissionId == permissionId) + + if userGroupId: + query = query.join(models.Permission.UserGroup).filter( + models.UserGroup.userGroupId == userGroupId + ) + + if search: + query = query.filter(models.Permission.type.like(f"%{search}%")) + + total = query.count() + query = page(query, skip=skip, limit=limit) + + return Paged(total=total, results=query.all(), skip=skip, limit=limit) + + +def add_permission(permission: schema.NewPermission) -> models.Permission: + check = ( + db.session.query(models.Permission) + .filter(models.Permission.type == permission.type) + .first() + ) + if check: + raise AttributeError(f"Permission type `{permission.type}` already exists") + + permissionModel = models.Permission(**permission.dict()) + db.session.add(permissionModel) + db.session.commit() + + return permissionModel + + +def update_permission( + permissionId: int, permission: schema.Permission +) -> models.Permission: + permissionModel = ( + db.session.query(models.Permission) + .filter(models.Permission.permissionId == permissionId) + .first() + ) + + if not permissionModel: + raise FileNotFoundError() + + permission_dict = permission.dict(exclude_unset=True) + for key in ["type", "description"]: + if key in permission_dict: + setattr(permissionModel, key, permission_dict[key]) + + db.session.commit() + + return permissionModel diff --git a/pyispyb/core/modules/api.py b/pyispyb/core/modules/api.py deleted file mode 100644 index 56b6ce4a..00000000 --- a/pyispyb/core/modules/api.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from flask import Blueprint - -from pyispyb.app.extensions import api - - -def init_app(app, **kwargs): - """ - Inits api. - - Args: - app ([type]): [description] - """ - # pylint: disable=unused-argument - api_v1_blueprint = Blueprint("api", __name__, url_prefix=app.config["API_ROOT"]) - api.api_v1.init_app(api_v1_blueprint) - app.register_blueprint(api_v1_blueprint, url_prefix=app.config["API_ROOT"]) diff --git a/pyispyb/core/modules/auto_proc.py b/pyispyb/core/modules/auto_proc.py deleted file mode 100644 index 18913372..00000000 --- a/pyispyb/core/modules/auto_proc.py +++ /dev/null @@ -1,306 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - -import os -import time -import zipfile -from io import BytesIO - - -from pyispyb.app.extensions import db -from pyispyb.core import models, schemas - - -def get_auto_procs(request): - """ - Returns auto_proc entries. - - Returns: - [type]: [description] - """ - query_params = request.args.to_dict() - - return db.get_db_items( - models.AutoProc, - schemas.auto_proc.dict_schema, - schemas.auto_proc.ma_schema, - query_params, - ) - - -def get_auto_proc_by_id(auto_proc_id): - """ - Returns auto_proc by its id - - Args: - auto_proc_id (int): corresponds to autoProcId in db - - Returns: - dict: info about auto_proc as dict - """ - data_dict = {"autoProcId": auto_proc_id} - return db.get_db_item_by_params( - models.AutoProc, schemas.auto_proc.ma_schema, data_dict - ) - - -def add_auto_proc(data_dict): - """ - Adds a auto_proc to db. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.AutoProc, schemas.auto_proc.ma_schema, data_dict) - - -def get_auto_proc_status(request): - """ - Returns auto_proc_status entries. - - Returns: - [type]: [description] - """ - query_params = request.args.to_dict() - - return db.get_db_items( - models.AutoProcStatus, - schemas.auto_proc_status.dict_schema, - schemas.auto_proc_status.ma_schema, - query_params, - ) - - -def get_auto_proc_status_by_id(auto_proc_status_id): - """ - Returns auto_proc_status by its auto_proc_statusId. - - Args: - auto_proc_status (int): corresponds to auto_proc_statusId in db - - Returns: - dict: info about auto_proc_status as dict - """ - data_dict = {"auto_proc_statusId": auto_proc_status_id} - return db.get_db_item_by_params( - models.AutoProcStatus, schemas.auto_proc_status.ma_schema, data_dict - ) - - -def add_auto_proc_status(data_dict): - """ - Adds a auto_proc_status to db. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item( - models.AutoProcStatus, schemas.auto_proc_status.ma_schema, data_dict - ) - - -def get_auto_proc_programs(request): - """ - Returns auto_proc_program entries. - - Returns: - [type]: [description] - """ - query_params = request.args.to_dict() - - return db.get_db_items( - models.AutoProcProgram, - schemas.auto_proc_program.dict_schema, - schemas.auto_proc_program.ma_schema, - query_params, - ) - - -def get_auto_proc_program_by_id(auto_proc_program_id): - """ - Returns auto_proc_program by its auto_proc_programId. - - Args: - auto_proc_program (int): corresponds to auto_proc_programId in db - - Returns: - dict: info about auto_proc_program as dict - """ - data_dict = {"autoProcProgramId": auto_proc_program_id} - return db.get_db_item_by_params( - models.AutoProcProgram, schemas.auto_proc_program.ma_schema, data_dict - ) - - -def add_auto_proc_program(data_dict): - """ - Adds a auto_proc_program to db. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item( - models.AutoProcProgram, schemas.auto_proc_program.ma_schema, data_dict - ) - - -def get_attachments_by_query(query_params): - """ - Returns auto_proc_program_attachment entries. - - Returns: - [type]: [description] - """ - return db.get_db_items( - models.AutoProcProgramAttachment, - schemas.auto_proc_program_attachment.dict_schema, - schemas.auto_proc_program_attachment.ma_schema, - query_params, - ) - - -def get_auto_proc_program_attachment_by_id(auto_proc_program_attachment_id): - """ - Returns auto_proc_program_attachment by its auto_proc_program_attachmentId. - - Args: - auto_proc_program_attachment (int): corresponds to autoProcProgramAttachmentId - - Returns: - dict: info about auto_proc_program_attachment as dict - """ - data_dict = {"autoProcProgramAttachmentId": auto_proc_program_attachment_id} - return db.get_db_item_by_params( - models.AutoProcProgramAttachment, - schemas.auto_proc_program_attachment.ma_schema, - data_dict, - ) - - -def add_auto_proc_program_attachment(data_dict): - """ - Adds a auto_proc_program_attachment to db. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item( - models.AutoProcProgramAttachment, - schemas.auto_proc_program_attachment.ma_schema, - data_dict, - ) - - -def get_auto_proc_program_messages(request): - """ - Returns auto_proc_program_message entries. - - Returns: - [type]: [description] - """ - query_params = request.args.to_dict() - - return db.get_db_items( - models.AutoProcProgramMessage, - schemas.auto_proc_program_message.dict_schema, - schemas.auto_proc_program_message.ma_schema, - query_params, - ) - -def get_attachment_zip_by_program_id(program_id): - attachment_list = get_attachments_by_query( - {"autoProcProgramId": program_id} - )["data"]["rows"] - - memory_file = None - msg = "" - - if attachment_list: - memory_file = BytesIO() - with zipfile.ZipFile(memory_file, 'w') as zf: - for attachm_file_dict in attachment_list: - data = zipfile.ZipInfo( - os.path.join( - attachm_file_dict['fileName'] - ) - ) - data.date_time = time.localtime(time.time())[:6] - data.compress_type = zipfile.ZIP_DEFLATED - attachm_file = open(os.path.join( - attachm_file_dict["filePath"], - attachm_file_dict['fileName'] - ), - "rb" - ) - zf.writestr(data, attachm_file.read()) - attachm_file.close() - memory_file.seek(0) - else: - msg = "No attachment with autoproc program id %d found" % program_id - - return memory_file, msg - - -def get_auto_proc_program_message_by_id(auto_proc_program_message_id): - """ - Returns auto_proc_program_message by its autoProcProgramMessageId. - - Args: - auto_proc_program_message (int): corresponds to autoProcProgramMessageId - - Returns: - dict: info about auto_proc_program_message as dict - """ - data_dict = {"autoProcProgramMessageId": auto_proc_program_message_id} - return db.get_db_item_by_params( - models.AutoProcProgramMessage, - schemas.auto_proc_program_message.ma_schema, - data_dict, - ) - - -def add_auto_proc_program_message(data_dict): - """ - Adds a auto_proc_program_message to db. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item( - models.AutoProcProgramMessage, - schemas.auto_proc_program_message.ma_schema, - data_dict, - ) diff --git a/pyispyb/core/modules/beamline_setup.py b/pyispyb/core/modules/beamline_setup.py deleted file mode 100644 index b7f471fa..00000000 --- a/pyispyb/core/modules/beamline_setup.py +++ /dev/null @@ -1,126 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions import db - -from pyispyb.core import models, schemas - - -def get_beamline_setups(request): - """ - Returns beamline_setup items based on query parameters. - - Args: - query_dict (dict): [description] - - Returns: - [type]: [description] - """ - query_dict = request.args.to_dict() - - return db.get_db_items( - models.BeamLineSetup, - schemas.beamline_setup.dict_schema, - schemas.beamline_setup.ma_schema, - query_dict, - ) - - -def add_beamline_setup(data_dict): - """ - Adds data collection item. - - Args: - beamline_setup_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item( - models.BeamLineSetup, schemas.beamline_setup.ma_schema, data_dict - ) - - -def get_beamline_setup_by_id(beamline_setup_id): - """ - Returns beamline_setup by its beamline_setupId. - - Args: - beamline_setup_id (int): corresponds to beamlineSetupId in db - - Returns: - dict: info about beamline_setup as dict - """ - data_dict = {"beamLineSetupId": beamline_setup_id} - return db.get_db_item( - models.BeamLineSetup, schemas.beamline_setup.ma_schema, data_dict - ) - - -def update_beamline_setup(beamline_setup_id, data_dict): - """ - Updates beamline_setup. - - Args: - beamline_setup_id ([type]): [description] - beamline_setup_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"beamLineSetupId": beamline_setup_id} - return db.update_db_item( - models.BeamLineSetup, schemas.beamline_setup.ma_schema, id_dict, data_dict - ) - - -def patch_beamline_setup(beamline_setup_id, data_dict): - """ - Patch a beamline_setup - - Args: - beamline_setup_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"beamLineSetupId": beamline_setup_id} - return db.patch_db_item( - models.BeamLineSetup, schemas.beamline_setup.ma_schema, id_dict, data_dict - ) - - -def delete_beamline_setup(beamline_setup_id): - """Deletes beamline_setup item from db. - - Args: - beamline_setup_id (int): beamline_setupId column in db - - Returns: - bool: True if the beamline_setup exists and deleted successfully, - otherwise return False - """ - id_dict = {"beamLineSetupId": beamline_setup_id} - return db.delete_db_item(models.BeamLineSetup, id_dict) diff --git a/pyispyb/core/modules/component.py b/pyispyb/core/modules/component.py deleted file mode 100644 index 217a55d4..00000000 --- a/pyispyb/core/modules/component.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - -from pyispyb.app.extensions import db -from pyispyb.core import models, schemas - - -def get_component_types(request): - """ - Returns component_type entries. - - Returns: - [type]: [description] - """ - query_dict = request.args.to_dict() - - return db.get_db_items( - models.ComponentType, - schemas.component_type.dict_schema, - schemas.component_type.ma_schema, - query_dict, - ) - - -def get_component_type_by_id(component_type_id): - """ - Returns component_type by its component_typeId. - - Args: - component_type_id (int): corresponds to component_typeId in db - - Returns: - dict: info about component_type as dict - """ - data_dict = {"componentTypeId": component_type_id} - return db.get_db_item( - models.ComponentType, schemas.component_type.ma_schema, data_dict - ) - - -def add_component_type(data_dict): - """ - Adds a component_type to db. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item( - models.ComponentType, schemas.component_type.ma_schema, data_dict - ) - - -def update_component_type(component_type_id, data_dict): - """ - Updates component_type. - - Args: - component_type_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"componentTypeId": component_type_id} - return db.update_db_item( - models.ComponentType, schemas.component_type.ma_schema, id_dict, data_dict - ) - - -def patch_component_type(component_type_id, data_dict): - """ - Patch a component_type. - - Args: - component_type_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"componentTypeId": component_type_id} - return db.patch_db_item( - models.ComponentType, schemas.component_type.ma_schema, id_dict, data_dict - ) - - -def delete_component_type(component_type_id): - """ - Deletes component_type item from db. - - Args: - component_type_id (int): componentTypeId column in db - - Returns: - bool: True if the component_type exists and deleted successfully, - otherwise return False - """ - id_dict = {"componentTypeId": component_type_id} - return db.delete_db_item(models.ComponentType, id_dict) diff --git a/pyispyb/core/modules/contacts.py b/pyispyb/core/modules/contacts.py deleted file mode 100644 index 4e310fc8..00000000 --- a/pyispyb/core/modules/contacts.py +++ /dev/null @@ -1,360 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions import db -from pyispyb.app.extensions.auth import auth_provider - -from pyispyb.core import models, schemas -from pyispyb.core.modules import proposal, sample, protein, crystal - - -def get_person_by_id(person_id): - id_dict = {"personId": person_id} - return db.get_db_item( - models.Person, schemas.person.ma_schema, id_dict - ) - -def get_person_info(request): - user_info = auth_provider.get_user_info_from_auth_header( - request.headers.get("Authorization") - ) - query_dict = request.args.to_dict() - if "login_name" in query_dict: - #Return info about requested login name - person_id = get_person_id_by_login(query_dict["login_name"]) - else: - person_id = get_person_id_by_login(user_info["sub"]) - - if person_id: - person_info = get_person_info_by_params({"personId": person_id}) - person_info["personId"] = person_id - user_info.update(person_info) - - return user_info - -def get_person_info_by_params(param_dict): - """Returns person by its id. - - Args: - param_dict (dict): corresponds to personId in db - - Returns: - dict: info about person as dict - """ - person_info_dict = db.get_db_item( - models.Person, schemas.person.ma_schema, param_dict - ) - proposal_id_list = proposal.get_proposal_ids_by_person_id( - person_info_dict["personId"] - ) - person_info_dict["proposal_ids"] = proposal_id_list - - person_protein_list = [] - for proposal_id in proposal_id_list: - protein_list = protein.get_proteins_by_query({"proposalId": proposal_id}) - for protein_dict in protein_list["data"]["rows"]: - person_protein_list.append(protein_dict["proteinId"]) - - # sample_list = sample.get_samples_by_request - person_info_dict["protein_ids"] = person_protein_list - - return person_info_dict - -def get_persons_by_query(query_dict): - """Returns person by its id. - - Args: - param_dict (dict): corresponds to personId in db - - Returns: - dict: info about person as dict - """ - return db.get_db_items( - models.Person, - schemas.person.dict_schema, - schemas.person.ma_schema, - query_dict, - ) - -def get_person_id_by_login(login_name): - """Returns person by login name. - - Args: - login_name ([type]): [description] - - Returns: - [type]: [description] - """ - if login_name: - persons = get_persons_by_query({"login": login_name})["data"]["rows"] - if persons: - return persons[0]["personId"] - - -def add_person(data_dict): - """Adds person item to db. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.Person, schemas.person.ma_schema, data_dict) - - -def update_person(person_id, data_dict): - """ - Updates person. - - Args: - person_id ([type]): [description] - person_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"personId": person_id} - return db.update_db_item( - models.Person, schemas.person.ma_schema, id_dict, data_dict - ) - - -def patch_person(person_id, person_dict): - """ - Patch a person. - - Args: - person_id ([type]): [description] - person_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"personId": person_id} - return db.patch_db_item( - models.Person, schemas.person.ma_schema, id_dict, person_dict - ) - - -def delete_person(person_id): - """Deletes person item from db. - - Args: - person_id (int): personId column in db - - Returns: - bool: True if the person exists and deleted successfully, - otherwise return False - """ - id_dict = {"personId": person_id} - return db.delete_db_item(models.Person, id_dict) - - -def get_lab_contacts(request): - """Returns lab contact by query parameters""" - - query_dict = request.args.to_dict() - - return db.get_db_items( - models.LabContact, - schemas.lab_contact.dict_schema, - schemas.lab_contact.ma_schema, - query_dict, - ) - - -def get_lab_contact_by_params(param_dict): - """ - Returns lab contacts by params. - - Args: - param_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.get_db_item( - models.LabContact, schemas.lab_contact.ma_schema, param_dict - ) - - -def add_lab_contact(data_dict): - """ - Adds a new lab contact. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.LabContact, schemas.lab_contact.ma_schema, data_dict) - - -def update_lab_contact(lab_contact_id, data_dict): - """ - Updates lab contact. - - Args: - lab_contact_id (int): [description] - data_dict (dict): [description] - - Returns: - dict: [description] - """ - id_dict = {"labContactId": lab_contact_id} - return db.update_db_item( - models.LabContact, schemas.lab_contact.ma_schema, id_dict, data_dict - ) - - -def patch_lab_contact(lab_contact_id, data_dict): - """ - Patches lab contact db item. - - Args: - lab_contact_id (int): [description] - data_dict (dict): [description] - - Returns: - dict: [description] - """ - id_dict = {"labContactId": lab_contact_id} - return db.patch_db_item( - models.LabContact, schemas.lab_contact.ma_schema, id_dict, data_dict - ) - - -def delete_lab_contact(lab_contact_id): - """ - Deletes lab contact. - - Args: - lab_contact_id ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"labContactId": lab_contact_id} - return db.delete_db_item(models.LabContact, id_dict) - - -def get_laboratories(request): - """ - Returns laboratories based on query parameters. - - Args: - query_dict ([type]): [description] - - Returns: - [type]: [description] - """ - query_dict = request.args.to_dict() - - return db.get_db_items( - models.Laboratory, - schemas.laboratory.dict_schema, - schemas.laboratory.ma_schema, - query_dict, - ) - - -def add_laboratory(data_dict): - """ - Adds new laboratory. - - Args: - laboratory_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.Laboratory, schemas.laboratory.ma_schema, data_dict) - - -def get_laboratory_by_id(laboratory_id): - """ - Returns laboratory info by its laboratoryId. - - Args: - laboratory_id (int): corresponds to laboratoryId in db - - Returns: - dict: info about laboratory as dict - """ - data_dict = {"laboratoryId": laboratory_id} - return db.get_db_item( - models.Laboratory, schemas.laboratory.ma_schema, data_dict - ) - - -def patch_laboratory(laboratory_id, data_dict): - """ - Patch a laboratory. - - Args: - laboratory_id ([type]): [description] - laboratory_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"laboratoryId": laboratory_id} - return db.patch_db_item( - models.Laboratory, schemas.laboratory.ma_schema, id_dict, data_dict - ) - - -def update_laboratory(laboratory_id, data_dict): - """ - Updates laboratory db item. - - Args: - laboratory_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"laboratoryId": laboratory_id} - return db.update_db_item( - models.Laboratory, schemas.laboratory.ma_schema, id_dict, data_dict - ) - - -def delete_laboratory(laboratory_id): - """ - Deletes laboratory item from db. - - Args: - laboratory_id (int): laboratoryId column in db - - Returns: - bool: True if the laboratory exists and deleted successfully, - otherwise return False - """ - id_dict = {"laboratoryId": laboratory_id} - return db.delete_db_item(models.Laboratory, id_dict) diff --git a/pyispyb/core/modules/container.py b/pyispyb/core/modules/container.py deleted file mode 100644 index 2db1dcb0..00000000 --- a/pyispyb/core/modules/container.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions import db -from pyispyb.core import models, schemas - - -def get_containers(request): - """ - Returns all containers. - - Returns: - dict: list with dewars] - """ - - query_dict = request.args.to_dict() - - return db.get_db_items( - models.Container, - schemas.dewar.dict_schema, - schemas.dewar.ma_schema, - query_dict, - ) - - -def get_container_by_id(container_id): - """ - Returns container by its container_id. - - Args: - container_id (int): corresponds to containerId in db - - Returns: - dict: info about container as dict - """ - id_dict = {"containerId": container_id} - return db.get_db_item( - models.Container, schemas.container.ma_schema, id_dict - ) - - -def add_container(data_dict): - """ - Adds a container to db. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.Container, schemas.container.ma_schema, data_dict) - - -def update_container(container_id, data_dict): - """ - Updates container. - - Args: - container_id ([type]): [description] - container_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"containerId": container_id} - return db.update_db_item( - models.Container, schemas.container.ma_schema, id_dict, data_dict - ) - - -def patch_container(container_id, data_dict): - """ - Patch a container. - - Args: - container_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"containerId": container_id} - return db.patch_db_item( - models.Container, schemas.container.ma_schema, id_dict, data_dict - ) - - -def delete_container(container_id): - """ - Deletes container item from db. - - Args: - container_id (int): containerId column in db - - Returns: - bool: True if the container exists and deleted successfully, - otherwise return False - """ - id_dict = {"containerId": container_id} - return db.delete_db_item(models.Container, id_dict) diff --git a/pyispyb/core/modules/containers.py b/pyispyb/core/modules/containers.py new file mode 100644 index 00000000..194c7fae --- /dev/null +++ b/pyispyb/core/modules/containers.py @@ -0,0 +1,80 @@ +from typing import Optional + +from sqlalchemy import distinct, func +from sqlalchemy.orm import joinedload +from ispyb import models + +from ...app.extensions.database.definitions import with_authorization +from ...app.extensions.database.utils import Paged, page, update_model, with_metadata +from ...app.extensions.database.middleware import db +from ..schemas import containers as schema + + +def get_containers( + skip: int, + limit: int, + proteinId: Optional[int] = None, + containerId: Optional[int] = None, + dewarId: Optional[int] = None, + proposal: str = None, + proposalId: Optional[int] = None, + withAuthorization: bool = True, +) -> Paged[models.Container]: + metadata = {"samples": func.count(distinct(models.BLSample.blSampleId))} + + query = ( + db.session.query(models.Container, *metadata.values()) + .join(models.Dewar) + .options(joinedload(models.Container.Dewar)) + .join(models.Shipping) + .join(models.BLSample) + .join(models.Crystal) + .join(models.Proposal, models.Proposal.proposalId == models.Shipping.proposalId) + .group_by(models.Container.containerId) + ) + + if containerId: + query = query.filter(models.Continer.containerId == containerId) + + if dewarId: + query = query.filter(models.Continer.dewarId == dewarId) + + if proteinId: + query = query.filter(models.Crystal.proteinId == proteinId) + + if proposal: + query = query.filter(models.Proposal.proposal == proposal) + + if proposalId: + query = query.filter(models.Proposal.proposalId == proposalId) + + if withAuthorization: + query = with_authorization(query) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def create_container(container: schema.ContainerCreate) -> models.Container: + container_dict = container.dict() + container = models.Container(**container_dict) + db.session.add(container) + db.session.commit() + + new_container = get_containers(containerId=container.containerId, skip=0, limit=1) + return new_container.first + + +def update_container( + containerId: int, container: schema.ContainerCreate +) -> models.Container: + container_dict = container.dict(exclude_unset=True) + new_container = get_containers(containerId=containerId, skip=0, limit=1).first + + update_model(new_container, container_dict) + db.session.commit() + + return get_containers(containerId=containerId, skip=0, limit=1).first diff --git a/pyispyb/core/modules/crystal.py b/pyispyb/core/modules/crystal.py deleted file mode 100644 index 0bf7ea76..00000000 --- a/pyispyb/core/modules/crystal.py +++ /dev/null @@ -1,137 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - -import os - -from pyispyb.app.extensions import db -from pyispyb.core import models, schemas - - -def get_crystals_by_query(query_dict): - """ - Returns crystal entries. - - Returns: - [type]: [description] - """ - return db.get_db_items( - models.Crystal, - schemas.crystal.dict_schema, - schemas.crystal.ma_schema, - query_dict, - ) - - -def get_crystal_by_id(crystal_id): - """ - Returns crystal by its crystalId. - - Args: - crystal_id (int): corresponds to crystalId in db - - Returns: - dict: info about crystal as dict - """ - data_dict = {"crystalId": crystal_id} - return db.get_db_item( - models.Crystal, schemas.crystal.ma_schema, data_dict - ) - - -def add_crystal(data_dict): - """ - Adds a crystal to db. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.Crystal, schemas.crystal.ma_schema, data_dict) - - -def update_crystal(crystal_id, data_dict): - """ - Updates crystal. - - Args: - crystal_id ([type]): [description] - crystal_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"crystalId": crystal_id} - return db.update_db_item( - models.Crystal, schemas.crystal.ma_schema, id_dict, data_dict - ) - - -def patch_crystal(crystal_id, data_dict): - """ - Patch a crystal. - - Args: - crystal_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.patch_db_item( - models.Crystal, - schemas.crystal.ma_schema, - {"crystalId" : crystal_id}, - data_dict - ) - - -def delete_crystal(crystal_id): - """ - Deletes crystal item from db. - - Args: - crystal_id (int): crystalId column in db - - Returns: - bool: True if the crystal exists and deleted successfully, - otherwise return False - """ - id_dict = {"crystalId": crystal_id} - return db.delete_db_item(models.Crystal, id_dict) - - -def get_crystal_pdb_by_id(crystal_id): - crystal_dict = get_crystal_by_id(crystal_id) - if crystal_dict: - return crystal_dict["pdbFilePath"], crystal_dict["pdbFileName"] - -def patch_crystal_pdb_by_id(crystal_id, query_dict): - return db.patch_db_item( - models.Crystal, - schemas.crystal.ma_schema, - {"crystalId": crystal_id}, - query_dict, - ) diff --git a/pyispyb/core/modules/data.py b/pyispyb/core/modules/data.py new file mode 100644 index 00000000..27b15042 --- /dev/null +++ b/pyispyb/core/modules/data.py @@ -0,0 +1,327 @@ +import logging +import math +import os +from typing import Optional, Tuple + +from fabio.cbfimage import CbfImage +import h5grove +from h5grove.content import DatasetContent +from h5grove.utils import get_array_stats +import h5py +from ispyb import models +import numpy as np + +from ...config import settings +from ...core.modules.events import get_events +from ...core.modules.processings import get_processing_attachments +from ..schemas import data as schema + +logger = logging.getLogger(__name__) + + +def get_image( + dataCollectionId: int, + imageNumber: int, + header: bool = False, +) -> np.ndarray: + datacollections = get_events(dataCollectionId=dataCollectionId, skip=0, limit=1) + try: + dc: models.DataCollection = datacollections.first["Item"] + except IndexError: + return None + + if imageNumber > dc.numberOfImages: + logger.warning( + f"Requested image {imageNumber} which is greater than total {dc.numberOfImages}" + ) + return None + + file_path = os.path.join(dc.imageDirectory, dc.fileTemplate) + if settings.path_map: + file_path = settings.path_map + file_path + + ext = get_file_ext(dc.fileTemplate) + if ext in ["h5", "H5", "hdf5", "HDF5"]: + if not os.path.exists(file_path): + return None + + _header, data = HDF5FormatHandler.preload( + path=file_path, imageNumber=imageNumber + ) + + elif ext in ["cbf", "CBF"]: + file_path = file_path % imageNumber + if "%" in file_path: + file_path = file_path.format(imageNumber) + + if not os.path.exists(file_path): + logger.warning( + f"Requested image {imageNumber} for dataCollection: {dataCollectionId} with path {file_path} does not exist on disk" + ) + return None + + _header, data = CBFFormatHandler.preload(path=file_path) + + if header: + return _header + + return data + + +def get_image_histogram( + dataCollectionId: int, + imageNumber: int, +) -> schema.ImageHistogram: + data = get_image(dataCollectionId, imageNumber) + if data is None: + return None + + hist, bins = _compute_histogram(data) + + return { + "values": hist.astype(np.float32).tolist(), + "bins": bins, + "shape": hist.shape, + "max": np.max(hist).item() if hist.size > 0 else 0, + } + + +class CBFFormatHandler: + @staticmethod + def preload(path: str) -> Tuple[dict, np.ndarray, bytes]: + cbf_image = CbfImage(fname=path) + float_data = cbf_image.data.astype(np.float32) + + parsed_ext_hdr, braggy_hdr = CBFFormatHandler._parse_header( + cbf_image, float_data + ) + + img_hdr = {} + img_hdr["parsed_ext_hdr"] = parsed_ext_hdr + img_hdr["braggy_hdr"] = braggy_hdr + + return img_hdr, float_data.flatten() + + @staticmethod + def _parse_header(cbf_image: CbfImage, np_array: np.ndarray) -> Tuple[dict, dict]: + height, width = cbf_image.shape + + hdr = cbf_image.header + parsed_ext_hdr = {} + braggy_hdr = {} + + _ext_hdr = hdr.get("_array_data.header_contents", "").split("\r\n") + for data in _ext_hdr: + # Ignore empty lines coming from multiple line-breaks + if data == "": + continue + + key_value = data.strip("#").strip().split() + + key = key_value[0].strip(":").strip() + value = " ".join(key_value[1:]) + parsed_ext_hdr[key] = value + try: + w = float(parsed_ext_hdr.get("Wavelength", "0").strip("A ")) + d = float(parsed_ext_hdr.get("Detector_distance", "0").strip("m ")) + + bcx, bcy = parsed_ext_hdr["Beam_xy"].split(",") + bcx, bcy = float(bcx.strip("pixels() ")), float(bcy.strip("pixels() ")) + + px_size_x, px_size_y = parsed_ext_hdr.get("Pixel_size", "0").split("x") + px_size_x, px_size_y = ( + float(px_size_x.strip("m ")), + float(px_size_y.strip("m ")), + ) + + dr = math.sqrt((px_size_x * width) ** 2 + (px_size_y * height) ** 2) / 2 + + # Remove invalid values (-1) + clean_np_array = np_array[np_array >= 0] + + braggy_hdr = { + "wavelength": w, + "detector_distance": d, + "beam_cx": bcx, + "beam_cy": bcy, + "beam_ocx": (width / 2) - bcx, + "beam_ocy": (height / 2) - bcy, + "detector_radius": dr, + "pixel_size_x": px_size_x, + "pixel_size_y": px_size_y, + "img_width": width, + "img_height": height, + "pxxpm": 1 / px_size_x, + "pxypm": 1 / px_size_y, + **get_array_stats( + clean_np_array if clean_np_array.size > 0 else np_array + ), + } + except (KeyError, IndexError): + logging.info("Could not create Braggy header from CBF header") + + return parsed_ext_hdr, braggy_hdr + + +class HDF5FormatHandler: + @staticmethod + def preload( + path: str, + imageNumber: int, + ) -> Tuple[dict, np.ndarray, bytes]: + h5path, image_index = HDF5FormatHandler._find_path(path, imageNumber) + if not h5path: + return None, None + + with h5py.File(path, "r") as h5file: + data = _get_dataset_data(h5file, h5path, str(image_index)) + + np_array = data.astype(np.float32) + img_hdr = HDF5FormatHandler._get_hdr(path, np_array) + + return img_hdr, np_array + + @staticmethod + def _get_hdr(path: str, np_array: np.ndarray) -> dict[str, dict]: + with h5py.File(path, "r") as h5file: + wavelength = _get_instrument_param(h5file, "beam/incident_wavelength") + detector = _get_instrument_param(h5file, "detector/detector_distance") + + pixel_size_x = _get_instrument_param(h5file, "detector/x_pixel_size") + pixel_size_y = _get_instrument_param(h5file, "detector/y_pixel_size") + width = _get_instrument_param( + h5file, "detector/detectorSpecific/x_pixels_in_detector" + ) + height = _get_instrument_param( + h5file, "detector/detectorSpecific/y_pixels_in_detector" + ) + + beam_cx = _get_instrument_param(h5file, "detector/beam_center_x") + beam_cy = _get_instrument_param(h5file, "detector/beam_center_y") + + # Remove invalid values (SATURATION VALUES) + clean_np_array = np_array[np_array != np.max(np_array)] + + braggy_hdr = { + "wavelength": wavelength, + "detector_distance": detector, + "beam_cx": beam_cx, + "beam_cy": beam_cy, + "beam_ocx": (width / 2) - beam_cx, + "beam_ocy": (height / 2) - beam_cy, + "detector_radius": (width * pixel_size_x) / 2, + "pixel_size_x": pixel_size_x, + "pixel_size_y": pixel_size_y, + "img_width": width, + "img_height": height, + "pxxpm": 1 / pixel_size_x, + "pxypm": 1 / pixel_size_y, + **get_array_stats(clean_np_array if clean_np_array.size > 0 else np_array), + } + + return {"braggy_hdr": braggy_hdr} + + @staticmethod + def _find_path(path: str, imageNumber: int) -> Tuple[str, int]: + """Lookup correct entry for requested imageNumber""" + with h5py.File(path, "r") as h5file: + dset_content = h5grove.create_content(h5file, "/entry/data") + for child in dset_content.metadata()["children"]: + child_path = "/entry/data/" + child["name"] + image_nr_low = _get_dataset_attr(h5file, child_path)["image_nr_low"] + image_nr_high = _get_dataset_attr(h5file, child_path)["image_nr_high"] + + if imageNumber >= image_nr_low and imageNumber <= image_nr_high: + image_index = imageNumber - image_nr_low.item() + logger.info( + f"Found imageNumber `{imageNumber}` in `{path}` with path `{child_path}` index `{image_index}`" + ) + + return child_path, image_index + + logger.warning( + f"Could not find requested imageNumber `{imageNumber}` in `{path}` (max imageNumber `{image_nr_high}`)" + ) + return None, None + + +def _get_dataset_attr(h5file: h5py.File, dset_path: str): + dset_content = h5grove.create_content(h5file, dset_path) + assert isinstance(dset_content, DatasetContent) # nosec + return dset_content.attributes() + + +def _get_dataset_data( + h5file: h5py.File, dset_path: str, selection: Optional[str] = None +): + dset_content = h5grove.create_content(h5file, dset_path) + assert isinstance(dset_content, DatasetContent) # nosec + return dset_content.data(selection) + + +def _get_instrument_param(h5file: h5py.File, param_path: str): + data = _get_dataset_data(h5file, f"/entry/instrument/{param_path}") + assert isinstance(data, np.generic) # nosec + return data.item() + + +def _compute_histogram(data: np.ndarray) -> Tuple[np.ndarray, list]: + std = 3 * np.std(data) + mean = np.mean(data) + clean_data = data[data < mean + std] + + if clean_data.size == 0: + return np.ndarray([]), [] + + hist, bins = np.histogram( + clean_data.flatten(), + bins=np.arange(np.min(clean_data), np.max(clean_data), 1) + if np.max(clean_data) <= 300 + else 300, + ) + return hist, bins.tolist() + + +def get_file_ext(file_name: str): + _, ext = os.path.splitext(file_name) + return ext[1:] # Remove leading dot + + +def get_h5_file( + dataCollectionId: Optional[int] = None, + autoProcProgramAttachmentId: Optional[int] = None, + robotActionId: Optional[int] = None, +): + """Find the relevant hdf5 file from the parameters""" + + # Direct datacollection hdf5 + if dataCollectionId is not None: + datacollections = get_events(dataCollectionId=dataCollectionId, skip=0, limit=1) + dc = datacollections.first["Item"] + return os.path.join(dc.imageDirectory, dc.fileTemplate) + + #  Directly from autoprocprogramattachmentid + elif autoProcProgramAttachmentId is not None: + attachments = get_processing_attachments( + autoProcProgramAttachmentId=autoProcProgramAttachmentId, skip=0, limit=1 + ) + attachment = attachments.first + ext = get_file_ext(attachment.fileName).lower() + if attachment.fileType == "Result" and ext in ["h5", "hdf5"]: + return attachment.fileFullPath + + # From a sample action + elif robotActionId is not None: + sampleactions = get_events(robotActionId, skip=0, limit=1) + sampleaction = sampleactions.first + return sampleaction.resultFilePath + + return None + + +def get_h5_path_mapped(**kwargs) -> str: + file_path = get_h5_file(**kwargs) + if file_path: + if settings.path_map: + return settings.path_map + file_path + return file_path diff --git a/pyispyb/core/modules/data_collection.py b/pyispyb/core/modules/data_collection.py deleted file mode 100644 index 74bf6bc6..00000000 --- a/pyispyb/core/modules/data_collection.py +++ /dev/null @@ -1,135 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions import db - -from pyispyb.core import models, schemas - - -def get_data_collections(query_dict): - """ - Returns data collection items based on query parameters. - - Args: - query_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.get_db_items( - models.DataCollection, - schemas.data_collection.dict_schema, - schemas.data_collection.ma_schema, - query_dict, - ) - - -def add_data_collection(data_dict): - """ - Adds data collection item. - - Args: - data_collection_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item( - models.DataCollection, - schemas.data_collection.ma_schema, - data_dict - ) - - -def get_data_collection_by_id(data_collection_id): - """ - Returns data_collection by its id. - - Args: - data_collection_id (int): corresponds to dataCollectionId in db - - Returns: - dict: info about data_collection as dict - """ - return db.get_db_item( - models.DataCollection, - schemas.data_collection.ma_schema, - {"dataCollectionId": data_collection_id} - ) - - -def get_data_collection_groups(request): - """ - Returns data collection group items based on query parameters. - - Args: - query_dict ([type]): [description] - - Returns: - [type]: [description] - """ - - query_dict = request.args.to_dict() - - return db.get_db_items( - models.DataCollectionGroup, - schemas.data_collection_group.dict_schema, - schemas.data_collection_group.ma_schema, - query_dict, - ) - - -def add_data_collection_group(data_dict): - """ - Adds data collection item. - - Args: - data_collection_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item( - models.DataCollectionGroup, - schemas.data_collection_group.ma_schema, - data_dict - ) - - -def get_data_collection_group_by_id(data_collection_group_id): - """ - Returns data collection group by its id. - - Args: - data_collection_group_id (int): corresponds to dataCollectionGroupId - - Returns: - dict: info about data collection group as dict - """ - return db.get_db_item( - models.DataCollectionGroup, - schemas.data_collection_group.ma_schema, - {"dataCollectionGroupId": data_collection_group_id} - ) diff --git a/pyispyb/core/modules/datacollections.py b/pyispyb/core/modules/datacollections.py new file mode 100644 index 00000000..addb775e --- /dev/null +++ b/pyispyb/core/modules/datacollections.py @@ -0,0 +1,263 @@ +import logging +import os +from typing import Optional + +from sqlalchemy import func +from ispyb import models + +from ...app.extensions.database.definitions import ( + with_authorization, +) +from ...app.extensions.database.utils import Paged, page, with_metadata +from ...app.extensions.database.middleware import db +from .events import get_events +from ..schemas import datacollections as schema +from ...config import settings + +logger = logging.getLogger(__name__) + + +def get_datacollection_diffraction_image_path( + dataCollectionId: int, + snapshot: bool = False, +) -> Optional[str]: + query = ( + db.session.query( + ( + models.Image.jpegThumbnailFileFullPath + if snapshot + else models.Image.jpegFileFullPath + ).label("imagePath") + ) + .filter(models.Image.imageNumber == 1) + .filter(models.Image.dataCollectionId == dataCollectionId) + .join(models.DataCollection) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + ) + + query = with_authorization(query, joinBLSession=False) + first_image = query.first() + + if first_image: + if not os.path.exists(first_image.imagePath): + logger.warning( + f"Diffraction image {first_image.imagePath} for dataCollectionId {dataCollectionId} does not exist on disk" + ) + return None + + return first_image.imagePath + + +def get_datacollection_snapshot_path( + dataCollectionId: int, + imageId: int = 1, + snapshot: bool = False, +) -> Optional[str]: + datacollections = get_events( + dataCollectionId=dataCollectionId, + skip=0, + limit=1, + ) + try: + dc = datacollections.first["Item"] + except IndexError: + return None + + images = [ + "xtalSnapshotFullPath1", + "xtalSnapshotFullPath2", + "xtalSnapshotFullPath3", + "xtalSnapshotFullPath4", + ] + + image_path: str = getattr(dc, images[imageId - 1]) + if image_path is None: + return None + + if settings.path_map: + image_path = settings.path_map + image_path + + if snapshot: + ext = os.path.splitext(image_path)[1][1:].strip() + image_path_tmp = image_path.replace(f".{ext}", f"t.{ext}") + + # fallback incase snapshot doesnt exist + if os.path.exists(image_path_tmp): + image_path = image_path_tmp + + if not os.path.exists(image_path): + logger.warning( + f"{images[imageId - 1]} [{image_path}] for dataCollectionId {dataCollectionId} does not exist on disk" + ) + return None + + return image_path + + +def get_datacollection_analysis_image_path( + dataCollectionId: int, +) -> Optional[str]: + datacollections = get_events( + dataCollectionId=dataCollectionId, + skip=0, + limit=1, + ) + try: + dc = datacollections.first["Item"] + except IndexError: + return None + + image_path: str = dc.imageQualityIndicatorsPlotPath + if image_path is None: + return None + + if settings.path_map: + image_path = settings.path_map + image_path + + if not os.path.exists(image_path): + logger.warning( + f"imageQualityIndicatorsPlotPath [{dc.imageQualityIndicatorsPlotPath}] for dataCollectionId {dataCollectionId} does not exist on disk" + ) + return None + + return image_path + + +def get_datacollection_attachments( + skip: int, + limit: int, + dataCollectionId: Optional[int] = None, + dataCollectionGroupId: Optional[int] = None, + dataCollectionFileAttachmentId: Optional[int] = None, +) -> Paged[models.DataCollectionFileAttachment]: + metadata = { + "url": func.concat( + f"{settings.api_root}/datacollections/attachments/", + models.DataCollectionFileAttachment.dataCollectionFileAttachmentId, + ) + } + + query = ( + db.session.query(models.DataCollectionFileAttachment, *metadata.values()) + .join(models.DataCollection) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + .group_by(models.DataCollectionFileAttachment.dataCollectionFileAttachmentId) + ) + + if dataCollectionId: + query = query.filter( + models.DataCollectionFileAttachment.dataCollectionId == dataCollectionId + ) + + if dataCollectionGroupId: + query = query.filter( + models.DataCollectionGroup.dataCollectionGroupId == dataCollectionGroupId + ) + + if dataCollectionFileAttachmentId: + query = query.filter( + models.DataCollectionFileAttachment.dataCollectionFileAttachmentId + == dataCollectionFileAttachmentId + ) + + query = with_authorization(query, joinBLSession=False) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + for result in results: + result._metadata["fileName"] = os.path.basename(result.fileFullPath) + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def get_per_image_analysis( + skip: int, + limit: int, + dataCollectionId: Optional[int] = None, + dataCollectionGroupId: Optional[int] = None, +) -> Paged[schema.PerImageAnalysis]: + query = ( + db.session.query( + models.ImageQualityIndicators.imageNumber, + models.ImageQualityIndicators.totalIntegratedSignal, + models.ImageQualityIndicators.method2Res, + models.ImageQualityIndicators.goodBraggCandidates, + ) + .join( + models.DataCollection, + models.ImageQualityIndicators.dataCollectionId + == models.DataCollection.dataCollectionId, + ) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + ) + + if dataCollectionId: + query = query.filter(models.DataCollection.dataCollectionId == dataCollectionId) + + if dataCollectionGroupId: + query = query.filter( + models.DataCollectionGroup.dataCollectionGroupId == dataCollectionGroupId + ) + + query = with_authorization(query, joinBLSession=False) + query = page(query, skip=skip, limit=limit) + total = query.count() + + results = {"dataCollectionId": dataCollectionId} + for row in [r._asdict() for r in query.all()]: + for key in [ + "imageNumber", + "totalIntegratedSignal", + "method2Res", + "goodBraggCandidates", + ]: + if key not in results: + results[key] = [] + if row[key] is not None: + results[key].append(row[key]) + + return Paged(total=total, results=[results], skip=skip, limit=limit) + + +def get_workflow_steps( + skip: int, + limit: int, + workflowId: Optional[int] = None, + workflowStepId: Optional[int] = None, +) -> Paged[models.WorkflowStep]: + query = ( + db.session.query(models.WorkflowStep) + .join(models.Workflow) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + ) + + if workflowId: + query = query.filter(models.WorkflowStep.workflowId == workflowId) + + if workflowStepId: + query = query.filter(models.WorkflowStep.workflowStepId == workflowStepId) + + query = with_authorization(query, joinBLSession=False) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = query.all() + + for result in results: + result._metadata["attachments"] = {} + for file in ["imageResultFilePath", "resultFilePath", "htmlResultFilePath"]: + result._metadata["attachments"][file] = os.path.exists( + getattr(result, file) + ) + + return Paged(total=total, results=results, skip=skip, limit=limit) diff --git a/pyispyb/core/modules/detector.py b/pyispyb/core/modules/detector.py deleted file mode 100644 index ea197861..00000000 --- a/pyispyb/core/modules/detector.py +++ /dev/null @@ -1,125 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions import db -from pyispyb.core import models, schemas - - -def get_detectors(request): - """ - Returns detector items based on query parameters. - - Args: - query_dict (dict): [description] - - Returns: - [type]: [description] - """ - query_dict = request.args.to_dict() - - return db.get_db_items( - models.Detector, - schemas.detector.dict_schema, - schemas.detector.ma_schema, - query_dict, - ) - - -def add_detector(data_dict): - """ - Adds data collection item. - - Args: - detector_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.Detector, schemas.detector.ma_schema, data_dict) - - -def get_detector_by_id(detector_id): - """ - Returns detector by its detectorId. - - Args: - detector_id (int): corresponds to detectorId in db - - Returns: - dict: info about detector as dict - """ - data_dict = {"detectorId": detector_id} - return db.get_db_item( - models.Detector, schemas.detector.ma_schema, data_dict - ) - - -def update_detector(detector_id, data_dict): - """ - Updates detector. - - Args: - detector_id ([type]): [description] - detector_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"detectorId": detector_id} - return db.update_db_item( - models.Detector, schemas.detector.ma_schema, id_dict, data_dict - ) - - -def patch_detector(detector_id, data_dict): - """ - Patch a detector. - - Args: - detector_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"detectorId": detector_id} - return db.patch_db_item( - models.Detector, schemas.detector.ma_schema, id_dict, data_dict - ) - - -def delete_detector(detector_id): - """ - Deletes detector item from db. - - Args: - detector_id (int): detectorId column in db - - Returns: - bool: True if the detector exists and deleted successfully, - otherwise return False - """ - id_dict = {"detectorId": detector_id} - return db.delete_db_item(models.Detector, id_dict) diff --git a/pyispyb/core/modules/dewar.py b/pyispyb/core/modules/dewar.py deleted file mode 100644 index f2659c24..00000000 --- a/pyispyb/core/modules/dewar.py +++ /dev/null @@ -1,118 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions import db, report -from pyispyb.core import models, schemas -from pyispyb.core.modules import contacts, proposal, shipping - - -def get_dewars_by_query(query_dict): - """ - Returns all dewars. - - Returns: - dict: list with dewars] - """ - return db.get_db_items( - models.Dewar, schemas.dewar.dict_schema, schemas.dewar.ma_schema, query_dict - ) - - -def get_dewar_by_id(dewar_id): - """ - Returns dewar by its dewar_id. - - Args: - dewar_id (int): corresponds to dewarId in db - - Returns: - dict: info about dewar as dict - """ - id_dict = {"dewarId": dewar_id} - return db.get_db_item(models.Dewar, schemas.dewar.ma_schema, id_dict) - - -def get_dewar_labels_by_id(dewar_id): - dewar_dict = get_dewar_by_id(dewar_id) - shipping_info_dict = shipping.get_shipping_info_by_id(dewar_dict["shippingId"]) - return report.create_dewar_labels(shipping_info_dict, dewar_dict) - - -def add_dewar(data_dict): - """ - Adds a dewar to db. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.Dewar, schemas.dewar.ma_schema, data_dict) - - -def update_dewar(dewar_id, data_dict): - """ - Updates dewar. - - Args: - dewar_id ([type]): [description] - dewar_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"dewarId": dewar_id} - return db.update_db_item(models.Dewar, schemas.dewar.ma_schema, id_dict, data_dict) - - -def patch_dewar(dewar_id, data_dict): - """ - Patch a dewar. - - Args: - dewar_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"dewarId": dewar_id} - return db.patch_db_item(models.Dewar, schemas.dewar.ma_schema, id_dict, data_dict) - - -def delete_dewar(dewar_id): - """ - Deletes dewar item from db. - - Args: - dewar_id (int): dewarId column in db - - Returns: - bool: True if the dewar exists and deleted successfully, - otherwise return False - """ - id_dict = {"dewarId": dewar_id} - return db.delete_db_item(models.Dewar, id_dict) diff --git a/pyispyb/core/modules/dewars.py b/pyispyb/core/modules/dewars.py new file mode 100644 index 00000000..50f52e66 --- /dev/null +++ b/pyispyb/core/modules/dewars.py @@ -0,0 +1,73 @@ +from typing import Optional + +from sqlalchemy import distinct, func +from sqlalchemy.orm import joinedload +from ispyb import models + +from ...app.extensions.database.definitions import with_authorization +from ...app.extensions.database.utils import Paged, page, update_model, with_metadata +from ...app.extensions.database.middleware import db +from ..schemas import dewars as schema + + +def get_dewars( + skip: int, + limit: int, + dewarId: Optional[int] = None, + shippingId: Optional[int] = None, + proposal: str = None, + proposalId: Optional[int] = None, + withAuthorization: bool = True, +) -> Paged[models.Dewar]: + metadata = {"containers": func.count(distinct(models.Container.containerId))} + + query = ( + db.session.query(models.Dewar, *metadata.values()) + .join(models.Dewar.Shipping) + .options(joinedload(models.Dewar.Shipping)) + .join(models.Proposal, models.Proposal.proposalId == models.Shipping.proposalId) + .outerjoin(models.Container) + .group_by(models.Dewar.dewarId) + .order_by(models.Dewar.dewarId) + ) + + if dewarId: + query = query.filter(models.Dewar.dewarId == dewarId) + + if shippingId: + query = query.filter(models.Shipping.shippingId == shippingId) + + if proposal: + query = query.filter(models.Proposal.proposal == proposal) + + if proposalId: + query = query.filter(models.Proposal.proposalId == proposalId) + + if withAuthorization: + query = with_authorization(query) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def create_dewar(dewar: schema.DewarCreate) -> models.Dewar: + dewar_dict = dewar.dict() + dewar = models.Dewar(**dewar_dict) + db.session.add(dewar) + db.session.commit() + + new_dewar = get_dewars(dewarId=dewar.dewarId, skip=0, limit=1) + return new_dewar.first + + +def update_dewar(dewarId: int, dewar: schema.DewarCreate) -> models.Dewar: + dewar_dict = dewar.dict(exclude_unset=True) + new_dewar = get_dewars(dewarId=dewarId, skip=0, limit=1).first + + update_model(new_dewar, dewar_dict) + db.session.commit() + + return get_dewars(dewarId=dewarId, skip=0, limit=1).first diff --git a/pyispyb/core/modules/diffraction_plan.py b/pyispyb/core/modules/diffraction_plan.py deleted file mode 100644 index 768d3f76..00000000 --- a/pyispyb/core/modules/diffraction_plan.py +++ /dev/null @@ -1,124 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions import db -from pyispyb.core import models, schemas - - -def get_diffraction_plans(request): - """ - Returns diffraction_plan entries. - - Returns: - [type]: [description] - """ - query_dict = request.args.to_dict() - - return db.get_db_items( - models.DiffractionPlan, - schemas.diffraction_plan.dict_schema, - schemas.diffraction_plan.ma_schema, - query_dict, - ) - - -def get_diffraction_plan_by_id(diffraction_plan_id): - """ - Returns diffraction_plan by its diffraction_planId. - - Args: - diffraction_plan_id (int): corresponds to diffraction_planId in db - - Returns: - dict: info about diffraction_plan as dict - """ - data_dict = {"diffractionPlanId": diffraction_plan_id} - return db.get_db_item( - models.DiffractionPlan, schemas.diffraction_plan.ma_schema, data_dict - ) - - -def add_diffraction_plan(data_dict): - """ - Adds a diffraction_plan to db. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item( - models.DiffractionPlan, schemas.diffraction_plan.ma_schema, data_dict - ) - - -def update_diffraction_plan(diffraction_plan_id, data_dict): - """ - Updates diffraction_plan. - - Args: - diffraction_plan_id ([type]): [description] - diffraction_plan_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"diffractionPlanId": diffraction_plan_id} - return db.update_db_item( - models.DiffractionPlan, schemas.diffraction_plan.ma_schema, id_dict, data_dict - ) - - -def patch_diffraction_plan(diffraction_plan_id, data_dict): - """ - Patch a diffraction_plan. - - Args: - diffraction_plan_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"diffractionPlanId": diffraction_plan_id} - return db.patch_db_item( - models.DiffractionPlan, schemas.diffraction_plan.ma_schema, id_dict, data_dict - ) - - -def delete_diffraction_plan(diffraction_plan_id): - """ - Deletes diffraction_plan item from db. - - Args: - diffraction_plan_id (int): diffraction_planId column in db - - Returns: - bool: True if the diffraction_plan exists and deleted successfully, - otherwise return False - """ - id_dict = {"diffractionPlanId": diffraction_plan_id} - return db.delete_db_item(models.DiffractionPlan, id_dict) diff --git a/pyispyb/core/modules/energy_scan.py b/pyispyb/core/modules/energy_scan.py deleted file mode 100644 index bb80729e..00000000 --- a/pyispyb/core/modules/energy_scan.py +++ /dev/null @@ -1,37 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -from pyispyb.core import models, schemas - - -__license__ = "LGPLv3+" - - -def get_energy_scans(): - """ - Returns list of energy scans. - - Returns: - [type]: [description] - """ - energy_scan_list = models.EnergyScan.query.all() - return schemas.energy_scan.ma_schema.dump(energy_scan_list) diff --git a/pyispyb/core/modules/events.py b/pyispyb/core/modules/events.py new file mode 100644 index 00000000..cd04c1a1 --- /dev/null +++ b/pyispyb/core/modules/events.py @@ -0,0 +1,638 @@ +from dataclasses import dataclass, field +import enum +from typing import Any, List, Optional +import os +from fastapi import HTTPException + +import sqlalchemy +from sqlalchemy import or_ +from sqlalchemy.orm import contains_eager +from sqlalchemy.sql.expression import literal_column +from ispyb import models + +from ...app.extensions.database.definitions import ( + with_authorization, + _session, + _proposal, +) +from ...app.extensions.database.utils import Paged, page +from ...app.extensions.database.middleware import db +from ..schemas import events as schema +from ...config import settings + + +@dataclass +class EntityType: + # The entity `DataCollection` or `EnergyScan` + entity: sqlalchemy.orm.decl_api.DeclarativeMeta + # How the entity joins to `BLSample` i.e. `DataCollection.blSampleId` + sampleId: "sqlalchemy.Column[Any]" + # Its primary key `dataCollectionId` + key: str + # Any joined entities i.e. `DataCollectionGroup`` + joined: Optional[List[sqlalchemy.orm.decl_api.DeclarativeMeta]] = field( + default_factory=list + ) + + +ENTITY_TYPES: dict[str, EntityType] = { + "dc": EntityType( + models.DataCollection, + models.DataCollectionGroup.blSampleId, + "dataCollectionId", + [ + models.DataCollection.DataCollectionGroup, + models.DataCollection.GridInfo, + [ + models.DataCollection.DataCollectionGroup, + models.DataCollectionGroup.Workflow, + ], + ], + ), + "robot": EntityType( + models.RobotAction, models.RobotAction.blsampleId, "robotActionId" + ), + "xrf": EntityType( + models.XFEFluorescenceSpectrum, + models.XFEFluorescenceSpectrum.blSampleId, + "xfeFluorescenceSpectrumId", + ), + "es": EntityType(models.EnergyScan, models.EnergyScan.blSampleId, "energyScanId"), +} + + +def with_sample( + query: "sqlalchemy.orm.Query[Any]", + column: "sqlalchemy.Column[Any]", + blSampleId: Optional[int] = None, + proteinId: Optional[int] = None, +) -> "sqlalchemy.orm.Query[Any]": + query = ( + query.outerjoin(models.BLSample, models.BLSample.blSampleId == column) + .add_columns(models.BLSample.name.label("blSample")) + .add_columns(models.BLSample.blSampleId.label("blSampleId")) + ) + + if blSampleId: + query = query.filter(column == blSampleId) + + if proteinId: + query = ( + query.join(models.Crystal) + .join(models.Protein) + .filter(models.Protein.proteinId == proteinId) + ) + + return query + + +class EventStatus(str, enum.Enum): + success = "success" + failed = "failed" + processed = "processed" + processerror = "processerror" + + +def get_events( + skip: int, + limit: int, + session: Optional[str] = None, + sessionId: Optional[int] = None, + proposal: Optional[str] = None, + proposalId: Optional[int] = None, + beamLineName: Optional[str] = None, + dataCollectionId: Optional[int] = None, + dataCollectionGroupId: Optional[int] = None, + blSampleId: Optional[int] = None, + blSubSampleId: Optional[int] = None, + proteinId: Optional[int] = None, + status: Optional[EventStatus] = None, + eventType: Optional[str] = None, +) -> Paged[schema.Event]: + queries = {} + + _dataCollectionId = models.DataCollection.dataCollectionId + startTime = models.DataCollection.startTime + endTime = models.DataCollection.endTime + duration = ( + sqlalchemy.func.timestampdiff( + sqlalchemy.text("SECOND"), + models.DataCollection.startTime, + models.DataCollection.endTime, + ) + / 60 + ) + dataCollectionCount = literal_column("1") + + if dataCollectionGroupId is None: + duration = sqlalchemy.func.sum(duration) / ( + sqlalchemy.func.count(models.DataCollection.dataCollectionId) + / sqlalchemy.func.count( + sqlalchemy.distinct(models.DataCollection.dataCollectionId) + ) + ) + # Return the first dataCollectionId in a group + _dataCollectionId = sqlalchemy.func.max(models.DataCollection.dataCollectionId) + startTime = sqlalchemy.func.min(models.DataCollection.startTime) + endTime = sqlalchemy.func.max(models.DataCollection.endTime) + dataCollectionCount = sqlalchemy.func.count( + sqlalchemy.func.distinct(models.DataCollection.dataCollectionId) + ) + + queries["dc"] = ( + db.session.query( + _dataCollectionId.label("id"), + startTime.label("startTime"), + endTime.label("endTime"), + duration.label("duration"), + literal_column("'dc'").label("type"), + dataCollectionCount.label("count"), + sqlalchemy.func.count( + sqlalchemy.distinct( + models.DataCollectionFileAttachment.dataCollectionFileAttachmentId + ) + ).label("attachments"), + ) + .join( + models.DataCollectionGroup, + models.DataCollectionGroup.dataCollectionGroupId + == models.DataCollection.dataCollectionGroupId, + ) + .join( + models.BLSession, + models.BLSession.sessionId == models.DataCollectionGroup.sessionId, + ) + .join( + models.Proposal, models.Proposal.proposalId == models.BLSession.proposalId + ) + .outerjoin(models.DataCollectionFileAttachment) + ) + + queries["robot"] = ( + db.session.query( + models.RobotAction.robotActionId.label("id"), + models.RobotAction.startTimestamp.label("startTime"), + models.RobotAction.endTimestamp.label("endTime"), + ( + sqlalchemy.func.timestampdiff( + sqlalchemy.text("SECOND"), + models.RobotAction.startTimestamp, + models.RobotAction.endTimestamp, + ) + / 60 + ).label("duration"), + literal_column("'robot'").label("type"), + literal_column("1").label("count"), + literal_column("0").label("attachments"), + ) + .join( + models.BLSession, + models.BLSession.sessionId == models.RobotAction.blsessionId, + ) + .join( + models.Proposal, models.Proposal.proposalId == models.BLSession.proposalId + ) + .group_by(models.RobotAction.robotActionId) + ) + queries["xrf"] = ( + db.session.query( + models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId.label("id"), + models.XFEFluorescenceSpectrum.startTime.label("startTime"), + models.XFEFluorescenceSpectrum.endTime.label("endTime"), + ( + sqlalchemy.func.timestampdiff( + sqlalchemy.text("SECOND"), + models.XFEFluorescenceSpectrum.startTime, + models.XFEFluorescenceSpectrum.endTime, + ) + / 60 + ).label("duration"), + literal_column("'xrf'").label("type"), + literal_column("1").label("count"), + literal_column("0").label("attachments"), + ) + .join( + models.BLSession, + models.BLSession.sessionId == models.XFEFluorescenceSpectrum.sessionId, + ) + .join( + models.Proposal, models.Proposal.proposalId == models.BLSession.proposalId + ) + .group_by(models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId) + ) + queries["es"] = ( + db.session.query( + models.EnergyScan.energyScanId.label("id"), + models.EnergyScan.startTime.label("startTime"), + models.EnergyScan.endTime.label("endTime"), + ( + sqlalchemy.func.timestampdiff( + sqlalchemy.text("SECOND"), + models.EnergyScan.startTime, + models.EnergyScan.endTime, + ) + / 60 + ).label("duration"), + literal_column("'es'").label("type"), + literal_column("1").label("count"), + literal_column("0").label("attachments"), + ) + .join( + models.BLSession, models.BLSession.sessionId == models.EnergyScan.sessionId + ) + .join( + models.Proposal, models.Proposal.proposalId == models.BLSession.proposalId + ) + .group_by(models.EnergyScan.energyScanId) + ) + + if session: + session_row = ( + db.session.query(models.BLSession) + .join(models.Proposal) + .filter(models.BLSession.session == session) + .first() + ) + + if proposal: + proposal_row = ( + db.session.query(models.Proposal) + .filter(models.Proposal.proposal == proposal) + .first() + ) + + # Join sample information + for key in queries.keys(): + # Add proposal, session + queries[key] = queries[key].add_columns( + _proposal, _session, models.BLSession.sessionId.label("sessionId") + ) + + # Add sample + queries[key] = with_sample( + queries[key], ENTITY_TYPES[key].sampleId, blSampleId, proteinId + ) + + # Apply permissions + queries[key] = with_authorization(queries[key], joinBLSession=False) + + # Filter by session + if session: + if session_row: + queries[key] = queries[key].filter( + models.BLSession.sessionId == session_row.sessionId + ) + + if sessionId: + queries[key] = queries[key].filter(models.BLSession.sessionId == sessionId) + + # Filter by proposal + if proposal: + if proposal_row: + queries[key] = queries[key].filter( + models.Proposal.proposalId == proposal_row.proposalId + ) + + if proposalId: + queries[key] = queries[key].filter(models.Proposal.proposalId == proposalId) + + # Filter by beamLineName + if beamLineName: + queries[key] = queries[key].filter( + models.BLSession.beamLineName == beamLineName + ) + + # Filter a single dataColleciton + if dataCollectionId: + queries["dc"] = queries["dc"].filter( + models.DataCollection.dataCollectionId == dataCollectionId + ) + queries["robot"] = queries["robot"].filter( + models.RobotAction.robotActionId == 0 + ) + queries["xrf"] = queries["xrf"].filter( + models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId == 0 + ) + queries["es"] = queries["es"].filter(models.EnergyScan.energyScanId == 0) + + # Ungroup a dataCollectionGroup + if dataCollectionGroupId: + queries["dc"] = ( + queries["dc"] + .filter( + models.DataCollectionGroup.dataCollectionGroupId + == dataCollectionGroupId + ) + .group_by(models.DataCollection.dataCollectionId) + ) + queries["robot"] = queries["robot"].filter( + models.RobotAction.robotActionId == 0 + ) + queries["xrf"] = queries["xrf"].filter( + models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId == 0 + ) + queries["es"] = queries["es"].filter(models.EnergyScan.energyScanId == 0) + else: + queries["dc"] = queries["dc"].group_by( + models.DataCollectionGroup.dataCollectionGroupId + ) + + # Filter by blSubSample + if blSubSampleId: + queries["dc"] = queries["dc"].filter( + models.DataCollection.blSubSampleId == blSubSampleId + ) + queries["robot"] = queries["robot"].filter( + models.RobotAction.robotActionId == 0 + ) + queries["xrf"] = queries["xrf"].filter( + models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId == 0 + ) + queries["es"] = queries["es"].filter(models.EnergyScan.energyScanId == 0) + + # Filter by status + if status: + if status == EventStatus.success: + queries["dc"] = queries["dc"].filter( + models.DataCollection.runStatus.like("%success%") + ) + queries["robot"] = queries["robot"].filter( + models.RobotAction.status.like("%success%") + ) + elif status == EventStatus.failed: + queries["dc"] = queries["dc"].filter( + models.DataCollection.runStatus.notlike("%success%") + ) + queries["robot"] = queries["robot"].filter( + models.RobotAction.status.notlike("%success%") + ) + elif status == EventStatus.processed: + queries["dc"] = ( + queries["dc"] + .join(models.AutoProcIntegration) + .join(models.AutoProcProgram) + .filter(models.AutoProcProgram.processingStatus == 1) + ) + queries["robot"] = queries["robot"].filter( + models.RobotAction.robotActionId == 0 + ) + elif status == EventStatus.processerror: + if not hasattr(models, "AutoProcProgramMessage"): + raise HTTPException( + status_code=500, + detail="Database does not have `AutoProcProgramMessage`", + ) + queries["dc"] = ( + queries["dc"] + .join(models.AutoProcIntegration) + # .outerjoin(models.ProcessingJob) + .join( + models.AutoProcProgram, + or_( + # models.ProcessingJob.processingJobId + # == models.AutoProcProgram.processingJobId, + models.AutoProcIntegration.autoProcProgramId + == models.AutoProcProgram.autoProcProgramId, + ), + ) + .join(models.AutoProcProgramMessage) + .filter( + or_( + models.AutoProcProgramMessage.severity == "WARNING", + models.AutoProcProgramMessage.severity == "ERROR", + ) + ) + ) + queries["robot"] = queries["robot"].filter( + models.RobotAction.robotActionId == 0 + ) + + queries["xrf"] = queries["xrf"].filter( + models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId == 0 + ) + queries["es"] = queries["es"].filter(models.EnergyScan.energyScanId == 0) + + # Filter by eventType + if eventType: + filters = { + "dc": queries["dc"].filter(models.DataCollection.dataCollectionId == 0), + "robot": queries["robot"].filter(models.RobotAction.robotActionId == 0), + "xrf": queries["xrf"].filter( + models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId == 0 + ), + "es": queries["es"].filter(models.EnergyScan.energyScanId == 0), + } + + tableFilters = ["robot", "es", "xrf"] + if eventType in tableFilters: + for query in tableFilters: + if eventType == query: + filters[query] = queries[query] + else: + filters["dc"] = queries["dc"].filter( + models.DataCollectionGroup.experimentType == eventType + ) + + for key, query_filter in filters.items(): + queries[key] = query_filter + + # Now union the four queries + query: sqlalchemy.orm.Query[Any] = queries["dc"].union( + queries["robot"], queries["xrf"], queries["es"] + ) + + total = query.count() + query = query.order_by(sqlalchemy.desc("startTime")) + query = page(query, skip=skip, limit=limit) + + # Results contains an index of type / id + results = query.all() + results = [r._asdict() for r in results] + + # Build a list of ids to load based on type, i.e. a list of `dataCollectionId`s + entity_ids: dict[str, list[int]] = {} + for result in results: + for name in ENTITY_TYPES.keys(): + if result["type"] == name: + if name not in entity_ids: + entity_ids[name] = [] + entity_ids[name].append(result["id"]) + + # Now load the related entities, i.e. load the `DataCollection` or `EnergyScan` + entity_type_map = {} + for name, entity_type in ENTITY_TYPES.items(): + if name in entity_ids: + column = getattr(entity_type.entity, entity_type.key) + query = db.session.query(entity_type.entity).filter( + column.in_(entity_ids[name]) + ) + + # If there are joined entities load those too + if entity_type.joined: + for joined_entity in entity_type.joined: + if isinstance(joined_entity, list): + query = query.outerjoin(joined_entity[-1]).options( + contains_eager(*joined_entity) + ) + else: + query = query.outerjoin(joined_entity).options( + contains_eager(joined_entity) + ) + + entity_type_map[name] = { + getattr(entity, entity_type.key): entity for entity in query.all() + } + + # Merge the loaded entities back into the index's `Item` + for result in results: + for entity_type_name in ENTITY_TYPES.keys(): + if result["type"] == entity_type_name: + if entity_type_name in entity_type_map: + result["Item"] = entity_type_map[entity_type_name][result["id"]] + + if entity_type_name == "dc": + _check_snapshots(result["Item"]) + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def _check_snapshots(datacollection: models.DataCollection) -> models.DataCollection: + snapshot_statuses = {} + for i, snapshot in enumerate( + [ + "xtalSnapshotFullPath1", + "xtalSnapshotFullPath2", + "xtalSnapshotFullPath3", + "xtalSnapshotFullPath4", + ] + ): + snapshot_path = getattr(datacollection, snapshot) + if snapshot_path: + if settings.path_map: + snapshot_path = settings.path_map + snapshot_path + snapshot_statuses[i + 1] = ( + os.path.exists(snapshot_path) if snapshot_path is not None else False + ) + else: + snapshot_statuses[i + 1] = False + + datacollection._metadata["snapshots"] = snapshot_statuses + + analysis = False + if hasattr(datacollection, "imageQualityIndicatorsPlotPath"): + if datacollection.imageQualityIndicatorsPlotPath: + analysis = os.path.exists(datacollection.imageQualityIndicatorsPlotPath) + datacollection._metadata["snapshots"]["analysis"] = analysis + + # diffraction_row: models.Image = ( + # db.session.query(models.Image) + # .filter(models.Image.imageNumber == 1) + # .filter(models.Image.dataCollectionId == datacollection.dataCollectionId) + # .first() + # ) + + # diffraction = False + # if diffraction_row: + # if diffraction_row.jpegThumbnailFileFullPath: + # diffraction = os.path.exists(diffraction_row.jpegThumbnailFileFullPath) + # datacollection._metadata["snapshots"]["diffraction"] = diffraction + + return datacollection + + +def get_event_types( + session: Optional[str] = None, + sessionId: Optional[int] = None, + blSampleId: Optional[int] = None, + proteinId: Optional[int] = None, +) -> Paged[schema.EventType]: + queries = {} + queries["dc"] = db.session.query( + sqlalchemy.distinct(models.DataCollectionGroup.experimentType).label( + "experimentType" + ), + ).join( + models.BLSession, + models.BLSession.sessionId == models.DataCollectionGroup.sessionId, + ) + + queries["robot"] = db.session.query( + sqlalchemy.func.count(models.RobotAction.robotActionId).label("count") + ).join( + models.BLSession, + models.BLSession.sessionId == models.RobotAction.blsessionId, + ) + queries["xrf"] = db.session.query( + sqlalchemy.func.count( + models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId + ).label("count") + ).join( + models.BLSession, + models.BLSession.sessionId == models.XFEFluorescenceSpectrum.sessionId, + ) + queries["es"] = db.session.query( + sqlalchemy.func.count(models.EnergyScan.energyScanId).label("count") + ).join(models.BLSession, models.BLSession.sessionId == models.EnergyScan.sessionId) + + if session: + session_row = ( + db.session.query(models.BLSession) + .join(models.Proposal) + .filter(models.BLSession.session == session) + .first() + ) + + for key in queries.keys(): + queries[key] = queries[key].join( + models.Proposal, + models.Proposal.proposalId == models.BLSession.proposalId, + ) + + if session: + if session_row: + queries[key] = queries[key].filter( + models.BLSession.sessionId == session_row.sessionId + ) + + if sessionId: + queries[key] = queries[key].filter(models.BLSession.sessionId == sessionId) + + if blSampleId: + queries[key] = queries[key].filter( + models.DataCollectionGroup.blSampleId == blSampleId + ) + + if proteinId: + queries[key] = ( + queries[key] + .join(models.BLSample) + .join(models.Crystal) + .join(models.Protein) + .filter(models.Protein.proteinId == proteinId) + ) + + queries[key] = with_authorization(queries[key], joinBLSession=False) + + queries[key] = [result._asdict() for result in queries[key].all()] + + eventTypes = [] + for eventType in queries["dc"]: + if eventType["experimentType"]: + eventTypes.append( + { + "eventType": eventType["experimentType"], + "eventTypeName": eventType["experimentType"], + } + ) + + for table, name in { + "robot": "Sample Actions", + "xrf": "XRF Spectrum", + "es": "Energy Scan", + }.items(): + if queries[table][0]["count"] > 0: + eventTypes.append({"eventType": table, "eventTypeName": name}) + + return Paged( + total=len(eventTypes), results=eventTypes, skip=0, limit=len(eventTypes) + ) diff --git a/pyispyb/core/modules/image_quality_indicators.py b/pyispyb/core/modules/image_quality_indicators.py deleted file mode 100644 index a0f4a91f..00000000 --- a/pyispyb/core/modules/image_quality_indicators.py +++ /dev/null @@ -1,39 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.core import models, schemas - - -def get_image_quality_indicators(): - """ - Returns image quality indicators. - - Returns: - [type]: [description] - """ - image_quality_indicators_list = models.ImageQualityIndicator.query.all() - return schemas.image_quality_indicators.ma_schema.dump( - image_quality_indicators_list - ) diff --git a/pyispyb/core/modules/labcontacts.py b/pyispyb/core/modules/labcontacts.py new file mode 100644 index 00000000..34475b8b --- /dev/null +++ b/pyispyb/core/modules/labcontacts.py @@ -0,0 +1,84 @@ +from typing import Optional + +from sqlalchemy.orm import joinedload +from ispyb import models + +from ...app.extensions.database.definitions import with_authorization +from ...app.extensions.database.utils import Paged, page, update_model +from ...app.extensions.database.middleware import db +from ..schemas import labcontacts as schema +from .proposals import get_proposals + + +def get_labcontacts( + skip: int, + limit: int, + labContactId: Optional[int] = None, + proposal: str = None, + proposalId: Optional[int] = None, + withAuthorization: bool = True, +) -> Paged[models.LabContact]: + query = ( + db.session.query(models.LabContact) + .options(joinedload(models.LabContact.Person)) + .options(joinedload(models.LabContact.Person, models.Person.Laboratory)) + .join( + models.Proposal, models.Proposal.proposalId == models.LabContact.proposalId + ) + .group_by(models.LabContact.labContactId) + ) + + if labContactId: + query = query.filter(models.LabContact.labContactId == labContactId) + + if proposal: + query = query.filter(models.Proposal.proposal == proposal) + + if proposalId: + query = query.filter(models.LabContact.proposalId == proposalId) + + if withAuthorization: + query = with_authorization(query) + + total = query.count() + query = page(query, skip=skip, limit=limit) + + return Paged(total=total, results=query.all(), skip=skip, limit=limit) + + +def create_labcontact(labcontact: schema.LabContactCreate) -> models.LabContact: + labcontact_dict = labcontact.dict() + person_dict = labcontact_dict.pop("Person") + laboratory_dict = person_dict.pop("Laboratory") + + proposals = get_proposals(proposalId=labcontact.proposalId, skip=0, limit=1) + proposals.first + + laboratory = models.Laboratory(**laboratory_dict) + db.session.add(laboratory) + db.session.commit() + + person = models.Person(laboratoryId=laboratory.laboratoryId, **person_dict) + db.session.add(person) + db.session.commit() + + contact = models.LabContact(personId=person.personId, **labcontact_dict) + db.session.add(contact) + db.session.commit() + + new_labcontact = get_labcontacts( + labContactId=int(contact.labContactId), skip=0, limit=1 + ) + return new_labcontact.first + + +def update_labcontact( + labContactId: int, labContact: schema.LabContactCreate +) -> models.LabContact: + labcontact_dict = labContact.dict(exclude_unset=True) + labconcat = get_labcontacts(labContactId=labContactId, skip=0, limit=1).first + + update_model(labconcat, labcontact_dict) + db.session.commit() + + return get_labcontacts(labContactId=labContactId, skip=0, limit=1).first diff --git a/pyispyb/core/modules/laboratories.py b/pyispyb/core/modules/laboratories.py new file mode 100644 index 00000000..ead4cc9e --- /dev/null +++ b/pyispyb/core/modules/laboratories.py @@ -0,0 +1,51 @@ +from typing import Optional +from ispyb import models +from pyispyb.app.extensions.database.utils import Paged, page +from pyispyb.app.extensions.database.middleware import db +from pyispyb.core.schemas import laboratories as schema + + +def get_laboratories( + skip: int, + limit: int, + laboratoryId: Optional[int] = None, + name: Optional[str] = None, + city: Optional[str] = None, + country: Optional[str] = None, + laboratoryExtPk: Optional[int] = None, +) -> Paged[models.Laboratory]: + query = db.session.query(models.Laboratory) + + if laboratoryId: + query = query.filter(models.Laboratory.laboratoryId == laboratoryId) + + if name: + query = query.filter(models.Laboratory.name == name) + + if city: + query = query.filter(models.Laboratory.city == city) + + if country: + query = query.filter(models.Laboratory.country == country) + + if laboratoryExtPk: + query = query.filter(models.Laboratory.laboratoryExtPk == laboratoryExtPk) + + total = query.count() + query = page(query, skip=skip, limit=limit) + + return Paged(total=total, results=query.all(), skip=skip, limit=limit) + + +def create_laboratory(laboratory: schema.Laboratory) -> models.Laboratory: + + laboratory_dict = laboratory.dict() + + laboratory = models.Laboratory(**laboratory_dict) + db.session.add(laboratory) + db.session.commit() + + new_laboratory = get_laboratories( + laboratoryId=int(laboratory.labContactId), skip=0, limit=1 + ) + return new_laboratory.first diff --git a/pyispyb/core/modules/legacy/__init__.py b/pyispyb/core/modules/legacy/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pyispyb/core/modules/legacy/data_collections.py b/pyispyb/core/modules/legacy/data_collections.py new file mode 100644 index 00000000..91a65f61 --- /dev/null +++ b/pyispyb/core/modules/legacy/data_collections.py @@ -0,0 +1,42 @@ +# Project: py-ispyb. + +# https://github.com/ispyb/py-ispyb + +# This file is part of py-ispyb software. + +# py-ispyb is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# py-ispyb is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. + +# You should have received a copy of the GNU Lesser General Public License +# along with py-ispyb. If not, see . + + +__license__ = "LGPLv3+" + +from pyispyb.app.utils import get_sql_query, queryresult_to_dict +from pyispyb.app.extensions.database.middleware import db + + +def get_data_collections_groups(session_id): + """Get data collection groups for session. + + Args: + session_id (str): session id + + Returns: + dict: Data collection groups + """ + sql = get_sql_query( + "dataCollection/groups", + append=" where DataCollectionGroup_sessionId = :sessionId group by v_datacollection_summary.DataCollectionGroup_dataCollectionGroupId order by DataCollection_startTime desc", + ) + sql = sql.bindparams(sessionId=session_id) + group_list = db.sesion.execute(sql) + return queryresult_to_dict(group_list) diff --git a/pyispyb/core/modules/legacy/em.py b/pyispyb/core/modules/legacy/em.py new file mode 100644 index 00000000..5dff5587 --- /dev/null +++ b/pyispyb/core/modules/legacy/em.py @@ -0,0 +1,173 @@ +# Project: py-ispyb. + +# https://github.com/ispyb/py-ispyb + +# This file is part of py-ispyb software. + +# py-ispyb is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# py-ispyb is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. + +# You should have received a copy of the GNU Lesser General Public License +# along with py-ispyb. If not, see . + + +__license__ = "LGPLv3+" + +from pyispyb.app.utils import get_sql_query, queryresult_to_dict +from pyispyb.app.extensions.database.middleware import db +from pyispyb.core.modules.legacy import data_collections + +############################ +# MOVIES # +############################ + + +def get_movies_data_by_datacollection_id(proposal_id, datacollection_id): + """Get movies data for datacollection. + + Args: + proposal_id (str): proposal id + datacollection_id (str): datacollection id + + Returns: + dict: movies data + """ + sql = get_sql_query( + "em/movie", + append=" where Movie_dataCollectionId = :dataCollectionId and Proposal_proposalId=:proposalId", + ) + sql = sql.bindparams(dataCollectionId=datacollection_id, proposalId=proposal_id) + res = db.session.execute(sql) + return queryresult_to_dict(res) + + +def get_movie_thumbnails(proposal_id, movie_id): + """Get movie thumbnails. + + Args: + proposal_id (str): proposal id + movie_id (str): movie id + + Returns: + dict: thumnails object + """ + sql = get_sql_query("em/movie_thumbnails") + sql = sql.bindparams(movieId=movie_id, proposalId=proposal_id) + res = db.session.execute(sql) + res = queryresult_to_dict(res) + if len(res) > 0: + return queryresult_to_dict(res)[0] + return None + + +############################ +# STATS # +############################ + + +def get_stats_by_session_id(session_id): + """Get stats for session. + + Args: + session_id (str): session id + + Returns: + dict: stats + """ + sql = get_sql_query("em/sessionStats", append=" where sessionId = :sessionId") + sql = sql.bindparams(sessionId=session_id) + res = db.session.execute(sql) + return queryresult_to_dict(res) + + +def get_stats_by_data_collections_ids(proposal_id, data_collection_ids): + """Get stats for data collections. + + Args: + proposal_id (str): proposal id + data_collection_ids (str): comma-separated data collection ids + + Returns: + dict: stats + """ + sql = get_sql_query( + "em/dataCollectionsStats", + append=" where dataCollectionId in (:dataCollectionIdList) and BLSession.proposalId=:proposalId", + ) + sql = sql.bindparams( + dataCollectionIdList=data_collection_ids, proposalId=proposal_id + ) + res = db.session.execute(sql) + return queryresult_to_dict(res) + + +def get_stats_by_data_collections_group_id(proposal_id, data_collection_group_id): + """Get stats for datacollection group. + + Args: + proposal_id (str): proposal id + data_collection_group_id (str): data collection group id + + Returns: + dict: stats + """ + sql = get_sql_query( + "em/dataCollectionsStats", + append=" where DataCollection.dataCollectionGroupId=:dataCollectionGroupId and BLSession.proposalId=:proposalId", + ) + sql = sql.bindparams( + dataCollectionGroupId=data_collection_group_id, proposalId=proposal_id + ) + res = db.session.execute(sql) + return queryresult_to_dict(res) + + +############################ +# DATA COLLECTION # +############################ + + +def get_data_collections_groups(proposal_id, session_id): + """ + Get data collection groups for session. + + Args: + proposal_id (str): proposal id + session_id (str): session id + + Returns: + list: datacollection groups + """ + res = data_collections.get_data_collections_groups(session_id) + for row in res: + row["stats"] = get_stats_by_data_collections_group_id( + proposal_id, row["DataCollectionGroup_dataCollectionGroupId"] + ) + return res + + +############################ +# CLASSIFICATION # +############################ + + +def get_classification_by_session_id(session_id): + """Get classification for session. + + Args: + session_id (str): session id + + Returns: + dict: classification + """ + sql = get_sql_query("em/classification", append=" where sessionId = :sessionId") + sql = sql.bindparams(sessionId=session_id) + res = db.session.execute(sql) + return queryresult_to_dict(res) diff --git a/pyispyb/core/modules/legacy/proposal.py b/pyispyb/core/modules/legacy/proposal.py new file mode 100644 index 00000000..0d6ffdb6 --- /dev/null +++ b/pyispyb/core/modules/legacy/proposal.py @@ -0,0 +1,115 @@ +""" +Project: py-ispyb. + +https://github.com/ispyb/py-ispyb + +This file is part of py-ispyb software. + +py-ispyb is free software: you can redistribute it and/or modify +it under the terms of the GNU Lesser General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +py-ispyb is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public License +along with py-ispyb. If not, see . +""" + + +__license__ = "LGPLv3+" + +from ispyb import models + +from pyispyb.app.utils import get_sql_query, queryresult_to_dict +from pyispyb.app.extensions.database.middleware import db + + +def get_proposals_infos_login(login): + """ + Get infos for all proposals that user can access. + + Args: + login (str): user login + + Returns: + dict: proposal infos + """ + sql = get_sql_query("proposal/proposalsInfosLogin") + sql = sql.bindparams(login=login) + res = db.session.execute(sql) + return queryresult_to_dict(res) + + +def get_proposals_infos_all(): + """Get infos for all proposals. + + Returns: + dict: proposal infos + """ + sql = get_sql_query("proposal/proposalsInfosAll") + res = db.session.execute(sql) + return queryresult_to_dict(res) + + +def get_proposal_infos(proposal_id): + """Get proposal infos. + + Args: + proposal_id (str): proposal id + + Returns: + dict: proposal infos + """ + return { + "proposal": ( + db.session.query(models.Proposal) + .filter(models.Proposal.proposalId == proposal_id) + .first() + ) + } + + +def login_authorized_for_proposal(login, proposal_id): + """Verify that login is authorized for proposal. + + Args: + login (str): user login + proposal_id (str): proposal id + + Returns: + boolean: authorization + """ + sql = get_sql_query("proposal/loginAuthorizedProposal") + sql = sql.bindparams(login=login, proposalId=proposal_id) + is_authorized = db.session.execute(sql) + return is_authorized.first()[0] > 0 + + +def find_proposal_id(id_or_name): + """Convert proposal name to id. If id, return id. + + Args: + id_or_name (str): proposal id or name + + Raises: + Exception: More than one proposal found for name + Exception: No proposal found for name + + Returns: + str: proposal id + """ + sql = get_sql_query("proposal/findProposalId") + sql = sql.bindparams(name=id_or_name) + res = db.session.execute(sql) + res = queryresult_to_dict(res) + if len(res) == 1: + return res[0]["proposalId"] + if len(res) > 1: + raise Exception(f"More than one proposal found for {id_or_name}") + if len(res) > 1: + raise Exception(f"No proposal found for {id_or_name}") + return None diff --git a/pyispyb/core/modules/legacy/session.py b/pyispyb/core/modules/legacy/session.py new file mode 100644 index 00000000..3f8b1221 --- /dev/null +++ b/pyispyb/core/modules/legacy/session.py @@ -0,0 +1,165 @@ +""" +Project: py-ispyb. + +https://github.com/ispyb/py-ispyb + +This file is part of py-ispyb software. + +py-ispyb is free software: you can redistribute it and/or modify +it under the terms of the GNU Lesser General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +py-ispyb is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public License +along with py-ispyb. If not, see . +""" + + +__license__ = "LGPLv3+" + + +from pyispyb.app.utils import get_sql_query, queryresult_to_dict +from pyispyb.app.extensions.database.middleware import db + + +def get_session_infos_login(login): + """Get info for sessions that user can access. + + Args: + login (str): user login + + Returns: + list: sessions infos + """ + sql = get_sql_query("session/sessionsInfosLogin") + sql = sql.bindparams(login=login) + res = db.session.execute(sql) + return queryresult_to_dict(res) + + +def get_session_infos_all(): + """Get info for all sessions. + + Returns: + list: sessions infos + """ + sql = get_sql_query("session/sessionsInfosAll") + res = db.session.execute(sql) + return queryresult_to_dict(res) + + +def get_session_infos_login_proposal(login, proposal_id): + """Get info for sessions in proposal that user can access. + + Args: + login (str): user login + proposal_id (str): proposal id + + Returns: + list: sessions infos + """ + sql = get_sql_query( + "session/sessionsInfosLogin", append=" and proposalId = :proposalId" + ) + sql = sql.bindparams(login=login, proposalId=proposal_id) + res = db.session.execute(sql) + return queryresult_to_dict(res) + + +def get_session_infos_all_proposal(proposal_id): + """Get info for all sessions in proposal. + + Args: + proposal_id (str): proposal id + + Returns: + list: sessions infos + """ + sql = get_sql_query( + "session/sessionsInfosAll", append=" where proposalId = :proposalId" + ) + sql = sql.bindparams(proposalId=proposal_id) + res = db.session.execute(sql) + return queryresult_to_dict(res) + + +def get_session_infos_login_dates(login, start_date, end_date): + """Get info for sessions between dates that user can access. + + Args: + login (str): user login + start_date (str): start_date + end_date (str): end_date + + Returns: + list: sessions infos + """ + sql = get_sql_query( + "session/sessionsInfosLogin", + append=""" + and ( + (BLSession_startDate >= :startDate and BLSession_startDate <= :endDate) + or + (BLSession_endDate >= :startDate and BLSession_endDate <= :endDate) + or + (BLSession_endDate >= :endDate and BLSession_startDate <= :startDate) + or + (BLSession_endDate <= :endDate and BLSession_startDate >= :startDate) + ) + order by v_session.sessionId DESC + """, + ) + sql = sql.bindparams(login=login, startDate=start_date, endDate=end_date) + res = db.session.execute(sql) + return queryresult_to_dict(res) + + +def get_session_infos_all_dates(start_date, end_date): + """Get info for all sessions between dates. + + Args: + start_date (str): start_date + end_date (str): end_date + + Returns: + list: sessions infos + """ + sql = get_sql_query( + "session/sessionsInfosAll", + append=""" + where ( + (BLSession_startDate >= :startDate and BLSession_startDate <= :endDate) + or + (BLSession_endDate >= :startDate and BLSession_endDate <= :endDate) + or + (BLSession_endDate >= :endDate and BLSession_startDate <= :startDate) + or + (BLSession_endDate <= :endDate and BLSession_startDate >= :startDate) + ) + order by v_session.sessionId DESC + """, + ) + sql = sql.bindparams(startDate=start_date, endDate=end_date) + res = db.session.execute(sql) + return queryresult_to_dict(res) + + +def login_authorized_for_session(login, session_id): + """Verify that login is authorized to access session. + + Args: + login (str): login + session_id (str): session id + + Returns: + boolean: authorization + """ + sql = get_sql_query("session/loginAuthorizedSession") + sql = sql.bindparams(login=login, sessionId=session_id) + is_authorized = db.session.execute(sql) + return is_authorized.first()[0] > 0 diff --git a/pyispyb/core/modules/mapping.py b/pyispyb/core/modules/mapping.py new file mode 100644 index 00000000..178d328e --- /dev/null +++ b/pyispyb/core/modules/mapping.py @@ -0,0 +1,234 @@ +import io +import gzip +import json + +import matplotlib as mpl +import matplotlib.cm as cm +import numpy as np +from PIL import Image +from sqlalchemy import func, or_ +from sqlalchemy.orm import joinedload +from ispyb import models + +from ...config import settings +from ...app.extensions.database.definitions import with_authorization +from ...app.extensions.database.utils import Paged, page, with_metadata +from ...app.extensions.database.middleware import db +from ..schemas import mapping as schema + + +def get_maps( + skip: int, + limit: int, + xrfFluorescenceMappingId: int = None, + dataCollectionId: int = None, + dataCollectionGroupId: int = None, + blSampleId: int = None, + blSubSampleId: int = None, + withAuthorization: bool = True, +) -> Paged[models.XRFFluorescenceMapping]: + metadata = { + "url": func.concat( + f"{settings.api_root}/mapping/", + models.XRFFluorescenceMapping.xrfFluorescenceMappingId, + ), + "blSubSampleId": models.DataCollection.blSubSampleId, + "blSampleId": models.DataCollectionGroup.blSampleId, + "dataCollectionId": models.DataCollection.dataCollectionId, + } + + query = ( + db.session.query(models.XRFFluorescenceMapping, *metadata.values()) + .join(models.XRFFluorescenceMappingROI) + .options(joinedload(models.XRFFluorescenceMapping.XRFFluorescenceMappingROI)) + .join(models.GridInfo) + .options(joinedload(models.XRFFluorescenceMapping.GridInfo)) + .join(models.DataCollection) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + .group_by(models.XRFFluorescenceMapping.xrfFluorescenceMappingId) + .order_by(models.XRFFluorescenceMapping.xrfFluorescenceMappingId) + ) + + if xrfFluorescenceMappingId: + query = query.filter( + models.XRFFluorescenceMapping.xrfFluorescenceMappingId + == xrfFluorescenceMappingId + ) + + if dataCollectionId: + query = query.filter(models.DataCollection.dataCollectionId == dataCollectionId) + + if dataCollectionGroupId: + query = query.filter( + models.DataCollectionGroup.dataCollectionGroupId == dataCollectionGroupId + ) + + if blSampleId: + query = query.filter( + or_( + models.DataCollectionGroup.blSampleId == blSampleId, + # Hacky legacy support + models.DataCollection.BLSAMPLEID == blSampleId, + ) + ) + + if blSubSampleId: + query = query.filter(models.DataCollection.blSubSampleId == blSubSampleId) + + if withAuthorization: + query = with_authorization(query, joinBLSession=False) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def get_map_rois( + skip: int, + limit: int, + xrfFluorescenceMappingROIId: int, + blSampleId: int = None, + withAuthorization: bool = True, +) -> Paged[models.XRFFluorescenceMappingROI]: + query = ( + db.session.query(models.XRFFluorescenceMappingROI) + .join(models.BLSample) + .join(models.Crystal) + .join(models.Protein) + .join(models.Proposal) + .group_by(models.XRFFluorescenceMappingROI.xrfFluorescenceMappingROIId) + .order_by(models.XRFFluorescenceMappingROI.xrfFluorescenceMappingROIId) + ) + + if xrfFluorescenceMappingROIId: + query = query.filter( + models.XRFFluorescenceMappingROI.xrfFluorescenceMappingROIId + == xrfFluorescenceMappingROIId + ) + + if blSampleId: + query = query.filter(models.DataCollectionGroup.blSampleId == blSampleId) + + if withAuthorization: + query = with_authorization(query) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = query.all() + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def shape_map(map_: schema.Map) -> np.ndarray: + """Shapes a 1d map array into the correct 2d image + + Reorders the data if needbe for snaked collection + Reshapes if the data was collected vertically + + Returns: + data (ndarray): The XRF map data + """ + if map_.dataFormat == "json+gzip": + data = gunzip_json(map_.data) + data = np.array(data) + else: + data = np.array(map_.data) + + # TODO: Catch raise + if map_.GridInfo.orientation == "vertical": + data = data.reshape(int(map_.GridInfo.steps_x), int(map_.GridInfo.steps_y)) + data = np.rot90(data) + data = np.flipud(data) + else: + data = data.reshape(int(map_.GridInfo.steps_y), int(map_.GridInfo.steps_x)) + + # For snaked collection every other row is reversed + if map_.GridInfo.snaked: + data[1::2, :] = data[1::2, ::-1] + + return data + + +def generate_map_image(map_: schema.Map, image_format: str = "PNG") -> io.BytesIO: + """Generates a PIL Image from an XRF map + + -1 placeholder values are converted to a transparent pixel + + Returns: + image (io.Bytes): Bytes of PIL Image + """ + data = shape_map(map_) + norm = mpl.colors.Normalize(vmin=map_.min, vmax=map_.max) + + colourmap = map_.colourMap or "viridis" + if not hasattr(cm, colourmap): + colourmap = "viridis" + + cmap = getattr(cm, colourmap or "viridis") + + m = cm.ScalarMappable(norm=norm, cmap=cmap) + img_data = m.to_rgba(data, bytes=True) + + mask = data == -1 + img_data[mask, :] = [255, 255, 255, 0] + + image = Image.fromarray(img_data, "RGBA") + img_io = io.BytesIO() + image.save(img_io, image_format, quality=100) + img_io.seek(0) + + return img_io + + +def generate_histogram(map_): + """Generates a histogram of map data + + Args: + map_(dict): An XRF map from the metadata handler + + Returns: + data: (dict(list)): The histogram, bins, and widths + """ + data = shape_map(map_) + ndata = np.array(data) + rdata = np.where(ndata == -1, 0, ndata) + + try: + hist, bins = np.histogram(rdata, bins=50) + center = (bins[:-1] + bins[1:]) / 2 + width = np.diff(bins) + + # TODO: This should not happen + except (OverflowError, ValueError): + hist = [] + center = [] + width = [] + + return { + "xrfFluorescenceMappingId": map_.xrfFluorescenceMappingId, + "hist": hist.tolist(), + "bins": center.tolist(), + "width": width.tolist(), + } + + +def gunzip_json(bytes_obj: str): + """Un gzips a bytes object and load into json + + Returns: + data(dict): The decoded json as a python object + """ + if not bytes_obj: + return [] + + in_ = io.BytesIO() + in_.write(bytes_obj) + in_.seek(0) + with gzip.GzipFile(fileobj=in_, mode="rb") as fo: + gunzipped_bytes_obj = fo.read() + + return json.loads(gunzipped_bytes_obj.decode()) diff --git a/pyispyb/core/modules/persons.py b/pyispyb/core/modules/persons.py new file mode 100644 index 00000000..e7e29d2d --- /dev/null +++ b/pyispyb/core/modules/persons.py @@ -0,0 +1,116 @@ +from typing import Optional + +from sqlalchemy import and_, or_, func +from sqlalchemy.orm import contains_eager, aliased +from ispyb import models + +from pyispyb.dependencies import has_permission + +from ...app.extensions.database.utils import Paged, page, with_metadata +from ...app.extensions.database.middleware import db +from ...core.modules.utils import encode_external_id +from ...app.extensions.database.definitions import with_authorization + + +def get_persons( + skip: int, + limit: int, + personId: Optional[int] = None, + proposal: Optional[str] = None, + sessionId: Optional[int] = None, + externalId: Optional[int] = None, + familyName: Optional[str] = None, + givenName: Optional[str] = None, + login: Optional[str] = None, + emailAddress: Optional[str] = None, + withLaboratory: Optional[bool] = False, + withAuthorization: bool = False, + showAll: bool = False, +) -> Paged[models.Person]: + metadata = {} + + query = ( + db.session.query(models.Person) + .select_from(models.Person) + .filter(models.Person.login != None) # noqa + .group_by(models.Person.personId) + ) + + if personId: + query = query.filter(models.Person.personId == personId) + + if externalId: + externalId = encode_external_id(externalId) + query = query.filter(models.Person.externalId == externalId) + + if familyName: + query = query.filter(models.Person.familyName == familyName) + + if givenName: + query = query.filter(models.Person.givenName == givenName) + + if login: + query = query.filter(models.Person.login == login) + + if emailAddress: + query = query.filter(models.Person.emailAddress == emailAddress) + + if withLaboratory: + query = query.join(models.Person.Laboratory).options( + contains_eager(models.Person.Laboratory), + ) + query = query.populate_existing() + + if proposal: + query = query.filter(models.Proposal.propsal == proposal) + + if sessionId: + metadata["sessions"] = func.count(models.BLSession.sessionId) + metadata["lastSession"] = func.max(models.BLSession.startDate) + metadata["remote"] = models.SessionHasPerson.remote + metadata["role"] = models.SessionHasPerson.role + + shp2 = aliased(models.SessionHasPerson) + bls2 = aliased(models.BLSession) + query = ( + query.join(models.SessionHasPerson) + .join(models.BLSession) + .join(models.Proposal) + .outerjoin(shp2, shp2.personId == models.Person.personId) + .outerjoin( + bls2, + and_( + models.BLSession.sessionId == shp2.sessionId, + bls2.startDate < models.BLSession.startDate, + ), + ) + .add_columns( + metadata["sessions"], + metadata["lastSession"], + metadata["remote"], + metadata["role"], + ) + ) + + if withAuthorization: + if not (has_permission("manage_persons") and showAll): + if sessionId: + query = with_authorization(query, joinBLSession=False) + else: + query = query.outerjoin(models.ProposalHasPerson) + query = query.outerjoin(models.LabContact) + query = query.outerjoin( + models.Proposal, + or_( + models.LabContact.proposalId == models.Proposal.proposalId, + models.ProposalHasPerson.proposalId + == models.Proposal.proposalId, + ), + ) + query = with_authorization(query) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + return Paged(total=total, results=results, skip=skip, limit=limit) diff --git a/pyispyb/core/modules/phasing.py b/pyispyb/core/modules/phasing.py deleted file mode 100644 index 5f7b46a3..00000000 --- a/pyispyb/core/modules/phasing.py +++ /dev/null @@ -1,64 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from flask_restx._http import HTTPStatus - -from pyispyb.app.extensions import db, auth_provider -from pyispyb.app.utils import create_response_item - -from pyispyb.core import models, schemas - -# from pyispyb.core.schemas import phasing_view - - -def get_phasing_results(request): - """Returns phasing_results_results by query parameters""" - - query_dict = request.args.to_dict() - - query_arg_list = ("dataCollectionId", "autoProcScalingId", "phasingStepId") - - # print(query_arg_list) - # return db.get_db_items_by_view( - # models.t_v_datacollection_summary_phasing, - # phasing_view.dict_schema, - # phasing_view.ma_schema, - # query_dict, - # ) - - -def add_phasing_results(data_dict): - """ - Adds a phasing_results. - - Args: - phasing_results_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return "Ok" - # return db.add_db_item(models.phasing_results, - # schemas.phasing_results.ma_schema, data_dict) diff --git a/pyispyb/core/modules/processings.py b/pyispyb/core/modules/processings.py new file mode 100644 index 00000000..b81497e8 --- /dev/null +++ b/pyispyb/core/modules/processings.py @@ -0,0 +1,601 @@ +from sqlalchemy import func, and_, or_, distinct +from sqlalchemy.sql.expression import literal_column +from sqlalchemy.orm import contains_eager +from ispyb import models + +from ...config import settings +from ...app.extensions.database.utils import Paged, page, with_metadata +from ...app.extensions.database.definitions import ( + with_authorization, +) +from ...app.extensions.database.middleware import db +from ..schemas import processings as schema + + +def get_processing_status( + dataCollectionIds: list[int], +) -> schema.ProcessingStatusesList: + queries = {} + queries["screening"] = ( + db.session.query( + models.DataCollection.dataCollectionId, + models.Screening.programVersion.label("program"), + models.ScreeningOutput.indexingSuccess, + models.ScreeningOutput.strategySuccess.label("status"), + ) + .select_from(models.DataCollection) + .join( + models.DataCollectionGroup, + models.DataCollection.dataCollectionGroupId + == models.DataCollectionGroup.dataCollectionGroupId, + ) + .join( + models.Screening, + or_( + models.Screening.dataCollectionId + == models.DataCollection.dataCollectionId, + models.Screening.dataCollectionGroupId + == models.DataCollection.dataCollectionGroupId, + ), + ) + .join(models.ScreeningOutput) + .group_by(models.DataCollection.dataCollectionId, models.Screening.screeningId) + ) + + if hasattr(models, "XrayCentringResult"): + queries["xrc"] = ( + db.session.query( + models.DataCollection.dataCollectionId, + models.XrayCentringResult.status, + literal_column("'xrc'").label("program"), + ) + .join(models.DataCollectionGroup) + .join( + models.GridInfo, + models.GridInfo.dataCollectionId + == models.DataCollection.dataCollectionId, + ) + .join(models.XrayCentringResult) + ) + + queries["autoIntegration"] = ( + db.session.query( + models.DataCollection.dataCollectionId, + models.AutoProcProgram.autoProcProgramId, + models.AutoProcProgram.processingPrograms.label("program"), + models.AutoProcProgram.processingStatus.label("status"), + ) + .select_from(models.DataCollection) + .join(models.DataCollectionGroup) + .join(models.AutoProcIntegration) + .join(models.AutoProcProgram) + ) + + if hasattr(models, "ProcessingJob"): + queries["processing"] = ( + db.session.query( + models.DataCollection.dataCollectionId, + models.AutoProcProgram.autoProcProgramId, + models.AutoProcProgram.processingPrograms.label("program"), + models.AutoProcProgram.processingStatus.label("status"), + ) + .select_from(models.DataCollection) + .join(models.DataCollectionGroup) + .join(models.ProcessingJob) + .join(models.AutoProcProgram) + .outerjoin(models.AutoProcIntegration) + .filter( + and_( + models.AutoProcIntegration.autoProcIntegrationId == None, # noqa + models.ProcessingJob.automatic == 1, + ) + ) + ) + + if hasattr(models.CTF, "CTFid"): + ctf_column = models.CTF.CTFid + else: + ctf_column = models.CTF.ctfId + + queries["em"] = ( + db.session.query( + models.DataCollection.dataCollectionId, + func.count(distinct(models.Movie.movieId)).label("movie"), + func.count(distinct(models.MotionCorrection.motionCorrectionId)).label( + "motionCorrection" + ), + func.count(distinct(ctf_column)).label("ctf"), + ) + .select_from(models.DataCollection) + .join(models.DataCollectionGroup) + .join(models.Movie) + .join(models.MotionCorrection) + .outerjoin( + models.CTF, + models.MotionCorrection.motionCorrectionId == models.CTF.motionCorrectionId, + ) + .group_by(models.DataCollection.dataCollectionId) + ) + + for key in queries.keys(): + queries[key] = queries[key].filter( + models.DataCollection.dataCollectionId.in_(dataCollectionIds) + ) + queries[key] = queries[key].join(models.BLSession).join(models.Proposal) + queries[key] = with_authorization(queries[key], joinBLSession=False) + + results = {} + for key in queries.keys(): + results[key] = [r._asdict() for r in queries[key].all()] + + statuses = {} + for key, rows in results.items(): + for row in rows: + if row["dataCollectionId"] not in statuses: + statuses[row["dataCollectionId"]] = {} + + if key not in statuses[row["dataCollectionId"]]: + statuses[row["dataCollectionId"]][key] = {} + + if key == "em": + for em_key in ["motionCorrection", "ctf", "movie"]: + statuses[row["dataCollectionId"]][key][em_key] = row[em_key] + else: + if row["program"] not in statuses[row["dataCollectionId"]][key]: + statuses[row["dataCollectionId"]][key][row["program"]] = [] + + statuses[row["dataCollectionId"]][key][row["program"]].append(row) + + return {"statuses": statuses} + + +def get_processing_message_status( + dataCollectionIds: list[int], +) -> schema.AutoProcProgramMessageStatuses: + if not hasattr(models, "AutoProcProgramMessage") and not hasattr( + models, "ProcessingJob" + ): + return {"statuses": []} + queries = {} + columns = [ + models.DataCollection.dataCollectionId.label("dataCollectionId"), + func.sum( + func.IF(models.AutoProcProgramMessage.severity == "ERROR", 1, 0) + ).label("errors"), + func.sum( + func.IF(models.AutoProcProgramMessage.severity == "WARNING", 1, 0) + ).label("warnings"), + func.sum(func.IF(models.AutoProcProgramMessage.severity == "INFO", 1, 0)).label( + "info" + ), + ] + + queries["autoIntegration"] = ( + db.session.query(*columns) + .select_from(models.AutoProcProgramMessage) + .join(models.AutoProcProgram) + .join(models.AutoProcIntegration) + .join(models.DataCollection) + .join(models.DataCollectionGroup) + .group_by(models.DataCollection.dataCollectionId) + ) + + queries["processing"] = ( + db.session.query(*columns) + .select_from(models.AutoProcProgramMessage) + .join(models.AutoProcProgram) + .join( + models.ProcessingJob, + models.ProcessingJob.processingJobId + == models.AutoProcProgram.processingJobId, + ) + .join( + models.DataCollection, + models.ProcessingJob.dataCollectionId + == models.DataCollection.dataCollectionId, + ) + .join(models.DataCollectionGroup) + .outerjoin( + models.AutoProcIntegration, + models.AutoProcIntegration.dataCollectionId + == models.DataCollection.dataCollectionId, + ) + .filter( + and_( + models.AutoProcIntegration.autoProcIntegrationId == None, # noqa + models.ProcessingJob.automatic == 1, + ) + ) + .group_by(models.DataCollection.dataCollectionId) + ) + + for key in queries.keys(): + queries[key] = queries[key].filter( + models.DataCollection.dataCollectionId.in_(dataCollectionIds) + ) + queries[key] = queries[key].join(models.BLSession).join(models.Proposal) + queries[key] = with_authorization(queries[key], joinBLSession=False) + + subquery = queries["autoIntegration"].union_all(queries["processing"]).subquery() + query = db.session.query( + subquery.c.dataCollectionId, + subquery.c.errors, + subquery.c.warnings, + subquery.c.info, + ).group_by(subquery.c.dataCollectionId) + + results = [r._asdict() for r in query.all()] + return {"statuses": {row["dataCollectionId"]: row for (row) in results}} + + +def get_processing_messages( + skip: 0, + limit: 25, + dataCollectionId: int = None, + autoProcProgramId: int = None, + autoProcProgramMessageId: int = None, +) -> Paged[schema.AutoProcProgramMessage]: + if not hasattr(models, "AutoProcProgramMessage"): + return Paged(total=0, results=[], skip=skip, limit=limit) + + queries = {} + queries["autoIntegration"] = ( + db.session.query(models.AutoProcProgramMessage) + .join(models.AutoProcProgram) + .join(models.AutoProcIntegration) + .join(models.DataCollection) + .join(models.DataCollectionGroup) + ) + + queries["processing"] = ( + db.session.query(models.AutoProcProgramMessage) + .join(models.AutoProcProgram) + .join(models.ProcessingJob) + .join(models.DataCollection) + .join(models.DataCollectionGroup) + ) + + for key in queries.keys(): + queries[key] = queries[key].join(models.BLSession).join(models.Proposal) + queries[key] = with_authorization(queries[key], joinBLSession=False) + + if autoProcProgramMessageId: + queries[key] = queries[key].filter( + models.AutoProcProgramMessage.autoProcProgramMessageId + == autoProcProgramMessageId + ) + + if dataCollectionId: + queries[key] = queries[key].filter( + models.DataCollection.dataCollectionId == dataCollectionId + ) + + if autoProcProgramId: + queries[key] = queries[key].filter( + models.AutoProcProgram.autoProcProgramId == autoProcProgramId + ) + + query = ( + queries["autoIntegration"] + .union_all(queries["processing"]) + .group_by(models.AutoProcProgramMessage.autoProcProgramMessageId) + ) + query = page(query, skip=skip, limit=limit) + + total = query.count() + return Paged(total=total, results=query.all(), skip=skip, limit=limit) + + +def get_screening_results( + skip: 0, + limit: 25, + dataCollectionId: int = None, + screeningId: int = None, +) -> Paged[models.Screening]: + query = ( + db.session.query(models.Screening) + .join(models.ScreeningOutput) + .options(contains_eager(models.Screening.ScreeningOutput)) + .join(models.ScreeningStrategy) + .options( + contains_eager( + models.Screening.ScreeningOutput, + models.ScreeningOutput.ScreeningStrategy, + ) + ) + .join(models.ScreeningStrategyWedge) + .options( + contains_eager( + models.Screening.ScreeningOutput, + models.ScreeningOutput.ScreeningStrategy, + models.ScreeningStrategy.ScreeningStrategyWedge, + ) + ) + .outerjoin(models.ScreeningStrategySubWedge) + .options( + contains_eager( + models.Screening.ScreeningOutput, + models.ScreeningOutput.ScreeningStrategy, + models.ScreeningStrategy.ScreeningStrategyWedge, + models.ScreeningStrategyWedge.ScreeningStrategySubWedge, + ) + ) + .outerjoin(models.ScreeningOutputLattice) + .options( + contains_eager( + models.Screening.ScreeningOutput, + models.ScreeningOutput.ScreeningStrategy, + models.ScreeningStrategy.ScreeningOutput, + models.ScreeningOutput.ScreeningOutputLattice, + ) + ) + # Support linkage via both `dataCollectionId` and `dataCollectionGroupId` + .join( + models.DataCollection, + or_( + models.DataCollection.dataCollectionId + == models.Screening.dataCollectionId, + models.DataCollection.dataCollectionGroupId + == models.Screening.dataCollectionGroupId, + ), + ) + .join( + models.DataCollectionGroup, + models.DataCollection.dataCollectionGroupId + == models.DataCollectionGroup.dataCollectionGroupId, + ) + .join(models.BLSession) + .join(models.Proposal) + ) + + if dataCollectionId: + query = query.filter(models.DataCollection.dataCollectionId == dataCollectionId) + + if screeningId: + query = query.filter(models.Screening.screeningId == screeningId) + + query = with_authorization(query, joinBLSession=False) + + total = query.count() + results = query.all() + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def get_processing_results( + skip: 0, + limit: 25, + dataCollectionId: int = None, + autoProcProgramId: int = None, +) -> Paged[models.AutoProcProgram]: + metadata = { + "attachments": func.count( + distinct(models.AutoProcProgramAttachment.autoProcProgramAttachmentId) + ) + } + + query = ( + db.session.query(models.AutoProcProgram, *metadata.values()) + .join(models.ProcessingJob) + .options(contains_eager(models.AutoProcProgram.ProcessingJob)) + .outerjoin(models.ProcessingJobParameter) + .options( + contains_eager( + models.AutoProcProgram.ProcessingJob, + models.ProcessingJob.ProcessingJobParameters, + ) + ) + .outerjoin(models.AutoProcProgramAttachment) + .join(models.DataCollection) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + .outerjoin( + models.AutoProcIntegration, + models.AutoProcIntegration.autoProcProgramId + == models.AutoProcProgram.autoProcProgramId, + ) + .group_by(models.AutoProcProgram.autoProcProgramId) + .filter(models.AutoProcIntegration.autoProcIntegrationId == None) # noqa + ) + + if dataCollectionId: + query = query.filter(models.ProcessingJob.dataCollectionId == dataCollectionId) + + if autoProcProgramId: + query = query.filter( + models.AutoProcProgram.autoProcProgramId == autoProcProgramId + ) + + query = with_authorization(query, joinBLSession=False) + + query = page(query, skip=skip, limit=limit) + total = query.count() + results = with_metadata(query.all(), list(metadata.keys())) + + messages = get_processing_messages( + skip=0, + limit=9999, + dataCollectionId=dataCollectionId, + ) + + for result in results: + result._metadata["autoProcProgramMessages"] = [ + message + for message in messages.results + if message.autoProcProgramId == result.autoProcProgramId + ] + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def get_processing_attachments( + skip: 0, + limit: 25, + autoProcProgramId: int = None, + autoProcProgramAttachmentId: int = None, +) -> Paged[models.AutoProcProgramAttachment]: + metadata = { + "url": func.concat( + f"{settings.api_root}/processings/attachments/", + models.AutoProcProgramAttachment.autoProcProgramAttachmentId, + ) + } + + queries = {} + queries["api"] = ( + db.session.query(models.AutoProcProgramAttachment, *metadata.values()) + .join(models.AutoProcProgram) + .join(models.AutoProcIntegration) + ) + + if hasattr(models, "ProcessingJob"): + queries["pj"] = ( + db.session.query(models.AutoProcProgramAttachment, *metadata.values()) + .join(models.AutoProcProgram) + .join(models.ProcessingJob) + ) + + for key in queries.keys(): + queries + if autoProcProgramAttachmentId: + queries[key] = queries[key].filter( + models.AutoProcProgramAttachment.autoProcProgramAttachmentId + == autoProcProgramAttachmentId + ) + + if autoProcProgramId: + queries[key] = queries[key].filter( + models.AutoProcProgramAttachment.autoProcProgramId == autoProcProgramId + ) + + queries[key] = ( + queries[key] + .join(models.DataCollection) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + ) + queries[key] = with_authorization(queries[key], joinBLSession=False) + queries[key] = page(queries[key], skip=skip, limit=limit) + + if hasattr(models, "ProcessingJob"): + query_all = queries["api"].union_all(queries["pj"]) + else: + query_all = queries["api"] + total = query_all.count() + results = with_metadata(query_all.all(), list(metadata.keys())) + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def get_autointegration_results( + skip: 0, + limit: 25, + dataCollectionId: int = None, + autoProcProgramId: int = None, +) -> Paged[models.AutoProcProgram]: + metadata = { + "attachments": func.count( + distinct(models.AutoProcProgramAttachment.autoProcProgramAttachmentId) + ), + } + query = ( + db.session.query(models.AutoProcProgram, *metadata.values()) + .outerjoin(models.AutoProcProgramAttachment) + .join(models.AutoProcIntegration) + .options(contains_eager(models.AutoProcProgram.AutoProcIntegration)) + .outerjoin(models.AutoProcScalingHasInt) + .options( + contains_eager( + models.AutoProcProgram.AutoProcIntegration, + models.AutoProcIntegration.AutoProcScalingHasInt, + ) + ) + .outerjoin(models.AutoProcScaling) + .options( + contains_eager( + models.AutoProcProgram.AutoProcIntegration, + models.AutoProcIntegration.AutoProcScalingHasInt, + models.AutoProcScalingHasInt.AutoProcScaling, + ) + ) + .outerjoin(models.AutoProc) + .options( + contains_eager( + models.AutoProcProgram.AutoProcIntegration, + models.AutoProcIntegration.AutoProcScalingHasInt, + models.AutoProcScalingHasInt.AutoProcScaling, + models.AutoProcScaling.AutoProc, + ) + ) + .outerjoin(models.AutoProcScalingStatistics) + .options( + contains_eager( + models.AutoProcProgram.AutoProcIntegration, + models.AutoProcIntegration.AutoProcScalingHasInt, + models.AutoProcScalingHasInt.AutoProcScaling, + models.AutoProcScaling.AutoProcScalingStatistics, + ) + ) + .join( + models.DataCollection, + models.DataCollection.dataCollectionId + == models.AutoProcIntegration.dataCollectionId, + ) + .options( + contains_eager( + models.AutoProcProgram.AutoProcIntegration, + models.AutoProcIntegration.DataCollection, + ) + ) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + .group_by(models.AutoProcProgram.autoProcProgramId) + ) + + if hasattr(models, "ProcessingJob"): + metadata["imageSweepCount"] = func.count( + distinct(models.ProcessingJobImageSweep.processingJobImageSweepId) + ) + query = ( + query.outerjoin( + models.ProcessingJob, + models.ProcessingJob.processingJobId + == models.AutoProcProgram.processingJobId, + ) + .outerjoin(models.ProcessingJobImageSweep) + .add_columns(metadata["imageSweepCount"]) + ) + + if dataCollectionId: + query = query.filter( + models.AutoProcIntegration.dataCollectionId == dataCollectionId + ) + + if autoProcProgramId: + query = query.filter( + models.AutoProcProgram.autoProcProgramId == autoProcProgramId + ) + + query = with_authorization(query, joinBLSession=False) + + query = page(query, skip=skip, limit=limit) + query = query.distinct() + total = query.count() + results = with_metadata(query.all(), list(metadata.keys())) + + messages = get_processing_messages( + skip=0, + limit=9999, + dataCollectionId=dataCollectionId, + ) + + for result in results: + result._metadata["autoProcProgramMessages"] = [ + message + for message in messages.results + if message.autoProcProgramId == result.autoProcProgramId + ] + + return Paged(total=total, results=results, skip=skip, limit=limit) diff --git a/pyispyb/core/modules/proposal.py b/pyispyb/core/modules/proposal.py deleted file mode 100644 index 0865fabe..00000000 --- a/pyispyb/core/modules/proposal.py +++ /dev/null @@ -1,193 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from flask_restx._http import HTTPStatus - -from pyispyb.app.extensions import db, auth_provider -from pyispyb.app.utils import create_response_item - -from pyispyb.core import models, schemas -from pyispyb.core.modules import contacts, session - - -def get_proposals_by_query(query_dict): - """Returns proposal db items - - Args: - query_dict (dict, optional): [description]. Defaults to {}. - - Returns: - [type]: [description] - """ - return db.get_db_items( - models.Proposal, - schemas.proposal.dict_schema, - schemas.proposal.ma_schema, - query_dict, - ) - -def get_proposals_has_person_by_query(query_dict): - return db.get_db_items( - models.ProposalHasPerson, - schemas.proposal_has_person.dict_schema, - schemas.proposal_has_person.ma_schema, - query_dict, - ) - -def get_proposal_by_id(proposal_id): - """ - Returns proposal by its proposalId. - - Args: - proposal_id (int): corresponds to proposalId in db - - Returns: - dict: info about proposal as dict - """ - id_dict = {"proposalId": proposal_id} - return db.get_db_item( - models.Proposal, schemas.proposal.ma_schema, id_dict - ) - - -def get_proposal_info_by_id(proposal_id): - """ - Returns proposal by its proposalId. - - Args: - proposal_id (int): corresponds to proposalId in db - - Returns: - dict: info about proposal as dict - """ - proposal_json = get_proposal_by_id(proposal_id) - - person_json = contacts.get_person_by_id(proposal_json["personId"]) - proposal_json["person"] = person_json - - sessions_json = session.get_sessions({"proposalId": proposal_id}) - proposal_json["sessions"] = sessions_json - - return proposal_json - - -def add_proposal(data_dict): - """ - Adds a proposal. - - Args: - proposal_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.Proposal, schemas.proposal.ma_schema, data_dict) - - -def update_proposal(proposal_id, data_dict): - """ - Updates proposal. - - Args: - proposal_id ([type]): [description] - proposal_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"proposalId": proposal_id} - return db.update_db_item( - models.Proposal, schemas.proposal.ma_schema, id_dict, data_dict - ) - - -def patch_proposal(proposal_id, proposal_dict): - """ - Patch a proposal. - - Args: - proposal_id ([type]): [description] - proposal_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"proposalId": proposal_id} - return db.patch_db_item( - models.Proposal, schemas.proposal.ma_schema, id_dict, proposal_dict - ) - - -def delete_proposal(proposal_id): - """ - Deletes proposal item from db. - - Args: - proposal_id (int): proposalId column in db - - Returns: - bool: True if the proposal exists and deleted successfully, - otherwise return False - """ - id_dict = {"proposalId": proposal_id} - return db.delete_db_item(models.Proposal, id_dict) - - -def get_proposal_ids_by_person_id(person_id): - proposal_id_list = [] - proposal_dict = get_proposals_by_query({"personId": person_id}) - if proposal_dict["data"]["rows"]: - for proposal in proposal_dict["data"]["rows"]: - proposal_id_list.append(proposal["proposalId"]) - proposal_has_person_dict = get_proposals_has_person_by_query({"personId": person_id}) - if proposal_has_person_dict["data"]["rows"]: - for proposal in proposal_has_person_dict["data"]["rows"]: - proposal_id_list.append(proposal["proposalId"]) - return proposal_id_list - -def get_proposal_ids(request): - """ - Checks if user can run query. - Manager role allows to run query without restrictions. - Otherwise proposal with proposalId in the query parameters should belong - to the user calling the requests - - Args: - request (request): [description] - - Returns: - bool, str: true if user can run query, if False then msg describes the reason - """ - - user_info = auth_provider.get_user_info_from_auth_header( - request.headers.get("Authorization") - ) - proposal_id_list = [] - - user_proposals = get_proposals_by_query(request.args.to_dict()) - for user_proposal in user_proposals["data"]["rows"]: - proposal_id_list.append(user_proposal.get("proposalId")) - - return user_info.get("is_admin"), proposal_id_list diff --git a/pyispyb/core/modules/proposals.py b/pyispyb/core/modules/proposals.py new file mode 100644 index 00000000..0d486fbe --- /dev/null +++ b/pyispyb/core/modules/proposals.py @@ -0,0 +1,97 @@ +from typing import Optional + +from sqlalchemy import or_, func, distinct +from sqlalchemy.orm import joinedload +from ispyb import models + +from ...app.extensions.database.utils import Paged, page, with_metadata +from ...app.extensions.database.middleware import db +from ...app.extensions.database.definitions import ( + groups_from_beamlines, + with_authorization, +) + + +def get_proposals( + skip: int, + limit: int, + proposalId: Optional[int] = None, + proposalCode: Optional[str] = None, + proposalNumber: Optional[str] = None, + proposal: Optional[str] = None, + search: Optional[str] = None, + withAuthorization: bool = True, +) -> Paged[models.Proposal]: + metadata = { + "persons": func.count(distinct(models.ProposalHasPerson.personId)), + "sessions": func.count(distinct(models.BLSession.sessionId)), + "beamLines": func.group_concat(distinct(models.BLSession.beamLineName)), + } + + query = ( + db.session.query(models.Proposal, *metadata.values()) + .options(joinedload(models.Proposal.Person)) + .outerjoin(models.BLSession) + .outerjoin(models.ProposalHasPerson) + .order_by(models.Proposal.proposalId.desc()) + .group_by(models.Proposal.proposalId) + ) + + if proposalId: + query = query.filter(models.Proposal.proposalId == proposalId) + + if proposal: + query = query.filter(models.Proposal.proposal == proposal) + + if proposalCode and proposalNumber: + query = query.filter(models.Proposal.proposalCode == proposalCode) + query = query.filter(models.Proposal.proposalNumber == proposalNumber) + + if search: + query = query.filter( + or_( + models.Proposal.title.like(f"%{search}%"), + models.BLSession.beamLineName.like(search), + models.Proposal.proposal.like(f"%{search}%"), + ) + ) + + if withAuthorization: + query = with_authorization(query, joinBLSession=False) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + for result in results: + result._metadata["beamLines"] = ( + result._metadata["beamLines"].split(",") + if result._metadata["beamLines"] + else [] + ) + + result._metadata["uiGroups"] = groups_from_beamlines( + result._metadata["beamLines"] + ) + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def get_proposalHasPerson( + skip: int, + limit: int, + proposalId: Optional[int] = None, +) -> Paged[models.ProposalHasPerson]: + + query = db.session.query(models.ProposalHasPerson).options( + joinedload(models.ProposalHasPerson.Person) + ) + + if proposalId: + query = query.filter(models.ProposalHasPerson.proposalId == proposalId) + + query_distinct = query.distinct() + total = query_distinct.count() + query = page(query_distinct, skip=skip, limit=limit) + + return Paged(total=total, results=query.all(), skip=skip, limit=limit) diff --git a/pyispyb/core/modules/protein.py b/pyispyb/core/modules/protein.py deleted file mode 100644 index 8fd9ec63..00000000 --- a/pyispyb/core/modules/protein.py +++ /dev/null @@ -1,152 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions import db -from pyispyb.app.utils import create_response_item - -from pyispyb.core import models, schemas -from pyispyb.core.modules import proposal - - -def get_proteins_by_request(request): - """ - Returns protein entries. - - Returns: - [type]: [description] - """ - query_dict = request.args.to_dict() - - is_admin, proposal_id_list = proposal.get_proposal_ids(request) - - run_query = False - if is_admin: - run_query = True - else: - if not proposal_id_list: - msg = "No sessions returned. User has no proposals." - else: - if "proposalId" in query_dict.keys(): - if query_dict["proposalId"] in proposal_id_list: - run_query = True - else: - msg = ( - "Proposal with id %s is not associated with user" - % query_dict["proposalId"] - ) - else: - query_dict["proposalId"] = proposal_id_list - - if run_query: - return get_proteins_by_query(query_dict) - else: - return create_response_item(msg=msg) - - -def get_proteins_by_query(query_dict): - return db.get_db_items( - models.Protein, - schemas.protein.dict_schema, - schemas.protein.ma_schema, - query_dict, - ) - - -def get_protein_by_id(protein_id): - """ - Returns protein by its proteinId. - - Args: - protein (int): corresponds to proteinId in db - - Returns: - dict: info about protein as dict - """ - data_dict = {"proteinId": protein_id} - return db.get_db_item( - models.Protein, schemas.protein.ma_schema, data_dict - ) - - -def add_protein(data_dict): - """ - Adds a protein to db. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.Protein, schemas.protein.ma_schema, data_dict) - - -def update_protein(protein_id, data_dict): - """ - Updates protein. - - Args: - protein_id ([type]): [description] - protein_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"proteinId": protein_id} - return db.update_db_item( - models.Protein, schemas.protein.ma_schema, id_dict, data_dict - ) - - -def patch_protein(protein_id, data_dict): - """ - Patch a protein. - - Args: - protein_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"proteinId": protein_id} - return db.patch_db_item( - models.Protein, schemas.protein.ma_schema, id_dict, data_dict - ) - - -def delete_protein(protein_id): - """ - Deletes protein item from db. - - Args: - protein_id (int): proteinId column in db - - Returns: - bool: True if the protein exists and deleted successfully, - otherwise return False - """ - id_dict = {"proteinId": protein_id} - return db.delete_db_item(models.Protein, id_dict) diff --git a/pyispyb/core/modules/proteins.py b/pyispyb/core/modules/proteins.py new file mode 100644 index 00000000..8ba66c76 --- /dev/null +++ b/pyispyb/core/modules/proteins.py @@ -0,0 +1,123 @@ +from typing import Optional + +from sqlalchemy import or_, func, distinct +from sqlalchemy.orm import joinedload, contains_eager +from ispyb import models + + +from ...app.extensions.database.utils import Paged, page, with_metadata, order +from ...app.extensions.database.middleware import db +from ...app.extensions.database.definitions import with_authorization +from ...core.modules.utils import encode_external_id + + +ORDER_BY_MAP = { + "proteinId": models.Protein.proteinId, + "acronym": models.Protein.acronym, + "name": models.Protein.name, +} + + +def get_proteins( + skip: int, + limit: int, + proteinId: Optional[int] = None, + proposalId: Optional[int] = None, + proposal: Optional[str] = None, + externalId: Optional[int] = None, + name: Optional[str] = None, + acronym: Optional[str] = None, + search: Optional[str] = None, + sort_order: Optional[dict[str, str]] = None, + withAuthorization: bool = True, +) -> Paged[models.Protein]: + metadata = { + "pdbs": func.count(distinct(models.ProteinHasPDB.proteinid)), + "samples": func.count(distinct(models.BLSample.blSampleId)), + "crystals": func.count(distinct(models.Crystal.crystalId)), + } + + query = ( + db.session.query(models.Protein, *metadata.values()) + .options(joinedload(models.Protein.Proposal)) + .join(models.Proposal) + # .outerjoin( + # models.ConcentrationType, + # models.ConcentrationType.concentrationTypeId + # == models.Protein.concentrationTypeId, + # ) + # .options(contains_eager(models.Protein.ConcentrationType)) + .outerjoin(models.ComponentType) + .options(contains_eager(models.Protein.ComponentType)) + .outerjoin(models.ProteinHasPDB) + .outerjoin(models.Crystal) + .outerjoin(models.BLSample) + .group_by(models.Protein.proteinId) + ) + + if withAuthorization: + query = with_authorization(query) + + if proteinId: + query = query.filter(models.Protein.proteinId == proteinId) + + if name: + query = query.filter(models.Protein.name == name) + + if acronym: + query = query.filter(models.Protein.acronym == acronym) + + if proposalId: + query = query.filter(models.Protein.proposalId == proposalId) + + if proposal: + query = query.filter(models.Proposal.proposal == proposal) + + if externalId: + externalId = encode_external_id(externalId) + query = query.filter(models.Protein.externalId == externalId) + + if search: + query = query.filter( + or_( + models.Protein.name.like(f"%{search}%"), + models.Protein.acronym.like(f"%{search}%"), + ) + ) + + if sort_order: + query = order(query, ORDER_BY_MAP, sort_order) + + total = query.count() + query = page(query, skip=skip, limit=limit) + + results = with_metadata(query.all(), list(metadata.keys())) + + protein_ids = [result.proteinId for result in results] + dc_query = ( + db.session.query( + models.Protein.proteinId, + func.count(distinct(models.DataCollection.dataCollectionId)).label( + "datacollections" + ), + ) + .join(models.Crystal) + .join(models.BLSample) + .join( + models.DataCollectionGroup, + models.BLSample.blSampleId == models.DataCollectionGroup.blSampleId, + ) + .join(models.DataCollection) + .filter(models.Protein.proteinId.in_(protein_ids)) + .group_by(models.Protein.proteinId) + ) + + dc_counts = {} + for dc in dc_query.all(): + row = dc._asdict() + dc_counts[row["proteinId"]] = row["datacollections"] + + for result in results: + result._metadata["datacollections"] = dc_counts.get(result.proteinId, 0) + + return Paged(total=total, results=results, skip=skip, limit=limit) diff --git a/pyispyb/core/modules/robot_action.py b/pyispyb/core/modules/robot_action.py deleted file mode 100644 index ebcd36a6..00000000 --- a/pyispyb/core/modules/robot_action.py +++ /dev/null @@ -1,125 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions import db -from pyispyb.core import models, schemas - - -def get_robot_actions(request): - """ - Returns robot_action items based on query parameters. - - Args: - query_dict (dict): [description] - - Returns: - [type]: [description] - """ - query_dict = request.args.to_dict() - - return db.get_db_items( - models.RobotAction, - schemas.robot_action.dict_schema, - schemas.robot_action.ma_schema, - query_dict, - ) - - -def add_robot_action(data_dict): - """ - Adds data collection item. - - Args: - robot_action_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.RobotAction, schemas.robot_action.ma_schema, data_dict) - - -def get_robot_action_by_id(robot_action_id): - """ - Returns robot_action by its robot_actionId. - - Args: - robot_action_id (int): corresponds to beamlineSetupId in db - - Returns: - dict: info about robot_action as dict - """ - data_dict = {"robotActionId": robot_action_id} - return db.get_db_item( - models.RobotAction, schemas.robot_action.ma_schema, data_dict - ) - - -def update_robot_action(robot_action_id, data_dict): - """ - Updates robot_action. - - Args: - robot_action_id ([type]): [description] - robot_action_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"robotActionId": robot_action_id} - return db.update_db_item( - models.RobotAction, schemas.robot_action.ma_schema, id_dict, data_dict - ) - - -def patch_robot_action(robot_action_id, data_dict): - """ - Patch a robot_action. - - Args: - robot_action_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"robotActionId": robot_action_id} - return db.patch_db_item( - models.RobotAction, schemas.robot_action.ma_schema, id_dict, data_dict - ) - - -def delete_robot_action(robot_action_id): - """ - Deletes robot_action item from db. - - Args: - robot_action_id (int): robot_actionId column in db - - Returns: - bool: True if the robot_action exists and deleted successfully, - otherwise return False - """ - id_dict = {"robotActionId": robot_action_id} - return db.delete_db_item(models.RobotAction, id_dict) diff --git a/pyispyb/core/modules/sample.py b/pyispyb/core/modules/sample.py deleted file mode 100644 index 774bb839..00000000 --- a/pyispyb/core/modules/sample.py +++ /dev/null @@ -1,122 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions import db -from pyispyb.core import models, schemas - - -def get_samples_by_request(request): - """ - Returns sample entries. - - Returns: - [type]: [description] - """ - query_dict = request.args.to_dict() - - return db.get_db_items( - models.BLSample, - schemas.sample.dict_schema, - schemas.sample.ma_schema, - query_dict, - ) - - -def get_sample_by_id(sample_id): - """ - Returns sample by its sampleId. - - Args: - sample (int): corresponds to sampleId in db - - Returns: - dict: info about sample as dict - """ - data_dict = {"blSampleId": sample_id} - return db.get_db_item( - models.BLSample, schemas.sample.ma_schema, data_dict - ) - - -def add_sample(data_dict): - """ - Adds a sample to db. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.BLSample, schemas.sample.ma_schema, data_dict) - - -def update_sample(sample_id, data_dict): - """ - Updates sample. - - Args: - sample_id ([type]): [description] - sample_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"blSampleId": sample_id} - return db.update_db_item( - models.BLSample, schemas.sample.ma_schema, id_dict, data_dict - ) - - -def patch_sample(sample_id, data_dict): - """ - Patch a sample. - - Args: - sample_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"blSampleId": sample_id} - return db.patch_db_item( - models.BLSample, schemas.sample.ma_schema, id_dict, data_dict - ) - - -def delete_sample(sample_id): - """ - Deletes sample item from db. - - Args: - sample_id (int): blSampleId column in db - - Returns: - bool: True if the sample exists and deleted successfully, - otherwise return False - """ - id_dict = {"blSampleId": sample_id} - return db.delete_db_item(models.BLSample, id_dict) diff --git a/pyispyb/core/modules/samples.py b/pyispyb/core/modules/samples.py new file mode 100644 index 00000000..4ed91f1d --- /dev/null +++ b/pyispyb/core/modules/samples.py @@ -0,0 +1,424 @@ +import enum +from typing import Optional + +from sqlalchemy.orm import contains_eager, aliased, joinedload +from sqlalchemy.sql.expression import func, distinct, and_, literal_column +from ispyb import models + +from ...config import settings +from ...app.extensions.database.definitions import with_authorization +from ...app.extensions.database.middleware import db +from ...app.extensions.database.utils import ( + Paged, + page, + with_metadata, + order, + update_model, +) + +from ..schemas import samples as schema + + +SAMPLE_ORDER_BY_MAP = { + "blSampleId": models.BLSample.blSampleId, + "name": models.BLSample.name, + "location": models.BLSample.location, + "datacollections": func.count(distinct(models.DataCollection.dataCollectionId)), +} + + +SAMPLE_STATUS_FILTERS = { + "Sample Action": func.count(models.RobotAction.robotActionId), + "Data Collected": func.count(models.DataCollection.dataCollectionId), + "Strategy": func.count(models.Screening.screeningId), + "Auto Integrated": func.count(models.AutoProcIntegration.autoProcIntegrationId), +} + +if hasattr(models, "ProcessingJob"): + SAMPLE_STATUS_FILTERS["Processed"] = func.count( + models.ProcessingJob.processingJobId + ) + +SAMPLE_STATUS_ENUM = enum.Enum( + "SampleStatus", {k: k for k in SAMPLE_STATUS_FILTERS.keys()} +) + + +def get_samples( + skip: int, + limit: int, + search: Optional[str] = None, + blSampleId: Optional[int] = None, + proteinId: Optional[int] = None, + proposal: Optional[str] = None, + containerId: Optional[int] = None, + beamLineName: Optional[str] = None, + sort_order: Optional[dict[str, str]] = None, + status: Optional[SAMPLE_STATUS_ENUM] = None, +) -> Paged[models.BLSample]: + metadata = { + "subsamples": func.count(distinct(models.BLSubSample.blSubSampleId)), + "datacollections": func.count(distinct(models.DataCollection.dataCollectionId)), + "types": func.group_concat(distinct(models.DataCollectionGroup.experimentType)), + "strategies": func.count(distinct(models.ScreeningOutput.screeningOutputId)), + "autoIntegrations": func.count( + distinct(models.AutoProcIntegration.autoProcIntegrationId) + ), + "integratedResolution": func.min( + models.AutoProcScalingStatistics.resolutionLimitHigh + ), + "proposal": models.Proposal.proposal, + } + + query = ( + db.session.query(models.BLSample, *metadata.values()) + .join(models.BLSample.Crystal) + .options( + contains_eager(models.BLSample.Crystal).load_only( + models.Crystal.cell_a, + models.Crystal.cell_b, + models.Crystal.cell_c, + models.Crystal.cell_alpha, + models.Crystal.cell_beta, + models.Crystal.cell_gamma, + ) + ) + .join(models.Crystal.Protein) + .options( + contains_eager(models.BLSample.Crystal, models.Crystal.Protein).load_only( + "name", "acronym" + ), + ) + .outerjoin( + models.BLSubSample, + models.BLSubSample.blSampleId == models.BLSample.blSampleId, + ) + .outerjoin( + models.DataCollectionGroup, + models.DataCollectionGroup.blSampleId == models.BLSample.blSampleId, + ) + .outerjoin( + models.DataCollection, + models.DataCollectionGroup.dataCollectionGroupId + == models.DataCollection.dataCollectionGroupId, + ) + .outerjoin(models.Screening) + .outerjoin( + models.ScreeningOutput, + and_( + models.Screening.screeningId == models.ScreeningOutput.screeningId, + models.ScreeningOutput.strategySuccess == 1, + ), + ) + .outerjoin(models.AutoProcIntegration) + .outerjoin(models.AutoProcScalingHasInt) + .outerjoin( + models.AutoProcScalingStatistics, + models.AutoProcScalingHasInt.autoProcScalingId + == models.AutoProcScalingStatistics.autoProcScalingId, + ) + .join( + models.Container, + models.BLSample.containerId == models.Container.containerId, + ) + .options( + contains_eager(models.BLSample.Container).load_only( + models.Container.code, + ) + ) + .join(models.Dewar, models.Container.dewarId == models.Dewar.dewarId) + .options( + contains_eager( + models.BLSample.Container, + models.Container.Dewar, + ).load_only( + models.Dewar.code, + ) + ) + .join(models.Shipping, models.Dewar.shippingId == models.Shipping.shippingId) + .options( + contains_eager( + models.BLSample.Container, models.Container.Dewar, models.Dewar.Shipping + ).load_only( + models.Shipping.shippingName, + ) + ) + .join(models.Proposal, models.Proposal.proposalId == models.Shipping.proposalId) + .group_by(models.BLSample.blSampleId) + ) + + if hasattr(models.ContainerQueueSample, "dataCollectionPlanId") and hasattr( + models.ContainerQueueSample, "blSampleId" + ): + query = query.outerjoin( + models.ContainerQueueSample, + models.BLSample.blSampleId == models.ContainerQueueSample.blSampleId, + ) + DataCollectionQueued: models.DataCollection = aliased(models.DataCollection) + query = query.outerjoin( + DataCollectionQueued, + models.ContainerQueueSample.dataCollectionPlanId + == DataCollectionQueued.dataCollectionPlanId, + ) + metadata["queued"] = func.IF( + func.count(models.ContainerQueueSample.containerQueueSampleId) + > func.count(DataCollectionQueued.dataCollectionId), + True, + False, + ) + + query = query.add_columns(metadata["queued"]) + else: + metadata["queued"] = literal_column("0") + query = query.add_columns(metadata["queued"]) + + if search: + query = query.filter( + models.BLSample.name.like(f"%{search}%"), + ) + + query = with_authorization(query) + + if blSampleId: + query = query.filter(models.BLSample.blSampleId == blSampleId) + + if proteinId: + query = query.filter(models.Protein.proteinId == proteinId) + + if containerId: + query = query.filter(models.Container.containerId == containerId) + + if proposal: + proposal_row = ( + db.session.query(models.Proposal) + .filter(models.Proposal.proposal == proposal) + .first() + ) + if proposal_row: + query = query.filter(models.Proposal.proposalId == proposal_row.proposalId) + + if beamLineName: + query = query.filter( + and_( + models.Dewar.dewarStatus == "processing", + models.Container.beamlineLocation == beamLineName, + models.Container.sampleChangerLocation != "", + ) + ) + + if status: + if hasattr(models, "ProcessingJob"): + if status == SAMPLE_STATUS_ENUM.Processed: + query = query.join(models.ProcessingJob) + + if status.value == "Sample Action": + query = query.join( + models.RobotAction, + models.RobotAction.blsampleId == models.BLSample.blSampleId, + ) + + query = query.having(SAMPLE_STATUS_FILTERS[status.value] > 0) + + if sort_order: + query = order( + query, + SAMPLE_ORDER_BY_MAP, + sort_order, + {"order_by": "blSampleId", "order": "desc"}, + ) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + for result in results: + if result._metadata["types"]: + result._metadata["types"] = result._metadata["types"].split(",") + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def create_sample(sample: schema.SampleCreate) -> models.BLSample: + sample_dict = sample.dict() + sample = models.BLSample(**sample_dict) + db.session.add(sample) + db.session.commit() + + new_sample = get_samples(sampleId=sample.sampleId, skip=0, limit=1) + return new_sample.first + + +def update_sample(sampleId: int, sample: schema.SampleCreate) -> models.BLSample: + sample_dict = sample.dict(exclude_unset=True) + new_sample = get_samples(sampleId=sampleId, skip=0, limit=1).first + + update_model(new_sample, sample_dict) + db.session.commit() + + return get_samples(sampleId=sampleId, skip=0, limit=1).first + + +SUBSAMPLE_ORDER_BY_MAP = { + "blSubSampleId": models.BLSubSample.blSubSampleId, + "datacollections": func.count(distinct(models.DataCollection.dataCollectionId)), +} + +if hasattr(models.BLSubSample, "type"): + SUBSAMPLE_ORDER_BY_MAP["type"] = models.BLSubSample.type + + +def get_subsamples( + skip: int, + limit: int, + blSubSampleId: Optional[int] = None, + blSampleId: Optional[int] = None, + proteinId: Optional[int] = None, + proposal: Optional[str] = None, + containerId: Optional[int] = None, + sort_order: Optional[dict[str, str]] = None, +) -> Paged[models.BLSubSample]: + metadata = { + "datacollections": func.count(distinct(models.DataCollection.dataCollectionId)), + "types": func.group_concat(distinct(models.DataCollectionGroup.experimentType)), + } + + query = ( + db.session.query(models.BLSubSample, *metadata.values()) + .join(models.BLSubSample.BLSample) + .join(models.BLSample.Crystal) + .options( + contains_eager(models.BLSubSample.BLSample).load_only( + models.BLSample.name, + ) + ) + .join(models.Crystal.Protein) + .options( + contains_eager( + models.BLSubSample.BLSample, + models.BLSample.Crystal, + models.Crystal.Protein, + ).load_only("name", "acronym"), + ) + .outerjoin( + models.DataCollection, + models.DataCollection.blSubSampleId == models.BLSubSample.blSubSampleId, + ) + .outerjoin( + models.DataCollectionGroup, + models.DataCollectionGroup.dataCollectionGroupId + == models.DataCollection.dataCollectionGroupId, + ) + .options( + joinedload(models.BLSubSample.Position1).load_only( + models.Position.posX, + models.Position.posY, + ) + ) + .options( + joinedload(models.BLSubSample.Position2).load_only( + models.Position.posX, + models.Position.posY, + ) + ) + .join( + models.Container, + models.BLSample.containerId == models.Container.containerId, + ) + .join(models.Dewar, models.Container.dewarId == models.Dewar.dewarId) + .join(models.Shipping, models.Dewar.shippingId == models.Shipping.shippingId) + .join(models.Proposal, models.Proposal.proposalId == models.Protein.proposalId) + .group_by(models.BLSubSample.blSubSampleId) + ) + + if hasattr(models.ContainerQueueSample, "dataCollectionPlanId"): + query = query.outerjoin( + models.ContainerQueueSample, + models.BLSubSample.blSubSampleId + == models.ContainerQueueSample.blSubSampleId, + ) + DataCollectionQueued: models.DataCollection = aliased(models.DataCollection) + query = query.outerjoin( + DataCollectionQueued, + models.ContainerQueueSample.dataCollectionPlanId + == DataCollectionQueued.dataCollectionPlanId, + ) + metadata["queued"] = func.IF( + func.count(models.ContainerQueueSample.containerQueueSampleId) + > func.count(DataCollectionQueued.dataCollectionId), + True, + False, + ) + query = query.add_columns(metadata["queued"]) + else: + metadata["queued"] = literal_column("0") + query = query.add_columns(metadata["queued"]) + + query = with_authorization(query) + + if blSubSampleId: + query = query.filter(models.BLSubSample.blSubSampleId == blSubSampleId) + + if blSampleId: + query = query.filter(models.BLSample.blSampleId == blSampleId) + + if proteinId: + query = query.filter(models.Protein.proteinId == proteinId) + + if containerId: + query = query.filter(models.Container.containerId == containerId) + + if proposal: + query = query.filter(models.Proposal.proposal == proposal) + + query = order(query, SUBSAMPLE_ORDER_BY_MAP, sort_order) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + for result in results: + if result._metadata["types"]: + result._metadata["types"] = result._metadata["types"].split(",") + else: + result._metadata["types"] = [] + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def get_sample_images( + skip: int, + limit: int, + blSampleId: Optional[int] = None, + blSampleImageId: Optional[int] = None, +) -> Paged[models.BLSampleImage]: + metadata = { + "url": func.concat( + f"{settings.api_root}/samples/images/", + models.BLSampleImage.blSampleImageId, + ) + } + + query = ( + db.session.query(models.BLSampleImage, *metadata.values()) + .join(models.BLSample) + .join( + models.Container, + models.BLSample.containerId == models.Container.containerId, + ) + .join(models.Dewar, models.Container.dewarId == models.Dewar.dewarId) + .join(models.Shipping, models.Dewar.shippingId == models.Shipping.shippingId) + ) + + if blSampleId: + query = query.filter(models.BLSample.blSampleId == blSampleId) + + if blSampleImageId: + query = query.filter(models.BLSampleImage.blSampleImageId == blSampleImageId) + + query = with_authorization(query, proposalColumn=models.Shipping.proposalId) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + return Paged(total=total, results=results, skip=skip, limit=limit) diff --git a/pyispyb/core/modules/screening.py b/pyispyb/core/modules/screening.py deleted file mode 100644 index b5345e3e..00000000 --- a/pyispyb/core/modules/screening.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" diff --git a/pyispyb/core/modules/session.py b/pyispyb/core/modules/session.py deleted file mode 100644 index 58925d0f..00000000 --- a/pyispyb/core/modules/session.py +++ /dev/null @@ -1,299 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions import db -from pyispyb.app.extensions.auth import auth_provider -from pyispyb.app.utils import create_response_item - -from pyispyb.core import models, schemas - -from pyispyb.core.modules import beamline_setup, session, proposal - - -def get_sessions(request): - """ - Returns session based on query parameters. - - Args: - query_dict ([type]): [description] - - Returns: - [type]: [description] - """ - query_dict = request.args.to_dict() - - is_admin, proposal_id_list = proposal.get_proposal_ids(request) - - run_query = False - if is_admin: - run_query = True - else: - if not proposal_id_list: - msg = "No sessions returned. User has no proposals." - else: - if "proposalId" in query_dict.keys(): - if query_dict["proposalId"] in proposal_id_list: - run_query = True - else: - msg = ( - "Proposal with id %s is not associated with user" - % query_dict["proposalId"] - ) - else: - query_dict["proposalId"] = proposal_id_list - - if run_query: - return db.get_db_items( - models.BLSession, - schemas.session.dict_schema, - schemas.session.ma_schema, - query_dict, - ) - else: - return create_response_item(msg=msg) - - -def add_session(data_dict): - """ - Adds new session. - - Args: - session_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.BLSession, schemas.session.ma_schema, data_dict) - - -def get_session_by_id(session_id): - """ - Returns session info by its sessionId. - - Args: - session_id (int): corresponds to sessionId in db - - Returns: - dict: info about session as dict - """ - data_dict = {"sessionId": session_id} - return db.get_db_item( - models.BLSession, schemas.session.ma_schema, data_dict - ) - - -def get_session_info_by_id(session_id): - """ - Returns session info by its sessionId. - - Args: - session_id (int): corresponds to sessionId in db - - Returns: - dict: info about session as dict - """ - session_json = get_session_by_id(session_id) - if session_json: - session_json["local_contact"] = session.get_session_by_id( - session_json["sessionId"] - ) - session_json["beamline_setup"] = beamline_setup.get_beamline_setup_by_id( - session_json["beamLineSetupId"] - ) - # session_json["data_collections_groups"] = data_collection. - # get_data_collection_groups({"sessionId" : session_id})["data"]["rows"] - - return session_json - - -def get_sessions_by_date(start_date=None, end_date=None, beamline=None): - """ - Returns list of sessions by start_date, end_date and beamline. - - Args: - start_date (datetime, optional): start date. Defaults to None. - end_date (datetime, optional): end date. Defaults to None. - beamline (str, optional): beamline name. Defaults to None. - - Returns: - list: list of session dicts - """ - query = models.BLSession.query - if start_date: - query = query.filter(models.BLSession.startDate >= start_date) - if end_date: - query = query.filter(models.BLSession.endDate <= end_date) - if beamline: - query = query.filter(models.BLSession.beamLineName == beamline) - return schemas.session.ma_schema.dump(query, many=True) - - -def update_session(session_id, data_dict): - """ - Updates session. - - Args: - session_id ([type]): [description] - session_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"sessionId": session_id} - return db.update_db_item( - models.BLSession, schemas.session.ma_schema, id_dict, data_dict - ) - - -def patch_session(session_id, data_dict): - """ - Patch a session. - - Args: - session_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"sessionId": session_id} - return db.patch_db_item( - models.BLSession, schemas.session.ma_schema, id_dict, data_dict - ) - - -def delete_session(session_id): - """ - Deletes session item from db. - - Args: - session_id (int): sessionId column in db - - Returns: - bool: True if the session exists and deleted successfully, - otherwise return False - """ - id_dict = {"sessionId": session_id} - return db.delete_db_item(models.BLSession, id_dict) - - -def get_beam_calendars(request): - """ - Returns beam_calendar items based on query parameters. - - Args: - query_dict (dict): [description] - - Returns: - [type]: [description] - """ - query_dict = request.args.to_dict() - - return db.get_db_items( - models.BeamCalendar, - schemas.beam_calendar.dict_schema, - schemas.beam_calendar.ma_schema, - query_dict, - ) - - -def add_beam_calendar(data_dict): - """ - Adds data collection item. - - Args: - beam_calendar_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item( - models.BeamCalendar, schemas.beam_calendar.ma_schema, data_dict - ) - - -def get_beam_calendar_by_id(beam_calendar_id): - """ - Returns beam_calendar by its beam_calendarId. - - Args: - beam_calendar_id (int): corresponds to beamlineSetupId in db - - Returns: - dict: info about beam_calendar as dict - """ - data_dict = {"beamCalendarId": beam_calendar_id} - return db.get_db_item( - models.BeamCalendar, schemas.beam_calendar.ma_schema, data_dict - ) - - -def update_beam_calendar(beam_calendar_id, data_dict): - """ - Updates beam_calendar. - - Args: - beam_calendar_id ([type]): [description] - beam_calendar_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"beamCalendarId": beam_calendar_id} - return db.update_db_item( - models.BeamCalendar, schemas.beam_calendar.ma_schema, id_dict, data_dict - ) - - -def patch_beam_calendar(beam_calendar_id, data_dict): - """ - Patch a beam_calendar. - - Args: - beam_calendar_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"beamCalendarId": beam_calendar_id} - return db.patch_db_item( - models.BeamCalendar, schemas.beam_calendar.ma_schema, id_dict, data_dict - ) - - -def delete_beam_calendar(beam_calendar_id): - """ - Deletes beam_calendar item from db. - - Args: - beam_calendar_id (int): beam_calendarId column in db - - Returns: - bool: True if the beam_calendar exists and deleted successfully, - otherwise return False - """ - id_dict = {"beamCalendarId": beam_calendar_id} - return db.delete_db_item(models.BeamCalendar, id_dict) diff --git a/pyispyb/core/modules/sessions.py b/pyispyb/core/modules/sessions.py new file mode 100644 index 00000000..2c249ec2 --- /dev/null +++ b/pyispyb/core/modules/sessions.py @@ -0,0 +1,211 @@ +from datetime import datetime, timedelta +from typing import Optional + +from ispyb import models +from sqlalchemy import func, and_, or_, extract, distinct +from sqlalchemy.orm import joinedload, contains_eager + +from ...app.extensions.database.definitions import ( + beamlines_from_group, + groups_from_beamlines, + with_authorization, +) +from ...app.extensions.database.utils import Paged, page, with_metadata +from ...app.extensions.database.middleware import db +from ...core.modules.utils import encode_external_id + + +def get_sessions( + skip: int, + limit: int, + sessionId: Optional[int] = None, + externalId: Optional[int] = None, + expSessionPk: Optional[int] = None, + proposalId: Optional[int] = None, + proposal: Optional[str] = None, + session: Optional[str] = None, + beamLineName: Optional[str] = None, + beamLineGroup: Optional[str] = None, + scheduled: Optional[bool] = None, + upcoming: Optional[bool] = None, + previous: Optional[bool] = None, + sessionType: Optional[str] = None, + month: Optional[int] = None, + year: Optional[int] = None, + withAuthorization: bool = True, +) -> Paged[models.BLSession]: + metadata = { + "active": func.IF( + and_( + models.BLSession.startDate <= datetime.now(), + models.BLSession.endDate >= datetime.now(), + ), + True, + False, + ), + "active_soon": func.IF( + and_( + models.BLSession.startDate <= datetime.now() - timedelta(minutes=20), + models.BLSession.endDate >= datetime.now() + timedelta(minutes=20), + ), + True, + False, + ), + "sessionTypes": func.group_concat(distinct(models.SessionType.typeName)), + "persons": func.count(models.SessionHasPerson.personId), + } + + query = ( + db.session.query(models.BLSession, *metadata.values()) + .outerjoin(models.SessionType) + .options(joinedload(models.BLSession.BeamLineSetup)) + .join(models.Proposal) + .outerjoin(models.SessionHasPerson) + .options(contains_eager(models.BLSession.Proposal)) + .order_by(models.BLSession.startDate.desc()) + .group_by(models.BLSession.sessionId) + ) + + if sessionId: + query = query.filter(models.BLSession.sessionId == sessionId) + + if session: + query = query.filter(models.BLSession.session == session) + + if externalId: + externalId = encode_external_id(externalId) + query = query.filter(models.BLSession.externalId == externalId) + + if expSessionPk: + query = query.filter(models.BLSession.expSessionPk == expSessionPk) + + if proposalId: + query = query.filter(models.BLSession.proposalId == proposalId) + + if proposal: + query = query.filter(models.Proposal.proposal == proposal) + + if beamLineName: + query = query.filter(models.BLSession.beamLineName == beamLineName) + + if scheduled: + query = query.filter(models.BLSession.scheduled == 1) + + if upcoming: + query = query.filter(models.BLSession.endDate >= datetime.now()) + query = query.order_by(models.BLSession.startDate) + + if previous: + query = query.filter(models.BLSession.endDate < datetime.now()) + + if sessionType: + query = query.filter(models.SessionType.typeName == sessionType) + + if month: + query = query.filter( + or_( + extract("month", models.BLSession.startDate) == month, + extract("month", models.BLSession.endDate) == month, + ) + ) + + if year: + query = query.filter( + or_( + extract("year", models.BLSession.startDate) == year, + extract("year", models.BLSession.endDate) == year, + ) + ) + + if beamLineGroup: + query = query.filter( + models.BLSession.beamLineName.in_(beamlines_from_group(beamLineGroup)) + ) + + if withAuthorization: + query = with_authorization(query, joinBLSession=False) + + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) + + dataCollections = ( + db.session.query( + func.count(models.DataCollection.dataCollectionId).label("count"), + models.DataCollectionGroup.sessionId, + ) + .join(models.DataCollectionGroup) + .filter( + models.DataCollectionGroup.sessionId.in_( + [result.sessionId for result in results] + ) + ) + .group_by(models.DataCollectionGroup.sessionId) + .all() + ) + dataCollectionCount = {} + for dataCollection in dataCollections: + dataCollectionDict = dataCollection._asdict() + dataCollectionCount[dataCollectionDict["sessionId"]] = dataCollectionDict[ + "count" + ] + + for result in results: + result._metadata["uiGroups"] = groups_from_beamlines([result.beamLineName]) + result._metadata["datacollections"] = dataCollectionCount.get( + result.sessionId, 0 + ) + result._metadata["sessionTypes"] = ( + result._metadata["sessionTypes"].split(",") + if result._metadata["sessionTypes"] + else [] + ) + + return Paged(total=total, results=results, skip=skip, limit=limit) + + +def get_sessions_for_beamline_group( + beamLineGroup: Optional[str], + upcoming: Optional[bool] = None, + previous: Optional[bool] = None, + sessionType: Optional[str] = None, +) -> Paged[models.BLSession]: + beamLines = beamlines_from_group(beamLineGroup) + if not beamLines: + return Paged(total=0, results=[], skip=0, limit=0) + + sessions = [] + for beamLine in beamLines: + beamline_sessions = get_sessions( + skip=0, + limit=1, + beamLineName=beamLine, + upcoming=upcoming, + previous=previous, + sessionType=sessionType, + ) + + sessions.extend(beamline_sessions.results) + + return Paged(total=len(sessions), results=sessions, skip=0, limit=len(sessions)) + + +def get_sessionHasPerson( + skip: int, + limit: int, + sessionId: Optional[int] = None, +) -> Paged[models.SessionHasPerson]: + + query = db.session.query(models.SessionHasPerson).options( + joinedload(models.SessionHasPerson.Person) + ) + + if sessionId: + query = query.filter(models.SessionHasPerson.sessionId == sessionId) + + query_distinct = query.distinct() + total = query_distinct.count() + + query = page(query_distinct, skip=skip, limit=limit) + + return Paged(total=total, results=query.all(), skip=skip, limit=limit) diff --git a/pyispyb/core/modules/shipping.py b/pyispyb/core/modules/shipping.py index b8329c43..bdc067a7 100644 --- a/pyispyb/core/modules/shipping.py +++ b/pyispyb/core/modules/shipping.py @@ -1,157 +1,74 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions import db -from pyispyb.app.utils import create_response_item - -from pyispyb.core import models, schemas -from pyispyb.core.modules import contacts, proposal, dewar - - -def get_shippings(request): - """Returns shippings by query parameters""" - - query_dict = request.args.to_dict() - - run_query, msg = proposal.get_proposal_ids(request) - - if run_query: - return db.get_db_items( - models.Shipping, - schemas.shipping.dict_schema, - schemas.shipping.ma_schema, - query_dict, - ) - else: - return create_response_item(msg=msg) - - -def get_shipping_by_id(shipping_id): - """ - Returns shipping by its shippingId. - - Args: - shipping_id (int): corresponds to shippingId in db - - Returns: - dict: info about shipping as dict - """ - id_dict = {"shippingId": shipping_id} - return db.get_db_item( - models.Shipping, schemas.shipping.ma_schema, id_dict - ) - - -def get_shipping_info_by_id(shipping_id): - """ - Returns shipping by its shippingId. - - Args: - shipping_id (int): corresponds to shippingId in db - - Returns: - dict: info about shipping as dict - """ - shipping_dict = {} - - shipping_dict["shipping"] = get_shipping_by_id(shipping_id) - shipping_dict["proposal"] = proposal.get_proposal_by_id( - shipping_dict["shipping"]["proposalId"] +from datetime import datetime +from typing import Optional + +from sqlalchemy import distinct, func +from sqlalchemy.orm import joinedload +from ispyb import models + +from ...app.extensions.database.definitions import with_authorization +from ...app.extensions.database.utils import Paged, page, update_model, with_metadata +from ...app.extensions.database.middleware import db +from ..schemas import shipping as schema + + +def get_shippings( + skip: int, + limit: int, + shippingId: Optional[int] = None, + proposal: str = None, + proposalId: Optional[int] = None, + withAuthorization: bool = True, +) -> Paged[models.Shipping]: + metadata = {"dewars": func.count(distinct(models.Dewar.dewarId))} + + query = ( + db.session.query(models.Shipping, *metadata.values()) + .options(joinedload(models.Shipping.LabContact)) + .options(joinedload(models.Shipping.LabContact1)) + .join(models.Proposal, models.Proposal.proposalId == models.Shipping.proposalId) + .outerjoin(models.Dewar) + .group_by(models.Shipping.shippingId) ) - shipping_dict["send_lab_contact"] = contacts.get_lab_contact_by_params( - {"labContactId": shipping_dict["shipping"]["sendingLabContactId"]} - ) - shipping_dict["send_person"] = contacts.get_person_by_id( - shipping_dict["send_lab_contact"]["personId"] - ) - shipping_dict["send_lab"] = contacts.get_laboratory_by_id( - shipping_dict["send_person"]["laboratoryId"] - ) - dewars = dewar.get_dewars_by_query({"shippingId": shipping_id}) - shipping_dict["dewars"] = dewars["data"]["rows"] - - return shipping_dict + if shippingId: + query = query.filter(models.Shipping.shippingId == shippingId) -def add_shipping(data_dict): - """ - Adds new shipping + if proposal: + query = query.filter(models.Proposal.proposal == proposal) - Args: - data_dict ([type]): [description] + if proposalId: + query = query.filter(models.Proposal.proposalId == proposalId) - Returns: - [type]: [description] - """ - return db.add_db_item(models.Shipping, schemas.shipping.ma_schema, data_dict) + if withAuthorization: + query = with_authorization(query) + total = query.count() + query = page(query, skip=skip, limit=limit) + results = with_metadata(query.all(), list(metadata.keys())) -def update_shipping(shipping_id, data_dict): - """ - Updates shipping. + return Paged(total=total, results=results, skip=skip, limit=limit) - Args: - shipping_id ([type]): [description] - shipping_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"shippingId": shipping_id} - return db.update_db_item( - models.Shipping, schemas.shipping.ma_schema, id_dict, data_dict - ) +def create_shipping(shipping: schema.ShippingCreate) -> models.Shipping: + shipping_dict = shipping.dict() + shipping_dict["safetyLevel"] = shipping.safetyLevel.value + shipping_dict["bltimeStamp"] = datetime.now() -def patch_shipping(shipping_id, data_dict): - """ - Partialy updates shipping. + shipping = models.Shipping(**shipping_dict) + db.session.add(shipping) + db.session.commit() - Args: - shipping_id ([type]): [description] - shipping_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"shippingId": shipping_id} - return db.patch_db_item( - models.Shipping, schemas.shipping.ma_schema, id_dict, data_dict - ) + new_shipping = get_shippings(shippingId=shipping.shippingId, skip=0, limit=1) + return new_shipping.first -def delete_shipping(shipping_id): - """ - Deletes shipping item from db. +def update_shipping( + shippingId: int, shipping: schema.ShippingCreate +) -> models.Shipping: + shipping_dict = shipping.dict(exclude_unset=True) + new_shipping = get_shippings(shippingId=shippingId, skip=0, limit=1).first - Args: - shipping_id (int): shippingId column in db + update_model(new_shipping, shipping_dict) + db.session.commit() - Returns: - bool: True if the shipping exists and deleted successfully, - otherwise return False - """ - id_dict = {"shippingId": shipping_id} - return db.delete_db_item(models.Shipping, id_dict) + return get_shippings(shippingId=shippingId, skip=0, limit=1).first diff --git a/pyispyb/core/modules/stats.py b/pyispyb/core/modules/stats.py new file mode 100644 index 00000000..3c390375 --- /dev/null +++ b/pyispyb/core/modules/stats.py @@ -0,0 +1,912 @@ +from collections import Counter +from dataclasses import dataclass +from difflib import SequenceMatcher +import os +from typing import Any, Optional + +from ispyb import models +import sqlalchemy +from sqlalchemy import func, and_, or_, text, extract, distinct, Date, cast + + +from ...config import settings +from ...core.modules.utils import get_last_line, to_energy +from ...app.extensions.database.utils import Paged, page +from ...app.extensions.database.definitions import ( + beamlines_from_group, + with_authorization, +) +from ...app.extensions.database.middleware import db +from ...core.schemas import stats as schema + + +def get_sessionId(session: Optional[str]) -> Optional[int]: + if not session: + return + + session_row = ( + db.session.query(models.BLSession) + .join(models.Proposal) + .filter(models.BLSession.session == session) + ).first() + if session_row: + return session_row.sessionId + + +def filter_query( + query: "sqlalchemy.orm.Query[Any]", + runId: str = None, + beamLineName: str = None, + sessionId: int = None, +) -> "sqlalchemy.orm.Query[Any]": + if runId: + query = query.join( + models.VRun, + models.BLSession.startDate.between( + models.VRun.startDate, models.VRun.endDate + ), + ) + query = query.filter(models.VRun.runId == runId) + + if beamLineName: + query = query.filter(models.BLSession.beamLineName == beamLineName) + + if sessionId: + query = query.filter(models.BLSession.sessionId == sessionId) + + query = with_authorization(query, joinBLSession=False) + + return query + + +def get_breakdown( + session: Optional[str] = None, + sessionId: Optional[int] = None, + beamLineName: Optional[str] = None, + runId: Optional[str] = None, +) -> schema.Breakdown: + + if session or sessionId: + info = db.session.query( + models.BLSession.startDate, + models.BLSession.endDate, + models.BLSession.beamLineName, + models.BLSession.session, + models.BLSession.sessionId, + ( + func.timestampdiff( + text("SECOND"), + models.BLSession.startDate, + models.BLSession.endDate, + ) + / 3600 + ).label("duration"), + ).join(models.Proposal) + + if session: + info = info.filter(models.BLSession.session == session) + else: + info = info.filter(models.BLSession.sessionId == sessionId) + + info = info.first() + else: + info = ( + db.session.query( + models.VRun.startDate, + models.VRun.endDate, + models.VRun.run, + ( + func.timestampdiff( + text("SECOND"), + models.VRun.startDate, + models.VRun.endDate, + ) + / 3600 + ).label("duration"), + ) + .filter(models.VRun.runId == runId) + .first() + ) + + queries = {} + queries["dc"] = ( + db.session.query( + models.DataCollection.dataCollectionId, + models.DataCollection.startTime, + models.DataCollection.endTime, + models.DataCollection.runStatus.label("status"), + models.DataCollectionGroup.experimentType.label("subType"), + models.DataCollection.wavelength, + models.DataCollection.beamSizeAtSampleX, + models.DataCollection.beamSizeAtSampleY, + models.DataCollection.chiStart, + models.DataCollection.kappaStart, + models.DataCollection.phiStart, + ) + .select_from(models.DataCollection) + .join(models.DataCollectionGroup) + .outerjoin( + models.BLSample, + models.DataCollectionGroup.blSampleId == models.BLSample.blSampleId, + ) + .filter( + and_( + models.DataCollection.startTime != None, # noqa + models.DataCollection.endTime != None, # noqa + ) + ) + .group_by(models.DataCollection.dataCollectionId) + .order_by(models.DataCollection.startTime) + ) + + queries["robot"] = ( + db.session.query( + models.RobotAction.robotActionId, + models.RobotAction.startTimestamp.label("startTime"), + models.RobotAction.endTimestamp.label("endTime"), + models.RobotAction.actionType.label("subType"), + models.RobotAction.status, + ) + .outerjoin(models.BLSample) + .group_by(models.RobotAction.robotActionId) + .order_by(models.RobotAction.endTimestamp) + ) + + queries["edge"] = ( + db.session.query( + models.EnergyScan.energyScanId, + models.EnergyScan.startTime, + models.EnergyScan.endTime, + ) + .outerjoin(models.BLSample) + .order_by(models.EnergyScan.endTime) + .group_by(models.EnergyScan.energyScanId) + ) + + queries["xrf"] = ( + db.session.query( + models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId, + models.XFEFluorescenceSpectrum.startTime, + models.XFEFluorescenceSpectrum.endTime, + ) + .outerjoin(models.BLSample) + .order_by(models.XFEFluorescenceSpectrum.endTime) + .group_by(models.XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId) + ) + + queries["fault"] = ( + db.session.query( + models.BFFault.faultId, + models.BFFault.title, + models.BFFault.beamtimelost_starttime.label("startTime"), + models.BFFault.beamtimelost_endtime.label("endTime"), + ) + .filter(models.BFFault.beamtimelost == 1) + .order_by(models.BFFault.beamtimelost_endtime) + .group_by(models.BFFault.faultId) + ) + + if session or sessionId: + queries["strategy"] = ( + db.session.query( + models.DataCollection.endTime.label("startTime"), + func.max(models.Screening.bltimeStamp).label("endTime"), + ) + .join( + models.Screening, + or_( + models.Screening.dataCollectionId + == models.DataCollection.dataCollectionId, + models.Screening.dataCollectionGroupId + == models.DataCollection.dataCollectionGroupId, + ), + ) + .join(models.DataCollectionGroup) + .order_by(models.DataCollection.endTime) + .group_by( + models.DataCollection.dataCollectionId, models.DataCollection.endTime + ) + ) + + queries["centring"] = ( + db.session.query( + models.RobotAction.endTimestamp.label("startTime"), + func.min(models.DataCollection.startTime).label("endTime"), + func.timestampdiff( + text("SECOND"), + cast(models.RobotAction.endTimestamp, Date), + func.min(models.DataCollection.startTime), + ).label("duration"), + ) + .select_from(models.RobotAction) + .join( + models.DataCollectionGroup, + models.DataCollectionGroup.blSampleId == models.RobotAction.blsampleId, + ) + .join( + models.DataCollection, + and_( + models.DataCollection.dataCollectionGroupId + == models.DataCollectionGroup.dataCollectionGroupId, + models.RobotAction.endTimestamp < models.DataCollection.startTime, + ), + ) + .join( + models.BLSession, + models.BLSession.sessionId == models.DataCollectionGroup.sessionId, + ) + .order_by(models.RobotAction.endTimestamp) + .group_by(models.RobotAction.endTimestamp) + ) + + else: + queries["sessions"] = ( + db.session.query( + models.BLSession.session, + models.BLSession.sessionId, + models.BLSession.startDate.label("startTime"), + models.BLSession.endDate.label("endTime"), + models.BLSession.scheduled, + models.Proposal.title, + ) + .order_by(models.BLSession.startDate) + .group_by(models.BLSession.sessionId) + ) + + if not sessionId: + sessionId = get_sessionId(session) + results = {} + for key in queries.keys(): + if key not in ["fault", "strategy", "centring", "sessions"]: + queries[key] = ( + queries[key] + .add_columns( + models.BLSample.name.label("sample"), + models.Protein.name.label("protein"), + ) + .outerjoin(models.Crystal) + .outerjoin(models.Protein) + ) + + if key not in ["sessions", "centring"]: + queries[key] = queries[key].join(models.BLSession) + + queries[key] = queries[key].join(models.Proposal) + + queries[key] = filter_query(queries[key], runId, beamLineName, sessionId) + + if key == "centring": + subquery = queries[key].subquery() + queries[key] = db.session.query( + subquery.c.startTime.label("startTime"), + subquery.c.endTime.label("endTime"), + ).filter(subquery.c.duration < 1000) + + results[key] = [r._asdict() for r in queries[key].all()] + + history = [] + for key in ["dc", "robot", "edge", "xrf", "centring", "strategy"]: + if key in results: + for row in results[key]: + if row["endTime"]: + history.append(schema.BreakdownPoint(eventType=key, **row)) + + if "sessions" in results: + for row in results["sessions"]: + if row["scheduled"]: + history.append(schema.BreakdownPoint(eventType="session", **row)) + + series = [] + for plottable in [ + "wavelength", + "beamSizeAtSampleX", + "beamSizeAtSampleY", + "chiStart", + "phiStart", + "kappaStart", + ]: + series.append( + schema.BreakdownPlottable( + title="energy" if plottable == "wavelength" else plottable, + data=[ + to_energy(row[plottable]) + if plottable == "wavelength" + else row[plottable] + for row in results["dc"] + ], + ) + ) + + overview = ( + schema.BreakdownOverviewSession + if (session or sessionId) + else schema.BreakdownOverviewRun + ) + return { + "overview": overview( + counts=schema.BreakdownOverviewCounts( + datacollections=len(results["dc"]), + failed=0, + datacollectionTypes=Counter([row["subType"] for row in results["dc"]]), + robot=len(results["robot"]), + edge=len(results["edge"]), + xrf=len(results["xrf"]), + ), + **info, + ), + "history": history, + "plottables": schema.BreakdownPlottables( + time=[row["startTime"] for row in results["dc"]], series=series + ), + } + + +def get_times( + session: Optional[str] = None, + sessionId: Optional[int] = None, + proposal: Optional[str] = None, + beamLineName: Optional[str] = None, + runId: Optional[str] = None, +) -> schema.Times: + """Get proportions of time used in a session""" + queries = {} + queries["dc"] = ( + db.session.query( + func.min(models.BLSession.startDate).label("start"), + func.max(models.BLSession.endDate).label("end"), + models.BLSession.sessionId, + ( + func.timestampdiff( + text("SECOND"), + func.min(models.BLSession.startDate), + func.max(models.BLSession.endDate), + ) + / 3600 + ).label("duration"), + ( + func.sum( + func.timestampdiff( + text("SECOND"), + models.DataCollection.startTime, + models.DataCollection.endTime, + ) + ) + / ( + 3600 + * ( + func.count(models.DataCollection.dataCollectionId) + / func.count(distinct(models.DataCollection.dataCollectionId)) + ) + ) + ).label("datacollection"), + func.max(models.DataCollection.endTime).label("last"), + func.greatest( + func.timestampdiff( + text("SECOND"), + func.min(models.BLSession.startDate), + func.min(models.DataCollection.startTime), + ) + / 3600, + 0, + ).label("startup"), + func.greatest( + func.timestampdiff( + text("SECOND"), + func.max(models.DataCollection.endTime), + func.max(models.BLSession.endDate), + ) + / 3600, + 0, + ).label("remaining"), + ) + .select_from(models.DataCollection) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + .group_by(models.BLSession.sessionId) + .order_by(models.BLSession.startDate.desc()) + ) + + queries["robot"] = ( + db.session.query( + ( + func.timestampdiff( + text("SECOND"), + models.RobotAction.startTimestamp, + models.RobotAction.endTimestamp, + ) + / 3600 + ).label("robot"), + models.BLSession.sessionId, + ) + .select_from(models.RobotAction) + .join(models.BLSession) + .join(models.Proposal) + .group_by(models.BLSession.sessionId) + ) + + queries["edge"] = ( + db.session.query( + ( + func.timestampdiff( + text("SECOND"), + models.EnergyScan.startTime, + models.EnergyScan.endTime, + ) + / 3600 + ).label("edge"), + models.BLSession.sessionId, + ) + .select_from(models.EnergyScan) + .join(models.BLSession) + .join(models.Proposal) + .group_by(models.BLSession.sessionId) + ) + + queries["xrf"] = ( + db.session.query( + ( + func.timestampdiff( + text("SECOND"), + models.XFEFluorescenceSpectrum.startTime, + models.XFEFluorescenceSpectrum.endTime, + ) + / 3600 + ).label("xrf"), + models.BLSession.sessionId, + ) + .select_from(models.XFEFluorescenceSpectrum) + .join(models.BLSession) + .join(models.Proposal) + .group_by(models.BLSession.sessionId) + ) + + queries["strategy"] = ( + db.session.query( + ( + func.timestampdiff( + text("SECOND"), + models.DataCollection.endTime, + func.max(models.Screening.bltimeStamp), + ) + / 3600 + ).label("strategy"), + models.BLSession.sessionId, + ) + .select_from(models.DataCollection) + .join( + models.DataCollectionGroup, + models.DataCollectionGroup.dataCollectionGroupId + == models.DataCollection.dataCollectionGroupId, + ) + .join( + models.Screening, + or_( + models.Screening.dataCollectionId + == models.DataCollection.dataCollectionId, + models.Screening.dataCollectionGroupId + == models.DataCollection.dataCollectionGroupId, + ), + ) + .join(models.BLSession) + .join(models.Proposal) + .group_by( + models.DataCollection.dataCollectionId, + models.BLSession.sessionId, + models.DataCollection.endTime, + ) + ) + + queries["centring"] = ( + db.session.query( + ( + func.timestampdiff( + text("SECOND"), + cast(models.RobotAction.endTimestamp, Date), + func.min(models.DataCollection.startTime), + ) + / 3600 + ).label("centring"), + models.BLSession.sessionId, + ) + .select_from(models.RobotAction) + .join( + models.DataCollection, + models.RobotAction.endTimestamp < models.DataCollection.startTime, + ) + .join( + models.DataCollectionGroup, + and_( + models.DataCollectionGroup.dataCollectionGroupId + == models.DataCollection.dataCollectionGroupId, + models.RobotAction.blsampleId == models.DataCollectionGroup.blSampleId, + ), + ) + .join( + models.BLSession, + models.BLSession.sessionId == models.DataCollectionGroup.sessionId, + ) + .join( + models.Proposal, models.Proposal.proposalId == models.BLSession.proposalId + ) + .group_by( + models.DataCollection.dataCollectionId, + models.BLSession.sessionId, + models.DataCollection.endTime, + ) + ) + + queries["fault"] = ( + db.session.query( + ( + func.timestampdiff( + text("SECOND"), + models.BFFault.beamtimelost_starttime, + models.BFFault.beamtimelost_endtime, + ) + / 3600 + ).label("fault"), + models.BLSession.sessionId, + ) + .select_from(models.BFFault) + .join(models.BLSession) + .join(models.Proposal) + .group_by(models.BLSession.sessionId) + ) + if not sessionId: + sessionId = get_sessionId(session) + proposalId = None + if proposal: + proposal_row = ( + db.session.query(models.Proposal).filter( + models.Proposal.proposal == proposal + ) + ).first() + proposalId = proposal_row.proposalId + + for key in queries.keys(): + queries[key] = filter_query(queries[key], runId, beamLineName, sessionId) + if proposalId: + queries[key] = queries[key].filter(models.Proposal.proposalId == proposalId) + + strategy = queries["strategy"].subquery() + queries["strategy"] = db.session.query( + func.sum(strategy.c.strategy).label("strategy"), + strategy.c.sessionId.label("sessionId"), + ).group_by(strategy.c.sessionId) + + centring = queries["centring"].subquery() + queries["centring"] = ( + db.session.query( + func.sum(centring.c.centring).label("centring"), + centring.c.sessionId.label("sessionId"), + ) + .filter(centring.c.centring < 0.25) + .group_by(centring.c.sessionId) + ) + + results = {} + for key in queries.keys(): + results[key] = [r._asdict() for r in queries[key].all()] + + session_lookup = {} + for key in ["robot", "strategy", "edge", "xrf", "centring", "fault"]: + if key not in session_lookup: + session_lookup[key] = {} + + for row in results[key]: + session_lookup[key][row["sessionId"]] = row[key] if row[key] else 0 + + sessions = [] + for row in results["dc"]: + for key in ["datacollection", "remaining"]: + if row[key] is None: + row[key] = 0 + session_time = schema.SessionTimeEntry( + **row, + robot=session_lookup["robot"].get(row["sessionId"], 0), + strategy=session_lookup["strategy"].get(row["sessionId"], 0), + edge=session_lookup["edge"].get(row["sessionId"], 0), + xrf=session_lookup["xrf"].get(row["sessionId"], 0), + centring=session_lookup["centring"].get(row["sessionId"], 0), + fault=session_lookup["fault"].get(row["sessionId"], 0), + ) + session_time.thinking = session_time.calc_thinking() + sessions.append(session_time) + + average = schema.AverageTimeEntry() + average.average(*sessions) + return {"sessions": sessions, "average": average} + + +def get_errors( + session: Optional[str] = None, + sessionId: Optional[int] = None, + beamLineName: Optional[str] = None, + runId: Optional[str] = None, +) -> schema.Errors: + """Get proportion of success and errors for data collection types + along with their error message frequency""" + queries = {} + queries["total"] = ( + db.session.query( + func.count(distinct(models.DataCollection.dataCollectionId)).label("count"), + models.DataCollectionGroup.experimentType, + ) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + .group_by(models.DataCollectionGroup.experimentType) + ) + + queries["dc"] = ( + db.session.query( + models.DataCollection.dataCollectionId, + models.DataCollection.runStatus, + models.DataCollectionGroup.experimentType, + models.DataCollectionFileAttachment.fileFullPath.label("logFile"), + ) + .outerjoin( + models.DataCollectionFileAttachment, + and_( + models.DataCollectionFileAttachment.dataCollectionId + == models.DataCollection.dataCollectionId, + models.DataCollectionFileAttachment.fileType == "log", + models.DataCollectionFileAttachment.fileFullPath.like("%err%"), + ), + ) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + .group_by(models.DataCollection.dataCollectionId) + .filter(models.DataCollection.runStatus.notlike("%success%")) + ) + + if not sessionId: + sessionId = get_sessionId(session) + for key in queries.keys(): + queries[key] = filter_query(queries[key], runId, beamLineName, sessionId) + + totals_rows = [r._asdict() for r in queries["total"].all()] + totals: dict[str, schema.ExperimentTypeGroup] = {} + for row in totals_rows: + totals[row["experimentType"]] = schema.ExperimentTypeGroupPrepare( + experimentType=row["experimentType"], + total=row["count"], + failed=0, + aborted=0, + messages={}, + ) + + datacollections = [r._asdict() for r in queries["dc"].all()] + for row in datacollections: + if "aborted" in row["runStatus"].lower(): + totals[row["experimentType"]].aborted += 1 + else: + totals[row["experimentType"]].failed += 1 + + if row["logFile"]: + log_path = row["logFile"] + if settings.path_map: + log_path = settings.path_map + log_path + + if os.path.exists(log_path): + last_line = get_last_line(log_path) + if last_line: + if last_line not in totals[row["experimentType"]].messages: + + replaced = False + for message in totals[ + row["experimentType"] + ].messages.keys(): + s = SequenceMatcher(None, last_line, message) + if s.ratio() > 0.8: + last_line = message + replaced = True + + if not replaced: + totals[row["experimentType"]].messages[ + last_line + ] = schema.ExperimentTypeMessages( + message=last_line, + count=0, + ) + + totals[row["experimentType"]].messages[last_line].count += 1 + + for row in totals.values(): + row.failedPercent = round(row.failed / row.total * 100, 1) + row.abortedPercent = round(row.aborted / row.total * 100, 1) + row.messages = list(row.messages.values()) + row.messages = sorted(row.messages, key=lambda d: d.count) + + return {"totals": list(totals.values())} + + +def get_hourlies( + session: Optional[str] = None, + sessionId: Optional[int] = None, + proposal: Optional[str] = None, + beamLineName: Optional[str] = None, + runId: Optional[str] = None, +) -> schema.Hourlies: + """Get hourly statistics""" + queries = {} + + queries["datacollections"] = ( + db.session.query( + func.count(distinct(models.DataCollection.dataCollectionId)).label("count"), + extract("HOUR", models.DataCollection.startTime).label("hour"), + ) + .join(models.DataCollectionGroup) + .group_by( + func.concat( + extract("DAY", models.DataCollection.startTime), + extract("HOUR", models.DataCollection.startTime), + ) + ) + ) + + queries["loaded"] = ( + db.session.query( + func.count(distinct(models.RobotAction.robotActionId)).label("count"), + extract("HOUR", models.RobotAction.startTimestamp).label("hour"), + ) + .filter(models.RobotAction.actionType.like("load")) + .group_by( + func.concat( + extract("DAY", models.RobotAction.startTimestamp), + extract("HOUR", models.RobotAction.startTimestamp), + ) + ) + ) + if not sessionId: + sessionId = get_sessionId(session) + hourlies = {} + for key in queries.keys(): + queries[key] = queries[key].join(models.BLSession).join(models.Proposal) + + queries[key] = filter_query(queries[key], runId, beamLineName, sessionId) + + if proposal: + queries[key] = queries[key].filter(models.Proposal.proposal == proposal) + + subquery = queries[key].subquery() + queries[key] = db.session.query( + func.avg(subquery.c.count).label("average"), subquery.c.hour + ).group_by(subquery.c.hour) + + results = [r._asdict() for r in queries[key].all()] + hour_map = {} + for row in results: + hour_map[row["hour"]] = row["average"] + + hourlies[key] = schema.Hourly( + hour=[hour for hour in range(24)], + average=[hour_map.get(hour, 0) for hour in range(24)], + ) + + return hourlies + + +@dataclass +class HistogramParameter: + unit: str + start: float + end: float + bin_size: int + column: "sqlalchemy.Column[Any]" + count_column: "sqlalchemy.Column[Any]" + + +def get_parameter_histogram( + session: Optional[str] = None, + sessionId: Optional[int] = None, + beamLineName: Optional[str] = None, + runId: Optional[str] = None, + beamLineGroup: Optional[str] = None, + parameter: str = "energy", +) -> schema.ParameterHistograms: + """Get a parameter histogram""" + parameters: dict[str, HistogramParameter] = { + "energy": HistogramParameter( + unit="eV", + start=1000, + end=25000, + bin_size=200, + column=(1.98644568e-25 / (models.DataCollection.wavelength * 1e-10)) + / 1.60217646e-19, + count_column=models.DataCollection.wavelength, + ), + "beamsizex": HistogramParameter( + unit="um", + start=0, + end=150, + bin_size=5, + column=models.DataCollection.beamSizeAtSampleX * 1000, + count_column=models.DataCollection.beamSizeAtSampleX, + ), + "beamsizey": HistogramParameter( + unit="um", + start=0, + end=150, + bin_size=5, + column=models.DataCollection.beamSizeAtSampleY * 1000, + count_column=models.DataCollection.beamSizeAtSampleY, + ), + "exposuretime": HistogramParameter( + unit="ms", + start=0, + end=5000, + bin_size=50, + column=models.DataCollection.exposureTime * 1000, + count_column=models.DataCollection.exposureTime, + ), + } + + if parameter not in parameters: + raise IndexError(f"Unknown parameter `{parameter}`") + + param = parameters[parameter] + query = ( + db.session.query( + ((param.column / param.bin_size) * param.bin_size).label("x"), + func.count(param.count_column).label("y"), + models.BLSession.beamLineName, + ) + .select_from(models.DataCollection) + .join(models.DataCollectionGroup) + .join(models.BLSession) + .join(models.Proposal) + .group_by(models.BLSession.beamLineName, text("x")) + .order_by(models.BLSession.beamLineName, text("x")) + ) + + if not sessionId: + sessionId = get_sessionId(session) + query = filter_query(query, runId, beamLineName, sessionId) + + if beamLineGroup: + beamLines = beamlines_from_group(beamLineGroup) + query = query.filter(models.BLSession.beamLineName.in_(beamLines)) + + results = [r._asdict() for r in query.all()] + histogram_lookup = {} + for row in results: + if row["beamLineName"] not in histogram_lookup: + histogram_lookup[row["beamLineName"]] = {} + histogram_lookup[row["beamLineName"]][round(row["x"])] = row["y"] + + histograms = {} + for beamline in histogram_lookup.keys(): + histograms[beamline] = {} + for histogram_bin in range(param.start, param.end, param.bin_size): + if histogram_bin not in histogram_lookup[beamline]: + histograms[beamline][histogram_bin] = 0 + else: + histograms[beamline][histogram_bin] = histogram_lookup[beamline][ + histogram_bin + ] + + return schema.ParameterHistograms( + parameter=parameter, + unit=param.unit, + beamLines=[ + schema.ParameterHistogram( + beamLineName=beamline, + bin=list(histogram.keys()), + frequency=list(histogram.values()), + ) + for beamline, histogram in histograms.items() + ], + ) + + +def get_runs(skip: int, limit: int) -> Paged[schema.VRun]: + if not hasattr(models, "VRun"): + return Paged(total=0, results=[], skip=skip, limit=limit) + + query = db.session.query(models.VRun).order_by(models.VRun.startDate.desc()) + total = query.count() + query = page(query, skip=skip, limit=limit) + return Paged(total=total, results=query.all(), skip=skip, limit=limit) diff --git a/pyispyb/core/modules/userportalsync.py b/pyispyb/core/modules/userportalsync.py new file mode 100644 index 00000000..8dc53aeb --- /dev/null +++ b/pyispyb/core/modules/userportalsync.py @@ -0,0 +1,777 @@ +import copy +import logging +import time +from typing import Any +from datetime import datetime, timezone +from sqlalchemy.orm import Session, joinedload +from ispyb import models +from pyispyb.app.extensions.database.session import engine +from ..modules.persons import get_persons +from ..schemas import userportalsync as schema +from pyispyb.app.utils import timed +from pyispyb.core.modules.utils import encode_external_id, decode_external_id + + +logger = logging.getLogger("ispyb") + + +def sync_proposal(proposal: schema.UserPortalProposalSync) -> float: + """ + Initialize a transactional session to be able to rollback if something goes wrong + https://docs.sqlalchemy.org/en/14/orm/session_transaction.html + """ + start = time.time() + # Using the same sqlalchemy engine from pyispyb.app.extensions.database.session + # Creating a different Session since nothing should be committed until the end of the UserPortalSync process + # session.flush is used everywhere to get new auto-increment IDs + session = Session(engine) + + # Initialize UserPortalSync class + user_portal_sync = UserPortalSync(session) + + # Get full proposal dict + full_dict = proposal.dict() + # Get source entity dicts + source_proposal = full_dict.pop("proposal") + source_proposal_persons = source_proposal.pop("persons") + source_proposal_labcontacts = source_proposal.pop("labcontacts") + source_sessions = full_dict.pop("sessions") + source_proteins = full_dict.pop("proteins") + + try: + # Process the proposal Persons + user_portal_sync.process_persons(source_proposal_persons, "proposal") + # At this point all Person/Laboratory entities related to the proposal have been either updated or created + + # Process the Proposal + # The first Person in the list will be the one having the relation to the proposal table + user_portal_sync.process_proposal(source_proposal, source_proposal_persons[0]) + # At this point the Proposal entity has been either updated or created, and we have a proposalID + + # Process the LabContacts + user_portal_sync.process_labcontacts(source_proposal_labcontacts) + + # Process Sessions + user_portal_sync.process_sessions(source_sessions) + + # Process Proteins + user_portal_sync.process_proteins(source_proteins) + + # Session commit is only applied once at the end of the whole process to commit all changes + # https://stackoverflow.com/questions/65699977/fastapi-sqlalchemy-how-to-manage-transaction-session-and-multiple-commits + session.commit() + except Exception as e: + logger.debug(f"sync_proposal exception: {e}") + session.rollback() + raise Exception(e) + finally: + session.close() + took = round(time.time() - start, 3) + return took + + +class UserPortalSync(object): + def __init__(self, session): + self.session = session + self.proposalId = None + # List of persons to be checked/added to ProposalHasPerson table + self.proposal_person_ids = [] + # Lab contact Person ID to be processed + self.labcontact_person_id = None + # List of persons to be checked/added to Session_has_Person table + self.session_person_ids = [] + # Dict of sessionIds with related personIds to be checked/added to Session_has_Person table + self.session_ids = {} + + def get_ispyb_proposals(self): + proposals = self.session.query( + models.Proposal.proposalId, + models.Proposal.title, + models.Proposal.proposalCode, + models.Proposal.proposalNumber, + models.Proposal.proposalType, + # Decode binary 16 externalId field so it can be compared against Integer + decode_external_id(models.Proposal.externalId).label("externalId"), + ) + ispyb_proposals = [p._asdict() for p in proposals.all()] + return ispyb_proposals + + def get_ispyb_persons(self): + persons = self.session.query( + models.Person.personId, + models.Person.givenName, + models.Person.familyName, + models.Person.emailAddress, + models.Person.phoneNumber, + models.Person.login, + decode_external_id(models.Person.externalId).label("externalId"), + ) + ispyb_persons = [p._asdict() for p in persons.all()] + return ispyb_persons + + def get_ispyb_laboratories(self): + laboratories = self.session.query( + models.Laboratory.laboratoryId, + models.Laboratory.laboratoryExtPk, + models.Laboratory.name, + models.Laboratory.address, + models.Laboratory.city, + models.Laboratory.country, + ) + ispyb_laboratories = [p._asdict() for p in laboratories.all()] + return ispyb_laboratories + + @timed + def process_proposal( + self, sourceProposal: dict[str, Any], sourcePerson: dict[str, Any] + ): + # First check to update proposal existing in the DB + to_add_proposal = self.check_proposal(sourceProposal, sourcePerson) + if to_add_proposal: + self.add_proposal(sourceProposal, sourcePerson) + # Second add/update here the relations between proposalId and personId (proposalHasPerson) + self.process_proposal_has_person() + + def process_proposal_has_person(self): + # First check the entry in ProposalHasPerson does not exist already in DB + # if that is the case create it + for personId in self.proposal_person_ids: + proposal_person = ( + self.session.query(models.ProposalHasPerson) + .filter(models.ProposalHasPerson.personId == personId) + .filter(models.ProposalHasPerson.proposalId == self.proposalId) + .first() + ) + if not proposal_person: + # Add the relation + proposal_has_person = models.ProposalHasPerson( + personId=personId, proposalId=self.proposalId + ) + self.session.add(proposal_has_person) + # Flush to get the new proposalId + self.session.flush() + logger.debug( + f"ProposalHasPerson with proposalId {self.proposalId} and {personId} added in DB" + ) + else: + logger.debug( + f"ProposalHasPerson with proposalId {self.proposalId} and {personId} already in DB" + ) + + def check_proposal( + self, sourceProposal: dict[str, Any], sourceProposer: dict[str, Any] + ) -> dict[str, Any]: + """Updates the proposal if it needed and exists on the DB""" + target_proposals = self.get_ispyb_proposals() + to_add_proposal = [] + for tar in target_proposals: + # Iterate over all the target proposals + # Check if the Proposal already exist in the DB + # by comparing against the proposalCode and proposalNumber or externalId + if ( + ( + tar["externalId"] is not None + and tar["externalId"] == sourceProposal["externalId"] + ) + or ( + tar["proposalCode"] is not None + and tar["proposalCode"] == sourceProposal["proposalCode"] + ) + and ( + tar["proposalNumber"] is not None + and tar["proposalNumber"] == sourceProposal["proposalNumber"] + ) + ): + update = False + logger.debug( + f"Proposal with code {tar['proposalCode']}{tar['proposalNumber']} or " + f"externalId {tar['externalId']} found in DB with proposalId {tar['proposalId']}" + ) + # Set the proposalId to be used to link other entities (sessions, proteins, etc) + self.proposalId = tar["proposalId"] + # Check which Proposal values should we inspect to see if they changed + for k in ["title"]: + if tar[k] != sourceProposal[k]: + logger.debug( + f"Field {k} to update for proposal {tar['proposalId']}" + ) + update = True + + if update: + # Update the existing proposal with new values + logger.debug(f"Updating proposal {tar['proposalId']}") + self.update_proposal(tar["proposalId"], sourceProposal) + break + else: + to_add_proposal.append(sourceProposal) + return to_add_proposal + + def add_proposal( + self, sourceProposal: dict[str, Any], sourceProposer: dict[str, Any] + ): + """Add a new proposal.""" + # First get the person that will be associated to the proposal + pers = ( + self.session.query(models.Person) + .filter(models.Person.login == sourceProposer["login"]) + .first() + ) + # Taken from https://gitlab.esrf.fr/ui/replicator/-/blob/master/replicator/impl/ispyb.py#L338 + if "externalId" in sourceProposal: + if sourceProposal["externalId"] is not None: + # Encode the externalId + sourceProposal["externalId"] = encode_external_id( + sourceProposal["externalId"] + ) + proposal = models.Proposal(**sourceProposal) + proposal.personId = pers.personId + logger.debug( + f"Proposal with code {proposal.proposalCode}{proposal.proposalNumber}" + f" does not exist. Creating it" + ) + self.session.add(proposal) + # Flush to get the new proposalId + self.session.flush() + # Set the proposalId to be used to link other entities (sessions, proteins, etc) + self.proposalId = proposal.proposalId + return proposal.proposalId + + def update_proposal( + self, proposalId: int, sourceProposal: dict[str, Any] + ) -> models.Proposal: + """Updates a Proposal entity.""" + prop = ( + self.session.query(models.Proposal) + .filter(models.Proposal.proposalId == proposalId) + .first() + ) + if prop: + prop.title = sourceProposal["title"] + # Do not update the proposal until the commit is done + self.session.flush() + return prop + + def add_person( + self, + sourcePerson: dict[str, Any], + laboratoryId: int = None, + person_type: str = None, + ) -> int: + """Add a new person together with relation to a laboratory if passed.""" + # Make a deep copy to session_options original values from self.session_ids, so they are not removed + copy_source_person = copy.deepcopy(sourcePerson) + if person_type == "session": + if ( + copy_source_person["session_options"] + or copy_source_person["session_options"] is None + ): + copy_source_person.pop("session_options") + + if laboratoryId: + copy_source_person["laboratoryId"] = laboratoryId + + if "externalId" in sourcePerson: + if sourcePerson["externalId"] is not None: + # Encode the externalId + copy_source_person["externalId"] = encode_external_id( + sourcePerson["externalId"] + ) + person = models.Person(**copy_source_person) + self.session.add(person) + # Flush to get the new personId + self.session.flush() + # Add the personId to a list to be used later to create the relation to proposalHasPerson/Session_has_Person + if person_type == "proposal": + self.proposal_person_ids.append(person.personId) + elif person_type == "session": + person_ids = dict() + person_ids["personId"] = person.personId + person_ids["login"] = person.login + person_ids["externalId"] = sourcePerson["externalId"] + self.session_person_ids.append(person_ids) + elif person_type == "labcontact": + self.labcontact_person_id = person.personId + return person.personId + + def update_person( + self, personId: int, sourcePerson: dict[str, Any] + ) -> models.Person: + """Updates a Person entity.""" + pers = ( + self.session.query(models.Person) + .options(joinedload(models.Person.Laboratory)) + .filter(models.Person.personId == personId) + .first() + ) + + if pers: + pers.givenName = sourcePerson["givenName"] + pers.familyName = sourcePerson["familyName"] + pers.emailAddress = sourcePerson["emailAddress"] + pers.phoneNumber = sourcePerson["phoneNumber"] + # Do not update the person until the commit is done + self.session.flush() + return pers + + def add_laboratory(self, sourceLaboratory: dict[str, Any]) -> int: + """Add a new laboratory.""" + laboratory = models.Laboratory(**sourceLaboratory) + self.session.add(laboratory) + # Flush to get the new laboratoryId + self.session.flush() + return laboratory.laboratoryId + + def update_laboratory( + self, laboratoryId: int, sourceLaboratory: dict[str, Any] + ) -> models.Laboratory: + """Updates a Laboratory entity.""" + lab = ( + self.session.query(models.Laboratory) + .filter(models.Laboratory.laboratoryId == laboratoryId) + .first() + ) + if lab: + lab.name = sourceLaboratory["name"] + lab.address = sourceLaboratory["address"] + lab.city = sourceLaboratory["city"] + lab.country = sourceLaboratory["country"] + # Do not update the laboratory until the commit is done + self.session.flush() + return lab + + @timed + def process_persons(self, sourcePersons: dict[str, Any], person_type: str = None): + """Process the creation or update of Persons""" + # Make sure the list of sourcePersons has unique login to avoid duplications + # https://stackoverflow.com/questions/11092511/list-of-unique-dictionaries + sourcePersons = list({v["login"]: v for v in sourcePersons}.values()) + # Check to update persons existing in the DB + to_add_persons = self.check_persons(sourcePersons, person_type) + # Second add new persons + if to_add_persons: + logger.debug( + f"There are {len(to_add_persons)} person/s to add for type {person_type}" + ) + self.create_persons(to_add_persons, person_type) + + def create_persons(self, sourcePersons: dict[str, Any], person_type: str = None): + """Process the creation of Persons""" + for new_person in sourcePersons: + # Add a new person + laboratory_id = None + src_lab = new_person.pop("laboratory") + if src_lab: + target_laboratories = self.get_ispyb_laboratories() + # Check if the laboratory attached to the Person is in ISPyB + for tar_lab in target_laboratories: + if ( + tar_lab["laboratoryExtPk"] is not None + and tar_lab["laboratoryExtPk"] == src_lab["laboratoryExtPk"] + ) or ( + tar_lab["name"] == src_lab["name"] + and tar_lab["city"] == src_lab["city"] + and tar_lab["country"] == src_lab["country"] + ): + update = False + laboratory_id = tar_lab["laboratoryId"] + # Check which laboratory values should we check to see if they changed + for k in ["name", "address", "city", "country"]: + if tar_lab[k] != src_lab[k]: + logger.debug( + f"Field {k} to update for laboratory {laboratory_id}" + ) + update = True + + if update: + # Update the existing laboratory with new values + logger.debug(f"Updating {tar_lab['laboratoryId']}") + self.update_laboratory(tar_lab["laboratoryId"], src_lab) + + break + + else: + # New laboratory to add if not found in ISPyB DB + laboratory_id = self.add_laboratory(src_lab) + + self.add_person(new_person, laboratory_id, person_type) + + def check_persons(self, sourcePersons, person_type: str = None) -> dict[str, Any]: + """Updates person entities if needed and returns only the new ones to be added.""" + target_persons = self.get_ispyb_persons() + to_add_persons = [] + for src in sourcePersons: + # Iterate over all the source persons + for tar in target_persons: + # Iterate over all the target persons + # Check if the Person already exist in the DB by comparing against the login or externalId + if ( + tar["externalId"] is not None + and tar["externalId"] == src["externalId"] + ) or (tar["login"] is not None and tar["login"] == src["login"]): + update = False + logger.debug( + f"Person with personId {tar['personId']} and type {person_type} already in DB" + ) + + if person_type == "proposal": + # Add personId to proposal_person_ids list + self.proposal_person_ids.append(tar["personId"]) + elif person_type == "session": + person_ids = dict() + # Add personids to session_person_ids list + person_ids["personId"] = tar["personId"] + person_ids["login"] = tar["login"] + person_ids["externalId"] = tar["externalId"] + self.session_person_ids.append(person_ids) + elif person_type == "labcontact": + self.labcontact_person_id = tar["personId"] + # Check which Person values should we inspect to see if they changed + for k in ["givenName", "familyName", "emailAddress", "phoneNumber"]: + if tar[k] != src[k]: + logger.debug( + f"Field {k} to update for person {tar['personId']}" + ) + update = True + + if update: + # Update the existing person with new values + logger.debug(f"Updating person {tar['personId']}") + self.update_person(tar["personId"], src) + # Check if the person laboratory has changed + self.update_person_laboratory(tar["personId"], src) + break + else: + # New persons to be added + to_add_persons.append(src) + return to_add_persons + + def update_person_laboratory(self, personId: int, sourcePerson: dict[str, Any]): + """Updates person relation to a laboratory if needed. + Logic is based on the laboratoryExtPk (User Portal laboratory/institution entity ID) + """ + person = ( + self.session.query(models.Person) + .options(joinedload(models.Person.Laboratory)) + .filter(models.Person.personId == personId) + .first() + ) + src_lab = sourcePerson.pop("laboratory") + # If the person has a Laboratory + if src_lab: + try: + # Check if the Person has a related laboratory already + person_laboratoryExtPk = person.Laboratory.laboratoryExtPk + except AttributeError: + person_laboratoryExtPk = None + + # If the relation to a laboratory changed + if person_laboratoryExtPk != src_lab["laboratoryExtPk"]: + logger.debug(f"Updating Laboratory relation for {personId}") + # Check if source laboratoryExtPk exists already in ISPyB + laboratory = ( + self.session.query(models.Laboratory) + .filter( + models.Laboratory.laboratoryExtPk == src_lab["laboratoryExtPk"] + ) + .first() + ) + if laboratory: + # Update person link to laboratory + logger.debug(f"Updating LaboratoryId for {personId}") + person.laboratoryId = laboratory.laboratoryId + self.session.flush() + else: + # Add a new laboratory and link it to person + logger.debug( + f"Creating and linking a new Laboratory for {personId}" + ) + laboratory_id = self.add_laboratory(src_lab) + person.laboratoryId = laboratory_id + self.session.flush() + else: + # The relation to laboratory has not changed but other laboratory fields might have changed + # Check which Laboratory values should we inspect to see if they changed + for k in ["name", "address", "city", "country"]: + if getattr(person.Laboratory, k) != src_lab[k]: + logger.debug( + f"Field {k} to update for Laboratory with laboratoryId {person.Laboratory.laboratoryId}" + ) + self.update_laboratory(person.Laboratory.laboratoryId, src_lab) + + @timed + def process_labcontacts(self, sourceLabContacts: dict[str, Any]): + """Process the creation or update of LabContacts""" + if sourceLabContacts: + self.check_lab_contacts(sourceLabContacts) + + def check_lab_contacts(self, sourceLabContacts: dict[str, Any]): + for lab_contact in sourceLabContacts: + # Check first if lab contact is already on DB + lb = ( + self.session.query(models.LabContact) + .filter(models.LabContact.proposalId == self.proposalId) + .filter(models.LabContact.cardName == lab_contact["cardName"]) + .first() + ) + if not lb: + # Get the lab contact person + lab_contact_person = lab_contact.pop("person") + # First check to create/update persons existing in the DB + to_add_person = self.check_persons([lab_contact_person], "labcontact") + if to_add_person: + self.create_persons(to_add_person, "labcontact") + # At this point labcontact_person_id has been populated either by + # the creation of a new person or by an existing person entity + # Add a new LabContact a link it to Person and Proposal + logger.debug( + f"Creating and linking a new LabContact for personID {self.labcontact_person_id}" + ) + self.add_lab_contact(self.labcontact_person_id, lab_contact) + + def add_lab_contact(self, personId: int, sourceLabContact: dict[str, Any]): + """Add a new Lab Contact.""" + lab_contact = models.LabContact(**sourceLabContact) + lab_contact.proposalId = self.proposalId + lab_contact.personId = personId + self.session.add(lab_contact) + # Flush to get the new labContactId + self.session.flush() + return lab_contact.labContactId + + @timed + def process_proteins(self, sourceProteins: dict[str, Any]): + """Process the creation or update of Proteins""" + if sourceProteins: + # Check if proteins exist the DB + self.check_proteins(sourceProteins) + + def check_proteins(self, sourceProteins: dict[str, Any]): + """Check Protein entities to see if they exist already, if not it creates a new one.""" + for protein in sourceProteins: + externalId = None + if protein["externalId"] is not None: + logger.debug("Finding protein by Externalid") + externalId = protein["externalId"] + protein["externalId"] = encode_external_id(protein["externalId"]) + prot = ( + self.session.query(models.Protein) + .filter(models.Protein.proposalId == self.proposalId) + .filter(models.Protein.externalId == protein["externalId"]) + .first() + ) + else: + logger.debug("Finding protein by acronym") + prot = ( + self.session.query(models.Protein) + .filter(models.Protein.proposalId == self.proposalId) + .filter(models.Protein.acronym == protein["acronym"]) + .first() + ) + if not prot: + # If there is not any macromolecule matching any externalId + # and proposalId then it will be created + logger.debug( + f"Protein with externalId {externalId} or acronym {protein['acronym']}" + f"for Proposal {self.proposalId} does not exist. Creating it" + ) + self.add_protein(protein) + else: + # If a Protein with a externalId or acronym already exist, update the protein + logger.debug( + f"Protein with externalId {externalId} or acronym {protein['acronym']} found in DB for proposalId {self.proposalId}" + ) + del protein["person"] + if externalId: + logger.debug("Updating protein by externalId") + self.session.query(models.Protein).filter( + models.Protein.proposalId == self.proposalId + ).filter(models.Protein.externalId == protein["externalId"]).update( + protein + ) + else: + logger.debug("Updating protein by acronym") + self.session.query(models.Protein).filter( + models.Protein.proposalId == self.proposalId + ).filter(models.Protein.acronym == protein["acronym"]).update( + protein + ) + self.session.flush() + + def add_protein(self, sourceProtein) -> int: + """Add a new Protein. + Here we assume the person related to the protein will be always a proposal participant + meaning the person is already on the DB or pending to be commited + """ + persons = get_persons( + skip=0, + limit=10, + login=sourceProtein["person"]["login"], + withLaboratory=False, + ) + + if persons.total > 0: + pers = persons.results[0] + """Add a new protein.""" + del sourceProtein["person"] + protein = models.Protein(**sourceProtein) + protein.proposalId = self.proposalId + protein.personId = pers.personId + self.session.add(protein) + # Flush to get the new proteinId + self.session.flush() + return protein.proteinId + + @timed + def process_sessions(self, sourceSessions: dict[str, Any]): + """Process the creation or update of Sessions""" + if sourceSessions: + # First process the sessions + self.check_sessions(sourceSessions) + # Second process the relation between sessions and persons (Session_has_Person) + self.process_session_has_person() + + def process_session_has_person(self): + # Iterate over all the session_ids + logger.debug("Executing process_session_has_person") + for session_id in self.session_ids: + self.session_person_ids = [] + # Create/Update new persons if needed first + logger.debug("Checking persons for session id " + str(session_id)) + self.process_persons(self.session_ids[session_id], "session") + # Check if the entry in Session_has_Person does not exist already in DB + for dict_person in self.session_person_ids: + session_person = ( + self.session.query(models.SessionHasPerson) + .filter(models.SessionHasPerson.sessionId == session_id) + .filter(models.SessionHasPerson.personId == dict_person["personId"]) + .first() + ) + + role = None + remote = 0 + person_found_in_session = None + for p in self.session_ids[session_id]: + if ( + p["login"] == dict_person["login"] + or p["externalId"] == dict_person["externalId"] + ): + person_found_in_session = p + break + + try: + # Get the session options (role, remote) + session_options = person_found_in_session["session_options"] + if session_options: + if session_options["role"]: + role = session_options["role"] + if session_options["remote"]: + remote = session_options["remote"] + except KeyError as e: + logger.debug( + f"session_options not found for login {dict_person['login']}: {e}" + ) + + if not session_person: + # Add the relation between sessionId and personId + session_has_person = models.SessionHasPerson( + sessionId=session_id, + personId=dict_person["personId"], + role=role, + remote=remote, + ) + self.session.add(session_has_person) + self.session.flush() + logger.debug( + f"Session_has_Person with sessionId {session_id} and personId {dict_person['personId']} added in DB" + ) + else: + # Update the Session_has_Person relation with the JSON values + self.session.query(models.SessionHasPerson).filter( + models.SessionHasPerson.personId == dict_person["personId"] + ).filter(models.SessionHasPerson.sessionId == session_id).update( + {"role": role, "remote": remote} + ) + self.session.flush() + logger.debug( + f"Session_has_Person with sessionId {session_id} and personId {dict_person['personId']} already in DB" + ) + + def check_sessions(self, sourceSessions): + """Check Session entities to see if they exist already, if not it creates a new one.""" + for session in sourceSessions: + # Get the session persons + session_persons = session.pop("persons") + externalId = None + # If externalId is present encode it for comparison in DB + if session["externalId"] is not None: + externalId = session["externalId"] + session["externalId"] = encode_external_id(session["externalId"]) + # Check if session is already on DB by using externalId. + sess = ( + self.session.query(models.BLSession) + .filter(models.BLSession.proposalId == self.proposalId) + .filter(models.BLSession.externalId == session["externalId"]) + .first() + ) + else: + # Check if session is already on DB by using expSessionPk. + # The expSessionPk field might be deprecated later. + sess = ( + self.session.query(models.BLSession) + .filter(models.BLSession.proposalId == self.proposalId) + .filter(models.BLSession.expSessionPk == session["expSessionPk"]) + .first() + ) + if not sess: + # Creates the session if it does not exist + logger.debug( + f"Session with externalId {externalId} or expSessionPk {session['expSessionPk']}" + f" for Proposal {self.proposalId} does not exist. Creating it" + ) + sessionId = self.add_session(session) + # Set the new session id with the related session persons + self.session_ids[sessionId] = session_persons + else: + # if the session already exist we just update all the values + logger.debug( + f"Session with externalId {externalId} or expSessionPk {session['expSessionPk']}" + f" found in DB for proposalId {self.proposalId}" + ) + if externalId: + self.session.query(models.BLSession).filter( + models.BLSession.proposalId == self.proposalId + ).filter( + models.BLSession.externalId == session["externalId"], + ).update( + session + ) + else: + self.session.query(models.BLSession).filter( + models.BLSession.proposalId == self.proposalId + ).filter( + models.BLSession.expSessionPk == session["expSessionPk"], + ).update( + session + ) + self.session.flush() + # Set the session id with the related session persons + self.session_ids[sess.sessionId] = session_persons + + def add_session(self, sourceSession) -> int: + """Add a new Session""" + if not sourceSession["lastUpdate"]: + """ + When adding a new session, if lastUpdate is not present, set as now() + Needed for backward compatibility with the Java API, since it does not + like lastUpdate = 0000-00-00 00:00:00 + """ + now = datetime.now() + now = now.replace(tzinfo=timezone.utc) + sourceSession["lastUpdate"] = now + # externalId is encoded already at this point if present + session = models.BLSession(**sourceSession) + session.proposalId = self.proposalId + self.session.add(session) + # Flush to get the new sessionId + self.session.flush() + return session.sessionId diff --git a/pyispyb/core/modules/utils.py b/pyispyb/core/modules/utils.py new file mode 100644 index 00000000..c707e334 --- /dev/null +++ b/pyispyb/core/modules/utils.py @@ -0,0 +1,46 @@ +import os + +from pint import UnitRegistry +from sqlalchemy.sql.expression import cast +from sqlalchemy import ( + func, + Integer, +) + +ureg = UnitRegistry() + + +# Taken from https://gitlab.esrf.fr/ui/replicator/-/blob/master/replicator/impl/ispyb.py#L116 +def decode_external_id(column): + return cast(func.CONV(func.HEX(column), 16, 10), Integer) + + +def encode_external_id(column): + return column.to_bytes(16, byteorder="big") + + +def to_energy(wavelength: float, round_value: bool = True) -> float: + """Convert from wavelength in Angstroms to energy in eV""" + if wavelength is None: + return None + + energy = ( + ((ureg.planck_constant * ureg.c) / (wavelength * ureg.angstrom)) + .to(ureg.eV) + .magnitude + ) + return round(energy) if round_value else energy + + +def get_last_line(file: str) -> str: + """Get the last line of the file + https://stackoverflow.com/questions/46258499/how-to-read-the-last-line-of-a-file-in-python + """ + with open(file, "rb") as f: + try: + f.seek(-2, os.SEEK_END) + while f.read(1) != b"\n": + f.seek(-2, os.SEEK_CUR) + except OSError: + f.seek(0) + return f.readline().decode().strip() diff --git a/pyispyb/core/routes/__init__.py b/pyispyb/core/routes/__init__.py index d9fadab4..519ca4bb 100644 --- a/pyispyb/core/routes/__init__.py +++ b/pyispyb/core/routes/__init__.py @@ -19,21 +19,35 @@ along with py-ispyb. If not, see . """ +import logging import os from importlib import import_module - +from fastapi import FastAPI __license__ = "LGPLv3+" +logger = logging.getLogger(__name__) -def init_app(app, **kwargs): - """Inits routes - - Args: - app ([type]): [description] - """ +def init_app(app: FastAPI, prefix: str = None, **kwargs): + """Init app routes.""" for module_name in os.listdir(os.path.dirname(__file__)): if not module_name.startswith("__") and module_name.endswith(".py"): - module = import_module(".%s" % module_name[:-3], package=__name__) - if hasattr(module, "init_app"): - module.init_app(app, **kwargs) + try: + logger.info(f"Importing {module_name}") + module = import_module(".%s" % module_name[:-3], package=__name__) + if hasattr(module, "router"): + app.include_router(module.router, prefix=prefix) + except Exception: + logger.exception(f"Could not import module `{module_name}`") + + from .legacy import init_app + + init_app(app, prefix=prefix) + + from .admin import init_app + + init_app(app, prefix=prefix) + + from .webservices import init_app + + init_app(app, prefix=prefix) diff --git a/pyispyb/core/routes/admin/__init__.py b/pyispyb/core/routes/admin/__init__.py new file mode 100644 index 00000000..5e353191 --- /dev/null +++ b/pyispyb/core/routes/admin/__init__.py @@ -0,0 +1,22 @@ +import os +import logging +from importlib import import_module +from fastapi import FastAPI + +from .base import router + +logger = logging.getLogger(__name__) + + +def init_app(app: FastAPI, prefix: str = None, **kwargs): + for module_name in os.listdir(os.path.dirname(__file__)): + if not module_name.startswith("__") and module_name.endswith(".py"): + try: + logger.info(f"importing {module_name}") + module = import_module(".%s" % module_name[:-3], package=__name__) + if hasattr(module, "router"): + app.include_router(module.router, prefix=prefix) + except Exception: + logger.exception(f"Could not import module `{module_name}`") + + app.include_router(router, prefix=prefix) diff --git a/pyispyb/core/routes/admin/activity.py b/pyispyb/core/routes/admin/activity.py new file mode 100644 index 00000000..e56f897e --- /dev/null +++ b/pyispyb/core/routes/admin/activity.py @@ -0,0 +1,25 @@ +from typing import Optional + +from fastapi import Depends + +from ispyb import models + +from ....dependencies import pagination, permission +from ....app.extensions.database.utils import Paged +from ....core.schemas.utils import paginated +from ...modules.admin import activity as crud +from ...schemas.admin.activity import AdminActivity, ActionType +from .base import router + + +@router.get( + "/activity", + response_model=paginated(AdminActivity), +) +def get_activity( + page: dict[str, int] = Depends(pagination), + action_type: Optional[ActionType] = None, + depends=Depends(permission("view_activity")), +) -> Paged[models.AdminActivity]: + """Get list of admin activity""" + return crud.get_activity(action_type=action_type, **page) diff --git a/pyispyb/core/routes/admin/base.py b/pyispyb/core/routes/admin/base.py new file mode 100644 index 00000000..033b4a49 --- /dev/null +++ b/pyispyb/core/routes/admin/base.py @@ -0,0 +1,3 @@ +from ....app.base import AuthenticatedAPIRouter + +router = AuthenticatedAPIRouter(prefix="/admin", tags=["Admin"]) diff --git a/pyispyb/core/routes/admin/groups.py b/pyispyb/core/routes/admin/groups.py new file mode 100644 index 00000000..2f4ea3a1 --- /dev/null +++ b/pyispyb/core/routes/admin/groups.py @@ -0,0 +1,215 @@ +from fastapi import Depends, HTTPException, status + +from ....dependencies import pagination, permission +from ....core.schemas.utils import paginated +from ...modules.admin import groups as crud +from ...schemas.admin import groups as schema +from ...schemas.utils import make_optional +from .... import filters +from .base import router + + +# Groups +@router.get( + "/groups", + response_model=paginated(schema.UserGroup), +) +def get_groups( + userGroupId: int = Depends(filters.userGroupId), + page: dict[str, int] = Depends(pagination), + depends=Depends(permission("manage_groups")), +): + """Get a list of UserGroups""" + return crud.get_groups(userGroupId=userGroupId, **page) + + +@router.post( + "/groups", + response_model=schema.UserGroup, +) +def add_group(group: schema.NewUserGroup, depends=Depends(permission("manage_groups"))): + """Add a new UserGroup""" + try: + return crud.add_group(group) + except Exception as e: + raise HTTPException( + status_code=400, detail=f"Could not add new group: `{str(e)}`" + ) + + +@router.patch( + "/groups/{userGroupId}", + response_model=schema.UserGroup, + responses={ + 404: {"description": "No such group"}, + 400: {"description": "Could not update group"}, + }, +) +def update_group( + userGroupId: int, + userGroup: make_optional(schema.NewUserGroup), + depends=Depends(permission("manage_groups")), +): + """Update a UserGroup""" + try: + return crud.update_group(userGroupId, userGroup) + except FileNotFoundError: + raise HTTPException(status_code=404, detail=f"No such group: `{userGroupId}`") + except Exception as e: + raise HTTPException( + status_code=400, detail=f"Could not update group: `{str(e)}`" + ) + + +# Group Permissions +@router.post( + "/groups/{userGroupId}/permission", + status_code=status.HTTP_204_NO_CONTENT, + responses={ + 204: {"description": "Added permission to group"}, + 400: {"description": "Could not add permission to group"}, + }, +) +def add_permission_to_group( + userGroupId: int, + permission_info: schema.NewUserGroupPermission, + depends=Depends(permission("manage_groups")), +): + """Add a Permission to a UserGroup""" + try: + crud.add_permission_to_group(permission_info.permissionId, userGroupId) + return status.HTTP_204_NO_CONTENT + except Exception as e: + raise HTTPException( + status_code=400, + detail=f"Could not add permission `{permission_info.permissionId}` to group `{userGroupId}`: `{str(e)}`", + ) + + +@router.delete( + "/groups/{userGroupId}/permission/{permissionId}", + status_code=status.HTTP_204_NO_CONTENT, + responses={ + 204: {"description": "Permission removed from group"}, + 400: {"description": "Could not remove permission from group"}, + }, +) +def remove_permission_from_group( + userGroupId: int, permissionId: int, depends=Depends(permission("manage_groups")) +): + """Remove a Permission from a UserGroup""" + try: + crud.remove_permission_from_group(permissionId, userGroupId) + return status.HTTP_204_NO_CONTENT + except Exception as e: + raise HTTPException( + status_code=400, + detail=f"Could not remove permission `{permissionId}` from group `{userGroupId}`: `{str(e)}`", + ) + + +# Group People +@router.post( + "/groups/{userGroupId}/person", + status_code=status.HTTP_204_NO_CONTENT, + responses={ + 204: {"description": "Added person to group"}, + 400: {"description": "Could not add person to group"}, + }, +) +def add_person_to_group( + userGroupId: int, + person_info: schema.NewUserGroupPerson, + depends=Depends(permission("manage_groups")), +): + """Add a Person to a UserGroup""" + try: + crud.add_person_to_group(person_info.personId, userGroupId) + return status.HTTP_204_NO_CONTENT + except Exception as e: + raise HTTPException( + status_code=400, + detail=f"Could not add person `{person_info.personId}` to group `{userGroupId}`: `{str(e)}`", + ) + + +@router.delete( + "/groups/{userGroupId}/person/{personId}", + status_code=status.HTTP_204_NO_CONTENT, + responses={ + 204: {"description": "Person removed from group"}, + 400: {"description": "Could not remove person from group"}, + }, +) +def remove_person_from_group( + userGroupId: int, personId: int, depends=Depends(permission("manage_groups")) +): + """Remove a Person from a UserGroup""" + try: + crud.remove_person_from_group(personId, userGroupId) + return status.HTTP_204_NO_CONTENT + except Exception as e: + raise HTTPException( + status_code=400, + detail=f"Could not remove person `{personId}` from group `{userGroupId}`: `{str(e)}`", + ) + + +# Permissions +@router.get( + "/permissions", + response_model=paginated(schema.Permission), +) +def get_permissions( + permissionId: int = Depends(filters.permissionId), + userGroupId: int = Depends(filters.userGroupId), + search: int = Depends(filters.search), + page: dict[str, int] = Depends(pagination), + depends=Depends(permission("manage_perms")), +): + """Get a list of Permissions""" + return crud.get_permissions( + permissionId=permissionId, userGroupId=userGroupId, search=search, **page + ) + + +@router.post( + "/permissions", + response_model=schema.Permission, +) +def add_permission( + permission: schema.NewPermission, depends=Depends(permission("manage_perms")) +): + """Add a new Permission""" + try: + return crud.add_permission(permission) + except Exception as e: + raise HTTPException( + status_code=400, detail=f"Could not add new permission: `{str(e)}`" + ) + + +@router.patch( + "/permissions/{permissionId}", + response_model=schema.Permission, + responses={ + 404: {"description": "No such permission"}, + 400: {"description": "Could not update permission"}, + }, +) +def update_permission( + permissionId: int, + permission: make_optional(schema.NewPermission), + depends=Depends(permission("manage_perms")), +): + """Update a Permission""" + try: + return crud.update_permission(permissionId, permission) + except FileNotFoundError: + raise HTTPException( + status_code=400, detail=f"No such permission: `{permissionId}`" + ) + except Exception as e: + raise HTTPException( + status_code=400, detail=f"Could not update group: `{str(e)}`" + ) diff --git a/pyispyb/core/routes/auto_proc.py b/pyispyb/core/routes/auto_proc.py deleted file mode 100644 index ede5d42d..00000000 --- a/pyispyb/core/routes/auto_proc.py +++ /dev/null @@ -1,275 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - -import os -from flask import request, send_file, abort -from pyispyb.flask_restx_patched import Resource, HTTPStatus - -from pyispyb.app.extensions.api import api_v1, Namespace -from pyispyb.app.extensions.auth import token_required, role_required - -from pyispyb.core.schemas import auto_proc as auto_proc_schemas -from pyispyb.core.schemas import auto_proc_program as auto_proc_program_schemas -from pyispyb.core.schemas import ( - auto_proc_program_attachment as auto_proc_program_attachment_schemas, -) - -# from pyispyb.core.schemas import ( -# auto_proc_program_message as auto_proc_program_message_schemas, -# ) -from pyispyb.core.schemas import auto_proc_status as auto_proc_status_schemas -from pyispyb.core.modules import auto_proc - - -__license__ = "LGPLv3+" - - -api = Namespace( - "Auto processing", description="Auto processing related namespace", path="/autoproc" -) - -api_v1.add_namespace(api) - - -@api.route("", endpoint="auto_procs") -@api.doc(security="apikey") -class AutoProcs(Resource): - """Allows to get all auto proc entries""" - - @token_required - @role_required - def get(self): - """Returns auto proc entries""" - return auto_proc.get_auto_procs(request) - - @api.expect(auto_proc_schemas.f_schema) - @api.marshal_with(auto_proc_schemas.f_schema, code=201) - # @api.errorhandler(FakeException) - # TODO add custom exception handling - @token_required - @role_required - def post(self): - """Adds a new auto proc""" - return auto_proc.add_auto_proc(api.payload) - - -@api.route("/", endpoint="auto_proc_by_id") -@api.param("auto_proc_id", "auto_proc id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="auto_proc not found.") -class AutoProcById(Resource): - """Allows to get/set/delete a auto_proc""" - - @api.doc(description="auto_proc_id should be an integer ") - @api.marshal_with(auto_proc_schemas.f_schema, skip_none=False, code=HTTPStatus.OK) - @token_required - @role_required - def get(self, auto_proc_id): - """Returns a auto_proc by auto_procId""" - return auto_proc.get_auto_proc_by_id(auto_proc_id) - - -@api.route("/status", endpoint="auto_proc_status") -@api.doc(security="apikey") -class AutoProcStatus(Resource): - """Allows to get all auto proc status entries""" - - @token_required - @role_required - def get(self): - """Returns all auto_proc_status entries""" - return auto_proc.get_auto_proc_status(request) - - @token_required - @role_required - @api.expect(auto_proc_program_schemas.f_schema) - @api.marshal_with(auto_proc_program_schemas.f_schema, code=201) - # @api.errorhandler(FakeException) - # TODO add custom exception handling - def post(self): - """Adds a new auto proc program""" - return auto_proc.add_auto_proc_status(api.payload) - - -@api.route("/status/", endpoint="auto_proc_status_by_id") -@api.param("status_id", "status id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="auto_proc_status not found.") -class AutoProcStatusById(Resource): - """Allows to get/set/delete a auto_proc_status""" - - @token_required - @role_required - @api.doc(description="status_id should be an integer ") - @api.marshal_with( - auto_proc_status_schemas.f_schema, skip_none=False, code=HTTPStatus.OK - ) - def get(self, status_id): - """Returns a auto_proc by auto_procId""" - return auto_proc.get_auto_proc_status_by_id(status_id) - - -@api.route("/programs", endpoint="auto_proc_programs") -@api.doc(security="apikey") -class AutoProcPrograms(Resource): - """Allows to get all auto proc program entries""" - - @token_required - @role_required - def get(self): - """Returns all auto_proc_program entries""" - return auto_proc.get_auto_proc_programs(request) - - @token_required - @role_required - @api.expect(auto_proc_program_schemas.f_schema) - @api.marshal_with(auto_proc_program_schemas.f_schema, code=201) - def post(self): - """Adds a new auto proc program""" - return auto_proc.add_auto_proc_program(api.payload) - -@api.route("/programs/", endpoint="program_by_id") -@api.param("program_id", "program id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="auto_proc_program not found.") -class AutoProcProgramById(Resource): - """Allows to get/set/delete a auto_proc_program""" - - @token_required - @role_required - @api.doc(description="program_id should be an integer ") - @api.marshal_with( - auto_proc_program_schemas.f_schema, skip_none=False, code=HTTPStatus.OK - ) - def get(self, program_id): - """Returns a auto_proc by auto_procId""" - return auto_proc.get_auto_proc_program_by_id(program_id) - - -@api.route("/attachments", endpoint="auto_proc_program_attachments") -@api.doc(security="apikey") -class Attachments(Resource): - """Allows to get all auto proc program attachment entries""" - - @token_required - @role_required - def get(self): - """Returns all auto_proc_program attachemnt entries""" - query_dict = request.args.to_dict() - return auto_proc.get_attachments_by_query(query_dict) - - @token_required - @role_required - @api.expect(auto_proc_program_attachment_schemas.f_schema) - @api.marshal_with(auto_proc_program_attachment_schemas.f_schema, code=201) - def post(self): - """Adds a new auto proc program""" - return auto_proc.add_auto_proc_program_attachment(api.payload) - -@api.route("/programs//attachments", endpoint="attachments_by_program_id") -@api.param("program_id", "program id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="auto_proc_program not found.") -class AttachmentsByAutoProcProgramId(Resource): - """Return auto proc program attachments by auto proc program id""" - - @token_required - @role_required - @api.doc(description="program_id should be an integer ") - def get(self, program_id): - """Returns list of autoproc program attachments""" - return auto_proc.get_attachments_by_query({"autoProcProgramId": program_id}) - -@api.route( - "/programs//attachments/download", - endpoint="download_attachments_by_program_id") -@api.param("program_id", "program id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="auto_proc_program not found.") -class DownloadAttachmentsByAutoProcProgramId(Resource): - - @token_required - @role_required - @api.doc(description="program_id should be an integer ") - def get(self, program_id): - """Downloads zip file with auto proc attachment files""" - attachment_file_zip, msg = auto_proc.get_attachment_zip_by_program_id(program_id) - - if attachment_file_zip: - return send_file( - attachment_file_zip, - attachment_filename='auto_proc_attachments_%d.zip' % program_id, - as_attachment=True - ) - else: - abort(HTTPStatus.NO_CONTENT, msg) - -@api.route("/attachments/", endpoint="attachment_by_id") -@api.param("attachment_id", "attachment id (integer)") -@api.doc(security="apikey") -@api.response( - code=HTTPStatus.NOT_FOUND, description="auto_proc_program_attachment not found." -) -class AttachmentById(Resource): - """Allows to get/set/delete a auto_proc_program""" - - @token_required - @role_required - @api.doc(description="attachment_id should be an integer ") - @api.marshal_with( - auto_proc_program_attachment_schemas.f_schema, - skip_none=False, - code=HTTPStatus.OK, - ) - def get(self, attachment_id): - """Returns a auto_proc by attachment_id""" - return auto_proc.get_auto_proc_program_attachment_by_id(attachment_id) - - -@api.route("/attachments//download", endpoint="attachment_download_by_id") -@api.param("attachment_id", "attachment id (integer)") -@api.doc(security="apikey") -@api.response( - code=HTTPStatus.NOT_FOUND, description="auto_proc_program_attachment not found." -) -class AttachmenDownloadById(Resource): - """Downloads autoproc program attachment file""" - - @token_required - @role_required - @api.doc(description="attachment_id should be an integer ") - def get(self, attachment_id): - """Downloads autoproc program attachment file by attachment_id""" - attach_dict = auto_proc.get_auto_proc_program_attachment_by_id(attachment_id) - path = os.path.join( - attach_dict["filePath"], - attach_dict["fileName"] - ) - if os.path.exists(path): - return send_file( - path, - as_attachment=True - ) - else: - abort( - HTTPStatus.NOT_FOUND, - "Autoproc program attachment %s not found" % path - ) diff --git a/pyispyb/core/routes/beamline.py b/pyispyb/core/routes/beamline.py deleted file mode 100644 index b38850b6..00000000 --- a/pyispyb/core/routes/beamline.py +++ /dev/null @@ -1,224 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - -import logging - -from flask import request -from pyispyb.flask_restx_patched import Resource, HTTPStatus - -from pyispyb.app.extensions.api import api_v1, Namespace -from pyispyb.app.extensions.auth import token_required, role_required - -from pyispyb.core.schemas import beamline_setup as beamline_setup_schemas -from pyispyb.core.schemas import robot_action as robot_action_schemas -from pyispyb.core.schemas import detector as detector_schemas -from pyispyb.core.modules import beamline_setup, robot_action, detector - - -__license__ = "LGPLv3+" - -log = logging.getLogger(__name__) -api = Namespace("Beamline", description="Beamline related namespace", path="/beamline") -api_v1.add_namespace(api) - - -@api.route("/setups", endpoint="beamline_setup") -@api.doc(security="apikey") -class BeamlineSetups(Resource): - """Allows to get all beamline_setups and insert a new one""" - - @token_required - @role_required - def get(self): - """Returns list of beamline_setups""" - return beamline_setup.get_beamline_setups(request) - - @token_required - @role_required - @api.expect(beamline_setup_schemas.f_schema) - @api.marshal_with(beamline_setup_schemas.f_schema, code=201) - def post(self): - """Adds a new beamline_setup""" - log.info("Inserts a new beamline_setup") - return beamline_setup.add_beamline_setup(api.payload) - - -@api.route("/setups/", endpoint="beamline_setup_by_id") -@api.param("beamline_setup_id", "beamline_setup id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="beamline_setup not found.") -class BeamlineSetupById(Resource): - """Allows to get/set/delete a beamline_setup""" - - @token_required - @role_required - @api.doc(description="beamline_setup_id should be an integer ") - @api.marshal_with( - beamline_setup_schemas.f_schema, skip_none=False, code=HTTPStatus.OK - ) - def get(self, beamline_setup_id): - """Returns a beamline_setup by beamline_setupId""" - return beamline_setup.get_beamline_setup_by_id(beamline_setup_id) - - @token_required - @role_required - @api.expect(beamline_setup_schemas.f_schema) - @api.marshal_with(beamline_setup_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, beamline_setup_id): - """Fully updates beamline_setup with beamline_setup_id""" - return beamline_setup.update_beamline_setup(beamline_setup_id, api.payload) - - @token_required - @role_required - @api.expect(beamline_setup_schemas.f_schema) - @api.marshal_with(beamline_setup_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, beamline_setup_id): - """Partially updates beamline_setup with id beamline_setupId""" - return beamline_setup.patch_beamline_setup(beamline_setup_id, api.payload) - - @token_required - @role_required - def delete(self, beamline_setup_id): - """Deletes a beamline_setup by beamline_setupId""" - return beamline_setup.delete_beamline_setup(beamline_setup_id) - - -@api.route("/robot_actions", endpoint="robot_actions") -@api.doc(security="apikey") -class RobotActions(Resource): - """Allows to get robot action db items and insert a new one""" - - @token_required - @role_required - def get(self): - """Returns list of robot_actions""" - return robot_action.get_robot_actions(request) - - @token_required - @role_required - @api.expect(robot_action_schemas.f_schema) - @api.marshal_with(robot_action_schemas.f_schema, code=201) - # @api.errorhandler(FakeException) - # TODO add custom exception handling - def post(self): - """Adds a new robot_action""" - return robot_action.add_robot_action(api.payload) - - -@api.route("/robot_actions/", endpoint="robot_action_by_id") -@api.param("robot_action_id", "robot_action id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="robot_action not found.") -class RobotActionById(Resource): - """Allows to get/set/delete a robot_action""" - - @token_required - @role_required - @api.doc(description="robot_action_id should be an integer ") - @api.marshal_with( - robot_action_schemas.f_schema, skip_none=False, code=HTTPStatus.OK - ) - def get(self, robot_action_id): - """Returns a robot_action by robot_action_id""" - return robot_action.get_robot_action_by_id(robot_action_id) - - @token_required - @role_required - @api.expect(robot_action_schemas.f_schema) - @api.marshal_with(robot_action_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, robot_action_id): - """Fully updates robot_action with robot_action_id""" - return robot_action.update_robot_action(robot_action_id, api.payload) - - @token_required - @role_required - @api.expect(robot_action_schemas.f_schema) - @api.marshal_with(robot_action_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, robot_action_id): - """Partially updates robot_action with robot_action_id""" - return robot_action.patch_robot_action(robot_action_id, api.payload) - - @token_required - @role_required - def delete(self, robot_action_id): - """Deletes a robot_action by robot_action_id""" - return robot_action.delete_robot_action(robot_action_id) - - -@api.route("/detectors", endpoint="detectors") -@api.doc(security="apikey") -class Detectors(Resource): - """Allows to get all detectors and insert a new one""" - - @token_required - @role_required - def get(self): - """Returns list of detectors""" - return detector.get_detectors(request) - - @token_required - @role_required - @api.expect(detector_schemas.f_schema) - @api.marshal_with(detector_schemas.f_schema, code=201) - # @api.errorhandler(FakeException) - # TODO add custom exception handling - def post(self): - """Adds a new detector""" - log.info("Inserts a new detector") - return detector.add_detector(api.payload) - - -@api.route("/detectors/", endpoint="detector_by_id") -@api.param("detector_id", "detector id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="detector not found.") -class DetectorById(Resource): - """Allows to get/set/delete a detector""" - - @token_required - @role_required - @api.doc(description="detector_id should be an integer ") - @api.marshal_with(detector_schemas.f_schema, skip_none=False, code=HTTPStatus.OK) - def get(self, detector_id): - """Returns a detector by detectorId""" - return detector.get_detector_by_id(detector_id) - - @token_required - @role_required - @api.expect(detector_schemas.f_schema) - @api.marshal_with(detector_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, detector_id): - """Fully updates detector with detector_id""" - return detector.update_detector(detector_id, api.payload) - - @token_required - @role_required - @api.expect(detector_schemas.f_schema) - @api.marshal_with(detector_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, detector_id): - """Partially updates detector with id detectorId""" - return detector.patch_detector(detector_id, api.payload) - - @token_required - @role_required - def delete(self, detector_id): - """Deletes a detector by detectorId""" - return detector.delete_detector(detector_id) diff --git a/pyispyb/core/routes/contacts.py b/pyispyb/core/routes/contacts.py deleted file mode 100644 index af09bf5e..00000000 --- a/pyispyb/core/routes/contacts.py +++ /dev/null @@ -1,222 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - -from flask import request -from pyispyb.flask_restx_patched import Resource, HTTPStatus - -from pyispyb.app.extensions.api import api_v1, Namespace -from pyispyb.app.extensions.auth import token_required, role_required - -from pyispyb.core.schemas import person as person_schemas -from pyispyb.core.schemas import lab_contact as lab_contact_schemas -from pyispyb.core.schemas import laboratory as laboratory_schemas -from pyispyb.core.modules import contacts - - -__license__ = "LGPLv3+" - - -api = Namespace("Contacts", description="Contact related namespace", path="/contacts") -api_v1.add_namespace(api) - - -@api.route("/persons", endpoint="persons") -@api.doc(security="apikey") -class Persons(Resource): - """Allows to get all persons""" - - @token_required - @role_required - def get(self): - """Returns all persons""" - return contacts.get_persons_by_query(request.args.to_dict()) - - @token_required - @role_required - @api.expect(person_schemas.f_schema) - @api.marshal_with(person_schemas.f_schema, code=201) - def post(self): - return contacts.add_person(api.payload) - - -@api.route("/persons/", endpoint="person_by_id") -@api.doc(security="apikey") -class PersonById(Resource): - """Allows to get/set/delete a person""" - - @token_required - @role_required - @api.doc(description="person_id should be an integer ") - @api.marshal_with(person_schemas.f_schema) - def get(self, person_id): - """Returns a person by personId""" - return contacts.get_person_by_id(person_id) - - @token_required - @role_required - @api.expect(person_schemas.f_schema) - @api.marshal_with(person_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, person_id): - """Fully updates person with id person_id""" - return contacts.update_person(person_id, api.payload) - - @token_required - @role_required - @api.expect(person_schemas.f_schema) - @api.marshal_with(person_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, person_id): - """Partially updates person with id person_id""" - return contacts.patch_person(person_id, api.payload) - - @token_required - @role_required - def delete(self, person_id): - """Deletes person by person_id""" - return contacts.delete_person(person_id) - - -@api.route("/persons//info", endpoint="person_info_by_login") -@api.doc(security="apikey") -class PersonInfoByLoginName(Resource): - """Returns info about the person""" - - @token_required - @role_required - @api.doc(description="person_login should be a string") - def get(self, person_login): - """Returns info about a person by login""" - params = {"login": person_login} - return contacts.get_person_info_by_params(params) - - -@api.route("/lab_contacts", endpoint="lab_contacts") -@api.doc(security="apikey") -class LabContacts(Resource): - """Allows to get all local contacts""" - - @token_required - @role_required - def get(self): - """Returns list of local contacts.""" - return contacts.get_lab_contacts(request), HTTPStatus.OK - - @token_required - @role_required - @api.expect(lab_contact_schemas.f_schema) - @api.marshal_with(lab_contact_schemas.f_schema, code=201) - def post(self): - """Adds a new lab contact""" - return contacts.add_lab_contact(api.payload) - - -@api.route("/lab_contacts/", endpoint="lab_contact_by_id") -@api.doc(security="apikey") -class LabContactById(Resource): - """Allows to get/set/delete a lab_contact""" - - @token_required - @role_required - @api.doc(description="lab_contact_id should be an integer ") - @api.marshal_with(lab_contact_schemas.f_schema) - def get(self, lab_contact_id): - """Returns a lab contact by lab_contact_id""" - params = {"labContactId": lab_contact_id} - return contacts.get_lab_contact_by_params(params) - - @token_required - @role_required - @api.expect(lab_contact_schemas.f_schema) - @api.marshal_with(lab_contact_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, lab_contact_id): - """Fully updates person with id lab_contact_id""" - return contacts.update_lab_contact(lab_contact_id, api.payload) - - @token_required - @role_required - @api.expect(lab_contact_schemas.f_schema) - @api.marshal_with(lab_contact_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, lab_contact_id): - """Partially updates person with id lab_contact_id""" - return contacts.patch_lab_contact(lab_contact_id, api.payload) - - @token_required - @role_required - def delete(self, lab_contact_id): - """Deletes lab contact by lab_contact_id""" - return contacts.delete_lab_contact(lab_contact_id) - - -@api.route("/labs", endpoint="labs") -@api.doc(security="apikey") -class Laboratories(Resource): - """Allows to get all laboratory items""" - - @token_required - @role_required - def get(self): - """Returns all laboratory entries.""" - return contacts.get_laboratories(request) - - @token_required - @role_required - @api.expect(laboratory_schemas.f_schema) - @api.marshal_with(laboratory_schemas.f_schema, code=201) - def post(self): - """Adds a new laboratory""" - return contacts.add_laboratory(api.payload) - - -@api.route("/labs/", endpoint="laboratory_by_id") -@api.param("laboratory_id", "laboratory_id id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="Laboratory not found.") -class LaboratoryById(Resource): - """Allows to get/set/delete a laboratory item""" - - @token_required - @role_required - @api.doc(description="lab_id should be an integer ") - @api.marshal_with(laboratory_schemas.f_schema, skip_none=False, code=HTTPStatus.OK) - def get(self, laboratory_id): - """Returns a laboratory by laboratoryId""" - return contacts.get_laboratory_by_id(laboratory_id) - - @token_required - @role_required - @api.expect(laboratory_schemas.f_schema) - @api.marshal_with(laboratory_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, laboratory_id): - """Fully updates laboratory with id laboratory_id.""" - return contacts.update_laboratory(laboratory_id, api.payload) - - @token_required - @role_required - @api.expect(laboratory_schemas.f_schema) - @api.marshal_with(laboratory_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, laboratory_id): - """Partially updates laboratory with id laboratory_id.""" - return contacts.patch_laboratory(laboratory_id, api.payload) - - @token_required - @role_required - def delete(self, laboratory_id): - """Deletes laboratory by laboratory_id.""" - return contacts.delete_laboratory(laboratory_id) diff --git a/pyispyb/core/routes/containers.py b/pyispyb/core/routes/containers.py new file mode 100644 index 00000000..c8cd878d --- /dev/null +++ b/pyispyb/core/routes/containers.py @@ -0,0 +1,86 @@ +import logging + +from fastapi import Depends, HTTPException, status +from ispyb import models + +from ...dependencies import pagination +from ...app.extensions.database.utils import Paged +from ...app.base import AuthenticatedAPIRouter +from ... import filters + +from ..modules import containers as crud +from ..schemas import containers as schema +from ..schemas.utils import paginated, make_optional + + +logger = logging.getLogger(__name__) +router = AuthenticatedAPIRouter(prefix="/containers", tags=["Containers"]) + + +@router.get("", response_model=paginated(schema.Container)) +def get_containers( + proposal: str = Depends(filters.proposal), + page: dict[str, int] = Depends(pagination), +) -> Paged[models.Container]: + """Get a list of containers""" + return crud.get_containers(proposal=proposal, **page) + + +@router.get( + "/{containerId}", + response_model=schema.Container, + responses={404: {"description": "No such container"}}, +) +def get_container(containerId: int) -> models.Container: + """Get a container""" + container = crud.get_containers( + containerId=containerId, + skip=0, + limit=1, + ) + try: + return container.first + except IndexError: + raise HTTPException(status_code=404, detail="Container not found") + + +@router.post( + "", + response_model=schema.Container, + status_code=status.HTTP_201_CREATED, +) +def create_container(container: schema.ContainerCreate) -> models.Container: + """Create a new container""" + return crud.create_container( + container=container, + ) + + +CONTAINER_UPDATE_EXCLUDED = {} + + +@router.patch( + "/{containerId}", + response_model=schema.Container, + responses={ + 404: {"description": "No such container"}, + 400: {"description": "Could not update container"}, + }, +) +def update_container( + containerId: int, + container: make_optional( + schema.ContainerCreate, + exclude=CONTAINER_UPDATE_EXCLUDED, + ), +): + """Update a Container""" + try: + return crud.update_container(containerId, container) + except IndexError: + raise HTTPException(status_code=404, detail="Container not found") + except Exception: + logger.exception( + f"Could not update container `{containerId}` with payload `{container}`" + ) + raise HTTPException(status_code=400, detail="Could not update container") diff --git a/pyispyb/core/routes/data.py b/pyispyb/core/routes/data.py new file mode 100644 index 00000000..8b1b0c41 --- /dev/null +++ b/pyispyb/core/routes/data.py @@ -0,0 +1,199 @@ +import logging +from typing import Optional + +from fastapi import Depends, HTTPException, Response, Query +from h5grove.content import ( + DatasetContent, + ResolvedEntityContent, + get_content_from_file, +) +from h5grove.encoders import encode +from h5grove.models import LinkResolution +from h5grove.utils import parse_link_resolution_arg +from pydantic import conint + +from ... import filters +from ...app.base import AuthenticatedAPIRouter +from ..modules import data as crud +from ..schemas import data as schema + +logger = logging.getLogger(__name__) +router = AuthenticatedAPIRouter(prefix="/data", tags=["Data"]) + + +@router.get("/images") +def get_image( + imageNumber: conint(gt=0), + dataCollectionId: int = Depends(filters.dataCollectionId), +): + """Get raw image data""" + image = crud.get_image( + dataCollectionId=dataCollectionId, + imageNumber=imageNumber, + ) + + if image is None: + raise HTTPException(status_code=404, detail="Image not found") + + return Response(image.tobytes(), media_type="application/octet-stream") + + +@router.get("/images/header") +def get_image_header( + imageNumber: int, + dataCollectionId: int = Depends(filters.dataCollectionId), +): + """Get image header""" + header = crud.get_image( + dataCollectionId=dataCollectionId, imageNumber=imageNumber, header=True + ) + + if not header: + raise HTTPException(status_code=404, detail="Image not found") + + return header + + +@router.get("/images/histogram", response_model=schema.ImageHistogram) +def get_image_histogram( + imageNumber: int, + dataCollectionId: int = Depends(filters.dataCollectionId), +): + """Get image histogram data""" + histogram = crud.get_image_histogram( + dataCollectionId=dataCollectionId, + imageNumber=imageNumber, + ) + + if not histogram: + raise HTTPException(status_code=404, detail="Image not found") + + return histogram + + +class H5GroveException(Exception): + def __init__(self, status_code: int, message: str) -> None: + self.status_code = status_code + self.message = message + + +def create_error(status_code, message): + return H5GroveException(status_code, message) + + +@router.get("/h5grove/attr/") +async def get_attr( + path: str = "/", + attr_keys: Optional[list[str]] = Query(default=None), + dataCollectionId: Optional[int] = Depends(filters.dataCollectionId), + autoProcProgramAttachmentId: Optional[int] = Query( + None, title="AutoProcProgramAttachment id" + ), + robotActionId: Optional[int] = Query(None, title="RobotAction id"), +): + """h5grove `/attr/` endpoint handler""" + file = crud.get_h5_path_mapped( + dataCollectionId=dataCollectionId, + autoProcProgramAttachmentId=autoProcProgramAttachmentId, + robotActionId=robotActionId, + ) + if file is None: + raise HTTPException(status_code=404, detail="File not found") + + with get_content_from_file(file, path, create_error) as content: + if not isinstance(content, ResolvedEntityContent): + raise HTTPException(status_code=500, detail="Wrong file type") + h5grove_response = encode(content.attributes(attr_keys), "json") + return Response( + content=h5grove_response.content, headers=h5grove_response.headers + ) + + +@router.get("/h5grove/data/") +async def get_data( + path: str = "/", + dtype: str = "origin", + format: str = "json", + flatten: bool = False, + selection=None, + dataCollectionId: Optional[int] = Depends(filters.dataCollectionId), + autoProcProgramAttachmentId: Optional[int] = Query( + None, title="AutoProcProgramAttachment id" + ), + robotActionId: Optional[int] = Query(None, title="RobotAction id"), +): + """h5grove `/data/` endpoint handler""" + file = crud.get_h5_path_mapped( + dataCollectionId=dataCollectionId, + autoProcProgramAttachmentId=autoProcProgramAttachmentId, + robotActionId=robotActionId, + ) + if file is None: + raise HTTPException(status_code=404, detail="File not found") + + with get_content_from_file(file, path, create_error) as content: + if not isinstance(content, DatasetContent): + raise HTTPException(status_code=500, detail="Wrong file type") + data = content.data(selection, flatten, dtype) + h5grove_response = encode(data, format) + return Response( + content=h5grove_response.content, headers=h5grove_response.headers + ) + + +@router.get("/h5grove/meta/") +async def get_meta( + path: str = "/", + resolve_links: str = "only_valid", + dataCollectionId: Optional[int] = Depends(filters.dataCollectionId), + autoProcProgramAttachmentId: Optional[int] = Query( + None, title="AutoProcProgramAttachment id" + ), + robotActionId: Optional[int] = Query(None, title="RobotAction id"), +): + """h5grove `/meta/` endpoint handler""" + file = crud.get_h5_path_mapped( + dataCollectionId=dataCollectionId, + autoProcProgramAttachmentId=autoProcProgramAttachmentId, + robotActionId=robotActionId, + ) + if file is None: + raise HTTPException(status_code=404, detail="File not found") + + resolve_links = parse_link_resolution_arg( + resolve_links, + fallback=LinkResolution.ONLY_VALID, + ) + with get_content_from_file(file, path, create_error, resolve_links) as content: + h5grove_response = encode(content.metadata(), "json") + return Response( + content=h5grove_response.content, headers=h5grove_response.headers + ) + + +@router.get("/h5grove/stats/") +async def get_stats( + path: str = "/", + selection=None, + dataCollectionId: Optional[int] = Depends(filters.dataCollectionId), + autoProcProgramAttachmentId: Optional[int] = Query( + None, title="AutoProcProgramAttachment id" + ), + robotActionId: Optional[int] = Query(None, title="RobotAction id"), +): + """h5grove `/stats/` endpoint handler""" + file = crud.get_h5_path_mapped( + dataCollectionId=dataCollectionId, + autoProcProgramAttachmentId=autoProcProgramAttachmentId, + robotActionId=robotActionId, + ) + if file is None: + raise HTTPException(status_code=404, detail="File not found") + + with get_content_from_file(file, path, create_error) as content: + if not isinstance(content, DatasetContent): + raise HTTPException(status_code=500, detail="Wrong file type") + h5grove_response = encode(content.data_stats(selection), "json") + return Response( + content=h5grove_response.content, headers=h5grove_response.headers + ) diff --git a/pyispyb/core/routes/data_collections.py b/pyispyb/core/routes/data_collections.py deleted file mode 100644 index 17da5b33..00000000 --- a/pyispyb/core/routes/data_collections.py +++ /dev/null @@ -1,208 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - -import os -from flask import request, send_file, abort - -from pyispyb.flask_restx_patched import Resource, HTTPStatus - -from pyispyb.app.extensions.api import api_v1, Namespace -from pyispyb.app.extensions.auth import token_required, role_required - -from pyispyb.core.schemas import data_collection as data_collection_schemas -from pyispyb.core.schemas import data_collection_group as data_collection_group_schemas -from pyispyb.core.modules import data_collection - - -__license__ = "LGPLv3+" - - -api = Namespace( - "Data collections", - description="Data collection related namespace", - path="/data_collections", -) -api_v1.add_namespace(api) - - -@api.route("") -@api.doc(security="apikey") -class DataColletions(Resource): - """Allows to get all data_collections""" - - @token_required - @role_required - @api.marshal_list_with(data_collection_schemas.f_schema, skip_none=False, code=HTTPStatus.OK) - def get(self): - """Returns list of data_collections""" - query_dict = request.args.to_dict() - return data_collection.get_data_collections(query_dict) - - @token_required - @role_required - @api.expect(data_collection_schemas.f_schema) - @api.marshal_with(data_collection_schemas.f_schema, code=201) - def post(self): - """Adds a new session""" - return data_collection.add_data_collection(api.payload) - - -@api.route("/") -@api.param("data_collection_id", "Data collection id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="data collection not found.") -class DataCollectionById(Resource): - """Allows to get/set/delete a data_collection""" - - @token_required - @role_required - @api.doc(description="data_collection_id should be an integer ") - @api.marshal_with( - data_collection_schemas.f_schema, - skip_none=False, - code=HTTPStatus.OK, - ) - def get(self, data_collection_id): - """Returns a data_collection by data_collectionId""" - return data_collection.get_data_collection_by_id(data_collection_id) - -@api.route("//snapshot/") -@api.param("data_collection_id", "data_collection_id (integer)") -@api.param("snapshot_index", "snapshot_index (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="data collection not found.") -class DataCollectionSnapshot(Resource): - """Allows to download snapshots associated to the data collection""" - - @token_required - @role_required - @api.doc(description="data_collection_id and snapshot_id should be an integer") - def get(self, data_collection_id, snapshot_index): - """Downloads data collection attribute by id and attribute_name""" - data_collection_dict = data_collection.get_data_collection_by_id( - data_collection_id - ) - if data_collection_dict: - snapshot_path = data_collection_dict.get("xtalSnapshotFullPath%d" % snapshot_index) - if snapshot_path: - if os.path.exists(snapshot_path): - return send_file( - snapshot_path, - attachment_filename=os.path.basename(snapshot_path), - as_attachment=True - ) - else: - abort( - HTTPStatus.NOT_FOUND, - "File %s do not exist" % snapshot_path - ) - else: - abort( - HTTPStatus.NOT_FOUND, - "No file name associated with xtalSnapshotFullPath%d" % snapshot_index - ) - - -@api.route("//file") -@api.param("data_collection_id", "data_collection_id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="data collection not found.") -class DataCollectionFile(Resource): - """Allows to download files associated to the data collection""" - - @token_required - @role_required - @api.doc(description="data_collection_id should be an integer ") - def get(self, data_collection_id): - """Downloads data collection attribute by id and attribute_name""" - data_collection_dict = data_collection.get_data_collection_by_id( - data_collection_id - ) - if data_collection_dict: - query_dict = request.args.to_dict() - if "attribute_name" in query_dict: - attribute_file_path = data_collection_dict.get( - query_dict["attribute_name"] - ) - if attribute_file_path: - if os.path.exists(attribute_file_path): - return send_file( - attribute_file_path, - attachment_filename=os.path.basename(attribute_file_path), - as_attachment=True - ) - else: - abort( - HTTPStatus.NOT_FOUND, - "File %s do not exist" % attribute_file_path - ) - else: - abort( - HTTPStatus.NOT_FOUND, - "No file associated with attribute %s" % - query_dict["attribute_name"] - ) - - else: - abort( - HTTPStatus.NOT_FOUND, - "No attribute_name in query parameters" - ) - - -@api.route("/groups") -@api.doc(security="apikey") -class DataCollectionGroups(Resource): - """Allows to get all data collection groups and add a new one""" - - @token_required - @role_required - @api.marshal_list_with(data_collection_schemas.f_schema, skip_none=False, code=HTTPStatus.OK) - def get(self): - """Returns list of data_collection_groups""" - return data_collection.get_data_collection_groups(request) - - @token_required - @role_required - @api.expect(data_collection_group_schemas.f_schema) - @api.marshal_with(data_collection_group_schemas.f_schema, code=201) - def post(self): - """Adds a new session""" - return data_collection.add_data_collection_group(api.payload) - -@api.route("/groups/") -@api.param("data_collection_group_id", "data_collection group_id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="data collection group not found.") -class DataCollectionGroupById(Resource): - """Allows to get/set/delete a data collection group""" - - @token_required - @role_required - @api.doc(description="data_collection_group_id should be an integer ") - @api.marshal_with( - data_collection_group_schemas.f_schema, - skip_none=False, - code=HTTPStatus.OK, - ) - def get(self, data_collection_group_id): - """Returns a data_collection group by dataCollection_group_id""" - return data_collection.get_data_collection_group_by_id(data_collection_group_id) diff --git a/pyispyb/core/routes/datacollections.py b/pyispyb/core/routes/datacollections.py new file mode 100644 index 00000000..2482c7ab --- /dev/null +++ b/pyispyb/core/routes/datacollections.py @@ -0,0 +1,184 @@ +import logging +import os +from typing import Optional + +from fastapi import Depends, HTTPException, Query +from fastapi.responses import FileResponse +from pydantic import conint +from ispyb import models + +from ...config import settings +from ...dependencies import pagination +from ...app.extensions.database.utils import Paged +from ... import filters +from ...app.base import AuthenticatedAPIRouter + +from ..modules import datacollections as crud +from ..schemas import datacollections as schema +from ..schemas.utils import paginated + + +logger = logging.getLogger(__name__) +router = AuthenticatedAPIRouter(prefix="/datacollections", tags=["Data Collections"]) + + +@router.get("/images/diffraction/{dataCollectionId}", response_class=FileResponse) +def get_datacollection_diffraction_image( + dataCollectionId: int, + snapshot: bool = Query(False, description="Get snapshot image"), +) -> str: + """Get a data collection diffraction image""" + path = crud.get_datacollection_diffraction_image_path( + dataCollectionId, + snapshot, + ) + if not path: + raise HTTPException(status_code=404, detail="Image not found") + + return path + + +@router.get("/images/quality/{dataCollectionId}", response_class=FileResponse) +def get_datacollection_anaylsis_image( + dataCollectionId: int, +) -> str: + """Get a data collection per image analysis image""" + path = crud.get_datacollection_analysis_image_path( + dataCollectionId, + ) + if not path: + raise HTTPException(status_code=404, detail="Image not found") + + return path + + +@router.get("/images/{dataCollectionId}", response_class=FileResponse) +def get_datacollection_image( + dataCollectionId: int, + imageId: conint(ge=1, le=4) = Query(1, description="Image 1-4 to return"), + snapshot: bool = Query(False, description="Get snapshot image"), +) -> str: + """Get a data collection image""" + path = crud.get_datacollection_snapshot_path( + dataCollectionId, + imageId, + snapshot, + ) + if not path: + raise HTTPException(status_code=404, detail="Image not found") + + return path + + +@router.get( + "/attachments", response_model=paginated(schema.DataCollectionFileAttachment) +) +def get_datacollection_attachments( + page: dict[str, int] = Depends(pagination), + dataCollectionId: int = Depends(filters.dataCollectionId), + dataCollectionGroupId: int = Depends(filters.dataCollectionGroupId), +) -> Paged[models.DataCollectionFileAttachment]: + """Get a list of data collection attachments""" + return crud.get_datacollection_attachments( + dataCollectionId=dataCollectionId, + dataCollectionGroupId=dataCollectionGroupId, + **page, + ) + + +@router.get( + "/attachments/{dataCollectionFileAttachmentId}", + response_class=FileResponse, + responses={404: {"description": "No such data collection attachment"}}, +) +def get_datacollection_attachment( + dataCollectionFileAttachmentId: int, +): + """Get a data collection attachment""" + attachments = crud.get_datacollection_attachments( + dataCollectionFileAttachmentId=dataCollectionFileAttachmentId, + skip=0, + limit=1, + ) + + try: + attachment = attachments.first + file_path = attachment.fileFullPath + if settings.path_map: + file_path = settings.path_map + file_path + + if not os.path.exists(file_path): + logger.warning( + f"dataCollectionFileAttachmentId `{attachment.dataCollectionFileAttachmentId}` file `{file_path}` does not exist on disk" + ) + raise IndexError + return FileResponse(file_path, filename=attachment._metadata["fileName"]) + except IndexError: + raise HTTPException( + status_code=404, detail="Data collection attachment not found" + ) + + +@router.get( + "/quality", + response_model=paginated(schema.PerImageAnalysis), + responses={404: {"description": "A list of per image/point analysis"}}, +) +def get_per_image_analysis( + page: dict[str, int] = Depends(pagination), + dataCollectionId: int = Depends(filters.dataCollectionId), + dataCollectionGroupId: int = Depends(filters.dataCollectionGroupId), +) -> Paged[schema.PerImageAnalysis]: + """Get a list of per image/point analysis""" + return crud.get_per_image_analysis( + dataCollectionId=dataCollectionId, + dataCollectionGroupId=dataCollectionGroupId, + **page, + ) + + +@router.get("/workflows/steps", response_model=paginated(schema.WorkflowStep)) +def get_workflow_steps( + page: dict[str, int] = Depends(pagination), + workflowId: Optional[int] = Query(None, title="Workflow id"), + workflowStepId: Optional[int] = Query(None, title="Workflow step id"), +) -> Paged[models.WorkflowStep]: + """Get a list of workflow steps""" + return crud.get_workflow_steps( + workflowId=workflowId, + workflowStepId=workflowStepId, + **page, + ) + + +@router.get( + "/workflows/steps/{workflowStepId}", + response_class=FileResponse, + responses={404: {"description": "No such workflow step attachment"}}, +) +def get_workflow_step_attachment( + workflowStepId: int, attachmentType: schema.WorkflowStepAttachment +): + """Get a workflow step attachment""" + steps = crud.get_workflow_steps( + workflowStepId=workflowStepId, + skip=0, + limit=1, + ) + + try: + steps: models.WorkflowStep = steps.first + file_path = getattr(steps, attachmentType) + if settings.path_map: + file_path = settings.path_map + file_path + + if not os.path.exists(file_path): + logger.warning( + f"workflowStep.{attachmentType} `{workflowStepId}` file `{file_path}` does not exist on disk" + ) + raise IndexError + return FileResponse(file_path, filename=os.path.basename(file_path)) + except IndexError: + raise HTTPException( + status_code=404, detail="Workflow step attachment not found" + ) diff --git a/pyispyb/core/routes/dewars.py b/pyispyb/core/routes/dewars.py new file mode 100644 index 00000000..b4980564 --- /dev/null +++ b/pyispyb/core/routes/dewars.py @@ -0,0 +1,85 @@ +import logging + +from fastapi import Depends, HTTPException, status +from ispyb import models + +from ...dependencies import pagination +from ...app.extensions.database.utils import Paged +from ...app.base import AuthenticatedAPIRouter +from ... import filters + +from ..modules import dewars as crud +from ..schemas import dewars as schema +from ..schemas.utils import paginated, make_optional + + +logger = logging.getLogger(__name__) +router = AuthenticatedAPIRouter(prefix="/dewars", tags=["Dewars"]) + + +@router.get("", response_model=paginated(schema.Dewar)) +def get_dewars( + shippingId: int = Depends(filters.shippingId), + proposal: str = Depends(filters.proposal), + page: dict[str, int] = Depends(pagination), +) -> Paged[models.Dewar]: + """Get a list of dewars""" + return crud.get_dewars(proposal=proposal, shippingId=shippingId, **page) + + +@router.get( + "/{dewarId}", + response_model=schema.Dewar, + responses={404: {"description": "No such dewar"}}, +) +def get_dewar(dewarId: int) -> models.Dewar: + """Get a dewar""" + dewar = crud.get_dewars( + dewarId=dewarId, + skip=0, + limit=1, + ) + try: + return dewar.first + except IndexError: + raise HTTPException(status_code=404, detail="Dewar not found") + + +@router.post( + "", + response_model=schema.Dewar, + status_code=status.HTTP_201_CREATED, +) +def create_dewar(dewar: schema.DewarCreate) -> models.Dewar: + """Create a new dewar""" + return crud.create_dewar( + dewar=dewar, + ) + + +DEWAR_UPDATE_EXCLUDED = {"shippingId": True} + + +@router.patch( + "/{dewarId}", + response_model=schema.Dewar, + responses={ + 404: {"description": "No such dewar"}, + 400: {"description": "Could not update dewar"}, + }, +) +def update_dewar( + dewarId: int, + dewar: make_optional( + schema.DewarCreate, + exclude=DEWAR_UPDATE_EXCLUDED, + ), +): + """Update a Dewar""" + try: + return crud.update_dewar(dewarId, dewar) + except IndexError: + raise HTTPException(status_code=404, detail="Dewar not found") + except Exception: + logger.exception(f"Could not update dewar `{dewarId}` with payload `{dewar}`") + raise HTTPException(status_code=400, detail="Could not update dewar") diff --git a/pyispyb/core/routes/events.py b/pyispyb/core/routes/events.py new file mode 100644 index 00000000..4f0a73ea --- /dev/null +++ b/pyispyb/core/routes/events.py @@ -0,0 +1,70 @@ +from typing import Optional +from fastapi import Depends + +from ...app.extensions.database.utils import Paged +from ...dependencies import pagination +from ... import filters +from ...app.base import AuthenticatedAPIRouter + +from ..modules import events as crud +from ..schemas import events as schema +from ..schemas.utils import paginated + +router = AuthenticatedAPIRouter(prefix="/events", tags=["Events"]) + + +@router.get( + "", + response_model=paginated(schema.Event), + responses={404: {"description": "Entity not found"}}, +) +def get_events( + page: dict[str, int] = Depends(pagination), + session: str = Depends(filters.session), + sessionId: int = Depends(filters.sessionId), + proposal: str = Depends(filters.proposal), + proposalId: str = Depends(filters.proposalId), + beamLineName: str = Depends(filters.beamLineName), + dataCollectionId: int = Depends(filters.dataCollectionId), + dataCollectionGroupId: int = Depends(filters.dataCollectionGroupId), + blSampleId: int = Depends(filters.blSampleId), + blSubSampleId: int = Depends(filters.blSubSampleId), + proteinId: int = Depends(filters.proteinId), + status: crud.EventStatus = None, + eventType: Optional[str] = None, +) -> Paged[schema.Event]: + """Get a list of events""" + return crud.get_events( + session=session, + sessionId=sessionId, + proposal=proposal, + proposalId=proposalId, + beamLineName=beamLineName, + dataCollectionId=dataCollectionId, + dataCollectionGroupId=dataCollectionGroupId, + blSampleId=blSampleId, + blSubSampleId=blSubSampleId, + proteinId=proteinId, + status=status, + eventType=eventType, + **page + ) + + +@router.get( + "/types", + response_model=paginated(schema.EventType), +) +def get_event_types( + session: str = Depends(filters.session), + sessionId: int = Depends(filters.sessionId), + blSampleId: int = Depends(filters.blSampleId), + proteinId: int = Depends(filters.proteinId), +) -> Paged[schema.EventType]: + """Get a list of event types""" + return crud.get_event_types( + session=session, + sessionId=sessionId, + blSampleId=blSampleId, + proteinId=proteinId, + ) diff --git a/pyispyb/core/routes/labcontacts.py b/pyispyb/core/routes/labcontacts.py new file mode 100644 index 00000000..57ae1cc9 --- /dev/null +++ b/pyispyb/core/routes/labcontacts.py @@ -0,0 +1,96 @@ +import logging + +from fastapi import Depends, HTTPException, status +from ispyb import models + +from pyispyb.dependencies import pagination +from pyispyb.app.extensions.database.utils import Paged +from pyispyb.app.base import AuthenticatedAPIRouter +from pyispyb import filters + +from ..modules import labcontacts as crud +from ..schemas import labcontacts as schema +from ..schemas.utils import paginated, make_optional + + +logger = logging.getLogger(__name__) +router = AuthenticatedAPIRouter(prefix="/labcontacts", tags=["Lab Contacts"]) + + +@router.get("", response_model=paginated(schema.LabContact)) +def get_lab_contacts( + proposal: str = Depends(filters.proposal), + page: dict[str, int] = Depends(pagination), +) -> Paged[models.LabContact]: + """Get a list of lab contacts""" + return crud.get_labcontacts(proposal=proposal, **page) + + +@router.get( + "/{labContactId}", + response_model=schema.LabContact, + responses={404: {"description": "No such contact"}}, +) +def get_lab_contact(labContactId: int) -> models.LabContact: + """Get a lab contact""" + users = crud.get_labcontacts( + labContactId=labContactId, + skip=0, + limit=1, + ) + try: + return users.first + except IndexError: + raise HTTPException(status_code=404, detail="Lab contact not found") + + +@router.post( + "", + response_model=schema.LabContact, + status_code=status.HTTP_201_CREATED, +) +def create_lab_contact(labcontact: schema.LabContactCreate) -> models.LabContact: + """Create a new lab contact""" + try: + return crud.create_labcontact( + labcontact=labcontact, + ) + except IndexError: + raise HTTPException(status_code=404, detail="No such proposal") + + +LABCONTACT_UPDATE_EXCLUDED = { + "proposalId": True, + "Person": { + "givenName": True, + "familyname": True, + "Laboratory": {"laboratoryExtPk": True}, + }, +} + + +@router.patch( + "/{labContactId}", + response_model=schema.LabContact, + responses={ + 404: {"description": "No such lab contat"}, + 400: {"description": "Could not update lab contact"}, + }, +) +def update_lab_contact( + labContactId: int, + labContact: make_optional( + schema.LabContactCreate, + exclude=LABCONTACT_UPDATE_EXCLUDED, + ), +): + """Update a Lab Contact""" + try: + return crud.update_labcontact(labContactId, labContact) + except IndexError: + raise HTTPException(status_code=404, detail="Lab contact not found") + except Exception: + logger.exception( + f"Could not update labcontact `{labContactId}` with payload `{labContact}`" + ) + raise HTTPException(status_code=400, detail="Could not update lab contact") diff --git a/pyispyb/core/routes/legacy/__init__.py b/pyispyb/core/routes/legacy/__init__.py new file mode 100644 index 00000000..369e5a6a --- /dev/null +++ b/pyispyb/core/routes/legacy/__init__.py @@ -0,0 +1,27 @@ +import os +import logging +from importlib import import_module +from fastapi import FastAPI + +from .base import router + +logger = logging.getLogger(__name__) + + +def init_app(app: FastAPI, prefix: str = None, **kwargs): + """Init app routes.""" + if not app.db_options.enable_legacy_routes: + logger.info("Legacy routes disabled") + return + + for module_name in os.listdir(os.path.dirname(__file__)): + if not module_name.startswith("__") and module_name.endswith(".py"): + try: + logger.info(f"importing {module_name}") + module = import_module(".%s" % module_name[:-3], package=__name__) + if hasattr(module, "router"): + app.include_router(module.router, prefix=prefix) + except Exception: + logger.exception(f"Could not import module `{module_name}`") + + app.include_router(router, prefix=prefix) diff --git a/pyispyb/core/routes/legacy/base.py b/pyispyb/core/routes/legacy/base.py new file mode 100644 index 00000000..1eb8959f --- /dev/null +++ b/pyispyb/core/routes/legacy/base.py @@ -0,0 +1,35 @@ +from fastapi import Depends, HTTPException +from fastapi.routing import APIRoute + +from pyispyb.app.base import BaseRouter +from pyispyb.app.extensions.auth.token import set_token_data +from pyispyb.app.extensions.auth.bearer import verify_jwt + + +async def token(token: str): + decoded = verify_jwt(token) + if not decoded: + raise HTTPException(status_code=401, detail="Invalid token or expired token.") + + set_token_data(decoded) + return token + + +def custom_generate_unique_id(route: APIRoute): + res = f"{route.name}-legacy_token" + return res + + +class LegacyAPIRouter(BaseRouter): + def __init__(self, *args, **kwargs): + super().__init__( + *args, + dependencies=[Depends(token)], + **kwargs, + generate_unique_id_function=custom_generate_unique_id, + ) + + +router = LegacyAPIRouter( + prefix="/legacy", tags=["Legacy with token in path âš ī¸ only for compatibility âš ī¸"] +) diff --git a/pyispyb/em/modules/__init__.py b/pyispyb/core/routes/legacy/data_collections.py similarity index 52% rename from pyispyb/em/modules/__init__.py rename to pyispyb/core/routes/legacy/data_collections.py index 1e33360b..5fab0bcc 100644 --- a/pyispyb/em/modules/__init__.py +++ b/pyispyb/core/routes/legacy/data_collections.py @@ -19,25 +19,30 @@ along with py-ispyb. If not, see . """ +from fastapi import Depends +from pyispyb.app.base import AuthenticatedAPIRouter +from pyispyb.core.modules.legacy import data_collections +from pyispyb.core.routes.legacy.dependencies import session_authorisation -__license__ = "LGPLv3+" +from .base import router as legacy_router +router = AuthenticatedAPIRouter( + prefix="/legacy/data_collections", + tags=["Data collections - legacy with header token"], +) -import os -from importlib import import_module +__license__ = "LGPLv3+" -def init_app(app, **kwargs): - """ - Inits extensions. +@legacy_router.get( + "/{token}/proposal/session/{session_id}/list", +) +@router.get("/groups/session/{session_id}") +def get(session_id: int = Depends(session_authorisation)): + """Get data collection groups for session. Args: - app (Flask app): [description] + session_id (str): session id """ - - for module_name in os.listdir(os.path.dirname(__file__)): - if not module_name.startswith("__") and module_name.endswith(".py"): - module = import_module(".%s" % module_name[:-3], package=__name__) - if hasattr(module, "init_app"): - module.init_app(app, **kwargs) + return data_collections.get_data_collections_groups(session_id) diff --git a/pyispyb/core/routes/legacy/dependencies.py b/pyispyb/core/routes/legacy/dependencies.py new file mode 100644 index 00000000..4667be2f --- /dev/null +++ b/pyispyb/core/routes/legacy/dependencies.py @@ -0,0 +1,78 @@ +from fastapi import HTTPException +from pyispyb.core.modules.legacy.proposal import ( + find_proposal_id, + login_authorized_for_proposal, +) +from pyispyb.core.modules.legacy.session import login_authorized_for_session +from pyispyb.app.globals import g + + +def proposal_authorisation(proposal_id: str): + proposal_id = find_proposal_id(proposal_id) + + permissions = g.permissions + login = g.login + + msg = "" + + if "all_proposals" in permissions: + return proposal_id + elif "own_proposals" in permissions: + is_autorized = login_authorized_for_proposal(login, proposal_id) + if is_autorized: + return proposal_id + else: + msg = ( + "User %s (permissions assigned: %s) is not authorized to access proposal %s." + % ( + login, + str(permissions), + str(proposal_id), + ) + ) + else: + msg = ( + "User %s (permissions assigned: %s) has no appropriate permissions (%s) to execute method." + % ( + login, + str(permissions), + str(["all_proposals", "own_proposals"]), + ) + ) + + raise HTTPException(status_code=403, detail=msg) + + +def session_authorisation(session_id: str): + + permissions = g.permissions + login = g.login + + msg = "" + + if "all_sessions" in permissions: + return session_id + elif "own_sessions" in permissions: + is_autorized = login_authorized_for_session(login, session_id) + if is_autorized: + return session_id + else: + msg = ( + "User %s (permissions assigned: %s) is not authorized to access session %s." + % ( + login, + str(permissions), + str(session_id), + ) + ) + else: + msg = ( + "User %s (permissions assigned: %s) has no appropriate permissions (%s) to execute method." + % ( + login, + str(permissions), + str(["all_sessions", "own_sessions"]), + ) + ) + + raise HTTPException(status_code=403, detail=msg) diff --git a/pyispyb/core/routes/legacy/em.py b/pyispyb/core/routes/legacy/em.py new file mode 100644 index 00000000..7b0a83af --- /dev/null +++ b/pyispyb/core/routes/legacy/em.py @@ -0,0 +1,234 @@ +__license__ = "LGPLv3+" + +from fastapi import Depends, HTTPException +from fastapi.responses import FileResponse +from pyispyb.app.base import AuthenticatedAPIRouter +from pyispyb.core.modules.legacy.proposal import find_proposal_id + +from pyispyb.core.modules.legacy import em +from pyispyb.core.routes.legacy.dependencies import ( + proposal_authorisation, + session_authorisation, +) + +from .base import router as legacy_router + +router = AuthenticatedAPIRouter( + prefix="/legacy/em", tags=["EM - legacy with header token"] +) + + +############################ +# MOVIES # +############################ + + +@legacy_router.get( + "/{token}/proposal/{proposal_id}/em/datacollection/{datacollection_id}/movie/all", +) +@router.get("/proposal/{proposal_id}/datacollection/{datacollection_id}/movies") +def get_movies( + datacollection_id: int, proposal_id: str = Depends(proposal_authorisation) +): + """Get movies date for datacollection. + + Args: + proposal_id (str): proposal id or name + datacollection_id (str): data collection id + """ + proposal_id = find_proposal_id(proposal_id) + return em.get_movies_data_by_datacollection_id(proposal_id, datacollection_id) + + +@legacy_router.get( + "/{token}/proposal/{proposal_id}/em/datacollection/{datacollection_id}/movie/{movie_id}/thumbnail", + response_class=FileResponse, +) +@router.get("/proposal/{proposal_id}/movie/{movie_id}/thumbnail") +def get_movie_thumbnail( + movie_id: int, proposal_id: str = Depends(proposal_authorisation) +): + """Get thumbnails for movie. + + Args: + proposal_id (str): proposal id or name + movie_id (str): movie id + """ + proposal_id = find_proposal_id(proposal_id) + path = em.get_movie_thumbnails(proposal_id, movie_id) + if path: + path = path["movie_thumbnail"] + if path: + return path + else: + raise HTTPException(status_code=404, detail="Sample not found") + + +@legacy_router.get( + "/{token}/proposal/{proposal_id}/em/datacollection/{datacollection_id}/movie/{movie_id}/motioncorrection/thumbnail", + response_class=FileResponse, +) +@router.get("/proposal/{proposal_id}/movie/{movie_id}/thumbnail/motioncorrection") +def get_motion_thumbnail( + movie_id: int, proposal_id: str = Depends(proposal_authorisation) +): + """Get motion correction thumbnail for movie. + + Args: + proposal_id (str): proposal id or name + movie_id (str): movie id + """ + proposal_id = find_proposal_id(proposal_id) + path = em.get_movie_thumbnails(proposal_id, movie_id) + if path: + path = path["motion_correction_thumbnail"] + if path: + return path + else: + raise HTTPException(status_code=404, detail="Sample not found") + + +@legacy_router.get( + "/{token}/proposal/{proposal_id}/em/datacollection/{datacollection_id}/movie/{movie_id}/ctf/thumbnail", +) +@router.get("/proposal/{proposal_id}/movie/{movie_id}/thumbnail/ctf") +def get_ctf_thumbnail( + movie_id: int, proposal_id: str = Depends(proposal_authorisation) +): + """Get CTF thumbnail for movie. + + Args: + proposal_id (str): proposal id or name + movie_id (str): movie id + """ + proposal_id = find_proposal_id(proposal_id) + path = em.get_movie_thumbnails(proposal_id, movie_id) + if path: + path = path["ctf_thumbnail"] + if path: + return path + else: + raise HTTPException(status_code=404, detail="Sample not found") + + +@legacy_router.get( + "/{token}/proposal/{proposal_id}/em/datacollection/{datacollection_id}/movie/{movie_id}/motioncorrection/drift", + response_class=FileResponse, +) +@router.get("/proposal/{proposal_id}/movie/{movie_id}/plot/motioncorrectiondrift") +def get_motion_drift_thumbnail( + movie_id: int, proposal_id: str = Depends(proposal_authorisation) +): + """Get motion correction drift thumbnail for movie. + + Args: + proposal_id (str): proposal id or name + movie_id (str): movie id + """ + proposal_id = find_proposal_id(proposal_id) + path = em.get_movie_thumbnails(proposal_id, movie_id) + if path: + path = path["motion_correction_drift"] + if path: + return path + else: + raise HTTPException(status_code=404, detail="Sample not found") + + +############################ +# STATS # +############################ + + +@legacy_router.get( + "/{token}/proposal/{proposal}/em/session/{session_id}/stats", +) +@router.get("/session/{session_id}/stats") +def get_stats_session(session_id: int = Depends(session_authorisation)): + """Get stats for session. + + Args: + session_id (str): session id + """ + return em.get_stats_by_session_id(session_id) + + +@legacy_router.get( + "/proposal/{proposal_id}/data_collections/{data_collections_ids}/stats", +) +@router.get("/proposal/{proposal_id}/data_collections/{data_collections_ids}/stats") +def get_stats_dcids( + data_collections_ids: str, proposal_id: str = Depends(proposal_authorisation) +): + """Get stats for data collection ids. + + Args: + proposal_id (str): proposal id or name + data_collections_ids (str): comma-separated datacollection ids + """ + proposal_id = find_proposal_id(proposal_id) + return em.get_stats_by_data_collections_ids(proposal_id, data_collections_ids) + + +@legacy_router.get( + "/proposal/{proposal_id}/data_collections_group/{data_collections_group_id}/stats", +) +@router.get( + "/proposal/{proposal_id}/data_collections_group/{data_collections_group_id}/stats" +) +def get_stats_group( + data_collections_group_id: int, proposal_id: str = Depends(proposal_authorisation) +): + """Get stats for datacollection group. + + Args: + proposal_id (str): proposal id or name + data_collections_group_id (str): data collection group id + """ + proposal_id = find_proposal_id(proposal_id) + return em.get_stats_by_data_collections_group_id( + proposal_id, data_collections_group_id + ) + + +############################ +# DATA COLLECTION # +############################ + + +@legacy_router.get( + "/{token}/proposal/{proposal_id}/em/datacollection/session/{session_id}/list", +) +@router.get("/proposal/{proposal_id}/session/{session_id}/data_collections/groups") +def get_groups_for_session( + proposal_id: str = Depends(proposal_authorisation), + session_id: int = Depends(session_authorisation), +): + """Get datacollection groups for session. + + Args: + proposal_id (str): proposal id or name + session_id (str): session id + """ + proposal_id = find_proposal_id(proposal_id) + return em.get_data_collections_groups(proposal_id, session_id) + + +############################ +# CLASSIFICATION # +############################ + + +@legacy_router.get( + "/{token}/proposal/{proposal_id}/em/session/{session_id}/classification" +) +@router.get("/session/{session_id}/classification") +def get_classification( + self, session_id: int = Depends(session_authorisation), **kwargs +): + """Get classification for session. + + Args: + session_id (str): session id + """ + return em.get_classification_by_session_id(session_id) diff --git a/pyispyb/core/routes/legacy/proposals.py b/pyispyb/core/routes/legacy/proposals.py new file mode 100644 index 00000000..c7771c0c --- /dev/null +++ b/pyispyb/core/routes/legacy/proposals.py @@ -0,0 +1,67 @@ +""" +Project: py-ispyb. + +https://github.com/ispyb/py-ispyb + +This file is part of py-ispyb software. + +py-ispyb is free software: you can redistribute it and/or modify +it under the terms of the GNU Lesser General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +py-ispyb is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public License +along with py-ispyb. If not, see . +""" + + +__license__ = "LGPLv3+" + +from fastapi import Depends +from pyispyb.app.base import AuthenticatedAPIRouter +from pyispyb.app.extensions.auth.bearer import permission_required +from pyispyb.app.globals import g +from pyispyb.core.modules.legacy import proposal +from pyispyb.core.routes.legacy.dependencies import proposal_authorisation + +from .base import router as legacy_router + +router = AuthenticatedAPIRouter( + prefix="/legacy/proposals", tags=["Proposals - legacy with header token"] +) + + +@legacy_router.get( + "/{token}/proposal/list", +) +@router.get( + "", +) +def get_proposals( + permissions=Depends(permission_required("any", ["own_proposals", "all_proposals"])) +): + """Get all proposal that user is allowed to access.""" + if "all_proposals" in permissions: + return proposal.get_proposals_infos_all() + return proposal.get_proposals_infos_login(g.login) + + +@legacy_router.get( + "/{token}/proposal/{proposal_id}/info/get", +) +@router.get( + "/{proposal_id}", +) +def get_proposal(proposal_id: str = Depends(proposal_authorisation)): + """Get proposal information. + + Args: + proposal_id (str): proposal id or name + """ + proposal_id = proposal.find_proposal_id(proposal_id) + return proposal.get_proposal_infos(proposal_id) diff --git a/pyispyb/core/routes/legacy/sessions.py b/pyispyb/core/routes/legacy/sessions.py new file mode 100644 index 00000000..8c991668 --- /dev/null +++ b/pyispyb/core/routes/legacy/sessions.py @@ -0,0 +1,94 @@ +""" +Project: py-ispyb. + +https://github.com/ispyb/py-ispyb + +This file is part of py-ispyb software. + +py-ispyb is free software: you can redistribute it and/or modify +it under the terms of the GNU Lesser General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +py-ispyb is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public License +along with py-ispyb. If not, see . +""" + +from fastapi import Depends +from pyispyb.app.base import AuthenticatedAPIRouter +from pyispyb.app.extensions.auth.bearer import permission_required +from pyispyb.core.modules.legacy.proposal import find_proposal_id +from pyispyb.core.modules.legacy import session +from pyispyb.app.globals import g + +__license__ = "LGPLv3+" + + +from .base import router as legacy_router + +router = AuthenticatedAPIRouter( + prefix="/legacy/sessions", tags=["Sessions - legacy with header token"] +) + + +@legacy_router.get( + "/{token}/session/list", +) +@router.get( + "", +) +def get_sessions( + permissions=Depends(permission_required("any", ["own_sessions", "all_sessions"])) +): + """Get all sessions that user is allowed to access.""" + if "all_sessions" in permissions: + return session.get_session_infos_all() + return session.get_session_infos_login(g.login) + + +@legacy_router.get( + "/{token}/proposal/session/date/{start_date}/{end_date}/list", +) +@router.get( + "/date/{start_date}/{end_date}", +) +def get_sessions_by_dates( + start_date: str, + end_date: str, + permissions=Depends(permission_required("any", ["own_sessions", "all_sessions"])), +): + """Get all sessions between two dates that user is allowed to access. + + Args: + start_date (str): start date + end_date (str): end date + """ + if "all_sessions" in permissions: + return session.get_session_infos_all_dates(start_date, end_date) + return session.get_session_infos_login_dates(g.login, start_date, end_date) + + +@legacy_router.get( + "/{token}/proposal/{proposal_id}/session/list", +) +@router.get( + "/proposal/{proposal_id}", +) +def get_sessions_for_proposal( + proposal_id: str, + permissions=Depends(permission_required("any", ["own_sessions", "all_sessions"])), +): + """Get all sessions for proposal that user is allowed to access. + + Args: + proposal_id (str): proposal id or name + """ + proposal_id = find_proposal_id(proposal_id) + if "all_sessions" in permissions: + return session.get_session_infos_all_proposal(proposal_id) + return session.get_session_infos_login_proposal(g.login, proposal_id) diff --git a/pyispyb/core/routes/mapping.py b/pyispyb/core/routes/mapping.py new file mode 100644 index 00000000..506156c5 --- /dev/null +++ b/pyispyb/core/routes/mapping.py @@ -0,0 +1,115 @@ +import logging +from typing import Optional + +from fastapi import Depends, HTTPException, Query +from fastapi.responses import StreamingResponse +from ispyb import models + +from ...dependencies import pagination +from ...app.extensions.database.utils import Paged +from ...app.base import AuthenticatedAPIRouter +from ... import filters + +from ..modules import mapping as crud +from ..schemas import mapping as schema +from ..schemas.utils import paginated + + +logger = logging.getLogger(__name__) +router = AuthenticatedAPIRouter(prefix="/mapping", tags=["Mapping"]) + + +@router.get("/rois", response_model=paginated(schema.MapROI)) +def get_map_rois( + blSampleId: str = Depends(filters.blSampleId), + xrfFluorescenceMappingROIId: Optional[int] = Query( + None, title="xrfFluorescenceMapping ROI id" + ), + page: dict[str, int] = Depends(pagination), +) -> Paged[models.XRFFluorescenceMappingROI]: + """Get a list of map rois""" + return crud.get_map_rois( + blSampleId=blSampleId, + xrfFluorescenceMappingROIId=xrfFluorescenceMappingROIId, + **page + ) + + +@router.get("", response_model=paginated(schema.Map)) +def get_maps( + dataCollectionId: int = Depends(filters.dataCollectionId), + dataCollectionGroupId: int = Depends(filters.dataCollectionGroupId), + blSampleId: int = Depends(filters.blSampleId), + blSubSampleId: int = Depends(filters.blSubSampleId), + xrfFluorescenceMappingId: int = Query(None, title="XrfFluorescenceMapping id"), + page: dict[str, int] = Depends(pagination), +) -> Paged[models.XRFFluorescenceMapping]: + """Get a list of maps""" + return crud.get_maps( + blSampleId=blSampleId, + blSubSampleId=blSubSampleId, + dataCollectionId=dataCollectionId, + dataCollectionGroupId=dataCollectionGroupId, + xrfFluorescenceMappingId=xrfFluorescenceMappingId, + **page + ) + + +@router.get("/histogram/{xrfFluorescenceMappingId}", response_model=schema.MapHistogram) +def get_map_histogram( + xrfFluorescenceMappingId: int, +): + """Get a map histogram""" + maps = crud.get_maps( + xrfFluorescenceMappingId=xrfFluorescenceMappingId, skip=0, limit=1 + ) + try: + map_ = maps.first + except IndexError: + raise HTTPException(status_code=404, detail="Map not found") + + return crud.generate_histogram(map_) + + +@router.get("/pixel/{xrfFluorescenceMappingId}", response_model=schema.MapPixelValue) +def get_map_pixel_value( + xrfFluorescenceMappingId: int, + x: int = Query(None, title="Y position"), + y: int = Query(None, title="Y position"), +): + """Get a map histogram""" + maps = crud.get_maps( + xrfFluorescenceMappingId=xrfFluorescenceMappingId, skip=0, limit=1 + ) + try: + map_ = maps.first + except IndexError: + raise HTTPException(status_code=404, detail="Map not found") + + data = crud.shape_map(map_) + + if y < len(data): + if x < len(data[y]): + return { + "xrfFluorescenceMappingId": map_.xrfFluorescenceMappingId, + "x": x, + "y": y, + "value": data[y][x], + } + + +@router.get("/{xrfFluorescenceMappingId}") +def get_map( + xrfFluorescenceMappingId: int, +): + """Get a map in image format""" + maps = crud.get_maps( + xrfFluorescenceMappingId=xrfFluorescenceMappingId, skip=0, limit=1 + ) + try: + map_ = maps.first + except IndexError: + raise HTTPException(status_code=404, detail="Map not found") + + image = crud.generate_map_image(map_) + return StreamingResponse(image, media_type="image/png") diff --git a/pyispyb/core/routes/persons.py b/pyispyb/core/routes/persons.py new file mode 100644 index 00000000..00399a6b --- /dev/null +++ b/pyispyb/core/routes/persons.py @@ -0,0 +1,35 @@ +import logging +from typing import Optional + +from fastapi import Depends +from ispyb import models + +from ...dependencies import pagination +from ...app.extensions.database.utils import Paged +from ...app.base import AuthenticatedAPIRouter +from ... import filters + +from ..modules import persons as crud +from ..schemas import persons as schema +from ..schemas.utils import paginated + + +logger = logging.getLogger(__name__) +router = AuthenticatedAPIRouter(prefix="/persons", tags=["People"]) + + +@router.get("", response_model=paginated(schema.Person)) +def get_persons( + proposal: str = Depends(filters.proposal), + sessionId: int = Depends(filters.sessionId), + showAll: Optional[bool] = False, + page: dict[str, int] = Depends(pagination), +) -> Paged[models.Person]: + """Get a list of people""" + return crud.get_persons( + proposal=proposal, + sessionId=sessionId, + showAll=showAll, + withAuthorization=True, + **page + ) diff --git a/pyispyb/core/routes/phasing.py b/pyispyb/core/routes/phasing.py deleted file mode 100644 index 3826f6af..00000000 --- a/pyispyb/core/routes/phasing.py +++ /dev/null @@ -1,64 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . - -""" - - -__license__ = "LGPLv3+" - -from flask import request, current_app -from flask_restx._http import HTTPStatus - -from pyispyb.flask_restx_patched import Resource - -from pyispyb.app.extensions.api import api_v1, Namespace -from pyispyb.app.extensions.auth import token_required, role_required - -# from pyispyb.core.schemas import phasing_program_run as phasing_program_run_schemas -from pyispyb.core.modules import phasing - - -api = Namespace("Phasing", description="Phasing related namespace", path="/phasing") -api_v1.add_namespace(api) - - -@api.route("", endpoint="phasing_results") -@api.doc(security="apikey") -class PhasingResults(Resource): - """Allows to get all phasing_results""" - - @token_required - @role_required - def get(self): - """Returns phasing_results based on query parameters""" - - api.logger.info("Get all phasing_results") - return phasing.get_phasing_results(request) - - @token_required - @role_required - # @api.expect(phasing_result_schemas.f_schema) - # @api.marshal_with(phasing_result_schemas.f_schema, code=201) - # @api.errorhandler(FakeException) - def post(self): - """Adds a new phasing_result""" - - api.logger.info("Inserts a new phasing_result") - return phasing.add_phasing_results(api.payload) diff --git a/pyispyb/core/routes/processings.py b/pyispyb/core/routes/processings.py new file mode 100644 index 00000000..de280f47 --- /dev/null +++ b/pyispyb/core/routes/processings.py @@ -0,0 +1,162 @@ +import logging +import os + +from fastapi import Depends, HTTPException, Query +from fastapi.responses import FileResponse +from ispyb import models +from pydantic import BaseModel, parse_obj_as +from pydantic.types import Json + +from ...config import settings +from ...app.base import AuthenticatedAPIRouter +from ...app.extensions.database.utils import Paged +from ..schemas.utils import paginated +from ...dependencies import pagination +from ... import filters + +from ..modules import processings as crud +from ..schemas import processings as schema + + +logger = logging.getLogger(__name__) +router = AuthenticatedAPIRouter( + prefix="/processings", tags=["Processing Status and Results"] +) + + +class DataCollectionIds(BaseModel): + dataCollectionIds: list[int] + + +def dataCollectionIds( + dataCollectionIds: Json = Query( + "", title="List of data collection ids (JSON encoded)" + ) +) -> list[int]: + try: + obj: DataCollectionIds = parse_obj_as( + DataCollectionIds, {"dataCollectionIds": dataCollectionIds} + ) + if not len(obj.dataCollectionIds): + raise + + return obj.dataCollectionIds + except Exception: + logger.exception("Couldn't parse dataCollectionIds") + raise HTTPException(status_code=422, detail="Couldn't parse dataCollectionIds") + + +@router.get("/status", response_model=schema.ProcessingStatusesList) +def get_processing_statuses( + dataCollectionIds: list[int] = Depends(dataCollectionIds), +) -> schema.ProcessingStatusesList: + """Get processing statuses for a group of data collections""" + return crud.get_processing_status( + dataCollectionIds=dataCollectionIds, + ) + + +@router.get("/screenings", response_model=paginated(schema.Screening)) +def get_screening_results( + dataCollectionId: int = Depends(filters.dataCollectionId), + page: dict[str, int] = Depends(pagination), +) -> Paged[models.Screening]: + """Get a list of screening results from `Screening`""" + return crud.get_screening_results( + dataCollectionId=dataCollectionId, + **page, + ) + + +@router.get("", response_model=paginated(schema.AutoProcProgram)) +def get_processing_results( + dataCollectionId: int = Depends(filters.dataCollectionId), + page: dict[str, int] = Depends(pagination), +) -> Paged[models.AutoProcProgram]: + """Get a list of processing results from `ProcessingJob`""" + return crud.get_processing_results( + dataCollectionId=dataCollectionId, + **page, + ) + + +@router.get( + "/auto-integrations", response_model=paginated(schema.AutoProcProgramIntegration) +) +def get_auto_integration_results( + dataCollectionId: int = Depends(filters.dataCollectionId), + page: dict[str, int] = Depends(pagination), +) -> Paged[models.AutoProcProgram]: + """Get a list of auto-integration results from `AutoProcIntegration`""" + return crud.get_autointegration_results( + dataCollectionId=dataCollectionId, + **page, + ) + + +@router.get("/messages", response_model=paginated(schema.AutoProcProgramMessage)) +def get_processing_messages( + dataCollectionId: int = Depends(filters.dataCollectionId), + autoProcProgramMessageId: int = None, + page: dict[str, int] = Depends(pagination), +) -> Paged[schema.AutoProcProgramMessage]: + """Get a list of processing messages""" + return crud.get_processing_messages( + dataCollectionId=dataCollectionId, + autoProcProgramMessageId=autoProcProgramMessageId, + **page, + ) + + +@router.get("/messages/status", response_model=schema.AutoProcProgramMessageStatuses) +def get_processing_messages_status( + dataCollectionIds: list[int] = Depends(dataCollectionIds), +) -> schema.AutoProcProgramMessageStatuses: + """Get the processing messages status""" + return crud.get_processing_message_status( + dataCollectionIds=dataCollectionIds, + ) + + +@router.get("/attachments", response_model=paginated(schema.AutoProcProgramAttachment)) +def get_processing_attachments( + autoProcProgramId: int = None, + autoProcProgramAttachmentId: int = None, + page: dict[str, int] = Depends(pagination), +) -> Paged[models.AutoProcProgramAttachment]: + """Get a list of auto processing attachments""" + return crud.get_processing_attachments( + autoProcProgramId=autoProcProgramId, + autoProcProgramAttachmentId=autoProcProgramAttachmentId, + **page, + ) + + +@router.get("/attachments/{autoProcProgramAttachmentId}", response_class=FileResponse) +def get_processing_attachment( + autoProcProgramAttachmentId: int, +): + """Get an auto processing attachment""" + attachments = crud.get_processing_attachments( + autoProcProgramAttachmentId=autoProcProgramAttachmentId, + skip=0, + limit=1, + ) + + try: + attachment = attachments.first + file_path = os.path.join(attachment.filePath, attachment.fileName) + + if settings.path_map: + file_path = settings.path_map + file_path + + if not os.path.exists(file_path): + logger.warning( + f"autoProcProgramAttachmentId `{attachment.autoProcProgramAttachmentId}` file `{file_path}` does not exist on disk" + ) + raise IndexError + return FileResponse(file_path, filename=attachment.fileName) + except IndexError: + raise HTTPException( + status_code=404, detail="Autoproc program attachment not found" + ) diff --git a/pyispyb/core/routes/proposals.py b/pyispyb/core/routes/proposals.py index 5de96960..73e400cc 100644 --- a/pyispyb/core/routes/proposals.py +++ b/pyispyb/core/routes/proposals.py @@ -1,156 +1,44 @@ -""" -Project: py-ispyb. +from fastapi import Depends, HTTPException +from ispyb import models -https://github.com/ispyb/py-ispyb +from pyispyb.dependencies import pagination +from pyispyb.app.extensions.database.utils import Paged +from pyispyb import filters +from pyispyb.app.base import AuthenticatedAPIRouter -This file is part of py-ispyb software. +from ..modules import proposals as crud +from ..schemas import proposals as schema +from ..schemas.utils import paginated -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. +router = AuthenticatedAPIRouter(prefix="/proposals", tags=["Proposals"]) -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -Proposal namespace with enpoint allowing to access proposals. +@router.get("", response_model=paginated(schema.Proposal)) +def get_proposals( + search: str = Depends(filters.search), + page: dict[str, int] = Depends(pagination), +) -> Paged[models.Proposal]: + """Get a list of proposals""" + return crud.get_proposals(search=search, **page) -Example routes: -[GET] /ispyb/api/v1/proposals - Retrieves a list of proposals -[GET] /ispyb/api/v1/proposals?proposalType=MX - Retrieves a list of MX proposals -[POST] /ispyb/api/v1/proposals - Creates a new proposal - -[GET] /ispyb/api/v1/proposals/1 - Retrieves proposal #1 -[PUT] /ispyb/api/v1/proposals/1 - Updates proposal #1 -[PATCH] /ispyb/api/v1/proposals/1 - Partially updates proposal #1 -[DELETE]/ispyb/api/v1/proposals/1 - Deletes proposal #1 -""" - - -__license__ = "LGPLv3+" - -from flask import request, current_app, abort -from flask_restx._http import HTTPStatus - -from pyispyb.flask_restx_patched import Resource - -from pyispyb.app.extensions.api import api_v1, Namespace -from pyispyb.app.extensions.auth import token_required, role_required -from pyispyb.core.schemas import proposal as proposal_schemas -from pyispyb.core.modules import contacts, proposal - - -api = Namespace( - "Proposals", description="Proposal related namespace", path="/proposals" +@router.get( + "/{proposal}", + response_model=schema.Proposal, + responses={404: {"description": "No such proposal"}}, ) -api_v1.add_namespace(api) - - -@api.route("", endpoint="proposals") -@api.doc(security="apikey") -class Proposals(Resource): - """Allows to get all proposals""" - - @token_required - @role_required - def get(self): - """Returns proposals based on query parameters""" - api.logger.info("Get all proposals") - user_info = contacts.get_person_info(request) - query_dict = request.args.to_dict() - if not user_info["is_admin"]: - proposal_ids = proposal.get_proposal_ids_by_person_id(user_info["personId"]) - query_dict["proposalId"] = proposal_ids - return proposal.get_proposals_by_query(query_dict) - - @token_required - @role_required - @api.expect(proposal_schemas.f_schema) - @api.marshal_with(proposal_schemas.f_schema, code=201) - def post(self): - """Adds a new proposal""" - api.logger.info("Inserts a new proposal") - return proposal.add_proposal(api.payload) - - -@api.route("/", endpoint="proposal_by_id") -@api.param("proposal_id", "Proposal id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.FOUND, description="Proposal found.", model=proposal_schemas.f_schema) -@api.response(code=HTTPStatus.NOT_FOUND, description="Proposal not found.") -class ProposalById(Resource): - """Allows to get/set/delete a proposal""" - - @token_required - @role_required - @api.doc(description="proposal_id should be an integer ") - @api.marshal_with(proposal_schemas.f_schema, skip_none=False, code=HTTPStatus.OK) - def get(self, proposal_id): - """Returns a proposal by proposalId""" - user_info = contacts.get_person_info(request) - if user_info["is_admin"] or proposal_id in user_info["proposal_ids"]: - return proposal.get_proposal_by_id(proposal_id) - else: - abort( - HTTPStatus.METHOD_NOT_ALLOWED, - "Permission denied. Proposal %d is not assigned to user %s" % ( - proposal_id, - user_info["login_name"] - ) - ) - - @token_required - @role_required - @api.expect(proposal_schemas.f_schema) - @api.marshal_with(proposal_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, proposal_id): - """Fully updates proposal with id proposal_id""" - current_app.logger.info("Update proposal %d" % proposal_id) - return proposal.update_proposal(proposal_id, api.payload) - - @token_required - @role_required - @api.expect(proposal_schemas.f_schema) - @api.marshal_with(proposal_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, proposal_id): - """Partially updates proposal with id proposal_id""" - return proposal.patch_proposal(proposal_id, api.payload) - - @token_required - @role_required - def delete(self, proposal_id): - """Deletes a proposal by proposal_id""" - return proposal.delete_proposal(proposal_id) - - -@api.route("//info", endpoint="proposal_info_by_id") -@api.param("proposal_id", "Proposal id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.FOUND, description="Proposal info found.") -@api.response(code=HTTPStatus.NOT_FOUND, description="Proposal info not found.") -class ProposalInfoById(Resource): - """Returns full information of a proposal""" - - @token_required - @role_required - @api.doc(description="proposal_id should be an integer ") - # @api.marshal_with(proposal_desc_f_schema) - def get(self, proposal_id): - """Returns a full description of a proposal by proposalId""" - user_info = contacts.get_person_info(request) - if user_info["is_admin"] or proposal_id in user_info["proposal_ids"]: - return proposal.get_proposal_info_by_id(proposal_id) - else: - abort( - HTTPStatus.METHOD_NOT_ALLOWED, - "Permission denied. Proposal %d is not assigned to user %s" % ( - proposal_id, - user_info["login_name"] - ) - ) \ No newline at end of file +def get_proposal( + proposal: str = Depends(filters.proposal), +) -> models.Proposal: + """Get a proposal""" + proposals = crud.get_proposals( + proposal=proposal, + skip=0, + limit=1, + ) + + try: + return proposals.first + except IndexError: + raise HTTPException(status_code=404, detail="Proposal not found") diff --git a/pyispyb/core/routes/proteins.py b/pyispyb/core/routes/proteins.py new file mode 100644 index 00000000..079a6369 --- /dev/null +++ b/pyispyb/core/routes/proteins.py @@ -0,0 +1,53 @@ +from fastapi import Depends, HTTPException +from ispyb import models + +from ...dependencies import order_by_factory, pagination +from ...app.extensions.database.utils import Paged +from ... import filters +from ...app.base import AuthenticatedAPIRouter + +from ..modules import proteins as crud +from ..schemas import protein as schema +from ..schemas.utils import paginated + + +router = AuthenticatedAPIRouter(prefix="/proteins", tags=["Proteins"]) + + +@router.get("", response_model=paginated(schema.Protein)) +def get_proteins( + page: dict[str, int] = Depends(pagination), + proteinId: int = Depends(filters.proteinId), + proposal: str = Depends(filters.proposal), + search: str = Depends(filters.search), + sort_order: dict = Depends(order_by_factory(crud.ORDER_BY_MAP, "ProteinOrder")), +) -> Paged[models.BLSample]: + """Get a list of proteins""" + return crud.get_proteins( + proteinId=proteinId, + proposal=proposal, + search=search, + sort_order=sort_order, + **page + ) + + +@router.get( + "/{proteinId}", + response_model=schema.Protein, + responses={404: {"description": "No such protein"}}, +) +def get_protein( + proteinId: int = Depends(filters.proteinId), +) -> models.Protein: + """Get a protein""" + proteins = crud.get_proteins( + proteinId=proteinId, + skip=0, + limit=1, + ) + + try: + return proteins.first + except IndexError: + raise HTTPException(status_code=404, detail="Protein not found") diff --git a/pyispyb/core/routes/responses.py b/pyispyb/core/routes/responses.py new file mode 100644 index 00000000..67aef5c5 --- /dev/null +++ b/pyispyb/core/routes/responses.py @@ -0,0 +1,14 @@ +from pydantic import BaseModel + + +class HTTPError(BaseModel): + detail: str + + class Config: + schema_extra = { + "example": {"detail": "HTTPException raised."}, + } + + +class Message(BaseModel): + message: str diff --git a/pyispyb/core/routes/samples.py b/pyispyb/core/routes/samples.py index 603cf3e0..b770940a 100644 --- a/pyispyb/core/routes/samples.py +++ b/pyispyb/core/routes/samples.py @@ -1,375 +1,149 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" -import io +import logging import os -from flask import request, send_file, current_app, abort -from pyispyb.flask_restx_patched import Resource, HTTPStatus - - -from pyispyb.app.utils import download_pdb_file -from pyispyb.app.extensions.api import api_v1, Namespace -from pyispyb.app.extensions.auth import token_required, role_required - - -from pyispyb.core.schemas import sample as sample_schemas -from pyispyb.core.schemas import crystal as crystal_schemas -from pyispyb.core.schemas import protein as protein_schemas -from pyispyb.core.schemas import diffraction_plan as diffraction_plan_schemas -from pyispyb.core.modules import sample, crystal, diffraction_plan, protein - - -__license__ = "LGPLv3+" - - -api = Namespace("Samples", description="Sample related namespace", path="/samples") -api_v1.add_namespace(api) - - -@api.route("", endpoint="samples") -@api.doc(security="apikey") -class Sample(Resource): - """Sample resource""" - - @token_required - @role_required - def get(self): - """Returns all sample items""" - return sample.get_samples_by_request(request) - - @token_required - @role_required - @api.expect(sample_schemas.f_schema) - @api.marshal_with(sample_schemas.f_schema, code=201) - def post(self): - """Adds a new sample item""" - return sample.add_sample(api.payload) - - -@api.route("/", endpoint="sample_by_id") -@api.param("sample_id", "Sample id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="Sample not found.") -class SampleById(Resource): - """Allows to get/set/delete a sample item""" - - @token_required - @role_required - @api.doc(description="sample_id should be an integer ") - @api.marshal_with(sample_schemas.f_schema, skip_none=False, code=HTTPStatus.OK) - def get(self, sample_id): - """Returns a sample by sampleId""" - return sample.get_sample_by_id(sample_id) - - @token_required - @role_required - @api.expect(sample_schemas.f_schema) - @api.marshal_with(sample_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, sample_id): - """Fully updates sample with sample_id""" - return sample.update_sample(sample_id, api.payload) - - @token_required - @role_required - @api.expect(sample_schemas.f_schema) - @api.marshal_with(sample_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, sample_id): - """Partially updates sample with id sampleId""" - return sample.patch_sample(sample_id, api.payload) - - @token_required - @role_required - def delete(self, sample_id): - """Deletes a sample by sampleId""" - return sample.delete_sample(sample_id) - - -@api.route("/crystals", endpoint="crystals") -@api.doc(security="apikey") -class Crystals(Resource): - """Crystal resource""" - - @token_required - @role_required - def get(self): - """Returns all crystal items""" - query_dict = request.args.to_dict() - return crystal.get_crystals_by_query(query_dict) - - @token_required - @role_required - @api.expect(crystal_schemas.f_schema) - @api.marshal_with(crystal_schemas.f_schema, code=201) - def post(self): - """Adds a new crystal item""" - return crystal.add_crystal(api.payload) - - -@api.route("/crystals/", endpoint="crystal_by_id") -@api.param("crystal_id", "Crystal id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="Crystal not found.") -class CrystalById(Resource): - """Allows to get/set/delete a crystal item""" - - @token_required - @role_required - @api.doc(description="crystal_id should be an integer ") - @api.marshal_with(crystal_schemas.f_schema, skip_none=False, code=HTTPStatus.OK) - def get(self, crystal_id): - """Returns a crystal by crystalId""" - return crystal.get_crystal_by_id(crystal_id) - - @token_required - @role_required - @api.expect(crystal_schemas.f_schema) - @api.marshal_with(crystal_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, crystal_id): - """Fully updates crystal with crystal_id""" - return crystal.update_crystal(crystal_id, api.payload) - - @token_required - @role_required - @api.expect(crystal_schemas.f_schema) - @api.marshal_with(crystal_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, crystal_id): - """Partially updates crystal with id crystalId""" - return crystal.patch_crystal(crystal_id, api.payload) +from typing import Optional + +from fastapi import Depends, HTTPException +from fastapi.responses import FileResponse +from ispyb import models + +from ...config import settings +from ...dependencies import pagination, order_by_factory +from ...app.extensions.database.utils import Paged +from ... import filters +from ...app.base import AuthenticatedAPIRouter +from ..modules import samples as crud +from ..schemas import samples as schema +from ..schemas.utils import paginated + +logger = logging.getLogger(__name__) +router = AuthenticatedAPIRouter(prefix="/samples", tags=["Samples"]) + + +@router.get("/sub", response_model=paginated(schema.SubSample)) +def get_subsamples( + page: dict[str, int] = Depends(pagination), + blSampleId: int = Depends(filters.blSampleId), + proteinId: int = Depends(filters.proteinId), + proposal: str = Depends(filters.proposal), + containerId: int = Depends(filters.containerId), + sort_order: dict = Depends( + order_by_factory(crud.SUBSAMPLE_ORDER_BY_MAP, "SubSampleOrder") + ), +) -> Paged[models.BLSubSample]: + """Get a list of sub samples""" + return crud.get_subsamples( + blSampleId=blSampleId, + proteinId=proteinId, + proposal=proposal, + containerId=containerId, + sort_order=sort_order, + **page, + ) - @token_required - @role_required - def delete(self, crystal_id): - """Deletes a crystal by crystalId""" - return crystal.delete_crystal(crystal_id) +@router.get( + "/sub/{blSubSampleId}", + response_model=schema.SubSample, + responses={404: {"description": "No such sub sample"}}, +) +def get_subsample( + blSubSampleId: int = Depends(filters.blSubSampleId), +) -> models.BLSubSample: + """Get a sub sample""" + subsamples = crud.get_subsamples( + blSubSampleId=blSubSampleId, + skip=0, + limit=1, + ) -@api.route("/crystals//pdb", endpoint="crystal_pdb_by_id") -@api.param("crystal_id", "Crystal id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="Crystal not found.") -class CrystalPdbById(Resource): - """Allows to get/set/delete crystal pdb item""" + try: + return subsamples.first + except IndexError: + raise HTTPException(status_code=404, detail="Sub sample not found") + + +@router.get("/images", response_model=paginated(schema.SampleImage)) +def get_sample_images( + page: dict[str, int] = Depends(pagination), + blSampleId: int = Depends(filters.blSampleId), +) -> Paged[models.BLSampleImage]: + """Get a list of sample images""" + return crud.get_sample_images( + blSampleId=blSampleId, + **page, + ) - #@token_required - #@role_required - @api.doc(description="crystal_id should be an integer ") - def get(self, crystal_id): - """Returns pdb file by crystalId""" - query_dict = request.args.to_dict() - pdb_file_path, pdb_file_name = crystal.get_crystal_pdb_by_id(crystal_id) - if pdb_file_path and pdb_file_name: - if os.path.exists( - os.path.join( - pdb_file_path, - pdb_file_name - ) - ): - return send_file( - os.path.join( - pdb_file_path, - pdb_file_name - ), - as_attachment=True) - if pdb_file_name: - query_dict["pdbFileName"] = pdb_file_path +@router.get("/images/{blSampleImageId}", response_class=FileResponse) +def get_sample_image( + blSampleImageId: int, +): + """Get a sample image""" + sampleimages = crud.get_sample_images( + blSampleImageId=blSampleImageId, + limit=1, + skip=0, + ) - # If no pdb file in the data base exists, then try to get one from pdb - if "pdbFileName" in query_dict: - if not query_dict["pdbFileName"].endswith(".pdb"): - query_dict["pdbFileName"] += ".pdb" - pdb_file = download_pdb_file(query_dict["pdbFileName"]) - if pdb_file: - return send_file( - io.BytesIO(pdb_file), - mimetype="text/plain", - as_attachment=True, - attachment_filename=query_dict["pdbFileName"], - ) - else: - abort( - HTTPStatus.NOT_FOUND, - "Pdb entry %s not found in %s" - % ( - query_dict["pdbFileName"], - current_app.config["PDB_URI"], - ), - ) + try: + sampleimage = sampleimages.first + image_path = sampleimage.imageFullPath + if settings.path_map: + image_path = settings.path_map + image_path - else: - abort( - HTTPStatus.NOT_FOUND, - "No pdb file or entry associated with crystal %d" % crystal_id, + if not os.path.exists(image_path): + logger.warning( + f"blSampleImageId `{sampleimage.blSampleImageId}` file `{image_path}` does not exist on disk" ) - - @token_required - @role_required - def patch(self, crystal_id): - """Fully updates crystal with crystal_id""" - query_dict = request.args.to_dict() - - if "file" not in request.files: - # No file submitted. Check if the pdb entry name exists - return crystal.patch_crystal_pdb_by_id(crystal_id, query_dict) - else: - request_file = request.files["file"] - if request_file.filename.endswith(".pdb"): - if "pdbFileName" not in query_dict: - query_dict["pdbFileName"] = request_file.filename - query_dict["pdbFilePath"] = current_app.config["UPLOAD_FOLDER"] - request_file.save( - os.path.join( - current_app.config["UPLOAD_FOLDER"], request_file.filename - ) - ) - return crystal.patch_crystal_pdb_by_id(crystal_id, query_dict) - else: - return abort( - HTTPStatus.FORBIDDEN, "Pdb file should end with extension .pdb" - ) - # return crystal.update_crystal_pdb(crystal_id, api.payload) - - @token_required - @role_required - def delete(self, crystal_id): - """Deletes a crystal pdb file by crystalId""" - # return crystal.delete_crystal_pdb(crystal_id) - - -@api.route("/proteins", endpoint="proteins") -@api.doc(security="apikey") -class Proteins(Resource): - """Proteins resource""" - - @token_required - @role_required - def get(self): - """Returns all protein items""" - return protein.get_proteins_by_request(request) - - @token_required - @role_required - @api.expect(protein_schemas.f_schema) - @api.marshal_with(protein_schemas.f_schema, code=201) - def post(self): - """Adds a new protein item""" - return protein.add_protein(api.payload) - - -@api.route("/proteins/", endpoint="protein_by_id") -@api.param("protein_id", "protein id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="protein not found.") -class ProteinById(Resource): - """Allows to get/set/delete a protein""" - - @token_required - @role_required - @api.doc(description="protein_id should be an integer ") - @api.marshal_with(protein_schemas.f_schema, skip_none=False, code=HTTPStatus.OK) - def get(self, protein_id): - """Returns a protein by proteinId""" - return protein.get_protein_by_id(protein_id) - - @token_required - @role_required - @api.expect(protein_schemas.f_schema) - @api.marshal_with(protein_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, protein_id): - """Fully updates protein with proteinId""" - return protein.update_protein(protein_id, api.payload) - - @token_required - @role_required - @api.expect(protein_schemas.f_schema) - @api.marshal_with(protein_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, protein_id): - """Partially updates protein with proteinId""" - return protein.patch_protein(protein_id, api.payload) - - @token_required - @role_required - def delete(self, protein_id): - """Deletes a protein by proteinId""" - return protein.delete_protein(protein_id) - - -@api.route("/diffraction_plans", endpoint="diffraction_plans") -@api.doc(security="apikey") -class DiffractionPlans(Resource): - """Allows to get all diffraction_plans and insert a new one""" - - @token_required - @role_required - def get(self): - """Returns list of diffraction_plans""" - return diffraction_plan.get_diffraction_plans(request) - - @token_required - @role_required - @api.expect(diffraction_plan_schemas.f_schema) - @api.marshal_with(diffraction_plan_schemas.f_schema, code=201) - def post(self): - """Adds a new diffraction_plan""" - return diffraction_plan.add_diffraction_plan(api.payload) + raise IndexError + return image_path + + except IndexError: + raise HTTPException(status_code=404, detail="Sample image not found") + + +@router.get("", response_model=paginated(schema.Sample)) +def get_samples( + page: dict[str, int] = Depends(pagination), + search: str = Depends(filters.search), + proteinId: int = Depends(filters.proteinId), + proposal: str = Depends(filters.proposal), + containerId: int = Depends(filters.containerId), + beamLineName: str = Depends(filters.beamLineName), + status: Optional[crud.SAMPLE_STATUS_ENUM] = None, + sort_order: dict = Depends( + order_by_factory(crud.SAMPLE_ORDER_BY_MAP, "SampleOrder") + ), +) -> Paged[models.BLSample]: + """Get a list of samples""" + return crud.get_samples( + search=search, + proteinId=proteinId, + proposal=proposal, + containerId=containerId, + beamLineName=beamLineName, + status=status, + sort_order=sort_order, + **page, + ) -@api.route( - "/diffraction_plans/", endpoint="diffraction_plan_by_id" +@router.get( + "/{blSampleId}", + response_model=schema.Sample, + responses={404: {"description": "No such sample"}}, ) -@api.param("diffraction_plan_id", "diffraction_plan id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="diffraction_plan not found.") -class DiffractionPlanById(Resource): - """Allows to get/set/delete a diffraction_plan""" - - @token_required - @role_required - @api.doc(description="diffraction_plan_id should be an integer ") - @api.marshal_with( - diffraction_plan_schemas.f_schema, skip_none=False, code=HTTPStatus.OK +def get_sample( + blSampleId: int = Depends(filters.blSampleId), +) -> models.BLSample: + """Get a sample""" + samples = crud.get_samples( + blSampleId=blSampleId, + skip=0, + limit=1, ) - def get(self, diffraction_plan_id): - """Returns a diffraction_plan by diffraction_planId""" - return diffraction_plan.get_diffraction_plan_by_id(diffraction_plan_id) - - @token_required - @role_required - @api.expect(diffraction_plan_schemas.f_schema) - @api.marshal_with(diffraction_plan_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, diffraction_plan_id): - """Fully updates diffraction_plan with diffraction_plan_id""" - return diffraction_plan.update_diffraction_plan( - diffraction_plan_id, api.payload - ) - - @token_required - @role_required - @api.expect(diffraction_plan_schemas.f_schema) - @api.marshal_with(diffraction_plan_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, diffraction_plan_id): - """Partially updates diffraction_plan with id diffraction_planId""" - return diffraction_plan.patch_diffraction_plan(diffraction_plan_id, api.payload) - @token_required - @role_required - def delete(self, diffraction_plan_id): - """Deletes a diffraction_plan by diffraction_planId""" - return diffraction_plan.delete_diffraction_plan(diffraction_plan_id) + try: + return samples.first + except IndexError: + raise HTTPException(status_code=404, detail="Sample not found") diff --git a/pyispyb/core/routes/schemas.py b/pyispyb/core/routes/schemas.py deleted file mode 100644 index f52e14aa..00000000 --- a/pyispyb/core/routes/schemas.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -import importlib - -from flask import current_app -from pyispyb.flask_restx_patched import Resource, HTTPStatus -from pyispyb.app.extensions.api import api_v1, Namespace -from pyispyb.app.extensions.auth import token_required, role_required -from pyispyb.core import schemas - - -__license__ = "LGPLv3+" - - -api = Namespace("Schemas", description="Schemas related namespace", path="/schemas") -api_v1.add_namespace(api) - - -@api.route("/available_names", endpoint="available_schemas_names") -class SchemasList(Resource): - - #@token_required - #@role_required - def get(self): - """Returns list of available schemas - - Returns: - list: list of names - """ - current_app.logger.info("Get all schemas") - # TODO I guess there is oneliner fancy code that can do this... - result = [] - for item in dir(schemas): - if not item.startswith("__"): - result.append(item) - - return result - - -@api.route("/", endpoint="schema_by_name") -@api.param("name", "name (string)") -@api.doc(description="name should be a string") -class Schemas(Resource): - - #token_required - #@role_required - def get(self, name): - """Returns json schema - - Args: - name (string): schema name - - Returns: - json: schema as json - """ - try: - schemas_module = importlib.import_module("pyispyb.core.schemas." + name) - return getattr(schemas_module, "json_schema") - except Exception as ex: - return ( - "Unable to return schema with name %s (%s)" % (name, str(ex)), - HTTPStatus.NOT_FOUND, - ) diff --git a/pyispyb/core/routes/sessions.py b/pyispyb/core/routes/sessions.py index bd34f507..455dfd3e 100644 --- a/pyispyb/core/routes/sessions.py +++ b/pyispyb/core/routes/sessions.py @@ -1,199 +1,96 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - -import logging -from datetime import datetime - -from flask import request -from pyispyb.flask_restx_patched import Resource, HTTPStatus, abort - -from pyispyb.app.extensions.api import api_v1, Namespace -from pyispyb.app.extensions.auth import token_required, role_required - -from pyispyb.core.schemas import session as session_schemas -from pyispyb.core.schemas import beam_calendar as beam_calendar_schemas -from pyispyb.core.modules import session - - -__license__ = "LGPLv3+" - -log = logging.getLogger(__name__) -api = Namespace("Sessions", description="Session related namespace", path="/sessions") -api_v1.add_namespace(api) - - -@api.route("", endpoint="sessions") -@api.doc(security="apikey") -class Sessions(Resource): - """Allows to get all sessions and insert a new one""" - - @token_required - @role_required - def get(self): - """Returns list of sessions""" - return session.get_sessions(request) - - @token_required - @role_required - @api.expect(session_schemas.f_schema) - @api.marshal_with(session_schemas.f_schema, code=201) - def post(self): - """Adds a new session""" - log.info("Inserts a new session") - return session.add_session(api.payload) - - -@api.route("/", endpoint="session_by_id") -@api.param("session_id", "Session id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.FOUND, description="Session found :)", model=session_schemas.f_schema) -@api.response(code=HTTPStatus.NOT_FOUND, description="Session not found :(") -class SessionById(Resource): - """Allows to get/set/delete a session""" - - @token_required - @role_required - @api.doc(description="session_id should be an integer ") - @api.marshal_with(session_schemas.f_schema, skip_none=True, code=HTTPStatus.OK) - def get(self, session_id): - """Returns a session by sessionId""" - return session.get_session_by_id(session_id) - - - @token_required - @role_required - @api.expect(session_schemas.f_schema) - @api.marshal_with(session_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, session_id): - """Fully updates session with session_id""" - return session.update_session(session_id, api.payload) - - @token_required - @role_required - @api.expect(session_schemas.f_schema) - @api.marshal_with(session_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, session_id): - """Partially updates session with id sessionId""" - return session.patch_session(session_id, api.payload) - - @token_required - @role_required - def delete(self, session_id): - """Deletes a session by sessionId""" - return session.delete_session(session_id) - - -@api.route("//info", endpoint="session_info_by_id") -@api.param("session_id", "session id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="session not found.") -class SessionInfoById(Resource): - """Returns full information of a session""" - - @token_required - @role_required - @api.doc(description="session_id should be an integer ") - def get(self, session_id): - """Returns a full description of a session by sessionId""" - return session.get_session_info_by_id(session_id) - - -@api.route("/date", endpoint="sessions_by_date") -@api.doc(security="apikey") -class SessionsByDateBeamline(Resource): - """Allows to get all sessions by date and beamline""" - - @token_required - @role_required - def get(self): - """Returns list of sessions by start_date, end_date and beamline.""" - - query_dict = request.args.to_dict() - start_date = query_dict.get("start_date") - end_date = query_dict.get("end_date") - beamline = query_dict.get("beamline") - - if start_date is None and end_date is None: - abort( - HTTPStatus.NOT_ACCEPTABLE, "No start_date or end_date argument provided" - ) - - if start_date: - try: - start_date = datetime.strptime(start_date, "%Y%m%d") - except ValueError as ex: - abort( - HTTPStatus.NOT_ACCEPTABLE, - "start_date should be in YYYYMMDD format (%s)" % str(ex), - ) - - if end_date: - try: - end_date = datetime.strptime(end_date, "%Y%m%d") - except ValueError as ex: - abort( - HTTPStatus.NOT_ACCEPTABLE, - "end_date should be in YYYYMMDD format (%s)" % str(ex), - ) - - return session.get_sessions_by_date(start_date, end_date, beamline) - - -# getSessionsByDateAndBeamline(startDate, endDate, beamline) -# getSessionsBybeam_calendarAndDate(startDate, endDate, beam_calendar) - - -@api.route("/beam_calendars", endpoint="beam_calendars") -@api.doc(security="apikey") -class BeamCalendars(Resource): - """Allows to get all beam_calendars""" - - @token_required - @role_required - def get(self): - """Returns beam_calendars based on query parameters""" - return session.get_beam_calendars(request) +from typing import Optional + +from fastapi import Depends, HTTPException, Query +from ispyb import models + +from pyispyb.dependencies import pagination +from pyispyb.app.extensions.database.utils import Paged +from pyispyb import filters +from pyispyb.app.base import AuthenticatedAPIRouter + +from ..modules import sessions as crud +from ..schemas import sessions as schema +from ..schemas.utils import paginated + + +router = AuthenticatedAPIRouter(prefix="/sessions", tags=["Sessions"]) +PaginatedSession = paginated(schema.Session) + + +@router.get("", response_model=PaginatedSession) +def get_sessions( + proposal: str = Depends(filters.proposal), + beamLineName: str = Depends(filters.beamLineName), + beamLineGroup: Optional[str] = Query( + None, description="Show sessions for a beamLineGroup" + ), + scheduled: bool = Query(None, description="Get scheduled sessions only"), + upcoming: Optional[bool] = Query(False, description="Get the upcoming sessions"), + previous: Optional[bool] = Query( + False, description="Get the recently finished sessions" + ), + sessionType=Query( + None, description="Filter by session type, i.e. commissioning, remote" + ), + month: int = Depends(filters.month), + year: int = Depends(filters.year), + page: dict[str, int] = Depends(pagination), +) -> Paged[models.BLSession]: + """Get a list of sessions""" + return crud.get_sessions( + proposal=proposal, + beamLineName=beamLineName, + beamLineGroup=beamLineGroup, + scheduled=scheduled, + upcoming=upcoming, + previous=previous, + sessionType=sessionType, + month=month, + year=year, + **page + ) - @token_required - @role_required - @api.expect(beam_calendar_schemas.f_schema) - @api.marshal_with(beam_calendar_schemas.f_schema, code=201) - def post(self): - """Adds a new beam_calendar""" - return session.add_beam_calendar(api.payload) +@router.get("/group", response_model=PaginatedSession) +def get_sessions_for_beamline_group( + beamLineGroup: str = Query( + None, description="Beamline group to display session for" + ), + upcoming: Optional[bool] = Query(False, description="Get the upcoming sessions"), + previous: Optional[bool] = Query( + False, description="Get the recently finished sessions" + ), + sessionType=Query( + None, description="Filter by session type, i.e. commissioning, remote" + ), +): + """Get a list of sessions for a beamline group + Displays one session per beamline + """ + return crud.get_sessions_for_beamline_group( + beamLineGroup=beamLineGroup, + upcoming=upcoming, + previous=previous, + sessionType=sessionType, + ) -@api.route("/beam_calendar/", endpoint="beam_calendar_by_id") -@api.param("beam_calendar_id", "beam_calendar id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="beam_calendar not found.") -class beam_calendarById(Resource): - """Allows to get/set/delete a beam_calendar""" - @token_required - @role_required - @api.doc(description="beam_calendar_id should be an integer ") - @api.marshal_with( - beam_calendar_schemas.f_schema, skip_none=False, code=HTTPStatus.OK +@router.get( + "/{sessionId}", + response_model=schema.Session, + responses={404: {"description": "No such session"}}, +) +def get_session( + sessionId: str = Depends(filters.sessionId), +) -> models.BLSession: + """Get a session""" + sessions = crud.get_sessions( + sessionId=sessionId, + skip=0, + limit=1, ) - def get(self, beam_calendar_id): - """Returns a beam_calendar by beam_calendarId""" - return session.get_beam_calendar_by_id(beam_calendar_id) + + try: + return sessions.first + except IndexError: + raise HTTPException(status_code=404, detail="Session not found") diff --git a/pyispyb/core/routes/shipping.py b/pyispyb/core/routes/shipping.py index aa2ba134..a84acaed 100644 --- a/pyispyb/core/routes/shipping.py +++ b/pyispyb/core/routes/shipping.py @@ -1,257 +1,86 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" import logging -from flask import current_app, request, send_from_directory -from flask_restx._http import HTTPStatus - -from pyispyb.flask_restx_patched import Resource -from pyispyb.app.extensions.auth import token_required, role_required -from pyispyb.app.extensions.api import api_v1, Namespace - -from pyispyb.core.modules import container, dewar, shipping -from pyispyb.core.schemas import container as container_schemas -from pyispyb.core.schemas import dewar as dewar_schemas -from pyispyb.core.schemas import shipping as shipping_schemas - - -__license__ = "LGPLv3+" - -log = logging.getLogger(__name__) - -api = Namespace( - "Shippings", description="shipping related namespace", path="/shippings" -) -api_v1.add_namespace(api) - - -@api.route("", endpoint="shippings") -@api.doc(security="apikey") -class Shippings(Resource): - """Allows to get all shippings""" - - @token_required - @role_required - def get(self): - """Returns list of shippings""" - return shipping.get_shippings(request), HTTPStatus.OK - - @token_required - @role_required - @api.expect(shipping_schemas.f_schema) - @api.marshal_with(shipping_schemas.f_schema, code=201) - def post(self): - """Adds a new shipping""" - return shipping.add_shipping(api.payload) - - -@api.route("/", endpoint="shipping_by_id") -@api.param("shipping_id", "shipping id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="shipping not found.") -class ShippingById(Resource): - """Allows to get/set/delete a shipping""" - - @token_required - @role_required - @api.doc(description="shipping_id should be an integer ") - @api.marshal_with(shipping_schemas.f_schema, skip_none=False, code=HTTPStatus.OK) - def get(self, shipping_id): - """Returns a shipping by shippingId""" - return shipping.get_shipping_by_id(shipping_id) - - @token_required - @role_required - @api.expect(shipping_schemas.f_schema) - @api.marshal_with(shipping_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, shipping_id): - """Fully updates shipping with id shipping_id""" - return shipping.update_shipping(shipping_id, api.payload) +from fastapi import Depends, HTTPException, status +from ispyb import models - @token_required - @role_required - @api.expect(shipping_schemas.f_schema) - @api.marshal_with(shipping_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, shipping_id): - """Partially updates shipping with id shipping_id""" - return shipping.patch_shipping(shipping_id, api.payload) +from pyispyb.dependencies import pagination +from pyispyb.app.extensions.database.utils import Paged +from pyispyb.app.base import AuthenticatedAPIRouter +from pyispyb import filters - @token_required - @role_required - def delete(self, shipping_id): - """Deletes shipping by shipping_id""" - return shipping.delete_shipping(shipping_id) +from ..modules import shipping as crud +from ..schemas import shipping as schema +from ..schemas.utils import paginated, make_optional -@api.route("//info", endpoint="shipping_info_by_id") -@api.param("shipping_id", "shipping id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="shipping not found.") -class ShippingInfoById(Resource): - """Returns full information of a shipping""" +logger = logging.getLogger(__name__) +router = AuthenticatedAPIRouter(prefix="/shippings", tags=["Shipping"]) - @token_required - @role_required - @api.doc(description="shipping_id should be an integer ") - # @api.marshal_with(shipping_desc_f_schema) - def get(self, shipping_id): - """Returns a full description of a shipping by shippingId""" - return shipping.get_shipping_info_by_id(shipping_id) +@router.get("", response_model=paginated(schema.Shipping)) +def get_shippings( + proposal: str = Depends(filters.proposal), + page: dict[str, int] = Depends(pagination), +) -> Paged[models.Shipping]: + """Get a list of shipments""" + return crud.get_shippings(proposal=proposal, **page) -@api.route("/dewars", endpoint="dewars") -@api.doc(security="apikey") -class Dewars(Resource): - """Dewars resource""" - @token_required - @role_required - def get(self): - """Returns all dewars items""" - query_dict = request.args.to_dict() - return dewar.get_dewars_by_query(query_dict) - - @token_required - @role_required - @api.expect(dewar_schemas.f_schema) - @api.marshal_with(dewar_schemas.f_schema, code=201) - def post(self): - """Adds a new dewar item""" - return dewar.add_dewar(api.payload) - - -@api.route("/dewars/", endpoint="dewar_by_id") -@api.param("dewar_id", "Dewar id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="Dewar not found.") -class DewarById(Resource): - """Allows to get/set/delete a dewar item""" - - @token_required - @role_required - @api.doc(description="dewar_id should be an integer ") - @api.marshal_with(dewar_schemas.f_schema, skip_none=False, code=HTTPStatus.OK) - def get(self, dewar_id): - """Returns a dewar by dewarId""" - return dewar.get_dewar_by_id(dewar_id) - - @token_required - @role_required - @api.expect(dewar_schemas.f_schema) - @api.marshal_with(dewar_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, dewar_id): - """Fully updates dewar with dewar_id""" - return dewar.update_dewar(dewar_id, api.payload) - - @token_required - @role_required - @api.expect(dewar_schemas.f_schema) - @api.marshal_with(dewar_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, dewar_id): - """Partially updates dewar with id dewarId""" - return dewar.patch_dewar(dewar_id, api.payload) +@router.get( + "/{shippingId}", + response_model=schema.Shipping, + responses={404: {"description": "No such shipment"}}, +) +def get_shipping(shippingId: int) -> models.Shipping: + """Get a shipment""" + shipping = crud.get_shippings( + shippingId=shippingId, + skip=0, + limit=1, + ) + try: + return shipping.first + except IndexError: + raise HTTPException(status_code=404, detail="Shipment not found") + + +@router.post( + "", + response_model=schema.Shipping, + status_code=status.HTTP_201_CREATED, +) +def create_shipping(shipping: schema.ShippingCreate) -> models.Shipping: + """Create a new shipment""" + return crud.create_shipping( + shipping=shipping, + ) - @token_required - @role_required - def delete(self, dewar_id): - """Deletes a dewar by dewarId""" - return dewar.delete_dewar(dewar_id) +SHIPPING_UPDATE_EXCLUDED = {} -@api.route("/dewars//labels", endpoint="dewar_labels_by_id") -@api.param("dewar_id", "Dewar id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="Dewar not found.") -class DewarLabelsById(Resource): - """Returns dewar label pdf""" - #@token_required - #@role_required - @api.doc(description="dewar_id should be an integer ") - def get(self, dewar_id): - """Returns a dewar labels by dewarId""" - log.info("Generating pdf labels for dewar %d" % dewar_id) - html_labels_filename, pdf_labels_filename = dewar.get_dewar_labels_by_id( - dewar_id - ) - return send_from_directory( - current_app.config["TEMP_FOLDER"], - pdf_labels_filename, - as_attachment=True +@router.patch( + "/{shippingId}", + response_model=schema.Shipping, + responses={ + 404: {"description": "No such shipment"}, + 400: {"description": "Could not update shipment"}, + }, +) +def update_shipping( + shippingId: int, + shipping: make_optional( + schema.ShippingCreate, + exclude=SHIPPING_UPDATE_EXCLUDED, + ), +): + """Update a Shipment""" + try: + return crud.update_shipping(shippingId, shipping) + except IndexError: + raise HTTPException(status_code=404, detail="Shipment not found") + except Exception: + logger.exception( + f"Could not update shipping `{shippingId}` with payload `{shipping}`" ) - - -@api.route("/containers", endpoint="containers") -@api.doc(security="apikey") -class Containers(Resource): - """Containers resource""" - - @token_required - @role_required - def get(self): - """Returns all container items""" - return container.get_containers(request) - - @token_required - @role_required - @api.expect(container_schemas.f_schema) - @api.marshal_with(container_schemas.f_schema, code=201) - def post(self): - """Adds a new container item""" - return container.add_container(api.payload) - - -@api.route("/containers/", endpoint="container_by_id") -@api.param("container_id", "Container id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="Container not found.") -class ContainerById(Resource): - """Allows to get/set/delete a container item""" - - @token_required - @role_required - @api.doc(description="container_id should be an integer ") - @api.marshal_with(container_schemas.f_schema, skip_none=False, code=HTTPStatus.OK) - def get(self, container_id): - """Returns a container by container_id""" - return container.get_container_by_id(container_id) - - @token_required - @role_required - @api.expect(container_schemas.f_schema) - @api.marshal_with(container_schemas.f_schema, code=HTTPStatus.CREATED) - def put(self, container_id): - """Fully updates container with container_id""" - return container.update_container(container_id, api.payload) - - @token_required - @role_required - @api.expect(container_schemas.f_schema) - @api.marshal_with(container_schemas.f_schema, code=HTTPStatus.CREATED) - def patch(self, container_id): - """Partially updates container with id containerId""" - return container.patch_container(container_id, api.payload) - - @token_required - @role_required - def delete(self, container_id): - """Deletes a container by containerId""" - return container.delete_container(container_id) + raise HTTPException(status_code=400, detail="Could not update shipment") diff --git a/pyispyb/core/routes/stats.py b/pyispyb/core/routes/stats.py new file mode 100644 index 00000000..4d92a45f --- /dev/null +++ b/pyispyb/core/routes/stats.py @@ -0,0 +1,136 @@ +from fastapi import Depends, HTTPException + +from ...app.base import AuthenticatedAPIRouter +from ...app.extensions.database.utils import Paged +from ...core.schemas.utils import paginated +from ...dependencies import pagination +from ... import filters + +from ..modules import stats as crud +from ..schemas import stats as schema + + +router = AuthenticatedAPIRouter(prefix="/stats", tags=["Stats"]) + + +@router.get("/breakdown", response_model=schema.Breakdown) +def get_breakdown( + beamLineName: str = Depends(filters.beamLineName), + session: str = Depends(filters.session), + sessionId: str = Depends(filters.sessionId), + runId: str = Depends(filters.runId), +) -> schema.Breakdown: + """Get stats breakdown for a session or run""" + if not (sessionId or session or (beamLineName and runId)): + raise HTTPException( + status_code=422, + detail="Please provide either `session` or (`beamLineName` and `runId`)", + ) + + return crud.get_breakdown( + session=session, + sessionId=sessionId, + beamLineName=beamLineName, + runId=runId, + ) + + +@router.get("/times", response_model=schema.Times) +def get_times( + beamLineName: str = Depends(filters.beamLineName), + session: str = Depends(filters.session), + sessionId: str = Depends(filters.sessionId), + proposal: str = Depends(filters.proposal), + runId: str = Depends(filters.runId), +) -> schema.Times: + """Get total times for a session, proposal, or run""" + if not (proposal or sessionId or session or (beamLineName and runId)): + raise HTTPException( + status_code=422, + detail="Please provide either `proposal` or `session` or (`beamLineName` and `runId`)", + ) + + return crud.get_times( + session=session, + sessionId=sessionId, + proposal=proposal, + beamLineName=beamLineName, + runId=runId, + ) + + +@router.get("/errors", response_model=schema.Errors) +def get_errors( + beamLineName: str = Depends(filters.beamLineName), + session: str = Depends(filters.session), + sessionId: str = Depends(filters.sessionId), + runId: str = Depends(filters.runId), +) -> schema.Errors: + """Get the errors for a session or run""" + if not (sessionId or session or (beamLineName and runId)): + raise HTTPException( + status_code=422, + detail="Please provide either `session` or (`beamLineName` and `runId`)", + ) + + return crud.get_errors( + session=session, + sessionId=sessionId, + beamLineName=beamLineName, + runId=runId, + ) + + +@router.get("/hourlies", response_model=schema.Hourlies) +def get_hourlies( + beamLineName: str = Depends(filters.beamLineName), + session: str = Depends(filters.session), + sessionId: str = Depends(filters.sessionId), + proposal: str = Depends(filters.proposal), + runId: str = Depends(filters.runId), +) -> schema.Hourlies: + """Get the hourly stats for a session or run""" + if not (proposal or sessionId or session or (beamLineName and runId)): + raise HTTPException( + status_code=422, + detail="Please provide either `proposal` or `session` or (`beamLineName` and `runId`)", + ) + + return crud.get_hourlies( + session=session, + sessionId=sessionId, + proposal=proposal, + beamLineName=beamLineName, + runId=runId, + ) + + +@router.get("/parameters/histogram", response_model=schema.ParameterHistograms) +def get_parameter_histogram( + beamLineName: str = Depends(filters.beamLineName), + session: str = Depends(filters.session), + sessionId: str = Depends(filters.sessionId), + runId: str = Depends(filters.runId), +) -> schema.ParameterHistograms: + """Get histogram of parameters for a session or run""" + if not (sessionId or session or (beamLineName and runId)): + raise HTTPException( + status_code=422, detail="Please provide either `session` or `runId`" + ) + + return crud.get_parameter_histogram( + session=session, + sessionId=sessionId, + beamLineName=beamLineName, + runId=runId, + ) + + +@router.get("/runs", response_model=paginated(schema.VRun)) +def get_runs( + page: dict[str, int] = Depends(pagination), +) -> Paged[schema.VRun]: + """Get a list of runs""" + return crud.get_runs( + **page, + ) diff --git a/pyispyb/core/routes/user.py b/pyispyb/core/routes/user.py new file mode 100644 index 00000000..c4d1e01d --- /dev/null +++ b/pyispyb/core/routes/user.py @@ -0,0 +1,63 @@ +from typing import Optional +from pydantic import BaseModel, Field + +from ...app.extensions.database.definitions import get_current_person, get_options +from ...app.extensions.auth.onetime import generate_onetime_token +from ...app.extensions.options.schema import BeamLineGroup +from ...app.base import AuthenticatedAPIRouter +from ...app.globals import g + +router = AuthenticatedAPIRouter(prefix="/user", tags=["Current User"]) + + +class CurrentUser(BaseModel): + givenName: Optional[str] + familyName: Optional[str] + login: str + Permissions: list[str] + personId: int + beamLineGroups: list[str] + beamLines: list[str] + + +@router.get( + "/current", + response_model=CurrentUser, +) +def current_user() -> CurrentUser: + person = get_current_person(g.login) + db_options = get_options() + beamLineGroups: list[BeamLineGroup] = db_options.beamLineGroups + groups = [] + beamLines = [] + for beamLineGroup in beamLineGroups: + if beamLineGroup.permission in g.permissions: + groups.append(beamLineGroup.groupName) + beamLines.extend( + [beamLine.beamLineName for beamLine in beamLineGroup.beamLines] + ) + + return { + "personId": person.personId, + "givenName": person.givenName, + "familyName": person.familyName, + "login": person.login, + "Permissions": g.permissions, + "beamLineGroups": groups, + "beamLines": list(set(beamLines)), + } + + +class OneTimeToken(BaseModel): + validity: str = Field(description="The url to sign") + token: Optional[str] + + +@router.post( + "/sign", + response_model=OneTimeToken, +) +def sign_url(token_request: OneTimeToken) -> OneTimeToken: + """Sign a url with a one time token""" + token = generate_onetime_token(token_request.validity, g.personId) + return OneTimeToken(token=token, validity=token_request.validity) diff --git a/pyispyb/core/routes/user_office.py b/pyispyb/core/routes/user_office.py deleted file mode 100644 index 8824dfef..00000000 --- a/pyispyb/core/routes/user_office.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -from flask_restx._http import HTTPStatus - -from pyispyb.flask_restx_patched import Resource - -from pyispyb.app.extensions.api import api_v1, Namespace -from pyispyb.app.extensions.auth import token_required, role_required -from pyispyb.app.extensions.user_office import user_office - - -__license__ = "LGPLv3+" - - -api = Namespace( - "User office", description="User office related namespace", path="/user_office" -) -api_v1.add_namespace(api) - - -@api.route("/sync_all", endpoint="user_office_sync_all") -@api.doc(security="apikey") -class SyncAll(Resource): - - """Sync with user office""" - - @token_required - @role_required - def post(self): - """Sync with user office""" - - api.logger.info("Sync with uer office") - user_office.sync_all() - return HTTPStatus.OK, {"message": "Done!"} - - -@api.route( - "/sync_proposal/", - endpoint="user_office_sync_proposal", -) -@api.param("proposal_code", "Proposal code (string)") -@api.param("proposal_number", "Proposal number (integer)") -@api.doc(security="apikey") -class UpdateProposal(Resource): - """Sync with user office""" - - @token_required - @role_required - @api.doc( - description="proposal_code should be a string, proposal_number should be an integer" - ) - def post(self, proposal_code, proposal_number): - """Sync with user office""" - - api.logger.info("Updates proposal %s%d" % (proposal_code, proposal_number)) - user_office.sync_proposal(proposal_code, proposal_number) - return HTTPStatus.OK, {"message": "Done!"} diff --git a/pyispyb/core/routes/webservices/__init__.py b/pyispyb/core/routes/webservices/__init__.py new file mode 100644 index 00000000..5e38b5a7 --- /dev/null +++ b/pyispyb/core/routes/webservices/__init__.py @@ -0,0 +1,27 @@ +import os +import logging +from importlib import import_module +from fastapi import FastAPI + +from .base import router + +logger = logging.getLogger(__name__) + + +def init_app(app: FastAPI, prefix: str = None, **kwargs): + """Init app routes.""" + if not app.db_options.enable_webservice_routes: + logger.info("Webservice routes disabled") + return + + for module_name in os.listdir(os.path.dirname(__file__)): + if not module_name.startswith("__") and module_name.endswith(".py"): + try: + logger.info(f"importing {module_name}") + module = import_module(".%s" % module_name[:-3], package=__name__) + if hasattr(module, "router"): + app.include_router(module.router, prefix=prefix) + except Exception: + logger.exception(f"Could not import module `{module_name}`") + + app.include_router(router, prefix=prefix) diff --git a/pyispyb/core/routes/webservices/base.py b/pyispyb/core/routes/webservices/base.py new file mode 100644 index 00000000..9dd58855 --- /dev/null +++ b/pyispyb/core/routes/webservices/base.py @@ -0,0 +1,6 @@ +from ....app.base import AuthenticatedAPIRouter + + +router = AuthenticatedAPIRouter( + prefix="/webservices", tags=["Webservices - Used by external applications"] +) diff --git a/pyispyb/core/routes/webservices/userportalsync.py b/pyispyb/core/routes/webservices/userportalsync.py new file mode 100644 index 00000000..ba6aa1be --- /dev/null +++ b/pyispyb/core/routes/webservices/userportalsync.py @@ -0,0 +1,33 @@ +import logging +from fastapi import HTTPException, Depends +from ...modules import userportalsync as crud +from ...schemas import userportalsync as schema +from ....dependencies import permission +from ..responses import Message +from .base import router + + +logger = logging.getLogger("ispyb") + + +@router.post( + "/userportalsync/sync_proposal", + response_model=Message, + responses={400: {"description": "The input data is not following the schema"}}, +) +def sync_proposal( + proposal: schema.UserPortalProposalSync, + depends: bool = Depends(permission("uportal_sync")), +): + """Create/Update a proposal from the User Portal and all its related entities""" + try: + execution_time = crud.sync_proposal(proposal=proposal) + proposal_dict = proposal.dict() + return { + "message": f"The proposal {proposal_dict['proposal']['proposalCode']}" + f"-{proposal_dict['proposal']['proposalNumber']} has been synchronized in {execution_time}" + } + + except Exception as e: + logging.debug(e) + raise HTTPException(status_code=400, detail=str(e)) diff --git a/pyispyb/core/schemas/__init__.py b/pyispyb/core/schemas/__init__.py index 70192fd5..3d8be7f0 100644 --- a/pyispyb/core/schemas/__init__.py +++ b/pyispyb/core/schemas/__init__.py @@ -14,9 +14,3 @@ You should have received a copy of the GNU Lesser General Public License along with py-ispyb. If not, see . """ - -from . import proposal_has_person - -#TODO for some reason one has to import proposal_has_person here -# So other modules can use it. Othwerwise no attribute error is raised -# Remove this import \ No newline at end of file diff --git a/pyispyb/core/schemas/admin/__init__.py b/pyispyb/core/schemas/admin/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pyispyb/core/schemas/admin/activity.py b/pyispyb/core/schemas/admin/activity.py new file mode 100644 index 00000000..7c3e1e93 --- /dev/null +++ b/pyispyb/core/schemas/admin/activity.py @@ -0,0 +1,19 @@ +from datetime import datetime +import enum + +from pydantic import BaseModel + + +class ActionType(str, enum.Enum): + db_options = "db_options" + online = "online" + + +class AdminActivity(BaseModel): + username: str + action: str + comments: str + dateTime: datetime + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/admin/groups.py b/pyispyb/core/schemas/admin/groups.py new file mode 100644 index 00000000..a0451bec --- /dev/null +++ b/pyispyb/core/schemas/admin/groups.py @@ -0,0 +1,45 @@ +from typing import Optional + +from pydantic import BaseModel, Field, constr + + +class UserGroupMetaData(BaseModel): + permissions: int = Field(description="Number of permissions") + people: int = Field(description="Number of people") + + +class NewUserGroup(BaseModel): + name: str = Field(title="Name", description="The name of the group") + + +class UserGroup(NewUserGroup): + userGroupId: int + + metadata: UserGroupMetaData = Field(alias="_metadata") + + class Config: + orm_mode = True + + +class NewUserGroupPermission(BaseModel): + permissionId: int = Field(title="Permission") + + +class NewUserGroupPerson(BaseModel): + personId: int = Field(title="Person") + + +class NewPermission(BaseModel): + type: constr(max_length=15) = Field( + title="Permission", description="The permission identifier" + ) + description: Optional[constr(max_length=100)] = Field( + title="Description", description="Description of this permission" + ) + + +class Permission(NewPermission): + permissionId: int + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/auto_proc.py b/pyispyb/core/schemas/auto_proc.py deleted file mode 100644 index 60f71699..00000000 --- a/pyispyb/core/schemas/auto_proc.py +++ /dev/null @@ -1,62 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'autoProcId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'autoProcProgramId': f_fields.Integer(required=False, description='Related program item'), - 'spaceGroup': f_fields.String(required=False, description='Space group'), - 'refinedCell_a': f_fields.Float(required=False, description='Refined cell'), - 'refinedCell_b': f_fields.Float(required=False, description='Refined cell'), - 'refinedCell_c': f_fields.Float(required=False, description='Refined cell'), - 'refinedCell_alpha': f_fields.Float(required=False, description='Refined cell'), - 'refinedCell_beta': f_fields.Float(required=False, description='Refined cell'), - 'refinedCell_gamma': f_fields.Float(required=False, description='Refined cell'), - 'recordTimeStamp': f_fields.DateTime(required=False, description='Creation or last update date/time'), - } - -class AutoProcSchema(Schema): - """Marshmallows schema class representing AutoProc table""" - - autoProcId = ma_fields.Integer() - autoProcProgramId = ma_fields.Integer() - spaceGroup = ma_fields.String() - refinedCell_a = ma_fields.Float() - refinedCell_b = ma_fields.Float() - refinedCell_c = ma_fields.Float() - refinedCell_alpha = ma_fields.Float() - refinedCell_beta = ma_fields.Float() - refinedCell_gamma = ma_fields.Float() - recordTimeStamp = ma_fields.DateTime() - -f_schema = api.model('AutoProc', dict_schema) -ma_schema = AutoProcSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/auto_proc_integration.py b/pyispyb/core/schemas/auto_proc_integration.py deleted file mode 100644 index 46fc750e..00000000 --- a/pyispyb/core/schemas/auto_proc_integration.py +++ /dev/null @@ -1,86 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'autoProcIntegrationId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'dataCollectionId': f_fields.Integer(required=True, description='DataCollection item'), - 'autoProcProgramId': f_fields.Integer(required=False, description='Related program item'), - 'startImageNumber': f_fields.Integer(required=False, description='start image number'), - 'endImageNumber': f_fields.Integer(required=False, description='end image number'), - 'refinedDetectorDistance': f_fields.Float(required=False, description='Refined DataCollection.detectorDistance'), - 'refinedXBeam': f_fields.Float(required=False, description='Refined DataCollection.xBeam'), - 'refinedYBeam': f_fields.Float(required=False, description='Refined DataCollection.yBeam'), - 'rotationAxisX': f_fields.Float(required=False, description='Rotation axis'), - 'rotationAxisY': f_fields.Float(required=False, description='Rotation axis'), - 'rotationAxisZ': f_fields.Float(required=False, description='Rotation axis'), - 'beamVectorX': f_fields.Float(required=False, description='Beam vector'), - 'beamVectorY': f_fields.Float(required=False, description='Beam vector'), - 'beamVectorZ': f_fields.Float(required=False, description='Beam vector'), - 'cell_a': f_fields.Float(required=False, description='Unit cell'), - 'cell_b': f_fields.Float(required=False, description='Unit cell'), - 'cell_c': f_fields.Float(required=False, description='Unit cell'), - 'cell_alpha': f_fields.Float(required=False, description='Unit cell'), - 'cell_beta': f_fields.Float(required=False, description='Unit cell'), - 'cell_gamma': f_fields.Float(required=False, description='Unit cell'), - 'recordTimeStamp': f_fields.DateTime(required=False, description='Creation or last update date/time'), - 'anomalous': f_fields.Integer(required=False, description='boolean type:0 noanoum - 1 anoum'), - } - -class AutoProcIntegrationSchema(Schema): - """Marshmallows schema class representing AutoProcIntegration table""" - - autoProcIntegrationId = ma_fields.Integer() - dataCollectionId = ma_fields.Integer() - autoProcProgramId = ma_fields.Integer() - startImageNumber = ma_fields.Integer() - endImageNumber = ma_fields.Integer() - refinedDetectorDistance = ma_fields.Float() - refinedXBeam = ma_fields.Float() - refinedYBeam = ma_fields.Float() - rotationAxisX = ma_fields.Float() - rotationAxisY = ma_fields.Float() - rotationAxisZ = ma_fields.Float() - beamVectorX = ma_fields.Float() - beamVectorY = ma_fields.Float() - beamVectorZ = ma_fields.Float() - cell_a = ma_fields.Float() - cell_b = ma_fields.Float() - cell_c = ma_fields.Float() - cell_alpha = ma_fields.Float() - cell_beta = ma_fields.Float() - cell_gamma = ma_fields.Float() - recordTimeStamp = ma_fields.DateTime() - anomalous = ma_fields.Integer() - -f_schema = api.model('AutoProcIntegration', dict_schema) -ma_schema = AutoProcIntegrationSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/auto_proc_program.py b/pyispyb/core/schemas/auto_proc_program.py deleted file mode 100644 index 33b0e6f8..00000000 --- a/pyispyb/core/schemas/auto_proc_program.py +++ /dev/null @@ -1,64 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'autoProcProgramId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'processingCommandLine': f_fields.String(required=False, description='Command line for running the automatic processing'), - 'processingPrograms': f_fields.String(required=False, description='Processing programs (comma separated)'), - 'processingStatus': f_fields.Integer(required=False, description='success (1) / fail (0)'), - 'processingMessage': f_fields.String(required=False, description='warning, error,...'), - 'processingStartTime': f_fields.DateTime(required=False, description='Processing start time'), - 'processingEndTime': f_fields.DateTime(required=False, description='Processing end time'), - 'processingEnvironment': f_fields.String(required=False, description='Cpus, Nodes,...'), - 'recordTimeStamp': f_fields.DateTime(required=False, description='Creation or last update date/time'), - 'processingJobId': f_fields.Integer(required=False, description=''), - 'dataCollectionId': f_fields.Integer(required=False, description=''), - } - -class AutoProcProgramSchema(Schema): - """Marshmallows schema class representing AutoProcProgram table""" - - autoProcProgramId = ma_fields.Integer() - processingCommandLine = ma_fields.String() - processingPrograms = ma_fields.String() - processingStatus = ma_fields.Integer() - processingMessage = ma_fields.String() - processingStartTime = ma_fields.DateTime() - processingEndTime = ma_fields.DateTime() - processingEnvironment = ma_fields.String() - recordTimeStamp = ma_fields.DateTime() - processingJobId = ma_fields.Integer() - dataCollectionId = ma_fields.Integer() - -f_schema = api.model('AutoProcProgram', dict_schema) -ma_schema = AutoProcProgramSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/auto_proc_program_attachment.py b/pyispyb/core/schemas/auto_proc_program_attachment.py deleted file mode 100644 index 333da2b6..00000000 --- a/pyispyb/core/schemas/auto_proc_program_attachment.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'autoProcProgramAttachmentId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'autoProcProgramId': f_fields.Integer(required=True, description='Related autoProcProgram item'), - 'fileType': f_fields.String(required=False, description='Type of file Attachmentenum(Log,Result,Graph,Debug)'), - 'fileName': f_fields.String(required=False, description='Attachment filename'), - 'filePath': f_fields.String(required=False, description='Attachment filepath to disk storage'), - 'recordTimeStamp': f_fields.DateTime(required=False, description='Creation or last update date/time'), - 'importanceRank': f_fields.Integer(required=False, description='For the particular autoProcProgramId and fileType, indicate the importance of the attachment. Higher numbers are more important'), - } - -class AutoProcProgramAttachmentSchema(Schema): - """Marshmallows schema class representing AutoProcProgramAttachment table""" - - autoProcProgramAttachmentId = ma_fields.Integer() - autoProcProgramId = ma_fields.Integer() - fileType = ma_fields.String() - fileName = ma_fields.String() - filePath = ma_fields.String() - recordTimeStamp = ma_fields.DateTime() - importanceRank = ma_fields.Integer() - -f_schema = api.model('AutoProcProgramAttachment', dict_schema) -ma_schema = AutoProcProgramAttachmentSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/auto_proc_program_message.py b/pyispyb/core/schemas/auto_proc_program_message.py deleted file mode 100644 index beb17d8d..00000000 --- a/pyispyb/core/schemas/auto_proc_program_message.py +++ /dev/null @@ -1,54 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'autoProcProgramMessageId': f_fields.Integer(required=True, description=''), - 'autoProcProgramId': f_fields.Integer(required=False, description=''), - 'recordTimeStamp': f_fields.DateTime(required=True, description=''), - 'severity': f_fields.String(required=False, description='enum(ERROR,WARNING,INFO)'), - 'message': f_fields.String(required=False, description=''), - 'description': f_fields.String(required=False, description=''), - } - -class AutoProcProgramMessageSchema(Schema): - """Marshmallows schema class representing AutoProcProgramMessage table""" - - autoProcProgramMessageId = ma_fields.Integer() - autoProcProgramId = ma_fields.Integer() - recordTimeStamp = ma_fields.DateTime() - severity = ma_fields.String() - message = ma_fields.String() - description = ma_fields.String() - -f_schema = api.model('AutoProcProgramMessage', dict_schema) -ma_schema = AutoProcProgramMessageSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/auto_proc_scaling.py b/pyispyb/core/schemas/auto_proc_scaling.py deleted file mode 100644 index ea6797a1..00000000 --- a/pyispyb/core/schemas/auto_proc_scaling.py +++ /dev/null @@ -1,48 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'autoProcScalingId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'autoProcId': f_fields.Integer(required=False, description='Related autoProc item (used by foreign key)'), - 'recordTimeStamp': f_fields.DateTime(required=False, description='Creation or last update date/time'), - } - -class AutoProcScalingSchema(Schema): - """Marshmallows schema class representing AutoProcScaling table""" - - autoProcScalingId = ma_fields.Integer() - autoProcId = ma_fields.Integer() - recordTimeStamp = ma_fields.DateTime() - -f_schema = api.model('AutoProcScaling', dict_schema) -ma_schema = AutoProcScalingSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/auto_proc_scaling_statistics.py b/pyispyb/core/schemas/auto_proc_scaling_statistics.py deleted file mode 100644 index 335ab036..00000000 --- a/pyispyb/core/schemas/auto_proc_scaling_statistics.py +++ /dev/null @@ -1,88 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'autoProcScalingStatisticsId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'autoProcScalingId': f_fields.Integer(required=False, description='Related autoProcScaling item (used by foreign key)'), - 'scalingStatisticsType': f_fields.String(required=True, description='Scaling statistics typeenum(overall,innerShell,outerShell)'), - 'comments': f_fields.String(required=False, description='Comments...'), - 'resolutionLimitLow': f_fields.Float(required=False, description='Low resolution limit'), - 'resolutionLimitHigh': f_fields.Float(required=False, description='High resolution limit'), - 'rMerge': f_fields.Float(required=False, description='Rmerge'), - 'rMeasWithinIPlusIMinus': f_fields.Float(required=False, description='Rmeas (within I+/I-)'), - 'rMeasAllIPlusIMinus': f_fields.Float(required=False, description='Rmeas (all I+ & I-)'), - 'rPimWithinIPlusIMinus': f_fields.Float(required=False, description='Rpim (within I+/I-) '), - 'rPimAllIPlusIMinus': f_fields.Float(required=False, description='Rpim (all I+ & I-)'), - 'fractionalPartialBias': f_fields.Float(required=False, description='Fractional partial bias'), - 'nTotalObservations': f_fields.Integer(required=False, description='Total number of observations'), - 'nTotalUniqueObservations': f_fields.Integer(required=False, description='Total number unique'), - 'meanIOverSigI': f_fields.Float(required=False, description='Mean((I)/sd(I))'), - 'completeness': f_fields.Float(required=False, description='Completeness'), - 'multiplicity': f_fields.Float(required=False, description='Multiplicity'), - 'anomalousCompleteness': f_fields.Float(required=False, description='Anomalous completeness'), - 'anomalousMultiplicity': f_fields.Float(required=False, description='Anomalous multiplicity'), - 'recordTimeStamp': f_fields.DateTime(required=False, description='Creation or last update date/time'), - 'anomalous': f_fields.Integer(required=False, description='boolean type:0 noanoum - 1 anoum'), - 'ccHalf': f_fields.Float(required=False, description='information from XDS'), - 'ccAnomalous': f_fields.Float(required=False, description=''), - } - -class AutoProcScalingStatisticsSchema(Schema): - """Marshmallows schema class representing AutoProcScalingStatistics table""" - - autoProcScalingStatisticsId = ma_fields.Integer() - autoProcScalingId = ma_fields.Integer() - scalingStatisticsType = ma_fields.String() - comments = ma_fields.String() - resolutionLimitLow = ma_fields.Float() - resolutionLimitHigh = ma_fields.Float() - rMerge = ma_fields.Float() - rMeasWithinIPlusIMinus = ma_fields.Float() - rMeasAllIPlusIMinus = ma_fields.Float() - rPimWithinIPlusIMinus = ma_fields.Float() - rPimAllIPlusIMinus = ma_fields.Float() - fractionalPartialBias = ma_fields.Float() - nTotalObservations = ma_fields.Integer() - nTotalUniqueObservations = ma_fields.Integer() - meanIOverSigI = ma_fields.Float() - completeness = ma_fields.Float() - multiplicity = ma_fields.Float() - anomalousCompleteness = ma_fields.Float() - anomalousMultiplicity = ma_fields.Float() - recordTimeStamp = ma_fields.DateTime() - anomalous = ma_fields.Integer() - ccHalf = ma_fields.Float() - ccAnomalous = ma_fields.Float() - -f_schema = api.model('AutoProcScalingStatistics', dict_schema) -ma_schema = AutoProcScalingStatisticsSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/auto_proc_status.py b/pyispyb/core/schemas/auto_proc_status.py deleted file mode 100644 index deca09fb..00000000 --- a/pyispyb/core/schemas/auto_proc_status.py +++ /dev/null @@ -1,54 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'autoProcStatusId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'autoProcIntegrationId': f_fields.Integer(required=True, description=''), - 'step': f_fields.String(required=True, description='autoprocessing stepenum(Indexing,Integration,Correction,Scaling,Importing)'), - 'status': f_fields.String(required=True, description='autoprocessing statusenum(Launched,Successful,Failed)'), - 'comments': f_fields.String(required=False, description='comments'), - 'bltimeStamp': f_fields.DateTime(required=True, description=''), - } - -class AutoProcStatusSchema(Schema): - """Marshmallows schema class representing AutoProcStatus table""" - - autoProcStatusId = ma_fields.Integer() - autoProcIntegrationId = ma_fields.Integer() - step = ma_fields.String() - status = ma_fields.String() - comments = ma_fields.String() - bltimeStamp = ma_fields.DateTime() - -f_schema = api.model('AutoProcStatus', dict_schema) -ma_schema = AutoProcStatusSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/beam_calendar.py b/pyispyb/core/schemas/beam_calendar.py deleted file mode 100644 index c627dc14..00000000 --- a/pyispyb/core/schemas/beam_calendar.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'beamCalendarId': f_fields.Integer(required=True, description=''), - 'run': f_fields.String(required=True, description=''), - 'beamStatus': f_fields.String(required=True, description=''), - 'startDate': f_fields.DateTime(required=True, description=''), - 'endDate': f_fields.DateTime(required=True, description=''), - } - -class BeamCalendarSchema(Schema): - """Marshmallows schema class representing BeamCalendar table""" - - beamCalendarId = ma_fields.Integer() - run = ma_fields.String() - beamStatus = ma_fields.String() - startDate = ma_fields.DateTime() - endDate = ma_fields.DateTime() - -f_schema = api.model('BeamCalendar', dict_schema) -ma_schema = BeamCalendarSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/beamline_setup.py b/pyispyb/core/schemas/beamline_setup.py deleted file mode 100644 index bc4bd2e4..00000000 --- a/pyispyb/core/schemas/beamline_setup.py +++ /dev/null @@ -1,134 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'beamLineSetupId': f_fields.Integer(required=True, description=''), - 'detectorId': f_fields.Integer(required=False, description=''), - 'synchrotronMode': f_fields.String(required=False, description=''), - 'undulatorType1': f_fields.String(required=False, description=''), - 'undulatorType2': f_fields.String(required=False, description=''), - 'undulatorType3': f_fields.String(required=False, description=''), - 'focalSpotSizeAtSample': f_fields.Float(required=False, description=''), - 'focusingOptic': f_fields.String(required=False, description=''), - 'beamDivergenceHorizontal': f_fields.Float(required=False, description=''), - 'beamDivergenceVertical': f_fields.Float(required=False, description=''), - 'polarisation': f_fields.Float(required=False, description=''), - 'monochromatorType': f_fields.String(required=False, description=''), - 'setupDate': f_fields.DateTime(required=False, description=''), - 'synchrotronName': f_fields.String(required=False, description=''), - 'maxExpTimePerDataCollection': f_fields.String(required=False, description=''), - 'maxExposureTimePerImage': f_fields.Float(required=False, description='unit: seconds'), - 'minExposureTimePerImage': f_fields.String(required=False, description=''), - 'goniostatMaxOscillationSpeed': f_fields.String(required=False, description=''), - 'goniostatMaxOscillationWidth': f_fields.String(required=False, description='unit: degrees'), - 'goniostatMinOscillationWidth': f_fields.String(required=False, description=''), - 'maxTransmission': f_fields.String(required=False, description='unit: percentage'), - 'minTransmission': f_fields.String(required=False, description=''), - 'recordTimeStamp': f_fields.DateTime(required=True, description='Creation or last update date/time'), - 'CS': f_fields.Float(required=False, description='Spherical Aberration, Units: mm?'), - 'beamlineName': f_fields.String(required=False, description='Beamline that this setup relates to'), - 'beamSizeXMin': f_fields.Float(required=False, description='unit: um'), - 'beamSizeXMax': f_fields.Float(required=False, description='unit: um'), - 'beamSizeYMin': f_fields.Float(required=False, description='unit: um'), - 'beamSizeYMax': f_fields.Float(required=False, description='unit: um'), - 'energyMin': f_fields.Float(required=False, description='unit: eV'), - 'energyMax': f_fields.Float(required=False, description='unit: eV'), - 'omegaMin': f_fields.Float(required=False, description='unit: degrees'), - 'omegaMax': f_fields.Float(required=False, description='unit: degrees'), - 'kappaMin': f_fields.Float(required=False, description='unit: degrees'), - 'kappaMax': f_fields.Float(required=False, description='unit: degrees'), - 'phiMin': f_fields.Float(required=False, description='unit: degrees'), - 'phiMax': f_fields.Float(required=False, description='unit: degrees'), - 'active': f_fields.Integer(required=True, description=''), - 'numberOfImagesMax': f_fields.Integer(required=False, description=''), - 'numberOfImagesMin': f_fields.Integer(required=False, description=''), - 'boxSizeXMin': f_fields.String(required=False, description='For gridscans, unit: um'), - 'boxSizeXMax': f_fields.String(required=False, description='For gridscans, unit: um'), - 'boxSizeYMin': f_fields.String(required=False, description='For gridscans, unit: um'), - 'boxSizeYMax': f_fields.String(required=False, description='For gridscans, unit: um'), - 'monoBandwidthMin': f_fields.String(required=False, description='unit: percentage'), - 'monoBandwidthMax': f_fields.String(required=False, description='unit: percentage'), - } - -class BeamLineSetupSchema(Schema): - """Marshmallows schema class representing BeamLineSetup table""" - - beamLineSetupId = ma_fields.Integer() - detectorId = ma_fields.Integer() - synchrotronMode = ma_fields.String() - undulatorType1 = ma_fields.String() - undulatorType2 = ma_fields.String() - undulatorType3 = ma_fields.String() - focalSpotSizeAtSample = ma_fields.Float() - focusingOptic = ma_fields.String() - beamDivergenceHorizontal = ma_fields.Float() - beamDivergenceVertical = ma_fields.Float() - polarisation = ma_fields.Float() - monochromatorType = ma_fields.String() - setupDate = ma_fields.DateTime() - synchrotronName = ma_fields.String() - maxExpTimePerDataCollection = ma_fields.String() - maxExposureTimePerImage = ma_fields.Float() - minExposureTimePerImage = ma_fields.String() - goniostatMaxOscillationSpeed = ma_fields.String() - goniostatMaxOscillationWidth = ma_fields.String() - goniostatMinOscillationWidth = ma_fields.String() - maxTransmission = ma_fields.String() - minTransmission = ma_fields.String() - recordTimeStamp = ma_fields.DateTime() - CS = ma_fields.Float() - beamlineName = ma_fields.String() - beamSizeXMin = ma_fields.Float() - beamSizeXMax = ma_fields.Float() - beamSizeYMin = ma_fields.Float() - beamSizeYMax = ma_fields.Float() - energyMin = ma_fields.Float() - energyMax = ma_fields.Float() - omegaMin = ma_fields.Float() - omegaMax = ma_fields.Float() - kappaMin = ma_fields.Float() - kappaMax = ma_fields.Float() - phiMin = ma_fields.Float() - phiMax = ma_fields.Float() - active = ma_fields.Integer() - numberOfImagesMax = ma_fields.Integer() - numberOfImagesMin = ma_fields.Integer() - boxSizeXMin = ma_fields.String() - boxSizeXMax = ma_fields.String() - boxSizeYMin = ma_fields.String() - boxSizeYMax = ma_fields.String() - monoBandwidthMin = ma_fields.String() - monoBandwidthMax = ma_fields.String() - -f_schema = api.model('BeamLineSetup', dict_schema) -ma_schema = BeamLineSetupSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/component_type.py b/pyispyb/core/schemas/component_type.py deleted file mode 100644 index 2b16165d..00000000 --- a/pyispyb/core/schemas/component_type.py +++ /dev/null @@ -1,46 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'componentTypeId': f_fields.Integer(required=True, description=''), - 'name': f_fields.String(required=True, description=''), - } - -class ComponentTypeSchema(Schema): - """Marshmallows schema class representing ComponentType table""" - - componentTypeId = ma_fields.Integer() - name = ma_fields.String() - -f_schema = api.model('ComponentType', dict_schema) -ma_schema = ComponentTypeSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/container.py b/pyispyb/core/schemas/container.py deleted file mode 100644 index eaef19d9..00000000 --- a/pyispyb/core/schemas/container.py +++ /dev/null @@ -1,84 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'containerId': f_fields.Integer(required=True, description=''), - 'dewarId': f_fields.Integer(required=False, description=''), - 'code': f_fields.String(required=False, description=''), - 'containerType': f_fields.String(required=False, description=''), - 'capacity': f_fields.Integer(required=False, description=''), - 'sampleChangerLocation': f_fields.String(required=False, description=''), - 'containerStatus': f_fields.String(required=False, description=''), - 'bltimeStamp': f_fields.DateTime(required=False, description=''), - 'beamlineLocation': f_fields.String(required=False, description=''), - 'screenId': f_fields.Integer(required=False, description=''), - 'scheduleId': f_fields.Integer(required=False, description=''), - 'barcode': f_fields.String(required=False, description=''), - 'imagerId': f_fields.Integer(required=False, description=''), - 'sessionId': f_fields.Integer(required=False, description=''), - 'ownerId': f_fields.Integer(required=False, description=''), - 'requestedImagerId': f_fields.Integer(required=False, description=''), - 'requestedReturn': f_fields.Integer(required=False, description='True for requesting return, False means container will be disposed'), - 'comments': f_fields.String(required=False, description=''), - 'experimentType': f_fields.String(required=False, description=''), - 'storageTemperature': f_fields.Float(required=False, description=''), - 'containerRegistryId': f_fields.Integer(required=False, description=''), - } - -class ContainerSchema(Schema): - """Marshmallows schema class representing Container table""" - - containerId = ma_fields.Integer() - dewarId = ma_fields.Integer() - code = ma_fields.String() - containerType = ma_fields.String() - capacity = ma_fields.Integer() - sampleChangerLocation = ma_fields.String() - containerStatus = ma_fields.String() - bltimeStamp = ma_fields.DateTime() - beamlineLocation = ma_fields.String() - screenId = ma_fields.Integer() - scheduleId = ma_fields.Integer() - barcode = ma_fields.String() - imagerId = ma_fields.Integer() - sessionId = ma_fields.Integer() - ownerId = ma_fields.Integer() - requestedImagerId = ma_fields.Integer() - requestedReturn = ma_fields.Integer() - comments = ma_fields.String() - experimentType = ma_fields.String() - storageTemperature = ma_fields.Float() - containerRegistryId = ma_fields.Integer() - -f_schema = api.model('Container', dict_schema) -ma_schema = ContainerSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/containers.py b/pyispyb/core/schemas/containers.py new file mode 100644 index 00000000..e56daab9 --- /dev/null +++ b/pyispyb/core/schemas/containers.py @@ -0,0 +1,39 @@ +from typing import Optional +from pydantic import BaseModel, Field + +from ispyb import models + +from .dewars import DewarShipping + +c = models.Container + + +class ContainerDewar(BaseModel): + code: str = Field(title="Name") + + Shipping: DewarShipping + + class Config: + orm_mode = True + + +class ContainerCreate(BaseModel): + code: str = Field(title="Name") + dewarId: int + containerType: str + + sampleChangerLocation: Optional[str] = Field( + description="Position in sample change" + ) + beamlineLocation: Optional[str] = Field( + description="Beamline if container is assigned" + ) + + +class Container(ContainerCreate): + containerId: int + + Dewar: ContainerDewar + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/crystal.py b/pyispyb/core/schemas/crystal.py index 961be726..f7c018e6 100644 --- a/pyispyb/core/schemas/crystal.py +++ b/pyispyb/core/schemas/crystal.py @@ -22,67 +22,31 @@ __license__ = "LGPLv3+" +from typing import Optional +from pydantic import BaseModel, Field +from ispyb import models -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'crystalId': f_fields.Integer(required=True, description=''), - 'diffractionPlanId': f_fields.Integer(required=False, description=''), - 'proteinId': f_fields.Integer(required=True, description=''), - 'crystalUUID': f_fields.String(required=False, description=''), - 'name': f_fields.String(required=False, description=''), - 'spaceGroup': f_fields.String(required=False, description=''), - 'morphology': f_fields.String(required=False, description=''), - 'color': f_fields.String(required=False, description=''), - 'size_X': f_fields.String(required=False, description=''), - 'size_Y': f_fields.String(required=False, description=''), - 'size_Z': f_fields.String(required=False, description=''), - 'cell_a': f_fields.String(required=False, description=''), - 'cell_b': f_fields.String(required=False, description=''), - 'cell_c': f_fields.String(required=False, description=''), - 'cell_alpha': f_fields.String(required=False, description=''), - 'cell_beta': f_fields.String(required=False, description=''), - 'cell_gamma': f_fields.String(required=False, description=''), - 'comments': f_fields.String(required=False, description=''), - 'pdbFileName': f_fields.String(required=False, description='pdb file name'), - 'pdbFilePath': f_fields.String(required=False, description='pdb file path'), - 'recordTimeStamp': f_fields.DateTime(required=True, description='Creation or last update date/time'), - 'abundance': f_fields.Float(required=False, description=''), - 'theoreticalDensity': f_fields.Float(required=False, description=''), - } - -class CrystalSchema(Schema): - """Marshmallows schema class representing Crystal table""" - - crystalId = ma_fields.Integer() - diffractionPlanId = ma_fields.Integer() - proteinId = ma_fields.Integer() - crystalUUID = ma_fields.String() - name = ma_fields.String() - spaceGroup = ma_fields.String() - morphology = ma_fields.String() - color = ma_fields.String() - size_X = ma_fields.String() - size_Y = ma_fields.String() - size_Z = ma_fields.String() - cell_a = ma_fields.String() - cell_b = ma_fields.String() - cell_c = ma_fields.String() - cell_alpha = ma_fields.String() - cell_beta = ma_fields.String() - cell_gamma = ma_fields.String() - comments = ma_fields.String() - pdbFileName = ma_fields.String() - pdbFilePath = ma_fields.String() - recordTimeStamp = ma_fields.DateTime() - abundance = ma_fields.Float() - theoreticalDensity = ma_fields.Float() - -f_schema = api.model('Crystal', dict_schema) -ma_schema = CrystalSchema() -json_schema = JSONSchema().dump(ma_schema) +from .protein import Protein + +c = models.Crystal + + +class CrystalBase(BaseModel): + cell_a: Optional[float] = Field(title="Cell A", nullable=True) + cell_b: Optional[float] = Field(title="Cell B", nullable=True) + cell_c: Optional[float] = Field(title="Cell C", nullable=True) + cell_alpha: Optional[float] = Field(title="Cell Alpha", nullable=True) + cell_beta: Optional[float] = Field(title="Cell Beta", nullable=True) + cell_gamma: Optional[float] = Field(title="Cell Gamma", nullable=True) + Protein: Protein + + +class Crystal(CrystalBase): + crystalId: int + proteinId: int = Field(title="Protein") + + Protein: Protein + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/data.py b/pyispyb/core/schemas/data.py new file mode 100644 index 00000000..c4e7cb82 --- /dev/null +++ b/pyispyb/core/schemas/data.py @@ -0,0 +1,12 @@ +from pydantic import BaseModel + + +class ImageHeader(BaseModel): + pass + + +class ImageHistogram(BaseModel): + values: list[int] + bins: list[int] + shape: tuple + max: float diff --git a/pyispyb/core/schemas/data_collection.py b/pyispyb/core/schemas/data_collection.py deleted file mode 100644 index 6dad9335..00000000 --- a/pyispyb/core/schemas/data_collection.py +++ /dev/null @@ -1,246 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'dataCollectionId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'BLSAMPLEID': f_fields.Integer(required=False, description=''), - 'SESSIONID': f_fields.Integer(required=False, description=''), - 'experimenttype': f_fields.String(required=False, description=''), - 'dataCollectionNumber': f_fields.Integer(required=False, description=''), - 'startTime': f_fields.DateTime(required=False, description='Start time of the dataCollection'), - 'endTime': f_fields.DateTime(required=False, description='end time of the dataCollection'), - 'runStatus': f_fields.String(required=False, description=''), - 'axisStart': f_fields.Float(required=False, description=''), - 'axisEnd': f_fields.Float(required=False, description=''), - 'axisRange': f_fields.Float(required=False, description=''), - 'overlap': f_fields.Float(required=False, description=''), - 'numberOfImages': f_fields.Integer(required=False, description=''), - 'startImageNumber': f_fields.Integer(required=False, description=''), - 'numberOfPasses': f_fields.Integer(required=False, description=''), - 'exposureTime': f_fields.Float(required=False, description=''), - 'imageDirectory': f_fields.String(required=False, description='The directory where files reside - should end with a slash'), - 'imagePrefix': f_fields.String(required=False, description=''), - 'imageSuffix': f_fields.String(required=False, description=''), - 'imageContainerSubPath': f_fields.String(required=False, description='Internal path of a HDF5 file pointing to the data for this data collection'), - 'fileTemplate': f_fields.String(required=False, description=''), - 'wavelength': f_fields.Float(required=False, description=''), - 'resolution': f_fields.Float(required=False, description=''), - 'detectorDistance': f_fields.Float(required=False, description=''), - 'xBeam': f_fields.Float(required=False, description=''), - 'yBeam': f_fields.Float(required=False, description=''), - 'comments': f_fields.String(required=False, description=''), - 'printableForReport': f_fields.Integer(required=False, description=''), - 'CRYSTALCLASS': f_fields.String(required=False, description=''), - 'slitGapVertical': f_fields.Float(required=False, description=''), - 'slitGapHorizontal': f_fields.Float(required=False, description=''), - 'transmission': f_fields.Float(required=False, description=''), - 'synchrotronMode': f_fields.String(required=False, description=''), - 'xtalSnapshotFullPath1': f_fields.String(required=False, description=''), - 'xtalSnapshotFullPath2': f_fields.String(required=False, description=''), - 'xtalSnapshotFullPath3': f_fields.String(required=False, description=''), - 'xtalSnapshotFullPath4': f_fields.String(required=False, description=''), - 'rotationAxis': f_fields.String(required=False, description='enum(Omega,Kappa,Phi)'), - 'phiStart': f_fields.Float(required=False, description=''), - 'kappaStart': f_fields.Float(required=False, description=''), - 'omegaStart': f_fields.Float(required=False, description=''), - 'chiStart': f_fields.Float(required=False, description=''), - 'resolutionAtCorner': f_fields.Float(required=False, description=''), - 'detector2Theta': f_fields.Float(required=False, description=''), - 'DETECTORMODE': f_fields.String(required=False, description=''), - 'undulatorGap1': f_fields.Float(required=False, description=''), - 'undulatorGap2': f_fields.Float(required=False, description=''), - 'undulatorGap3': f_fields.Float(required=False, description=''), - 'beamSizeAtSampleX': f_fields.Float(required=False, description=''), - 'beamSizeAtSampleY': f_fields.Float(required=False, description=''), - 'centeringMethod': f_fields.String(required=False, description=''), - 'averageTemperature': f_fields.Float(required=False, description=''), - 'ACTUALSAMPLEBARCODE': f_fields.String(required=False, description=''), - 'ACTUALSAMPLESLOTINCONTAINER': f_fields.Integer(required=False, description=''), - 'ACTUALCONTAINERBARCODE': f_fields.String(required=False, description=''), - 'ACTUALCONTAINERSLOTINSC': f_fields.Integer(required=False, description=''), - 'actualCenteringPosition': f_fields.String(required=False, description=''), - 'beamShape': f_fields.String(required=False, description=''), - 'dataCollectionGroupId': f_fields.Integer(required=True, description='references DataCollectionGroup table'), - 'POSITIONID': f_fields.Integer(required=False, description=''), - 'detectorId': f_fields.Integer(required=False, description='references Detector table'), - 'FOCALSPOTSIZEATSAMPLEX': f_fields.Float(required=False, description=''), - 'POLARISATION': f_fields.Float(required=False, description=''), - 'FOCALSPOTSIZEATSAMPLEY': f_fields.Float(required=False, description=''), - 'APERTUREID': f_fields.Integer(required=False, description=''), - 'screeningOrigId': f_fields.Integer(required=False, description=''), - 'startPositionId': f_fields.Integer(required=False, description=''), - 'endPositionId': f_fields.Integer(required=False, description=''), - 'flux': f_fields.String(required=False, description=''), - 'strategySubWedgeOrigId': f_fields.Integer(required=False, description='references ScreeningStrategySubWedge table'), - 'blSubSampleId': f_fields.Integer(required=False, description=''), - 'flux_end': f_fields.String(required=False, description='flux measured after the collect'), - 'bestWilsonPlotPath': f_fields.String(required=False, description=''), - 'processedDataFile': f_fields.String(required=False, description=''), - 'datFullPath': f_fields.String(required=False, description=''), - 'magnification': f_fields.Float(required=False, description='Calibrated magnification, Units: dimensionless'), - 'totalAbsorbedDose': f_fields.Float(required=False, description='Unit: e-/A^2 for EM'), - 'binning': f_fields.Integer(required=False, description='1 or 2. Number of pixels to process as 1. (Use mean value.)'), - 'particleDiameter': f_fields.Float(required=False, description='Unit: nm'), - 'boxSize_CTF': f_fields.Float(required=False, description='Unit: pixels'), - 'minResolution': f_fields.Float(required=False, description='Unit: A'), - 'minDefocus': f_fields.Float(required=False, description='Unit: A'), - 'maxDefocus': f_fields.Float(required=False, description='Unit: A'), - 'defocusStepSize': f_fields.Float(required=False, description='Unit: A'), - 'amountAstigmatism': f_fields.Float(required=False, description='Unit: A'), - 'extractSize': f_fields.Float(required=False, description='Unit: pixels'), - 'bgRadius': f_fields.Float(required=False, description='Unit: nm'), - 'voltage': f_fields.Float(required=False, description='Unit: kV'), - 'objAperture': f_fields.Float(required=False, description='Unit: um'), - 'c1aperture': f_fields.Float(required=False, description='Unit: um'), - 'c2aperture': f_fields.Float(required=False, description='Unit: um'), - 'c3aperture': f_fields.Float(required=False, description='Unit: um'), - 'c1lens': f_fields.Float(required=False, description='Unit: %'), - 'c2lens': f_fields.Float(required=False, description='Unit: %'), - 'c3lens': f_fields.Float(required=False, description='Unit: %'), - 'totalExposedDose': f_fields.Float(required=False, description='Units: e-/A^2'), - 'nominalMagnification': f_fields.Float(required=False, description='Nominal magnification: Units: dimensionless'), - 'nominalDefocus': f_fields.Float(required=False, description='Nominal defocus, Units: A'), - 'imageSizeX': f_fields.Integer(required=False, description='Image size in x, incase crop has been used, Units: pixels'), - 'imageSizeY': f_fields.Integer(required=False, description='Image size in y, Units: pixels'), - 'pixelSizeOnImage': f_fields.Float(required=False, description='Pixel size on image, calculated from magnification, duplicate? Units: um?'), - 'phasePlate': f_fields.Integer(required=False, description='Whether the phase plate was used'), - } - -class DataCollectionSchema(Schema): - """Marshmallows schema class representing DataCollection table""" - - dataCollectionId = ma_fields.Integer() - BLSAMPLEID = ma_fields.Integer() - SESSIONID = ma_fields.Integer() - experimenttype = ma_fields.String() - dataCollectionNumber = ma_fields.Integer() - startTime = ma_fields.DateTime() - endTime = ma_fields.DateTime() - runStatus = ma_fields.String() - axisStart = ma_fields.Float() - axisEnd = ma_fields.Float() - axisRange = ma_fields.Float() - overlap = ma_fields.Float() - numberOfImages = ma_fields.Integer() - startImageNumber = ma_fields.Integer() - numberOfPasses = ma_fields.Integer() - exposureTime = ma_fields.Float() - imageDirectory = ma_fields.String() - imagePrefix = ma_fields.String() - imageSuffix = ma_fields.String() - imageContainerSubPath = ma_fields.String() - fileTemplate = ma_fields.String() - wavelength = ma_fields.Float() - resolution = ma_fields.Float() - detectorDistance = ma_fields.Float() - xBeam = ma_fields.Float() - yBeam = ma_fields.Float() - comments = ma_fields.String() - printableForReport = ma_fields.Integer() - CRYSTALCLASS = ma_fields.String() - slitGapVertical = ma_fields.Float() - slitGapHorizontal = ma_fields.Float() - transmission = ma_fields.Float() - synchrotronMode = ma_fields.String() - xtalSnapshotFullPath1 = ma_fields.String() - xtalSnapshotFullPath2 = ma_fields.String() - xtalSnapshotFullPath3 = ma_fields.String() - xtalSnapshotFullPath4 = ma_fields.String() - rotationAxis = ma_fields.String() - phiStart = ma_fields.Float() - kappaStart = ma_fields.Float() - omegaStart = ma_fields.Float() - chiStart = ma_fields.Float() - resolutionAtCorner = ma_fields.Float() - detector2Theta = ma_fields.Float() - DETECTORMODE = ma_fields.String() - undulatorGap1 = ma_fields.Float() - undulatorGap2 = ma_fields.Float() - undulatorGap3 = ma_fields.Float() - beamSizeAtSampleX = ma_fields.Float() - beamSizeAtSampleY = ma_fields.Float() - centeringMethod = ma_fields.String() - averageTemperature = ma_fields.Float() - ACTUALSAMPLEBARCODE = ma_fields.String() - ACTUALSAMPLESLOTINCONTAINER = ma_fields.Integer() - ACTUALCONTAINERBARCODE = ma_fields.String() - ACTUALCONTAINERSLOTINSC = ma_fields.Integer() - actualCenteringPosition = ma_fields.String() - beamShape = ma_fields.String() - dataCollectionGroupId = ma_fields.Integer() - POSITIONID = ma_fields.Integer() - detectorId = ma_fields.Integer() - FOCALSPOTSIZEATSAMPLEX = ma_fields.Float() - POLARISATION = ma_fields.Float() - FOCALSPOTSIZEATSAMPLEY = ma_fields.Float() - APERTUREID = ma_fields.Integer() - screeningOrigId = ma_fields.Integer() - startPositionId = ma_fields.Integer() - endPositionId = ma_fields.Integer() - flux = ma_fields.String() - strategySubWedgeOrigId = ma_fields.Integer() - blSubSampleId = ma_fields.Integer() - flux_end = ma_fields.String() - bestWilsonPlotPath = ma_fields.String() - processedDataFile = ma_fields.String() - datFullPath = ma_fields.String() - magnification = ma_fields.Float() - totalAbsorbedDose = ma_fields.Float() - binning = ma_fields.Integer() - particleDiameter = ma_fields.Float() - boxSize_CTF = ma_fields.Float() - minResolution = ma_fields.Float() - minDefocus = ma_fields.Float() - maxDefocus = ma_fields.Float() - defocusStepSize = ma_fields.Float() - amountAstigmatism = ma_fields.Float() - extractSize = ma_fields.Float() - bgRadius = ma_fields.Float() - voltage = ma_fields.Float() - objAperture = ma_fields.Float() - c1aperture = ma_fields.Float() - c2aperture = ma_fields.Float() - c3aperture = ma_fields.Float() - c1lens = ma_fields.Float() - c2lens = ma_fields.Float() - c3lens = ma_fields.Float() - totalExposedDose = ma_fields.Float() - nominalMagnification = ma_fields.Float() - nominalDefocus = ma_fields.Float() - imageSizeX = ma_fields.Integer() - imageSizeY = ma_fields.Integer() - pixelSizeOnImage = ma_fields.Float() - phasePlate = ma_fields.Integer() - -f_schema = api.model('DataCollection', dict_schema) -ma_schema = DataCollectionSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/data_collection_group.py b/pyispyb/core/schemas/data_collection_group.py deleted file mode 100644 index aae03408..00000000 --- a/pyispyb/core/schemas/data_collection_group.py +++ /dev/null @@ -1,74 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'dataCollectionGroupId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'sessionId': f_fields.Integer(required=True, description='references Session table'), - 'comments': f_fields.String(required=False, description='comments'), - 'blSampleId': f_fields.Integer(required=False, description='references BLSample table'), - 'experimentType': f_fields.String(required=False, description='Standard: Routine structure determination experiment. Time Resolved: Investigate the change of a system over time. Custom: Special or non-standard data collection.enum(SAD,SAD - Inverse Beam,OSC,Collect - Multiwedge,MAD,Helical,Multi-positional,Mesh,Burn,MAD - Inverse Beam,Characterization,Dehydration,tomo,experiment,EM,PDF,PDF+Bragg,Bragg,single particle,Serial Fixed,Serial Jet,Standard,Time Resolved,Diamond Anvil High Pressure,Custom)'), - 'startTime': f_fields.DateTime(required=False, description='Start time of the dataCollectionGroup'), - 'endTime': f_fields.DateTime(required=False, description='end time of the dataCollectionGroup'), - 'crystalClass': f_fields.String(required=False, description='Crystal Class for industrials users'), - 'detectorMode': f_fields.String(required=False, description='Detector mode'), - 'actualSampleBarcode': f_fields.String(required=False, description='Actual sample barcode'), - 'actualSampleSlotInContainer': f_fields.Integer(required=False, description='Actual sample slot number in container'), - 'actualContainerBarcode': f_fields.String(required=False, description='Actual container barcode'), - 'actualContainerSlotInSC': f_fields.Integer(required=False, description='Actual container slot number in sample changer'), - 'workflowId': f_fields.Integer(required=False, description=''), - 'xtalSnapshotFullPath': f_fields.String(required=False, description=''), - 'scanParameters': f_fields.String(required=False, description=''), - } - -class DataCollectionGroupSchema(Schema): - """Marshmallows schema class representing DataCollectionGroup table""" - - dataCollectionGroupId = ma_fields.Integer() - sessionId = ma_fields.Integer() - comments = ma_fields.String() - blSampleId = ma_fields.Integer() - experimentType = ma_fields.String() - startTime = ma_fields.DateTime() - endTime = ma_fields.DateTime() - crystalClass = ma_fields.String() - detectorMode = ma_fields.String() - actualSampleBarcode = ma_fields.String() - actualSampleSlotInContainer = ma_fields.Integer() - actualContainerBarcode = ma_fields.String() - actualContainerSlotInSC = ma_fields.Integer() - workflowId = ma_fields.Integer() - xtalSnapshotFullPath = ma_fields.String() - scanParameters = ma_fields.String() - -f_schema = api.model('DataCollectionGroup', dict_schema) -ma_schema = DataCollectionGroupSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/datacollections.py b/pyispyb/core/schemas/datacollections.py new file mode 100644 index 00000000..8dc73e50 --- /dev/null +++ b/pyispyb/core/schemas/datacollections.py @@ -0,0 +1,186 @@ +# import datetime + +import enum +from typing import Optional + +from pydantic import BaseModel, Field + + +class Workflow(BaseModel): + workflowId: int + comments: Optional[str] + status: Optional[str] + workflowTitle: Optional[str] + workflowType: Optional[str] + + class Config: + orm_mode = True + + +class WorkflowStepAttachment(str, enum.Enum): + imageResultFilePath = "imageResultFilePath" + # htmlResultFilePath = "htmlResultFilePath" + resultFilePath = "resultFilePath" + + +class WorkflowStepMetaData(BaseModel): + attachments: dict[str, bool] = Field(description="Attachment statuses") + + +class WorkflowStep(BaseModel): + workflowId: int + workflowStepId: int + workflowStepType: Optional[str] + status: Optional[str] + comments: Optional[str] + + metadata: WorkflowStepMetaData = Field(alias="_metadata") + + class Config: + orm_mode = True + + +class DataCollectionGroup(BaseModel): + dataCollectionGroupId: int + experimentType: Optional[str] + + Workflow: Optional[Workflow] + + class Config: + orm_mode = True + + +class GridInfo(BaseModel): + gridInfoId: int + + xOffset: Optional[float] + yOffset: Optional[float] + dx_mm: Optional[float] + dy_mm: Optional[float] + steps_x: Optional[float] + steps_y: Optional[float] + meshAngle: Optional[float] + orientation: Optional[str] + pixelsPerMicronX: Optional[float] + pixelsPerMicronY: Optional[float] + snapshot_offsetXPixel: Optional[float] + snapshot_offsetYPixel: Optional[float] + snaked: Optional[bool] + + class Config: + orm_mode = True + + +class DataCollectionMetaData(BaseModel): + snapshots: dict[str, bool] = Field(description="Snapshot statuses with ids 1-4") + + +class RotationAxis(str, enum.Enum): + omega = "omega" + phi = "phi" + + +class DataCollectionBase(BaseModel): + runStatus: Optional[str] = Field( + title="Status", description="`Successful` on success" + ) + + imageDirectory: Optional[str] = Field( + title="Directory", description="Directory where the data is saved" + ) + fileTemplate: Optional[str] = Field( + title="Data File Template", description="File template for data" + ) + imageContainerSubPath: Optional[str] = Field( + title="Image Sub Path", description="For hdf5 files, path to the images" + ) + numberOfImages: Optional[int] = Field(title="Number of Images / Points") + numberOfPasses: Optional[int] = Field(title="Number of Passes / Repeats") + + wavelength: Optional[float] = Field(title="Wavelength", unit="Å") + exposureTime: Optional[float] = Field(title="Exposure Time", unit="s") + flux: Optional[float] = Field(title="Flux", unit="ph/s") + xBeam: Optional[float] = Field(title="Beam Position (Horizontal)", unit="pixels") + yBeam: Optional[float] = Field(title="Beam Position (Vertical)", unit="pixels") + beamSizeAtSampleX: Optional[float] = Field( + title="Beam Size at Sample (Horizontal)", unit="mm" + ) + beamSizeAtSampleY: Optional[float] = Field( + title="Beam Size at Sample (Vertical)", unit="mm" + ) + transmission: Optional[float] = Field(title="Beam Transmision", unit="%") + resolution: Optional[float] = Field( + title="Resolution", description="At edge of detector", unit="Å" + ) + detectorDistance: Optional[float] = Field(title="Detector Distance", unit="mm") + + axisStart: Optional[float] = Field(title="Rotation Axis Start", unit="°") + axisEnd: Optional[float] = Field(title="Rotation Axis End", unit="°") + axisRange: Optional[float] = Field(title="Rotation Axis Oscillation", unit="°") + rotationAxis: Optional[str] = Field(title="Rotation Axis Motor") + overlap: Optional[float] = Field(title="Rotation Axis Overlap", unit="°") + + phiStart: Optional[float] = Field(title="Phi Start", unit="°") + kappaStart: Optional[float] = Field(title="Kappa Start", unit="°") + omegaStart: Optional[float] = Field(title="Omega Start", unit="°") + chiStart: Optional[float] = Field(title="Chi Start", unit="°") + + xBeamPix: Optional[float] = Field(title="Beam size X", unit="pixels") + yBeamPix: Optional[float] = Field(title="Beam size Y", unit="pixels") + + # EM + magnification: Optional[int] = Field(title="Magnification", unit="x") + binning: Optional[int] = Field(title="Binning") + particleDiameter: Optional[float] = Field(title="Particle Diameter", unit="nm") + # boxSize_CTF: Optional[float] = Field(unit="pixels") + # minResolution: Optional[float] = Field(unit="A") + # minDefocus: Optional[float] = Field(unit="A") + # maxDefocus: Optional[float] = Field(unit="A") + defocusStepSize: Optional[float] = Field(unit="A") + amountAstigmatism: Optional[float] = Field(unit="A") + # extractSize: Optional[float] = Field(unit="pixels") + # bgRadius: Optional[float] = Field(unit="nm") + voltage: Optional[float] = Field(unit="kV") + objAperture: Optional[float] = Field(unit="um") + # c1aperture: Optional[float] = Field(unit="um") + # c2aperture: Optional[float] = Field(unit="um") + # c3aperture: Optional[float] = Field(unit="um") + # c1lens: Optional[float] = Field(unit="%") + # c2lens: Optional[float] = Field(unit="%") + # c3lens: Optional[float] = Field(unit="%") + + +class DataCollection(DataCollectionBase): + dataCollectionId: int + + DataCollectionGroup: DataCollectionGroup + GridInfo: Optional[list[GridInfo]] + + metadata: DataCollectionMetaData = Field(alias="_metadata") + + class Config: + orm_mode = True + + +class DataCollectionFileAttachmentMetaData(BaseModel): + url: str = Field(description="Url to data collection file attachment") + fileName: str = Field(description="File name") + + +class DataCollectionFileAttachment(BaseModel): + dataCollectionFileAttachmentId: int + dataCollectionId: int + fileType: str + + metadata: DataCollectionFileAttachmentMetaData = Field(alias="_metadata") + + class Config: + orm_mode = True + + +class PerImageAnalysis(BaseModel): + dataCollectionId: Optional[int] + imageNumber: Optional[list[int]] = Field(description="Scan point") + totalIntegratedSignal: Optional[list[float]] = Field(description="Total signal") + goodBraggCandidates: Optional[list[int]] = Field(description="Number of spots") + method2Res: Optional[list[float]] = Field(description="Estimated resolution") diff --git a/pyispyb/core/schemas/detector.py b/pyispyb/core/schemas/detector.py deleted file mode 100644 index 15442097..00000000 --- a/pyispyb/core/schemas/detector.py +++ /dev/null @@ -1,92 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'detectorId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'detectorType': f_fields.String(required=False, description=''), - 'detectorManufacturer': f_fields.String(required=False, description=''), - 'detectorModel': f_fields.String(required=False, description=''), - 'detectorPixelSizeHorizontal': f_fields.Float(required=False, description=''), - 'detectorPixelSizeVertical': f_fields.Float(required=False, description=''), - 'DETECTORMAXRESOLUTION': f_fields.Float(required=False, description=''), - 'DETECTORMINRESOLUTION': f_fields.Float(required=False, description=''), - 'detectorSerialNumber': f_fields.String(required=False, description=''), - 'detectorDistanceMin': f_fields.String(required=False, description=''), - 'detectorDistanceMax': f_fields.String(required=False, description=''), - 'trustedPixelValueRangeLower': f_fields.String(required=False, description=''), - 'trustedPixelValueRangeUpper': f_fields.String(required=False, description=''), - 'sensorThickness': f_fields.Float(required=False, description=''), - 'overload': f_fields.Float(required=False, description=''), - 'XGeoCorr': f_fields.String(required=False, description=''), - 'YGeoCorr': f_fields.String(required=False, description=''), - 'detectorMode': f_fields.String(required=False, description=''), - 'density': f_fields.Float(required=False, description=''), - 'composition': f_fields.String(required=False, description=''), - 'numberOfPixelsX': f_fields.Integer(required=False, description='Detector number of pixels in x'), - 'numberOfPixelsY': f_fields.Integer(required=False, description='Detector number of pixels in y'), - 'detectorRollMin': f_fields.String(required=False, description='unit: degrees'), - 'detectorRollMax': f_fields.String(required=False, description='unit: degrees'), - 'localName': f_fields.String(required=False, description='Colloquial name for the detector'), - } - -class DetectorSchema(Schema): - """Marshmallows schema class representing Detector table""" - - detectorId = ma_fields.Integer() - detectorType = ma_fields.String() - detectorManufacturer = ma_fields.String() - detectorModel = ma_fields.String() - detectorPixelSizeHorizontal = ma_fields.Float() - detectorPixelSizeVertical = ma_fields.Float() - DETECTORMAXRESOLUTION = ma_fields.Float() - DETECTORMINRESOLUTION = ma_fields.Float() - detectorSerialNumber = ma_fields.String() - detectorDistanceMin = ma_fields.String() - detectorDistanceMax = ma_fields.String() - trustedPixelValueRangeLower = ma_fields.String() - trustedPixelValueRangeUpper = ma_fields.String() - sensorThickness = ma_fields.Float() - overload = ma_fields.Float() - XGeoCorr = ma_fields.String() - YGeoCorr = ma_fields.String() - detectorMode = ma_fields.String() - density = ma_fields.Float() - composition = ma_fields.String() - numberOfPixelsX = ma_fields.Integer() - numberOfPixelsY = ma_fields.Integer() - detectorRollMin = ma_fields.String() - detectorRollMax = ma_fields.String() - localName = ma_fields.String() - -f_schema = api.model('Detector', dict_schema) -ma_schema = DetectorSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/dewar.py b/pyispyb/core/schemas/dewar.py deleted file mode 100644 index 068badd1..00000000 --- a/pyispyb/core/schemas/dewar.py +++ /dev/null @@ -1,78 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'dewarId': f_fields.Integer(required=True, description=''), - 'shippingId': f_fields.Integer(required=False, description=''), - 'code': f_fields.String(required=False, description=''), - 'comments': f_fields.String(required=False, description=''), - 'storageLocation': f_fields.String(required=False, description=''), - 'dewarStatus': f_fields.String(required=False, description=''), - 'bltimeStamp': f_fields.DateTime(required=False, description=''), - 'isStorageDewar': f_fields.Integer(required=False, description=''), - 'barCode': f_fields.String(required=False, description=''), - 'firstExperimentId': f_fields.Integer(required=False, description=''), - 'customsValue': f_fields.Integer(required=False, description=''), - 'transportValue': f_fields.Integer(required=False, description=''), - 'trackingNumberToSynchrotron': f_fields.String(required=False, description=''), - 'trackingNumberFromSynchrotron': f_fields.String(required=False, description=''), - 'type': f_fields.String(required=True, description='enum(Dewar,Toolbox)'), - 'FACILITYCODE': f_fields.String(required=False, description=''), - 'weight': f_fields.Float(required=False, description='dewar weight in kg'), - 'deliveryAgent_barcode': f_fields.String(required=False, description='Courier piece barcode (not the airway bill)'), - } - -class DewarSchema(Schema): - """Marshmallows schema class representing Dewar table""" - - dewarId = ma_fields.Integer() - shippingId = ma_fields.Integer() - code = ma_fields.String() - comments = ma_fields.String() - storageLocation = ma_fields.String() - dewarStatus = ma_fields.String() - bltimeStamp = ma_fields.DateTime() - isStorageDewar = ma_fields.Integer() - barCode = ma_fields.String() - firstExperimentId = ma_fields.Integer() - customsValue = ma_fields.Integer() - transportValue = ma_fields.Integer() - trackingNumberToSynchrotron = ma_fields.String() - trackingNumberFromSynchrotron = ma_fields.String() - type = ma_fields.String() - FACILITYCODE = ma_fields.String() - weight = ma_fields.Float() - deliveryAgent_barcode = ma_fields.String() - -f_schema = api.model('Dewar', dict_schema) -ma_schema = DewarSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/dewars.py b/pyispyb/core/schemas/dewars.py new file mode 100644 index 00000000..8ca2f247 --- /dev/null +++ b/pyispyb/core/schemas/dewars.py @@ -0,0 +1,29 @@ +from typing import Optional +from pydantic import BaseModel, Field + +from ispyb import models + +d = models.Dewar + + +class DewarShipping(BaseModel): + proposalId: int + shippingName: str = Field(title="Name") + + class Config: + orm_mode = True + + +class DewarCreate(BaseModel): + shippingId: int + code: str = Field(title="Name") + dewarType: Optional[str] + + +class Dewar(DewarCreate): + dewarId: int + + Shipping: DewarShipping + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/diffraction_plan.py b/pyispyb/core/schemas/diffraction_plan.py deleted file mode 100644 index 89371171..00000000 --- a/pyispyb/core/schemas/diffraction_plan.py +++ /dev/null @@ -1,146 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'diffractionPlanId': f_fields.Integer(required=True, description=''), - 'name': f_fields.String(required=False, description=''), - 'experimentKind': f_fields.String(required=False, description='enum(Default,MXPressE,MXPressO,MXPressE_SAD,MXScore,MXPressM,MAD,SAD,Fixed,Ligand binding,Refinement,OSC,MAD - Inverse Beam,SAD - Inverse Beam,MESH,XFE,Stepped transmission)'), - 'observedResolution': f_fields.Float(required=False, description=''), - 'minimalResolution': f_fields.Float(required=False, description=''), - 'exposureTime': f_fields.Float(required=False, description=''), - 'oscillationRange': f_fields.Float(required=False, description=''), - 'maximalResolution': f_fields.Float(required=False, description=''), - 'screeningResolution': f_fields.Float(required=False, description=''), - 'radiationSensitivity': f_fields.Float(required=False, description=''), - 'anomalousScatterer': f_fields.String(required=False, description=''), - 'preferredBeamSizeX': f_fields.Float(required=False, description=''), - 'preferredBeamSizeY': f_fields.Float(required=False, description=''), - 'preferredBeamDiameter': f_fields.Float(required=False, description=''), - 'comments': f_fields.String(required=False, description=''), - 'DIFFRACTIONPLANUUID': f_fields.String(required=False, description=''), - 'aimedCompleteness': f_fields.String(required=False, description=''), - 'aimedIOverSigmaAtHighestRes': f_fields.String(required=False, description=''), - 'aimedMultiplicity': f_fields.String(required=False, description=''), - 'aimedResolution': f_fields.String(required=False, description=''), - 'anomalousData': f_fields.Integer(required=False, description=''), - 'complexity': f_fields.String(required=False, description=''), - 'estimateRadiationDamage': f_fields.Integer(required=False, description=''), - 'forcedSpaceGroup': f_fields.String(required=False, description=''), - 'requiredCompleteness': f_fields.String(required=False, description=''), - 'requiredMultiplicity': f_fields.String(required=False, description=''), - 'requiredResolution': f_fields.String(required=False, description=''), - 'strategyOption': f_fields.String(required=False, description=''), - 'kappaStrategyOption': f_fields.String(required=False, description=''), - 'numberOfPositions': f_fields.Integer(required=False, description=''), - 'minDimAccrossSpindleAxis': f_fields.String(required=False, description='minimum dimension accross the spindle axis'), - 'maxDimAccrossSpindleAxis': f_fields.String(required=False, description='maximum dimension accross the spindle axis'), - 'radiationSensitivityBeta': f_fields.String(required=False, description=''), - 'radiationSensitivityGamma': f_fields.String(required=False, description=''), - 'minOscWidth': f_fields.Float(required=False, description=''), - 'recordTimeStamp': f_fields.DateTime(required=True, description='Creation or last update date/time'), - 'monochromator': f_fields.String(required=False, description='DMM or DCM'), - 'energy': f_fields.Float(required=False, description='eV'), - 'transmission': f_fields.Float(required=False, description='Decimal fraction in range [0,1]'), - 'boxSizeX': f_fields.Float(required=False, description='microns'), - 'boxSizeY': f_fields.Float(required=False, description='microns'), - 'kappaStart': f_fields.Float(required=False, description='degrees'), - 'axisStart': f_fields.Float(required=False, description='degrees'), - 'axisRange': f_fields.Float(required=False, description='degrees'), - 'numberOfImages': f_fields.Integer(required=False, description='The number of images requested'), - 'presetForProposalId': f_fields.Integer(required=False, description='Indicates this plan is available to all sessions on given proposal'), - 'beamLineName': f_fields.String(required=False, description='Indicates this plan is available to all sessions on given beamline'), - 'detectorId': f_fields.Integer(required=False, description=''), - 'distance': f_fields.String(required=False, description=''), - 'orientation': f_fields.String(required=False, description=''), - 'monoBandwidth': f_fields.String(required=False, description=''), - 'centringMethod': f_fields.String(required=False, description='enum(xray,loop,diffraction,optical)'), - } - -class DiffractionPlanSchema(Schema): - """Marshmallows schema class representing DiffractionPlan table""" - - diffractionPlanId = ma_fields.Integer() - name = ma_fields.String() - experimentKind = ma_fields.String() - observedResolution = ma_fields.Float() - minimalResolution = ma_fields.Float() - exposureTime = ma_fields.Float() - oscillationRange = ma_fields.Float() - maximalResolution = ma_fields.Float() - screeningResolution = ma_fields.Float() - radiationSensitivity = ma_fields.Float() - anomalousScatterer = ma_fields.String() - preferredBeamSizeX = ma_fields.Float() - preferredBeamSizeY = ma_fields.Float() - preferredBeamDiameter = ma_fields.Float() - comments = ma_fields.String() - DIFFRACTIONPLANUUID = ma_fields.String() - aimedCompleteness = ma_fields.String() - aimedIOverSigmaAtHighestRes = ma_fields.String() - aimedMultiplicity = ma_fields.String() - aimedResolution = ma_fields.String() - anomalousData = ma_fields.Integer() - complexity = ma_fields.String() - estimateRadiationDamage = ma_fields.Integer() - forcedSpaceGroup = ma_fields.String() - requiredCompleteness = ma_fields.String() - requiredMultiplicity = ma_fields.String() - requiredResolution = ma_fields.String() - strategyOption = ma_fields.String() - kappaStrategyOption = ma_fields.String() - numberOfPositions = ma_fields.Integer() - minDimAccrossSpindleAxis = ma_fields.String() - maxDimAccrossSpindleAxis = ma_fields.String() - radiationSensitivityBeta = ma_fields.String() - radiationSensitivityGamma = ma_fields.String() - minOscWidth = ma_fields.Float() - recordTimeStamp = ma_fields.DateTime() - monochromator = ma_fields.String() - energy = ma_fields.Float() - transmission = ma_fields.Float() - boxSizeX = ma_fields.Float() - boxSizeY = ma_fields.Float() - kappaStart = ma_fields.Float() - axisStart = ma_fields.Float() - axisRange = ma_fields.Float() - numberOfImages = ma_fields.Integer() - presetForProposalId = ma_fields.Integer() - beamLineName = ma_fields.String() - detectorId = ma_fields.Integer() - distance = ma_fields.String() - orientation = ma_fields.String() - monoBandwidth = ma_fields.String() - centringMethod = ma_fields.String() - -f_schema = api.model('DiffractionPlan', dict_schema) -ma_schema = DiffractionPlanSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/energy_scan.py b/pyispyb/core/schemas/energy_scan.py deleted file mode 100644 index 3310dc6b..00000000 --- a/pyispyb/core/schemas/energy_scan.py +++ /dev/null @@ -1,110 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'energyScanId': f_fields.Integer(required=True, description=''), - 'sessionId': f_fields.Integer(required=True, description=''), - 'blSampleId': f_fields.Integer(required=False, description=''), - 'fluorescenceDetector': f_fields.String(required=False, description=''), - 'scanFileFullPath': f_fields.String(required=False, description=''), - 'jpegChoochFileFullPath': f_fields.String(required=False, description=''), - 'element': f_fields.String(required=False, description=''), - 'startEnergy': f_fields.Float(required=False, description=''), - 'endEnergy': f_fields.Float(required=False, description=''), - 'transmissionFactor': f_fields.Float(required=False, description=''), - 'exposureTime': f_fields.Float(required=False, description=''), - 'axisPosition': f_fields.Float(required=False, description=''), - 'synchrotronCurrent': f_fields.Float(required=False, description=''), - 'temperature': f_fields.Float(required=False, description=''), - 'peakEnergy': f_fields.Float(required=False, description=''), - 'peakFPrime': f_fields.Float(required=False, description=''), - 'peakFDoublePrime': f_fields.Float(required=False, description=''), - 'inflectionEnergy': f_fields.Float(required=False, description=''), - 'inflectionFPrime': f_fields.Float(required=False, description=''), - 'inflectionFDoublePrime': f_fields.Float(required=False, description=''), - 'xrayDose': f_fields.Float(required=False, description=''), - 'startTime': f_fields.DateTime(required=False, description=''), - 'endTime': f_fields.DateTime(required=False, description=''), - 'edgeEnergy': f_fields.String(required=False, description=''), - 'filename': f_fields.String(required=False, description=''), - 'beamSizeVertical': f_fields.Float(required=False, description=''), - 'beamSizeHorizontal': f_fields.Float(required=False, description=''), - 'choochFileFullPath': f_fields.String(required=False, description=''), - 'crystalClass': f_fields.String(required=False, description=''), - 'comments': f_fields.String(required=False, description=''), - 'flux': f_fields.String(required=False, description='flux measured before the energyScan'), - 'flux_end': f_fields.String(required=False, description='flux measured after the energyScan'), - 'workingDirectory': f_fields.String(required=False, description=''), - 'blSubSampleId': f_fields.Integer(required=False, description=''), - } - -class EnergyScanSchema(Schema): - """Marshmallows schema class representing EnergyScan table""" - - energyScanId = ma_fields.Integer() - sessionId = ma_fields.Integer() - blSampleId = ma_fields.Integer() - fluorescenceDetector = ma_fields.String() - scanFileFullPath = ma_fields.String() - jpegChoochFileFullPath = ma_fields.String() - element = ma_fields.String() - startEnergy = ma_fields.Float() - endEnergy = ma_fields.Float() - transmissionFactor = ma_fields.Float() - exposureTime = ma_fields.Float() - axisPosition = ma_fields.Float() - synchrotronCurrent = ma_fields.Float() - temperature = ma_fields.Float() - peakEnergy = ma_fields.Float() - peakFPrime = ma_fields.Float() - peakFDoublePrime = ma_fields.Float() - inflectionEnergy = ma_fields.Float() - inflectionFPrime = ma_fields.Float() - inflectionFDoublePrime = ma_fields.Float() - xrayDose = ma_fields.Float() - startTime = ma_fields.DateTime() - endTime = ma_fields.DateTime() - edgeEnergy = ma_fields.String() - filename = ma_fields.String() - beamSizeVertical = ma_fields.Float() - beamSizeHorizontal = ma_fields.Float() - choochFileFullPath = ma_fields.String() - crystalClass = ma_fields.String() - comments = ma_fields.String() - flux = ma_fields.String() - flux_end = ma_fields.String() - workingDirectory = ma_fields.String() - blSubSampleId = ma_fields.Integer() - -f_schema = api.model('EnergyScan', dict_schema) -ma_schema = EnergyScanSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/energyscan.py b/pyispyb/core/schemas/energyscan.py new file mode 100644 index 00000000..8d7b88ea --- /dev/null +++ b/pyispyb/core/schemas/energyscan.py @@ -0,0 +1,8 @@ +from pydantic import BaseModel + + +class EnergyScan(BaseModel): + energyScanId: int + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/events.py b/pyispyb/core/schemas/events.py new file mode 100644 index 00000000..9f30f390 --- /dev/null +++ b/pyispyb/core/schemas/events.py @@ -0,0 +1,36 @@ +from datetime import datetime +from typing import Union, Optional + +from pydantic import BaseModel, Field + +from .datacollections import DataCollection +from .robotactions import RobotAction +from .energyscan import EnergyScan +from .xfefluorescencespectrum import XFEFluorescenceSpectrum + + +class EventBase(BaseModel): + id: int + type: str + startTime: Optional[datetime] = Field(title="Start Time") + endTime: Optional[datetime] = Field(title="End Time") + duration: Optional[float] = Field(title="Duration", unit="min") + count: int + session: Optional[str] + sessionId: int + proposal: str + blSample: Optional[str] = Field(description="Sample Name") + blSampleId: Optional[int] = Field(description="Sample Id") + attachments: Optional[int] = Field(description="No. of attachments") + + Item: Union[DataCollection, RobotAction, XFEFluorescenceSpectrum, EnergyScan] + + +class Event(EventBase): + class Config: + orm_mode = True + + +class EventType(BaseModel): + eventTypeName: str + eventType: str diff --git a/pyispyb/core/schemas/image_quality_indicators.py b/pyispyb/core/schemas/image_quality_indicators.py deleted file mode 100644 index c19f73c5..00000000 --- a/pyispyb/core/schemas/image_quality_indicators.py +++ /dev/null @@ -1,78 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'dataCollectionId': f_fields.Integer(required=True, description=''), - 'imageNumber': f_fields.Integer(required=True, description=''), - 'imageId': f_fields.Integer(required=False, description=''), - 'autoProcProgramId': f_fields.Integer(required=False, description='Foreign key to the AutoProcProgram table'), - 'spotTotal': f_fields.Integer(required=False, description='Total number of spots'), - 'inResTotal': f_fields.Integer(required=False, description='Total number of spots in resolution range'), - 'goodBraggCandidates': f_fields.Integer(required=False, description='Total number of Bragg diffraction spots'), - 'iceRings': f_fields.Integer(required=False, description='Number of ice rings identified'), - 'method1Res': f_fields.Float(required=False, description='Resolution estimate 1 (see publication)'), - 'method2Res': f_fields.Float(required=False, description='Resolution estimate 2 (see publication)'), - 'maxUnitCell': f_fields.Float(required=False, description='Estimation of the largest possible unit cell edge'), - 'pctSaturationTop50Peaks': f_fields.Float(required=False, description='The fraction of the dynamic range being used'), - 'inResolutionOvrlSpots': f_fields.Integer(required=False, description='Number of spots overloaded'), - 'binPopCutOffMethod2Res': f_fields.Float(required=False, description='Cut off used in resolution limit calculation'), - 'recordTimeStamp': f_fields.DateTime(required=False, description='Creation or last update date/time'), - 'totalIntegratedSignal': f_fields.String(required=False, description=''), - 'dozor_score': f_fields.String(required=False, description='dozor_score'), - 'driftFactor': f_fields.Float(required=False, description='EM movie drift factor'), - } - -class ImageQualityIndicatorsSchema(Schema): - """Marshmallows schema class representing ImageQualityIndicators table""" - - dataCollectionId = ma_fields.Integer() - imageNumber = ma_fields.Integer() - imageId = ma_fields.Integer() - autoProcProgramId = ma_fields.Integer() - spotTotal = ma_fields.Integer() - inResTotal = ma_fields.Integer() - goodBraggCandidates = ma_fields.Integer() - iceRings = ma_fields.Integer() - method1Res = ma_fields.Float() - method2Res = ma_fields.Float() - maxUnitCell = ma_fields.Float() - pctSaturationTop50Peaks = ma_fields.Float() - inResolutionOvrlSpots = ma_fields.Integer() - binPopCutOffMethod2Res = ma_fields.Float() - recordTimeStamp = ma_fields.DateTime() - totalIntegratedSignal = ma_fields.String() - dozor_score = ma_fields.String() - driftFactor = ma_fields.Float() - -f_schema = api.model('ImageQualityIndicators', dict_schema) -ma_schema = ImageQualityIndicatorsSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/lab_contact.py b/pyispyb/core/schemas/lab_contact.py deleted file mode 100644 index d86ea4a8..00000000 --- a/pyispyb/core/schemas/lab_contact.py +++ /dev/null @@ -1,62 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'labContactId': f_fields.Integer(required=True, description=''), - 'personId': f_fields.Integer(required=True, description=''), - 'cardName': f_fields.String(required=True, description=''), - 'proposalId': f_fields.Integer(required=True, description=''), - 'defaultCourrierCompany': f_fields.String(required=False, description=''), - 'courierAccount': f_fields.String(required=False, description=''), - 'billingReference': f_fields.String(required=False, description=''), - 'dewarAvgCustomsValue': f_fields.Integer(required=True, description=''), - 'dewarAvgTransportValue': f_fields.Integer(required=True, description=''), - 'recordTimeStamp': f_fields.DateTime(required=True, description='Creation or last update date/time'), - } - -class LabContactSchema(Schema): - """Marshmallows schema class representing LabContact table""" - - labContactId = ma_fields.Integer() - personId = ma_fields.Integer() - cardName = ma_fields.String() - proposalId = ma_fields.Integer() - defaultCourrierCompany = ma_fields.String() - courierAccount = ma_fields.String() - billingReference = ma_fields.String() - dewarAvgCustomsValue = ma_fields.Integer() - dewarAvgTransportValue = ma_fields.Integer() - recordTimeStamp = ma_fields.DateTime() - -f_schema = api.model('LabContact', dict_schema) -ma_schema = LabContactSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/labcontacts.py b/pyispyb/core/schemas/labcontacts.py new file mode 100644 index 00000000..65db84f8 --- /dev/null +++ b/pyispyb/core/schemas/labcontacts.py @@ -0,0 +1,60 @@ +from typing import Optional +import datetime + +from pydantic import BaseModel, Field +from pyispyb.core.schemas.laboratories import Laboratory, LaboratoryCreate + + +class PersonBase(BaseModel): + givenName: str = Field(title="First Name") + familyName: str = Field(title="Surname") + emailAddress: Optional[str] = Field(title="Email Address", nullable=True) + phoneNumber: Optional[str] = Field(title="Phone Number", nullable=True) + + Laboratory: Optional[Laboratory] + + +class PersonCreate(PersonBase): + Laboratory: Optional[LaboratoryCreate] = Field(title="Laboratory") + + +class Person(PersonBase): + # personId: int + + class Config: + title = "Contact Person" + orm_mode = True + + +class LabContactBase(BaseModel): + proposalId: int + cardName: str = Field( + title="Card Name", description="The name for this lab contact" + ) + defaultCourrierCompany: Optional[str] = Field( + title="Courrier Company", nullable=True + ) + courierAccount: Optional[str] = Field(title="Account No.", nullable=True) + billingReference: Optional[str] = Field(title="Billing Reference", nullable=True) + dewarAvgCustomsValue: Optional[int] = Field(title="Avg Customs Value", unit="Eur") + dewarAvgTransportValue: Optional[int] = Field( + title="Avg Transport Value", unit="Eur" + ) + + Person: Person + + +class LabContactCreate(LabContactBase): + Person: PersonCreate = Field(title="Person") + + +class LabContact(LabContactBase): + labContactId: int + personId: int + recordTimeStamp: datetime.datetime = Field( + description="Time lab contact was created" + ) + + class Config: + orm_mode = True + json_encoders = {datetime.datetime: lambda obj: obj.isoformat() + "+00:00"} diff --git a/pyispyb/core/schemas/laboratories.py b/pyispyb/core/schemas/laboratories.py new file mode 100644 index 00000000..a2a65ac1 --- /dev/null +++ b/pyispyb/core/schemas/laboratories.py @@ -0,0 +1,43 @@ +import datetime +from typing import Optional + +# from pyispyb.core.schemas.validators import WordDashSpace +from pydantic import BaseModel, Field + + +class LaboratoryCreate(BaseModel): + name: str = Field(title="Laboratory Name", description="The Laboratory name") + address: str = Field(title="Address", description="The Laboratory Address") + city: str = Field(title="City", description="The Laboratory City") + country: str = Field(title="Country", description="The Laboratory Country") + url: Optional[str] = Field( + title="URL", description="The Laboratory optional URL", nullable=True + ) + laboratoryExtPk: Optional[int] = Field( + title="laboratoryExtPk", + description="External Id from the User Portal", + nullable=True, + ) + + +class Laboratory(LaboratoryCreate): + laboratoryId: int + + # Could be missing in db + name: Optional[str] = Field( + None, title="Laboratory Name", description="The Laboratory name" + ) + address: Optional[str] = Field( + None, title="Address", description="The Laboratory Address" + ) + city: Optional[str] = Field(None, title="City", description="The Laboratory City") + country: Optional[str] = Field( + None, title="Country", description="The Laboratory Country" + ) + recordTimeStamp: Optional[datetime.datetime] = Field( + title="recordTimeStamp", + description="Time Laboratory was created", + ) + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/laboratory.py b/pyispyb/core/schemas/laboratory.py deleted file mode 100644 index 4ff4c607..00000000 --- a/pyispyb/core/schemas/laboratory.py +++ /dev/null @@ -1,64 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'laboratoryId': f_fields.Integer(required=True, description=''), - 'laboratoryUUID': f_fields.String(required=False, description=''), - 'name': f_fields.String(required=False, description=''), - 'address': f_fields.String(required=False, description=''), - 'city': f_fields.String(required=False, description=''), - 'country': f_fields.String(required=False, description=''), - 'url': f_fields.String(required=False, description=''), - 'organization': f_fields.String(required=False, description=''), - 'recordTimeStamp': f_fields.DateTime(required=True, description='Creation or last update date/time'), - 'laboratoryPk': f_fields.Integer(required=False, description=''), - 'postcode': f_fields.String(required=False, description=''), - } - -class LaboratorySchema(Schema): - """Marshmallows schema class representing Laboratory table""" - - laboratoryId = ma_fields.Integer() - laboratoryUUID = ma_fields.String() - name = ma_fields.String() - address = ma_fields.String() - city = ma_fields.String() - country = ma_fields.String() - url = ma_fields.String() - organization = ma_fields.String() - recordTimeStamp = ma_fields.DateTime() - laboratoryPk = ma_fields.Integer() - postcode = ma_fields.String() - -f_schema = api.model('Laboratory', dict_schema) -ma_schema = LaboratorySchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/mapping.py b/pyispyb/core/schemas/mapping.py new file mode 100644 index 00000000..4868a106 --- /dev/null +++ b/pyispyb/core/schemas/mapping.py @@ -0,0 +1,62 @@ +from typing import Optional +from pydantic import BaseModel, Field + + +class MapROI(BaseModel): + xrfFluorescenceMappingROIId: int + element: Optional[str] + edge: Optional[str] + scalar: Optional[str] + startEnergy: float + endEnergy: float + + class Config: + orm_mode = True + + +class MapGridInfo(BaseModel): + gridInfoId: int + steps_x: int + steps_y: int + snaked: bool + orientation: str + + class Config: + orm_mode = True + + +class MapMetaData(BaseModel): + url: str = Field(description="Url to map image") + blSubSampleId: Optional[int] + blSampleId: Optional[int] + dataCollectionId: Optional[int] + + +class Map(BaseModel): + xrfFluorescenceMappingId: int + colourMap: Optional[str] + opacity: Optional[float] + points: Optional[int] + dataFormat: str + + metadata: MapMetaData = Field(alias="_metadata") + + GridInfo: MapGridInfo + XRFFluorescenceMappingROI: MapROI + + class Config: + orm_mode = True + + +class MapHistogram(BaseModel): + xrfFluorescenceMappingId: int + hist: list[int] + bins: list[float] + width: list[float] + + +class MapPixelValue(BaseModel): + xrfFluorescenceMappingId: int + x: int + y: int + value: float diff --git a/pyispyb/core/schemas/person.py b/pyispyb/core/schemas/person.py deleted file mode 100644 index 4fa91b7a..00000000 --- a/pyispyb/core/schemas/person.py +++ /dev/null @@ -1,70 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'personId': f_fields.Integer(required=True, description=''), - 'laboratoryId': f_fields.Integer(required=False, description=''), - 'siteId': f_fields.Integer(required=False, description=''), - 'personUUID': f_fields.String(required=False, description=''), - 'familyName': f_fields.String(required=False, description=''), - 'givenName': f_fields.String(required=False, description=''), - 'title': f_fields.String(required=False, description=''), - 'emailAddress': f_fields.String(required=False, description=''), - 'phoneNumber': f_fields.String(required=False, description=''), - 'login': f_fields.String(required=False, description=''), - 'faxNumber': f_fields.String(required=False, description=''), - 'recordTimeStamp': f_fields.DateTime(required=True, description='Creation or last update date/time'), - 'cache': f_fields.String(required=False, description=''), - 'externalId': f_fields.Integer(required=False, description=''), - } - -class PersonSchema(Schema): - """Marshmallows schema class representing Person table""" - - personId = ma_fields.Integer() - laboratoryId = ma_fields.Integer() - siteId = ma_fields.Integer() - personUUID = ma_fields.String() - familyName = ma_fields.String() - givenName = ma_fields.String() - title = ma_fields.String() - emailAddress = ma_fields.String() - phoneNumber = ma_fields.String() - login = ma_fields.String() - faxNumber = ma_fields.String() - recordTimeStamp = ma_fields.DateTime() - cache = ma_fields.String() - externalId = ma_fields.Integer() - -f_schema = api.model('Person', dict_schema) -ma_schema = PersonSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/persons.py b/pyispyb/core/schemas/persons.py new file mode 100644 index 00000000..255eeb97 --- /dev/null +++ b/pyispyb/core/schemas/persons.py @@ -0,0 +1,24 @@ +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field + + +class PersonMetaData(BaseModel): + sessions: Optional[int] = Field( + description="Number of sessions this person has been on" + ) + lastSession: Optional[datetime] = Field(description="Last session date") + role: Optional[str] + remote: Optional[bool] + + +class Person(BaseModel): + personId: int + givenName: str + familyName: str + + metadata: Optional[PersonMetaData] = Field(alias="_metadata") + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/phasing.py b/pyispyb/core/schemas/phasing.py deleted file mode 100644 index ad20919a..00000000 --- a/pyispyb/core/schemas/phasing.py +++ /dev/null @@ -1,62 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'phasingId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'phasingAnalysisId': f_fields.Integer(required=True, description='Related phasing analysis item'), - 'phasingProgramRunId': f_fields.Integer(required=True, description='Related program item'), - 'spaceGroupId': f_fields.Integer(required=False, description='Related spaceGroup'), - 'method': f_fields.String(required=False, description='phasing methodenum(solvent flattening,solvent flipping)'), - 'solventContent': f_fields.String(required=False, description=''), - 'enantiomorph': f_fields.Integer(required=False, description='0 or 1'), - 'lowRes': f_fields.String(required=False, description=''), - 'highRes': f_fields.String(required=False, description=''), - 'recordTimeStamp': f_fields.DateTime(required=False, description=''), - } - -class PhasingSchema(Schema): - """Marshmallows schema class representing Phasing table""" - - phasingId = ma_fields.Integer() - phasingAnalysisId = ma_fields.Integer() - phasingProgramRunId = ma_fields.Integer() - spaceGroupId = ma_fields.Integer() - method = ma_fields.String() - solventContent = ma_fields.String() - enantiomorph = ma_fields.Integer() - lowRes = ma_fields.String() - highRes = ma_fields.String() - recordTimeStamp = ma_fields.DateTime() - -f_schema = api.model('Phasing', dict_schema) -ma_schema = PhasingSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/phasing_has_scaling.py b/pyispyb/core/schemas/phasing_has_scaling.py deleted file mode 100644 index ffec560e..00000000 --- a/pyispyb/core/schemas/phasing_has_scaling.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'phasingHasScalingId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'phasingAnalysisId': f_fields.Integer(required=True, description='Related phasing analysis item'), - 'autoProcScalingId': f_fields.Integer(required=True, description='Related autoProcScaling item'), - 'datasetNumber': f_fields.Integer(required=False, description='serial number of the dataset and always reserve 0 for the reference'), - 'recordTimeStamp': f_fields.DateTime(required=False, description=''), - } - -class Phasing_has_ScalingSchema(Schema): - """Marshmallows schema class representing Phasing_has_Scaling table""" - - phasingHasScalingId = ma_fields.Integer() - phasingAnalysisId = ma_fields.Integer() - autoProcScalingId = ma_fields.Integer() - datasetNumber = ma_fields.Integer() - recordTimeStamp = ma_fields.DateTime() - -f_schema = api.model('Phasing_has_Scaling', dict_schema) -ma_schema = Phasing_has_ScalingSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/phasing_program_attachment.py b/pyispyb/core/schemas/phasing_program_attachment.py deleted file mode 100644 index be6a0446..00000000 --- a/pyispyb/core/schemas/phasing_program_attachment.py +++ /dev/null @@ -1,54 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'phasingProgramAttachmentId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'phasingProgramRunId': f_fields.Integer(required=True, description='Related program item'), - 'fileType': f_fields.String(required=False, description='file typeenum(Map,Logfile,PDB,CSV,INS,RES,TXT)'), - 'fileName': f_fields.String(required=False, description='file name'), - 'filePath': f_fields.String(required=False, description='file path'), - 'recordTimeStamp': f_fields.DateTime(required=False, description='Creation or last update date/time'), - } - -class PhasingProgramAttachmentSchema(Schema): - """Marshmallows schema class representing PhasingProgramAttachment table""" - - phasingProgramAttachmentId = ma_fields.Integer() - phasingProgramRunId = ma_fields.Integer() - fileType = ma_fields.String() - fileName = ma_fields.String() - filePath = ma_fields.String() - recordTimeStamp = ma_fields.DateTime() - -f_schema = api.model('PhasingProgramAttachment', dict_schema) -ma_schema = PhasingProgramAttachmentSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/phasing_program_run.py b/pyispyb/core/schemas/phasing_program_run.py deleted file mode 100644 index 00709f41..00000000 --- a/pyispyb/core/schemas/phasing_program_run.py +++ /dev/null @@ -1,60 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'phasingProgramRunId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'phasingCommandLine': f_fields.String(required=False, description='Command line for phasing'), - 'phasingPrograms': f_fields.String(required=False, description='Phasing programs (comma separated)'), - 'phasingStatus': f_fields.Integer(required=False, description='success (1) / fail (0)'), - 'phasingMessage': f_fields.String(required=False, description='warning, error,...'), - 'phasingStartTime': f_fields.DateTime(required=False, description='Processing start time'), - 'phasingEndTime': f_fields.DateTime(required=False, description='Processing end time'), - 'phasingEnvironment': f_fields.String(required=False, description='Cpus, Nodes,...'), - 'recordTimeStamp': f_fields.DateTime(required=False, description=''), - } - -class PhasingProgramRunSchema(Schema): - """Marshmallows schema class representing PhasingProgramRun table""" - - phasingProgramRunId = ma_fields.Integer() - phasingCommandLine = ma_fields.String() - phasingPrograms = ma_fields.String() - phasingStatus = ma_fields.Integer() - phasingMessage = ma_fields.String() - phasingStartTime = ma_fields.DateTime() - phasingEndTime = ma_fields.DateTime() - phasingEnvironment = ma_fields.String() - recordTimeStamp = ma_fields.DateTime() - -f_schema = api.model('PhasingProgramRun', dict_schema) -ma_schema = PhasingProgramRunSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/phasing_statistics.py b/pyispyb/core/schemas/phasing_statistics.py deleted file mode 100644 index af1f87b3..00000000 --- a/pyispyb/core/schemas/phasing_statistics.py +++ /dev/null @@ -1,66 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'phasingStatisticsId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'phasingHasScalingId1': f_fields.Integer(required=True, description='the dataset in question'), - 'phasingHasScalingId2': f_fields.Integer(required=False, description='if this is MIT or MAD, which scaling are being compared, null otherwise'), - 'phasingStepId': f_fields.Integer(required=False, description=''), - 'numberOfBins': f_fields.Integer(required=False, description='the total number of bins'), - 'binNumber': f_fields.Integer(required=False, description='binNumber, 999 for overall'), - 'lowRes': f_fields.String(required=False, description='low resolution cutoff of this binfloat'), - 'highRes': f_fields.String(required=False, description='high resolution cutoff of this binfloat'), - 'metric': f_fields.String(required=False, description='metricenum(Rcullis,Average Fragment Length,Chain Count,Residues Count,CC,PhasingPower,FOM,,Best CC,CC(1/2),Weak CC,CFOM,Pseudo_free_CC,CC of partial model)'), - 'statisticsValue': f_fields.String(required=False, description='the statistics value'), - 'nReflections': f_fields.Integer(required=False, description=''), - 'recordTimeStamp': f_fields.DateTime(required=False, description=''), - } - -class PhasingStatisticsSchema(Schema): - """Marshmallows schema class representing PhasingStatistics table""" - - phasingStatisticsId = ma_fields.Integer() - phasingHasScalingId1 = ma_fields.Integer() - phasingHasScalingId2 = ma_fields.Integer() - phasingStepId = ma_fields.Integer() - numberOfBins = ma_fields.Integer() - binNumber = ma_fields.Integer() - lowRes = ma_fields.String() - highRes = ma_fields.String() - metric = ma_fields.String() - statisticsValue = ma_fields.String() - nReflections = ma_fields.Integer() - recordTimeStamp = ma_fields.DateTime() - -f_schema = api.model('PhasingStatistics', dict_schema) -ma_schema = PhasingStatisticsSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/phasing_step.py b/pyispyb/core/schemas/phasing_step.py deleted file mode 100644 index 6ffe0255..00000000 --- a/pyispyb/core/schemas/phasing_step.py +++ /dev/null @@ -1,68 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'phasingStepId': f_fields.Integer(required=True, description=''), - 'previousPhasingStepId': f_fields.Integer(required=False, description=''), - 'programRunId': f_fields.Integer(required=False, description=''), - 'spaceGroupId': f_fields.Integer(required=False, description=''), - 'autoProcScalingId': f_fields.Integer(required=False, description=''), - 'phasingAnalysisId': f_fields.Integer(required=False, description=''), - 'phasingStepType': f_fields.String(required=False, description='enum(PREPARE,SUBSTRUCTUREDETERMINATION,PHASING,MODELBUILDING)'), - 'method': f_fields.String(required=False, description=''), - 'solventContent': f_fields.String(required=False, description=''), - 'enantiomorph': f_fields.String(required=False, description=''), - 'lowRes': f_fields.String(required=False, description=''), - 'highRes': f_fields.String(required=False, description=''), - 'recordTimeStamp': f_fields.DateTime(required=True, description=''), - } - -class PhasingStepSchema(Schema): - """Marshmallows schema class representing PhasingStep table""" - - phasingStepId = ma_fields.Integer() - previousPhasingStepId = ma_fields.Integer() - programRunId = ma_fields.Integer() - spaceGroupId = ma_fields.Integer() - autoProcScalingId = ma_fields.Integer() - phasingAnalysisId = ma_fields.Integer() - phasingStepType = ma_fields.String() - method = ma_fields.String() - solventContent = ma_fields.String() - enantiomorph = ma_fields.String() - lowRes = ma_fields.String() - highRes = ma_fields.String() - recordTimeStamp = ma_fields.DateTime() - -f_schema = api.model('PhasingStep', dict_schema) -ma_schema = PhasingStepSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/phasing_view.py b/pyispyb/core/schemas/phasing_view.py deleted file mode 100644 index abb0ac47..00000000 --- a/pyispyb/core/schemas/phasing_view.py +++ /dev/null @@ -1,100 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'phasingStepId': f_fields.Integer(required=False, description=''), - 'previousPhasingStepId': f_fields.Integer(required=False, description=''), - 'phasingAnalysisId': f_fields.Integer(required=False, description=''), - 'autoProcIntegrationId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'dataCollectionId': f_fields.Integer(required=True, description='DataCollection item'), - 'anomalous': f_fields.Integer(required=False, description='boolean type:0 noanoum - 1 anoum'), - 'spaceGroup': f_fields.String(required=False, description='Space group'), - 'autoProcId': f_fields.Integer(required=False, description='Primary key (auto-incremented)'), - 'phasingStepType': f_fields.String(required=False, description='enum(PREPARE,SUBSTRUCTUREDETERMINATION,PHASING,MODELBUILDING)'), - 'method': f_fields.String(required=False, description=''), - 'solventContent': f_fields.String(required=False, description=''), - 'enantiomorph': f_fields.String(required=False, description=''), - 'lowRes': f_fields.String(required=False, description=''), - 'highRes': f_fields.String(required=False, description=''), - 'autoProcScalingId': f_fields.Integer(required=False, description='Primary key (auto-incremented)'), - 'spaceGroupShortName': f_fields.String(required=False, description='short name without blank'), - 'processingPrograms': f_fields.String(required=False, description='Processing programs (comma separated)'), - 'processingStatus': f_fields.Integer(required=False, description='success (1) / fail (0)'), - 'phasingPrograms': f_fields.String(required=False, description='Phasing programs (comma separated)'), - 'phasingStatus': f_fields.Integer(required=False, description='success (1) / fail (0)'), - 'phasingStartTime': f_fields.DateTime(required=False, description='Processing start time'), - 'phasingEndTime': f_fields.DateTime(required=False, description='Processing end time'), - 'sessionId': f_fields.Integer(required=False, description='references Session table'), - 'proposalId': f_fields.Integer(required=False, description=''), - 'blSampleId': f_fields.Integer(required=False, description=''), - 'name': f_fields.String(required=False, description=''), - 'code': f_fields.String(required=False, description=''), - 'acronym': f_fields.String(required=False, description=''), - 'proteinId': f_fields.Integer(required=False, description=''), - } - -class DatacollectionSummaryPhasingViewSchema(Schema): - """Marshmallows schema class representing v_datacollection_summary_phasing table""" - - phasingStepId = ma_fields.Integer() - previousPhasingStepId = ma_fields.Integer() - phasingAnalysisId = ma_fields.Integer() - autoProcIntegrationId = ma_fields.Integer() - dataCollectionId = ma_fields.Integer() - anomalous = ma_fields.Integer() - spaceGroup = ma_fields.String() - autoProcId = ma_fields.Integer() - phasingStepType = ma_fields.String() - method = ma_fields.String() - solventContent = ma_fields.String() - enantiomorph = ma_fields.String() - lowRes = ma_fields.String() - highRes = ma_fields.String() - autoProcScalingId = ma_fields.Integer() - spaceGroupShortName = ma_fields.String() - processingPrograms = ma_fields.String() - processingStatus = ma_fields.Integer() - phasingPrograms = ma_fields.String() - phasingStatus = ma_fields.Integer() - phasingStartTime = ma_fields.DateTime() - phasingEndTime = ma_fields.DateTime() - sessionId = ma_fields.Integer() - proposalId = ma_fields.Integer() - blSampleId = ma_fields.Integer() - name = ma_fields.String() - code = ma_fields.String() - acronym = ma_fields.String() - proteinId = ma_fields.Integer() - -f_schema = api.model('v_datacollection_summary_phasing', dict_schema) -ma_schema = DatacollectionSummaryPhasingViewSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/processings.py b/pyispyb/core/schemas/processings.py new file mode 100644 index 00000000..3dc3b8d8 --- /dev/null +++ b/pyispyb/core/schemas/processings.py @@ -0,0 +1,326 @@ +import enum +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field, validator + + +class StatusEnum(enum.Enum): + RUNNING = None + FAILED = 0 + SUCCESS = 1 + DIDNTRUN = 2 + + +class ProcessingStatus(BaseModel): + status: Optional[StatusEnum] + + @validator("status", pre=True) + def check_status(cls, status): + if status == "SUCCESS": + return 1 + if status == "FAILED": + return 0 + if status == "RUNNING": + return None + + return status + + +class ProcessingProcessingStatus(ProcessingStatus): + autoProcProgramId: int + + +class ScreeningProcesingStatus(ProcessingStatus): + indexingSuccess: StatusEnum + + +class EMProcessingStatus(BaseModel): + movie: int + motionCorrection: int + ctf: int + + +class ProcessingStatuses(BaseModel): + screening: Optional[dict[str, list[ScreeningProcesingStatus]]] + xrc: Optional[dict[str, list[ProcessingStatus]]] + processing: Optional[dict[str, list[ProcessingProcessingStatus]]] + autoIntegration: Optional[dict[str, list[ProcessingProcessingStatus]]] + em: Optional[EMProcessingStatus] + + +class ProcessingStatusesList(BaseModel): + statuses: dict[int, ProcessingStatuses] + + +class ScreeningStrategySubWedge(BaseModel): + screeningStrategySubWedgeId: int + subWedgeNumber: Optional[int] + rotationAxis: Optional[str] + axisStart: Optional[float] + axisEnd: Optional[float] + exposureTime: Optional[float] + transmission: Optional[float] + oscillationRange: Optional[float] + completeness: Optional[float] + multiplicity: Optional[float] + RESOLUTION: Optional[float] + doseTotal: Optional[float] + numberOfImages: Optional[int] + comments: Optional[str] + + class Config: + orm_mode = True + + +class ScreeningStrategyWedge(BaseModel): + screeningStrategyWedgeId: int + wedgeNumber: Optional[int] + resolution: Optional[float] + completeness: Optional[float] + multiplicity: Optional[float] + doseTotal: Optional[float] + numberOfImages: Optional[int] + phi: Optional[float] + kappa: Optional[float] + chi: Optional[float] + comments: Optional[str] + wavelength: Optional[float] + + ScreeningStrategySubWedge: Optional[list[ScreeningStrategySubWedge]] + + class Config: + orm_mode = True + + +class ScreeningStrategy(BaseModel): + screeningStrategyId: int + rankingResolution: Optional[float] + + ScreeningStrategyWedge: Optional[list[ScreeningStrategyWedge]] + + class Config: + orm_mode = True + + +class ScreeningOutputLattice(BaseModel): + unitCell_a: float + unitCell_b: float + unitCell_c: float + unitCell_alpha: float + unitCell_beta: float + unitCell_gamma: float + spaceGroup: Optional[str] + pointGroup: Optional[str] + + class Config: + orm_mode = True + + +class ScreeningOutput(BaseModel): + screeningOutputId: int + indexingSuccess: int + strategySuccess: int + + ScreeningStrategy: Optional[list[ScreeningStrategy]] + ScreeningOutputLattice: Optional[list[ScreeningOutputLattice]] + + class Config: + orm_mode = True + + +class Screening(BaseModel): + screeningId: int + programVersion: str + comments: Optional[str] + shortComments: Optional[str] + + ScreeningOutput: Optional[list[ScreeningOutput]] + + class Config: + orm_mode = True + + +class ProcessingJobParameter(BaseModel): + parameterKey: Optional[str] + parameterValue: Optional[str] + + class Config: + orm_mode = True + + +class ProcessingJob(BaseModel): + processingJobId: int + displayName: Optional[str] + comments: Optional[str] + recordTimestamp: datetime + recipe: Optional[str] + automatic: bool + + ProcessingJobParameters: Optional[list[ProcessingJobParameter]] + + class Config: + orm_mode = True + + +class AutoProcProgramMessageSeverity(str, enum.Enum): + ERROR = "ERROR" + WARNING = "WARNING" + INFO = "INFO" + + +class AutoProcProgramMessage(BaseModel): + autoProcProgramMessageId: int + autoProcProgramId: int + description: str + message: str + severity: AutoProcProgramMessageSeverity + recordTimeStamp: datetime + + class Config: + orm_mode = True + + +class AutoProcProgramMetadata(BaseModel): + attachments: Optional[int] = Field(description="Number of attachments") + autoProcProgramMessages: Optional[list[AutoProcProgramMessage]] + imageSweepCount: Optional[int] + + +class AutoProcProgram(BaseModel): + autoProcProgramId: int + processingCommandLine: Optional[str] + processingPrograms: Optional[str] + processingStatus: Optional[StatusEnum] + processingMessage: Optional[str] + processingStartTime: Optional[datetime] + processingEndTime: Optional[datetime] + processingEnvironment: Optional[str] + recordTimeStamp: datetime + + ProcessingJob: Optional[ProcessingJob] + + metadata: AutoProcProgramMetadata = Field(alias="_metadata") + + @validator("processingStatus", pre=True) + def check_status(cls, status): + if status == "SUCCESS": + return 1 + if status == "FAILED": + return 0 + if status == "RUNNING": + return None + + return status + + class Config: + orm_mode = True + + +class AutoProc(BaseModel): + spaceGroup: str + refinedCell_a: float + refinedCell_b: float + refinedCell_c: float + refinedCell_alpha: float + refinedCell_beta: float + refinedCell_gamma: float + + class Config: + orm_mode = True + + +class ScalingStatisticsType(str, enum.Enum): + overall = "overall" + innerShell = "innerShell" + outerShell = "outerShell" + + +class AutoProcScalingStatistics(BaseModel): + comments: Optional[str] + scalingStatisticsType: Optional[ScalingStatisticsType] + resolutionLimitLow: Optional[float] + resolutionLimitHigh: Optional[float] + rMerge: Optional[float] + rMeasAllIPlusIMinus: Optional[float] + rPimAllIPlusIMinus: Optional[float] + fractionalPartialBias: Optional[float] + nTotalObservations: Optional[int] + nTotalUniqueObservations: Optional[int] + meanIOverSigI: Optional[float] + completeness: Optional[float] + multiplicity: Optional[float] + anomalousCompleteness: Optional[float] + anomalousMultiplicity: Optional[float] + anomalous: Optional[bool] + ccHalf: Optional[float] + ccAnomalous: Optional[float] + resIOverSigI2: Optional[float] + + class Config: + orm_mode = True + + +class AutoProcScaling(BaseModel): + AutoProc: Optional[AutoProc] + AutoProcScalingStatistics: Optional[list[AutoProcScalingStatistics]] + + class Config: + orm_mode = True + + +class AutoProcScalingHasInt(BaseModel): + AutoProcScaling: Optional[AutoProcScaling] + + class Config: + orm_mode = True + + +class AutoProcIntegrationDataCollection(BaseModel): + xBeam: Optional[float] + yBeam: Optional[float] + + class Config: + orm_mode = True + + +class AutoProcIntegration(BaseModel): + refinedXBeam: Optional[float] + refinedYBeam: Optional[float] + + AutoProcScalingHasInt: Optional[list[AutoProcScalingHasInt]] + DataCollection: Optional[AutoProcIntegrationDataCollection] + + class Config: + orm_mode = True + + +class AutoProcProgramIntegration(AutoProcProgram): + AutoProcIntegration: Optional[list[AutoProcIntegration]] + + +class AutoProcProgramMessageStatus(BaseModel): + errors: int + warnings: int + info: int + + +class AutoProcProgramMessageStatuses(BaseModel): + statuses: dict[int, AutoProcProgramMessageStatus] + + +class AutoProcProgramAttachmentMetaData(BaseModel): + url: str = Field(description="Url to autoproc program attachment") + + +class AutoProcProgramAttachment(BaseModel): + autoProcProgramAttachmentId: int + autoProcProgramId: int + fileName: str + fileType: str + importanceRank: Optional[int] + + metadata: AutoProcProgramAttachmentMetaData = Field(alias="_metadata") + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/proposal.py b/pyispyb/core/schemas/proposal.py deleted file mode 100644 index d52bc1b1..00000000 --- a/pyispyb/core/schemas/proposal.py +++ /dev/null @@ -1,60 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'proposalId': f_fields.Integer(required=True, description=''), - 'personId': f_fields.Integer(required=True, description=''), - 'title': f_fields.String(required=False, description=''), - 'proposalCode': f_fields.String(required=False, description=''), - 'proposalNumber': f_fields.String(required=False, description=''), - 'bltimeStamp': f_fields.DateTime(required=True, description=''), - 'proposalType': f_fields.String(required=False, description='Proposal type: MX, BX'), - 'externalId': f_fields.Integer(required=False, description=''), - 'state': f_fields.String(required=False, description='enum(Open,Closed,Cancelled)'), - } - -class ProposalSchema(Schema): - """Marshmallows schema class representing Proposal table""" - - proposalId = ma_fields.Integer() - personId = ma_fields.Integer() - title = ma_fields.String() - proposalCode = ma_fields.String() - proposalNumber = ma_fields.String() - bltimeStamp = ma_fields.DateTime() - proposalType = ma_fields.String() - externalId = ma_fields.Integer() - state = ma_fields.String() - -f_schema = api.model('Proposal', dict_schema) -ma_schema = ProposalSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/proposal_has_person.py b/pyispyb/core/schemas/proposal_has_person.py deleted file mode 100644 index eade9519..00000000 --- a/pyispyb/core/schemas/proposal_has_person.py +++ /dev/null @@ -1,50 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'proposalHasPersonId': f_fields.Integer(required=True, description=''), - 'proposalId': f_fields.Integer(required=True, description=''), - 'personId': f_fields.Integer(required=True, description=''), - 'role': f_fields.String(required=False, description='enum(Co-Investigator,Principal Investigator,Alternate Contact)'), - } - -class ProposalHasPersonSchema(Schema): - """Marshmallows schema class representing ProposalHasPerson table""" - - proposalHasPersonId = ma_fields.Integer() - proposalId = ma_fields.Integer() - personId = ma_fields.Integer() - role = ma_fields.String() - -f_schema = api.model('ProposalHasPerson', dict_schema) -ma_schema = ProposalHasPersonSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/proposals.py b/pyispyb/core/schemas/proposals.py new file mode 100644 index 00000000..47fb95b7 --- /dev/null +++ b/pyispyb/core/schemas/proposals.py @@ -0,0 +1,39 @@ +# import datetime +import enum +from typing import Optional + +from pydantic import BaseModel, Field + + +class ProposalState(str, enum.Enum): + Open = "Open" + Closed = "Closed" + Cancelled = "Cancelled" + + +class ProposalMetaData(BaseModel): + persons: int = Field( + description="Number of people registered on this proposal (via ProposalHasPerson)" + ) + sessions: int = Field(description="Number of sessions") + beamLines: list[str] = Field(description="Beamlines allocated in this proposal") + uiGroups: Optional[list[str]] = Field( + description="UI groups allocated in this proposal" + ) + + +class ProposalBase(BaseModel): + proposalCode: str = Field(title="Proposal Code") + proposalNumber: str = Field(title="Proposal Number") + proposal: str + title: Optional[str] = Field(title="Proposal Title") + state: Optional[ProposalState] = Field(title="Proposal State") + + metadata: ProposalMetaData = Field(alias="_metadata") + + +class Proposal(ProposalBase): + proposalId: int + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/protein.py b/pyispyb/core/schemas/protein.py index 56ba112d..82915712 100644 --- a/pyispyb/core/schemas/protein.py +++ b/pyispyb/core/schemas/protein.py @@ -1,76 +1,54 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'proteinId': f_fields.Integer(required=True, description=''), - 'proposalId': f_fields.Integer(required=True, description=''), - 'name': f_fields.String(required=False, description=''), - 'acronym': f_fields.String(required=False, description=''), - 'molecularMass': f_fields.String(required=False, description=''), - 'proteinType': f_fields.String(required=False, description=''), - 'personId': f_fields.Integer(required=False, description=''), - 'bltimeStamp': f_fields.DateTime(required=True, description=''), - 'isCreatedBySampleSheet': f_fields.Integer(required=False, description=''), - 'sequence': f_fields.String(required=False, description=''), - 'MOD_ID': f_fields.String(required=False, description=''), - 'componentTypeId': f_fields.Integer(required=False, description=''), - 'concentrationTypeId': f_fields.Integer(required=False, description=''), - 'Global': f_fields.Integer(required=False, description=''), - 'externalId': f_fields.Integer(required=False, description=''), - 'density': f_fields.Float(required=False, description=''), - 'abundance': f_fields.Float(required=False, description='Deprecated'), - } - -class ProteinSchema(Schema): - """Marshmallows schema class representing Protein table""" - - proteinId = ma_fields.Integer() - proposalId = ma_fields.Integer() - name = ma_fields.String() - acronym = ma_fields.String() - molecularMass = ma_fields.String() - proteinType = ma_fields.String() - personId = ma_fields.Integer() - bltimeStamp = ma_fields.DateTime() - isCreatedBySampleSheet = ma_fields.Integer() - sequence = ma_fields.String() - MOD_ID = ma_fields.String() - componentTypeId = ma_fields.Integer() - concentrationTypeId = ma_fields.Integer() - Global = ma_fields.Integer() - externalId = ma_fields.Integer() - density = ma_fields.Float() - abundance = ma_fields.Float() - -f_schema = api.model('Protein', dict_schema) -ma_schema = ProteinSchema() -json_schema = JSONSchema().dump(ma_schema) +from typing import Optional + +from pydantic import BaseModel, Field + + +class ComponentType(BaseModel): + componentTypeId: int + name: str + + class Config: + orm_mode = True + + +class ConcentrationType(BaseModel): + concentrationTypeId: int + name: str + symbol: str + + class Config: + orm_mode = True + + +class ProteinMetaData(BaseModel): + pdbs: Optional[int] = Field(description="Number of attached pdbs") + crystals: Optional[int] = Field(description="Number of child crystals") + samples: Optional[int] = Field(description="Number of child samples") + datacollections: Optional[int] = Field(description="Number of data collections") + + +class ProteinBase(BaseModel): + name: str + acronym: str = Field(title="Acronym", description="A short name") + proposalId: int + sequence: Optional[str] = Field( + title="Sequence/SMILES", description="Sequence or chemical composition" + ) + density: Optional[float] = Field(title="Density", unit="g/L") + molecularMass: Optional[float] = Field(title="Mass", unit="kDa") + + containmentLevel: Optional[str] + hazardGroup: Optional[str] + safetyLevel: Optional[str] + + ComponentType: Optional[ComponentType] + # ConcentrationType: Optional[ConcentrationType] + + +class Protein(ProteinBase): + proteinId: int + + metadata: Optional[ProteinMetaData] = Field(alias="_metadata") + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/robot_action.py b/pyispyb/core/schemas/robot_action.py deleted file mode 100644 index f45fc05a..00000000 --- a/pyispyb/core/schemas/robot_action.py +++ /dev/null @@ -1,68 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'robotActionId': f_fields.Integer(required=True, description=''), - 'blsessionId': f_fields.Integer(required=True, description=''), - 'blsampleId': f_fields.Integer(required=False, description=''), - 'actionType': f_fields.String(required=False, description='enum(LOAD,UNLOAD,DISPOSE,STORE,WASH,ANNEAL)'), - 'startTimestamp': f_fields.DateTime(required=True, description=''), - 'endTimestamp': f_fields.DateTime(required=True, description=''), - 'status': f_fields.String(required=False, description='enum(SUCCESS,ERROR,CRITICAL,WARNING,EPICSFAIL,COMMANDNOTSENT)'), - 'message': f_fields.String(required=False, description=''), - 'containerLocation': f_fields.Integer(required=False, description=''), - 'dewarLocation': f_fields.Integer(required=False, description=''), - 'sampleBarcode': f_fields.String(required=False, description=''), - 'xtalSnapshotBefore': f_fields.String(required=False, description=''), - 'xtalSnapshotAfter': f_fields.String(required=False, description=''), - } - -class RobotActionSchema(Schema): - """Marshmallows schema class representing RobotAction table""" - - robotActionId = ma_fields.Integer() - blsessionId = ma_fields.Integer() - blsampleId = ma_fields.Integer() - actionType = ma_fields.String() - startTimestamp = ma_fields.DateTime() - endTimestamp = ma_fields.DateTime() - status = ma_fields.String() - message = ma_fields.String() - containerLocation = ma_fields.Integer() - dewarLocation = ma_fields.Integer() - sampleBarcode = ma_fields.String() - xtalSnapshotBefore = ma_fields.String() - xtalSnapshotAfter = ma_fields.String() - -f_schema = api.model('RobotAction', dict_schema) -ma_schema = RobotActionSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/robotactions.py b/pyispyb/core/schemas/robotactions.py new file mode 100644 index 00000000..a1abb387 --- /dev/null +++ b/pyispyb/core/schemas/robotactions.py @@ -0,0 +1,12 @@ +from typing import Optional + +from pydantic import BaseModel + + +class RobotAction(BaseModel): + actionType: str + status: Optional[str] + message: Optional[str] + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/sample.py b/pyispyb/core/schemas/sample.py deleted file mode 100644 index 17417dea..00000000 --- a/pyispyb/core/schemas/sample.py +++ /dev/null @@ -1,118 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'blSampleId': f_fields.Integer(required=True, description=''), - 'diffractionPlanId': f_fields.Integer(required=False, description=''), - 'crystalId': f_fields.Integer(required=False, description=''), - 'containerId': f_fields.Integer(required=False, description=''), - 'name': f_fields.String(required=False, description=''), - 'code': f_fields.String(required=False, description=''), - 'location': f_fields.String(required=False, description=''), - 'holderLength': f_fields.String(required=False, description=''), - 'loopLength': f_fields.String(required=False, description=''), - 'loopType': f_fields.String(required=False, description=''), - 'wireWidth': f_fields.String(required=False, description=''), - 'comments': f_fields.String(required=False, description=''), - 'completionStage': f_fields.String(required=False, description=''), - 'structureStage': f_fields.String(required=False, description=''), - 'publicationStage': f_fields.String(required=False, description=''), - 'publicationComments': f_fields.String(required=False, description=''), - 'blSampleStatus': f_fields.String(required=False, description=''), - 'isInSampleChanger': f_fields.Integer(required=False, description=''), - 'lastKnownCenteringPosition': f_fields.String(required=False, description=''), - 'POSITIONID': f_fields.Integer(required=False, description=''), - 'recordTimeStamp': f_fields.DateTime(required=True, description='Creation or last update date/time'), - 'SMILES': f_fields.String(required=False, description='the symbolic description of the structure of a chemical compound'), - 'blSubSampleId': f_fields.Integer(required=False, description=''), - 'lastImageURL': f_fields.String(required=False, description=''), - 'screenComponentGroupId': f_fields.Integer(required=False, description=''), - 'volume': f_fields.Float(required=False, description=''), - 'dimension1': f_fields.String(required=False, description=''), - 'dimension2': f_fields.String(required=False, description=''), - 'dimension3': f_fields.String(required=False, description=''), - 'shape': f_fields.String(required=False, description=''), - 'packingFraction': f_fields.Float(required=False, description=''), - 'preparationTemeprature': f_fields.Integer(required=False, description='Sample preparation temperature, Units: kelvin'), - 'preparationHumidity': f_fields.Float(required=False, description='Sample preparation humidity, Units: %'), - 'blottingTime': f_fields.Integer(required=False, description='Blotting time, Units: sec'), - 'blottingForce': f_fields.Float(required=False, description='Force used when blotting sample, Units: N?'), - 'blottingDrainTime': f_fields.Integer(required=False, description='Time sample left to drain after blotting, Units: sec'), - 'support': f_fields.String(required=False, description='Sample support material'), - 'subLocation': f_fields.Integer(required=False, description='Indicates the samples location on a multi-sample pin, where 1 is closest to the pin base'), - } - -class SampleSchema(Schema): - """Marshmallows schema class representing Sample table""" - - blSampleId = ma_fields.Integer() - diffractionPlanId = ma_fields.Integer() - crystalId = ma_fields.Integer() - containerId = ma_fields.Integer() - name = ma_fields.String() - code = ma_fields.String() - location = ma_fields.String() - holderLength = ma_fields.String() - loopLength = ma_fields.String() - loopType = ma_fields.String() - wireWidth = ma_fields.String() - comments = ma_fields.String() - completionStage = ma_fields.String() - structureStage = ma_fields.String() - publicationStage = ma_fields.String() - publicationComments = ma_fields.String() - blSampleStatus = ma_fields.String() - isInSampleChanger = ma_fields.Integer() - lastKnownCenteringPosition = ma_fields.String() - POSITIONID = ma_fields.Integer() - recordTimeStamp = ma_fields.DateTime() - SMILES = ma_fields.String() - blSubSampleId = ma_fields.Integer() - lastImageURL = ma_fields.String() - screenComponentGroupId = ma_fields.Integer() - volume = ma_fields.Float() - dimension1 = ma_fields.String() - dimension2 = ma_fields.String() - dimension3 = ma_fields.String() - shape = ma_fields.String() - packingFraction = ma_fields.Float() - preparationTemeprature = ma_fields.Integer() - preparationHumidity = ma_fields.Float() - blottingTime = ma_fields.Integer() - blottingForce = ma_fields.Float() - blottingDrainTime = ma_fields.Integer() - support = ma_fields.String() - subLocation = ma_fields.Integer() - -f_schema = api.model('Sample', dict_schema) -ma_schema = SampleSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/samples.py b/pyispyb/core/schemas/samples.py new file mode 100644 index 00000000..32fa6043 --- /dev/null +++ b/pyispyb/core/schemas/samples.py @@ -0,0 +1,136 @@ +# import datetime + +from typing import Optional + +from pydantic import BaseModel, Field + +from .crystal import Crystal + + +class Position(BaseModel): + posX: int + posY: int + + class Config: + orm_mode = True + + +class SampleMetaData(BaseModel): + subsamples: int = Field(description="Number of sub samples") + datacollections: int = Field(description="Number of data collections") + types: Optional[list[str]] = Field( + description="Types of data collections", nullable=True + ) + queued: Optional[bool] = Field( + description="Whether this sample is queued for data collection" + ) + strategies: Optional[int] = Field(description="Number of successful strategies") + autoIntegrations: Optional[int] = Field( + description="Number of successful auto-integrations" + ) + integratedResolution: Optional[float] = Field( + description="Highest integration resolution", nullable=True + ) + proposal: Optional[str] = Field(description="The associated proposal") + + +class SampleCrystalCreate(BaseModel): + proteinId: int + + +class SampleCreate(BaseModel): + name: str + comments: Optional[str] = Field(title="Comments", nullable=True) + location: Optional[int] = Field( + title="Location", description="Location in container" + ) + containerId: Optional[int] + Crystal: SampleCrystalCreate + + +class SampleProtein(BaseModel): + proposalId: str + name: str + acronym: str + + class Config: + orm_mode = True + + +class SampleCrystal(Crystal): + Protein: SampleProtein = Field(title="Protein") + + +class SampleContainer(BaseModel): + code: str + + sampleChangerLocation: Optional[str] = Field( + title="Sample Changer Location", description="Position in sample change" + ) + beamlineLocation: Optional[str] = Field( + title="Beamline Location", description="Beamline if container is assigned" + ) + + class Config: + orm_mode = True + + +class Sample(SampleCreate): + blSampleId: int + + Crystal: SampleCrystal = Field(title="Crystal") + Container: Optional[SampleContainer] = Field(title="Container") + + metadata: Optional[SampleMetaData] = Field(alias="_metadata") + + class Config: + orm_mode = True + + +class SampleImageMetaData(BaseModel): + url: str = Field(description="Url to sample image") + + +class SampleImage(BaseModel): + blSampleImageId: int + blSampleId: int + micronsPerPixelX: float + micronsPerPixelY: float + offsetX: int + offsetY: int + + metadata: SampleImageMetaData = Field(alias="_metadata") + + class Config: + orm_mode = True + + +class SubSampleSample(BaseModel): + name: str + + class Config: + orm_mode = True + + +class SubSampleMetaData(BaseModel): + datacollections: int = Field(description="Number of data collections") + types: Optional[list[str]] = Field(description="Types of data collections") + + +class SubSampleBase(BaseModel): + type: Optional[str] = Field(title="Subsample Type") + comments: Optional[str] = Field(title="Comments", nullable=True) + blSampleId: int + + metadata: SubSampleMetaData = Field(alias="_metadata") + + +class SubSample(SubSampleBase): + blSubSampleId: int + + BLSample: SubSampleSample + Position1: Optional[Position] + Position2: Optional[Position] + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/screening.py b/pyispyb/core/schemas/screening.py deleted file mode 100644 index 1ee8ab77..00000000 --- a/pyispyb/core/schemas/screening.py +++ /dev/null @@ -1,60 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'screeningId': f_fields.Integer(required=True, description=''), - 'dataCollectionId': f_fields.Integer(required=False, description=''), - 'bltimeStamp': f_fields.DateTime(required=True, description=''), - 'programVersion': f_fields.String(required=False, description=''), - 'comments': f_fields.String(required=False, description=''), - 'shortComments': f_fields.String(required=False, description=''), - 'diffractionPlanId': f_fields.Integer(required=False, description='references DiffractionPlan'), - 'dataCollectionGroupId': f_fields.Integer(required=False, description=''), - 'xmlSampleInformation': f_fields.String(required=False, description=''), - } - -class ScreeningSchema(Schema): - """Marshmallows schema class representing Screening table""" - - screeningId = ma_fields.Integer() - dataCollectionId = ma_fields.Integer() - bltimeStamp = ma_fields.DateTime() - programVersion = ma_fields.String() - comments = ma_fields.String() - shortComments = ma_fields.String() - diffractionPlanId = ma_fields.Integer() - dataCollectionGroupId = ma_fields.Integer() - xmlSampleInformation = ma_fields.String() - -f_schema = api.model('Screening', dict_schema) -ma_schema = ScreeningSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/session.py b/pyispyb/core/schemas/session.py deleted file mode 100644 index 376d97c7..00000000 --- a/pyispyb/core/schemas/session.py +++ /dev/null @@ -1,94 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'sessionId': f_fields.Integer(required=True, description=''), - 'beamLineSetupId': f_fields.Integer(required=False, description=''), - 'proposalId': f_fields.Integer(required=True, description=''), - 'beamCalendarId': f_fields.Integer(required=False, description=''), - 'projectCode': f_fields.String(required=False, description=''), - 'startDate': f_fields.DateTime(required=False, description=''), - 'endDate': f_fields.DateTime(required=False, description=''), - 'beamLineName': f_fields.String(required=False, description=''), - 'scheduled': f_fields.Integer(required=False, description=''), - 'nbShifts': f_fields.Integer(required=False, description=''), - 'comments': f_fields.String(required=False, description=''), - 'beamLineOperator': f_fields.String(required=False, description=''), - 'bltimeStamp': f_fields.DateTime(required=True, description=''), - 'visit_number': f_fields.Integer(required=False, description=''), - 'usedFlag': f_fields.Integer(required=False, description='indicates if session has Datacollections or XFE or EnergyScans attached'), - 'sessionTitle': f_fields.String(required=False, description='fx accounts only'), - 'structureDeterminations': f_fields.Float(required=False, description=''), - 'dewarTransport': f_fields.Float(required=False, description=''), - 'databackupFrance': f_fields.Float(required=False, description='data backup and express delivery France'), - 'databackupEurope': f_fields.Float(required=False, description='data backup and express delivery Europe'), - 'expSessionPk': f_fields.Integer(required=False, description='smis session Pk '), - 'operatorSiteNumber': f_fields.String(required=False, description='matricule site'), - 'lastUpdate': f_fields.DateTime(required=True, description='last update timestamp: by default the end of the session, the last collect...'), - 'protectedData': f_fields.String(required=False, description='indicates if the data are protected or not'), - 'externalId': f_fields.Integer(required=False, description=''), - 'archived': f_fields.Integer(required=False, description='The data for the session is archived and no longer available on disk'), - } - -class SessionSchema(Schema): - """Marshmallows schema class representing Session table""" - - sessionId = ma_fields.Integer() - beamLineSetupId = ma_fields.Integer() - proposalId = ma_fields.Integer() - beamCalendarId = ma_fields.Integer() - projectCode = ma_fields.String() - startDate = ma_fields.DateTime() - endDate = ma_fields.DateTime() - beamLineName = ma_fields.String() - scheduled = ma_fields.Integer() - nbShifts = ma_fields.Integer() - comments = ma_fields.String() - beamLineOperator = ma_fields.String() - bltimeStamp = ma_fields.DateTime() - visit_number = ma_fields.Integer() - usedFlag = ma_fields.Integer() - sessionTitle = ma_fields.String() - structureDeterminations = ma_fields.Float() - dewarTransport = ma_fields.Float() - databackupFrance = ma_fields.Float() - databackupEurope = ma_fields.Float() - expSessionPk = ma_fields.Integer() - operatorSiteNumber = ma_fields.String() - lastUpdate = ma_fields.DateTime() - protectedData = ma_fields.String() - externalId = ma_fields.Integer() - archived = ma_fields.Integer() - -f_schema = api.model('Session', dict_schema) -ma_schema = SessionSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/sessions.py b/pyispyb/core/schemas/sessions.py new file mode 100644 index 00000000..b2fd87b9 --- /dev/null +++ b/pyispyb/core/schemas/sessions.py @@ -0,0 +1,73 @@ +# import datetime +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field + + +class SessionMetaData(BaseModel): + datacollections: Optional[int] = Field(description="Number of datacollections") + uiGroups: Optional[list[str]] = Field(description="UI groups for this session") + persons: int = Field( + description="Number of people registered on this session (via SessionHasPerson)" + ) + active: bool = Field(description="Whether this session is active") + active_soon: bool = Field( + description="Whether this session is due to start soon or has ended recently (+/-20 min)" + ) + sessionTypes: list[str] = Field(description="Session types for this session") + + +class BeamLineSetup(BaseModel): + beamLineSetupId: int + synchrotronMode: Optional[str] + undulatorType1: Optional[str] + undulatorType2: Optional[str] + undulatorType3: Optional[str] + focalSpotSizeAtSample: Optional[float] + focusingOptic: Optional[str] + beamDivergenceHorizontal: Optional[float] + beamDivergenceVertical: Optional[float] + polarisation: Optional[float] + monochromatorType: Optional[str] + setupDate: Optional[datetime] + synchrotronName: Optional[str] + maxExpTimePerDataCollection: Optional[float] + minExposureTimePerImage: Optional[float] + goniostatMaxOscillationSpeed: Optional[float] + goniostatMinOscillationWidth: Optional[float] + minTransmission: Optional[float] + CS: Optional[float] + + class Config: + orm_mode = True + + +class SessionBase(BaseModel): + sessionId: int + proposalId: int + session: str + proposal: str + + BeamLineSetup: Optional[BeamLineSetup] + + visit_number: Optional[int] + startDate: datetime + endDate: datetime + beamLineName: str + beamLineOperator: Optional[str] + scheduled: Optional[bool] + comments: Optional[str] + nbReimbDewars: Optional[int] + + class Config: + orm_mode = True + + +class Session(SessionBase): + sessionId: int + + metadata: SessionMetaData = Field(alias="_metadata") + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/shipping.py b/pyispyb/core/schemas/shipping.py index 93c8c98a..063c964c 100644 --- a/pyispyb/core/schemas/shipping.py +++ b/pyispyb/core/schemas/shipping.py @@ -1,104 +1,87 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'shippingId': f_fields.Integer(required=True, description=''), - 'proposalId': f_fields.Integer(required=True, description=''), - 'shippingName': f_fields.String(required=False, description=''), - 'deliveryAgent_agentName': f_fields.String(required=False, description=''), - 'deliveryAgent_shippingDate': f_fields.String(required=False, description=''), - 'deliveryAgent_deliveryDate': f_fields.String(required=False, description=''), - 'deliveryAgent_agentCode': f_fields.String(required=False, description=''), - 'deliveryAgent_flightCode': f_fields.String(required=False, description=''), - 'shippingStatus': f_fields.String(required=False, description=''), - 'bltimeStamp': f_fields.DateTime(required=False, description=''), - 'laboratoryId': f_fields.Integer(required=False, description=''), - 'isStorageShipping': f_fields.Integer(required=False, description=''), - 'creationDate': f_fields.DateTime(required=False, description=''), - 'comments': f_fields.String(required=False, description=''), - 'sendingLabContactId': f_fields.Integer(required=False, description=''), - 'returnLabContactId': f_fields.Integer(required=False, description=''), - 'returnCourier': f_fields.String(required=False, description=''), - 'dateOfShippingToUser': f_fields.DateTime(required=False, description=''), - 'shippingType': f_fields.String(required=False, description=''), - 'SAFETYLEVEL': f_fields.String(required=False, description=''), - 'deliveryAgent_flightCodeTimestamp': f_fields.DateTime(required=False, description='Date flight code created, if automatic'), - 'deliveryAgent_label': f_fields.String(required=False, description='Base64 encoded pdf of airway label'), - 'readyByTime': f_fields.String(required=False, description='Time shipment will be ready'), - 'closeTime': f_fields.String(required=False, description='Time after which shipment cannot be picked up'), - 'physicalLocation': f_fields.String(required=False, description='Where shipment can be picked up from: i.e. Stores'), - 'deliveryAgent_pickupConfirmationTimestamp': f_fields.DateTime(required=False, description='Date picked confirmed'), - 'deliveryAgent_pickupConfirmation': f_fields.String(required=False, description='Confirmation number of requested pickup'), - 'deliveryAgent_readyByTime': f_fields.String(required=False, description='Confirmed ready-by time'), - 'deliveryAgent_callinTime': f_fields.String(required=False, description='Confirmed courier call-in time'), - 'deliveryAgent_productcode': f_fields.String(required=False, description='A code that identifies which shipment service was used'), - 'deliveryAgent_flightCodePersonId': f_fields.Integer(required=False, description='The person who created the AWB (for auditing)'), - } - -class ShippingSchema(Schema): - """Marshmallows schema class representing Shipping table""" - - shippingId = ma_fields.Integer() - proposalId = ma_fields.Integer() - shippingName = ma_fields.String() - deliveryAgent_agentName = ma_fields.String() - deliveryAgent_shippingDate = ma_fields.String() - deliveryAgent_deliveryDate = ma_fields.String() - deliveryAgent_agentCode = ma_fields.String() - deliveryAgent_flightCode = ma_fields.String() - shippingStatus = ma_fields.String() - bltimeStamp = ma_fields.DateTime() - laboratoryId = ma_fields.Integer() - isStorageShipping = ma_fields.Integer() - creationDate = ma_fields.DateTime() - comments = ma_fields.String() - sendingLabContactId = ma_fields.Integer() - returnLabContactId = ma_fields.Integer() - returnCourier = ma_fields.String() - dateOfShippingToUser = ma_fields.DateTime() - shippingType = ma_fields.String() - SAFETYLEVEL = ma_fields.String() - deliveryAgent_flightCodeTimestamp = ma_fields.DateTime() - deliveryAgent_label = ma_fields.String() - readyByTime = ma_fields.String() - closeTime = ma_fields.String() - physicalLocation = ma_fields.String() - deliveryAgent_pickupConfirmationTimestamp = ma_fields.DateTime() - deliveryAgent_pickupConfirmation = ma_fields.String() - deliveryAgent_readyByTime = ma_fields.String() - deliveryAgent_callinTime = ma_fields.String() - deliveryAgent_productcode = ma_fields.String() - deliveryAgent_flightCodePersonId = ma_fields.Integer() - -f_schema = api.model('Shipping', dict_schema) -ma_schema = ShippingSchema() -json_schema = JSONSchema().dump(ma_schema) +from datetime import datetime +import enum +from typing import Optional +from pydantic import BaseModel, Field + +from ispyb import models + +d = models.Dewar + + +class SafetyLevelEnum(str, enum.Enum): + GREEN = "GREEN" + YELLOW = "YELLOW" + RED = "RED" + + +class ShippingCreate(BaseModel): + proposalId: int + shippingName: str = Field(title="Name") + sendingLabContactId: int = Field(title="Sending Lab Contact") + returnLabContactId: int = Field(title="Return Lab Contact") + safetyLevel: SafetyLevelEnum = Field(title="Safety Level", default="GREEN") + comments: Optional[str] = Field(title="Comments") + + +class ShippingMetaData(BaseModel): + dewars: int = Field(description="Number of dewars") + + +class ShippingLabContactPerson(BaseModel): + givenName: str + familyName: str + + class Config: + orm_mode = True + + +class ShippingLabContact(BaseModel): + cardName: str + + Person: ShippingLabContactPerson + + class Config: + orm_mode = True + + +class Shipping(ShippingCreate): + shippingId: int + bltimeStamp: Optional[datetime] = Field(title="Created at") + + sendingLabContactId: Optional[int] = Field(title="Sending Lab Contact") + returnLabContactId: Optional[int] = Field(title="Return Lab Contact") + safetyLevel: Optional[SafetyLevelEnum] = Field(title="Safety Level") + + metadata: Optional[ShippingMetaData] = Field(alias="_metadata") + + LabContact: Optional[ShippingLabContact] = Field(title="Return Lab Contact") + LabContact1: Optional[ShippingLabContact] = Field(title="Sending Lab Contact") + + class Config: + orm_mode = True + json_encoders = {datetime: lambda obj: obj.isoformat() + "+00:00"} + + +class Dewar(BaseModel): + code: str = Field(title="Name") + + Shipping: Shipping + + class Config: + orm_mode = True + + +class Container(BaseModel): + code: str = Field(title="Name") + + sampleChangerLocation: Optional[str] = Field( + description="Position in sample change" + ) + beamlineLocation: Optional[str] = Field( + description="Beamline if container is assigned" + ) + + Dewar: Dewar + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/stats.py b/pyispyb/core/schemas/stats.py new file mode 100644 index 00000000..4eae09bf --- /dev/null +++ b/pyispyb/core/schemas/stats.py @@ -0,0 +1,172 @@ +from datetime import datetime +from typing import Optional, Union + +from pydantic import BaseModel, Field + + +class BreakdownOverviewCounts(BaseModel): + datacollections: int + failed: int + datacollectionTypes: dict[str | None, int] + robot: int + edge: int + xrf: int + + +class BreakdownOverviewSession(BaseModel): + startDate: datetime + endDate: datetime + duration: float + beamLineName: str + sessionId: str + session: Optional[str] + counts: BreakdownOverviewCounts + + +class BreakdownOverviewRun(BaseModel): + startDate: datetime + endDate: datetime + duration: float + run: str + counts: BreakdownOverviewCounts + + +class BreakdownPoint(BaseModel): + eventType: str + startTime: datetime + endTime: datetime + + protein: Optional[str] + sample: Optional[str] + + subType: Optional[str] + status: Optional[str] + + title: Optional[str] + session: Optional[str] + sessionId: Optional[str] + + +class BreakdownPlottable(BaseModel): + title: str + unit: Optional[str] + data: list + + +class BreakdownPlottables(BaseModel): + time: list[datetime] + series: list[BreakdownPlottable] + + +class Breakdown(BaseModel): + overview: Union[BreakdownOverviewSession, BreakdownOverviewRun] + history: list[BreakdownPoint] + plottables: BreakdownPlottables + + +class TimeEntry(BaseModel): + duration: float = Field(0, title="Total time") + startup: float = Field(0, title="Time before first data collection") + datacollection: float = Field(0, title="Time used for data collections") + edge: float = Field(0, title="Time used for energy scans") + xrf: float = Field(0, title="Total used for xrf scans") + robot: float = Field(0, title="Total used for robot / sample actions") + strategy: float = Field(0, title="Time waiting for strategy") + centring: float = Field(0, title="Total waiting for centring") + fault: float = Field(0, title="Time taken with faults") + remaining: float = Field(0, title="Time remaining") + thinking: float = Field(0, title="Time not used by other types") + + def calc_thinking(self) -> float: + thinking = ( + self.duration + - self.startup + - self.datacollection + - self.edge + - self.xrf + - self.robot + - self.strategy + - self.centring + - self.fault + ) + + return thinking if thinking >= 0 else 0 + + +class AverageTimeEntry(TimeEntry): + count: int = 0 + + def average(self, *models: TimeEntry): + for field in TimeEntry.__fields__.keys(): + if len(models) > 0: + setattr( + self, + field, + sum([getattr(model, field) for model in models]) / len(models), + ) + + +class SessionTimeEntry(TimeEntry): + sessionId: int = Field(title="The session id") + + +class Times(BaseModel): + average: TimeEntry = Field(title="The average times") + sessions: list[SessionTimeEntry] = Field(title="Times per session") + + +class Hourly(BaseModel): + hour: list[int] + average: list[float] + + +class Hourlies(BaseModel): + datacollections: Hourly + loaded: Hourly + + +class ExperimentTypeMessages(BaseModel): + count: int = Field(title="Frequency of this error message") + message: str = Field(title="The error message") + + +class ExperimentTypeGroup(BaseModel): + experimentType: str = Field(title="Experiment type") + total: int = Field(title="Total data collections") + failed: int = Field(title="Failed data collections") + failedPercent: float = 0 + aborted: int = Field(title="Aborted data collections") + abortedPercent: float = 0 + messages: list[ExperimentTypeMessages] = Field(title="Error Messages") + + +class ExperimentTypeGroupPrepare(ExperimentTypeGroup): + """Used to prepare the Error data""" + + messages: dict[str, ExperimentTypeMessages] + + +class Errors(BaseModel): + totals: list[ExperimentTypeGroup] + + +class ParameterHistogram(BaseModel): + beamLineName: str + bin: list[int] + frequency: list[int] + + +class ParameterHistograms(BaseModel): + parameter: str + unit: str + beamLines: list[ParameterHistogram] + + +class VRun(BaseModel): + run: str + runId: int + startDate: datetime + endDate: datetime + + class Config: + orm_mode = True diff --git a/pyispyb/core/schemas/userportalsync.py b/pyispyb/core/schemas/userportalsync.py new file mode 100644 index 00000000..d76f3cbc --- /dev/null +++ b/pyispyb/core/schemas/userportalsync.py @@ -0,0 +1,143 @@ +from typing import List, Optional, Literal +from pydantic import BaseModel, conlist, root_validator +from pydantic_sqlalchemy import sqlalchemy_to_pydantic +from pyispyb.core.schemas.laboratories import LaboratoryCreate +from ispyb import models +from datetime import datetime + +PydanticPerson = sqlalchemy_to_pydantic( + models.Person, exclude={"personId", "laboratoryId", "recordTimeStamp"} +) +PydanticProposal = sqlalchemy_to_pydantic( + models.Proposal, exclude={"proposalId", "personId", "bltimeStamp"} +) +PydanticSession = sqlalchemy_to_pydantic( + models.BLSession, exclude={"sessionId", "proposalId", "bltimeStamp", "comments"} +) + +""" +Excluding _global field to avoid a Pydantic RuntimeWarning +RuntimeWarning: fields may not start with an underscore, ignoring "_global" +""" +PydanticProtein = sqlalchemy_to_pydantic( + models.Protein, + exclude={"proteinId", "proposalId", "personId", "bltimeStamp", "_global"}, +) +PydanticSessionHasPerson = sqlalchemy_to_pydantic( + models.SessionHasPerson, exclude={"sessionId", "personId"} +) +PydanticLabContact = sqlalchemy_to_pydantic( + models.LabContact, + exclude={"labContactId", "proposalId", "personId", "recordTimeStamp"}, +) + + +class UPPerson(PydanticPerson): + # At least login or externalId required to be able to check for existing Person in DB (to update or create) + login: Optional[str] = None + externalId: Optional[int] = None + + # https://github.com/samuelcolvin/pydantic/issues/506 + @root_validator() + def check_login_or_externalId(cls, values): + if (values.get("login") is None) and (values.get("externalId") is None): + raise ValueError( + "either login or externalId is required for a Person entity" + ) + return values + + class Config: + orm_mode = True + + +class PersonSessionOptions(PydanticSessionHasPerson): + role: Optional[ + Literal[ + "Local Contact", + "Local Contact 2", + "Staff", + "Team Leader", + "Co-Investigator", + "Principal Investigator", + "Alternate Contact", + ] + ] + + +class PersonProposalLaboratory(UPPerson): + laboratory: Optional[LaboratoryCreate] + + +class PersonSessionLaboratory(UPPerson): + laboratory: Optional[LaboratoryCreate] + # Optional section to be used in Session_has_Person + session_options: Optional[PersonSessionOptions] + + +class UPLabContact(PydanticLabContact): + # Person is required for a LabContact + person: PersonProposalLaboratory + # Make dewarAvgCustomsValue and dewarAvgTransportValue optional fields + # Somehow they are required by default + dewarAvgCustomsValue: Optional[int] + dewarAvgTransportValue: Optional[int] + + class Config: + orm_mode = True + + +class UPProposal(PydanticProposal): + # proposalCode and proposalNumber required + proposalCode: str + proposalNumber: str + # externailId is an optional Integer and conversions to bynary 16 are done internally + externalId: Optional[int] = None + # Here we need minimum 1 Person to be related to the Proposal (foreign key constraint) + persons: conlist(PersonProposalLaboratory, min_items=1) + # LabContacts are always related to a proposal + labcontacts: Optional[List[UPLabContact]] + + class Config: + orm_mode = True + + +class UPProtein(PydanticProtein): + # It may sync by checking protein acronym and proposalId in DB + acronym: str + # Can also use externalId to be able to check for existing protein in DB (to update or create) + externalId: Optional[int] + person: PersonProposalLaboratory + + class Config: + orm_mode = True + + +class UPSession(PydanticSession): + # expSessionPk or externalId to be able to check for existing session in DB (to update or create) + # The expSessionPk field might be deprecated later + expSessionPk: Optional[int] + externalId: Optional[int] + lastUpdate: Optional[datetime] + # persons related to sessions is optional + persons: Optional[List[PersonSessionLaboratory]] + + @root_validator() + def check_expSessionPk_or_externalId(cls, values): + if (values.get("expSessionPk") is None) and (values.get("externalId") is None): + raise ValueError( + "either expSessionPk or externalId is required for a Session entity" + ) + return values + + class Config: + orm_mode = True + + +class PydanticProposal(BaseModel): + proposal: UPProposal + sessions: Optional[List[UPSession]] + proteins: Optional[List[UPProtein]] + + +class UserPortalProposalSync(PydanticProposal): + pass diff --git a/pyispyb/core/schemas/utils.py b/pyispyb/core/schemas/utils.py new file mode 100644 index 00000000..641764ea --- /dev/null +++ b/pyispyb/core/schemas/utils.py @@ -0,0 +1,53 @@ +from typing import Optional + +from pydantic import BaseModel, create_model +from pydantic.main import ModelMetaclass + + +def paginated(model: ModelMetaclass) -> ModelMetaclass: + class PaginatedModel(BaseModel): + total: int + results: list[model] # type: ignore + skip: int + limit: int + + cls_name = f"Paginated<{model.__name__}>" + PaginatedModel.__name__ = cls_name + PaginatedModel.__qualname__ = cls_name + + return PaginatedModel + + +def make_optional(baseclass: BaseModel, *, exclude: dict[str, any] = {}) -> BaseModel: + """Make a pydantic models fields optional (for patch requests) + + Optionally exclude some fields (with nesting): + ``` + exclude={ + "proposalId": True, + "Person": { + "givenName": True, + "Laboratory": {"laboratoryExtPk": True}, + }, + } + ``` + """ + # https://stackoverflow.com/questions/67699451/make-every-fields-as-optional-with-pydantic + fields = baseclass.__fields__ + + validators = {"__validators__": baseclass.__validators__} + optional_fields = { + key: (Optional[item.type_], None) + for key, item in fields.items() + if exclude.get(key, None) is not True + } + new_model = create_model( + f"{baseclass.__name__}Optional", **optional_fields, __validators__=validators + ) + + # Deal with nested models + for key, item in new_model.__fields__.items(): + if item.is_complex(): + item.type_ = make_optional(item.type_, exclude=exclude.get(key, {})) + + return new_model diff --git a/pyispyb/core/schemas/v_datacollection_summary_phasing.py b/pyispyb/core/schemas/v_datacollection_summary_phasing.py deleted file mode 100644 index 3e4cc4ee..00000000 --- a/pyispyb/core/schemas/v_datacollection_summary_phasing.py +++ /dev/null @@ -1,100 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'phasingStepId': f_fields.Integer(required=False, description=''), - 'previousPhasingStepId': f_fields.Integer(required=False, description=''), - 'phasingAnalysisId': f_fields.Integer(required=False, description=''), - 'autoProcIntegrationId': f_fields.Integer(required=True, description='Primary key (auto-incremented)'), - 'dataCollectionId': f_fields.Integer(required=True, description='DataCollection item'), - 'anomalous': f_fields.Integer(required=False, description='boolean type:0 noanoum - 1 anoum'), - 'spaceGroup': f_fields.String(required=False, description='Space group'), - 'autoProcId': f_fields.Integer(required=False, description='Primary key (auto-incremented)'), - 'phasingStepType': f_fields.String(required=False, description='enum(PREPARE,SUBSTRUCTUREDETERMINATION,PHASING,MODELBUILDING)'), - 'method': f_fields.String(required=False, description=''), - 'solventContent': f_fields.String(required=False, description=''), - 'enantiomorph': f_fields.String(required=False, description=''), - 'lowRes': f_fields.String(required=False, description=''), - 'highRes': f_fields.String(required=False, description=''), - 'autoProcScalingId': f_fields.Integer(required=False, description='Primary key (auto-incremented)'), - 'spaceGroupShortName': f_fields.String(required=False, description='short name without blank'), - 'processingPrograms': f_fields.String(required=False, description='Processing programs (comma separated)'), - 'processingStatus': f_fields.Integer(required=False, description='success (1) / fail (0)'), - 'phasingPrograms': f_fields.String(required=False, description='Phasing programs (comma separated)'), - 'phasingStatus': f_fields.Integer(required=False, description='success (1) / fail (0)'), - 'phasingStartTime': f_fields.DateTime(required=False, description='Processing start time'), - 'phasingEndTime': f_fields.DateTime(required=False, description='Processing end time'), - 'sessionId': f_fields.Integer(required=False, description='references Session table'), - 'proposalId': f_fields.Integer(required=False, description=''), - 'blSampleId': f_fields.Integer(required=False, description=''), - 'name': f_fields.String(required=False, description=''), - 'code': f_fields.String(required=False, description=''), - 'acronym': f_fields.String(required=False, description=''), - 'proteinId': f_fields.Integer(required=False, description=''), - } - -class v_datacollection_summary_phasingSchema(Schema): - """Marshmallows schema class representing v_datacollection_summary_phasing table""" - - phasingStepId = ma_fields.Integer() - previousPhasingStepId = ma_fields.Integer() - phasingAnalysisId = ma_fields.Integer() - autoProcIntegrationId = ma_fields.Integer() - dataCollectionId = ma_fields.Integer() - anomalous = ma_fields.Integer() - spaceGroup = ma_fields.String() - autoProcId = ma_fields.Integer() - phasingStepType = ma_fields.String() - method = ma_fields.String() - solventContent = ma_fields.String() - enantiomorph = ma_fields.String() - lowRes = ma_fields.String() - highRes = ma_fields.String() - autoProcScalingId = ma_fields.Integer() - spaceGroupShortName = ma_fields.String() - processingPrograms = ma_fields.String() - processingStatus = ma_fields.Integer() - phasingPrograms = ma_fields.String() - phasingStatus = ma_fields.Integer() - phasingStartTime = ma_fields.DateTime() - phasingEndTime = ma_fields.DateTime() - sessionId = ma_fields.Integer() - proposalId = ma_fields.Integer() - blSampleId = ma_fields.Integer() - name = ma_fields.String() - code = ma_fields.String() - acronym = ma_fields.String() - proteinId = ma_fields.Integer() - -f_schema = api.model('v_datacollection_summary_phasing', dict_schema) -ma_schema = v_datacollection_summary_phasingSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/core/schemas/validators.py b/pyispyb/core/schemas/validators.py new file mode 100644 index 00000000..ace82b1a --- /dev/null +++ b/pyispyb/core/schemas/validators.py @@ -0,0 +1,20 @@ +from typing import Any +from pydantic import Field + + +def WordDash(*args: Any, **kwargs: Any) -> Any: + return Field(*args, regex=r"^([\w-])+$", **kwargs) + + +def WordDashSpace(*args: Any, **kwargs: Any) -> Any: + return Field(*args, regex=r"^([\w\s-])+$", **kwargs) + + +def WordDashSpaceNC(*args: Any, **kwargs: Any) -> Any: + """Word, Space, New Line, Comma, Dash""" + return Field(*args, regex=r"^([\w\s\n,-])+$", **kwargs) + + +def WordDashSpaceBC(*args: Any, **kwargs: Any) -> Any: + """Word, Space, Brackets, Dash""" + return Field(*args, regex=r"^([\w-\s\(\)\'])+$", **kwargs) diff --git a/pyispyb/core/schemas/xfefluorescencespectrum.py b/pyispyb/core/schemas/xfefluorescencespectrum.py new file mode 100644 index 00000000..dee6c944 --- /dev/null +++ b/pyispyb/core/schemas/xfefluorescencespectrum.py @@ -0,0 +1,8 @@ +from pydantic import BaseModel + + +class XFEFluorescenceSpectrum(BaseModel): + xfeFluorescenceSpectrumId: int + + class Config: + orm_mode = True diff --git a/pyispyb/dependencies.py b/pyispyb/dependencies.py new file mode 100644 index 00000000..83a1b61b --- /dev/null +++ b/pyispyb/dependencies.py @@ -0,0 +1,66 @@ +import enum +import logging +from typing import Callable, Optional, Any + +from fastapi import HTTPException, Query +from pydantic import conint + +from .app.globals import g + + +logger = logging.getLogger(__name__) + + +class Order(str, enum.Enum): + asc = "asc" + desc = "desc" + + +def pagination( + skip: Optional[conint(ge=0)] = Query(0, description="Results to skip"), + limit: Optional[conint(gt=0)] = Query(25, description="Number of results to show"), +) -> dict[str, int]: + return {"skip": skip, "limit": limit} + + +def order_by_factory(columns: dict[str], enumName: str) -> Callable: + order_by_enum = enum.Enum(enumName, {k: k for k in columns.keys()}) + + def order_by( + order_by: Optional[order_by_enum] = Query( + None, description="Field to order by" + ), + order: Optional[Order] = Query(None, description="Order direction"), + ) -> dict[str, Any]: + order_fields = {"order_by": order_by} + order_fields["order"] = order + + return order_fields + + return order_by + + +def filter(filter: str) -> str: + return filter + + +def permission(permission: str): + """Requires the user to have the specified permission""" + + async def with_permission() -> bool: + if permission not in g.permissions: + logger.info( + f"User {g.login} tried to access route with required permission {permission}" + ) + raise HTTPException( + status_code=403, + detail="Not Authorized", + ) + + return True + + return with_permission + + +def has_permission(permission: str): + return permission in g.permissions diff --git a/pyispyb/em/__init__.py b/pyispyb/em/__init__.py deleted file mode 100644 index a5863f2d..00000000 --- a/pyispyb/em/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -# encoding: utf-8 -# pylint: disable=no-member -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -__license__ = "LGPLv3+" - - -def init_app(app): - - from . import modules - - modules.init_app(app) - - from . import routes - - routes.init_app(app) - - print("ispyb-em loaded") - # app.logger.debug("ISPyB server started") diff --git a/pyispyb/em/models.py b/pyispyb/em/models.py deleted file mode 100644 index eb425d45..00000000 --- a/pyispyb/em/models.py +++ /dev/null @@ -1,3501 +0,0 @@ -# coding: utf-8 -from sqlalchemy import BINARY, Column, Date, DateTime, Float, ForeignKey, Index, Integer, LargeBinary, Numeric, SmallInteger, String, Table, Text, Time -from sqlalchemy.orm import relationship -from sqlalchemy.schema import FetchedValue -from sqlalchemy.dialects.mysql.enumerated import ENUM -from sqlalchemy.dialects.mysql.types import LONGBLOB -from flask_sqlalchemy import SQLAlchemy - - -from pyispyb.app.extensions import db - - - -class AbInitioModel(db.Model): - __tablename__ = 'AbInitioModel' - - abInitioModelId = db.Column(db.Integer, primary_key=True) - modelListId = db.Column(db.ForeignKey('ModelList.modelListId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - averagedModelId = db.Column(db.ForeignKey('Model.modelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - rapidShapeDeterminationModelId = db.Column(db.ForeignKey('Model.modelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - shapeDeterminationModelId = db.Column(db.ForeignKey('Model.modelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - comments = db.Column(db.String(512)) - creationTime = db.Column(db.DateTime) - - Model = db.relationship('Model', primaryjoin='AbInitioModel.averagedModelId == Model.modelId') - ModelList = db.relationship('ModelList', primaryjoin='AbInitioModel.modelListId == ModelList.modelListId') - Model1 = db.relationship('Model', primaryjoin='AbInitioModel.rapidShapeDeterminationModelId == Model.modelId') - Model2 = db.relationship('Model', primaryjoin='AbInitioModel.shapeDeterminationModelId == Model.modelId') - - - -class Additive(db.Model): - __tablename__ = 'Additive' - - additiveId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - additiveType = db.Column(db.String(45)) - comments = db.Column(db.String(512)) - - - -class AdminActivity(db.Model): - __tablename__ = 'AdminActivity' - - adminActivityId = db.Column(db.Integer, primary_key=True) - username = db.Column(db.String(45), nullable=False, unique=True, server_default=db.FetchedValue()) - action = db.Column(db.String(45), index=True) - comments = db.Column(db.String(100)) - dateTime = db.Column(db.DateTime) - - - -class AdminVar(db.Model): - __tablename__ = 'AdminVar' - - varId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(32), index=True) - value = db.Column(db.String(1024), index=True) - - - -class Aperture(db.Model): - __tablename__ = 'Aperture' - - apertureId = db.Column(db.Integer, primary_key=True) - sizeX = db.Column(db.Float) - - - -class Assembly(db.Model): - __tablename__ = 'Assembly' - - assemblyId = db.Column(db.Integer, primary_key=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - creationDate = db.Column(db.DateTime) - comments = db.Column(db.String(255)) - - Macromolecule = db.relationship('Macromolecule', primaryjoin='Assembly.macromoleculeId == Macromolecule.macromoleculeId') - - - -class AssemblyHasMacromolecule(db.Model): - __tablename__ = 'AssemblyHasMacromolecule' - - AssemblyHasMacromoleculeId = db.Column(db.Integer, primary_key=True) - assemblyId = db.Column(db.ForeignKey('Assembly.assemblyId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - - Assembly = db.relationship('Assembly', primaryjoin='AssemblyHasMacromolecule.assemblyId == Assembly.assemblyId') - Macromolecule = db.relationship('Macromolecule', primaryjoin='AssemblyHasMacromolecule.macromoleculeId == Macromolecule.macromoleculeId') - - - -class AssemblyRegion(db.Model): - __tablename__ = 'AssemblyRegion' - - assemblyRegionId = db.Column(db.Integer, primary_key=True) - assemblyHasMacromoleculeId = db.Column(db.ForeignKey('AssemblyHasMacromolecule.AssemblyHasMacromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - assemblyRegionType = db.Column(db.String(45)) - name = db.Column(db.String(45)) - fromResiduesBases = db.Column(db.String(45)) - toResiduesBases = db.Column(db.String(45)) - - AssemblyHasMacromolecule = db.relationship('AssemblyHasMacromolecule', primaryjoin='AssemblyRegion.assemblyHasMacromoleculeId == AssemblyHasMacromolecule.AssemblyHasMacromoleculeId') - - - -class AutoProc(db.Model): - __tablename__ = 'AutoProc' - - autoProcId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - autoProcProgramId = db.Column(db.Integer, index=True, info='Related program item') - spaceGroup = db.Column(db.String(45), info='Space group') - refinedCell_a = db.Column(db.Float, info='Refined cell') - refinedCell_b = db.Column(db.Float, info='Refined cell') - refinedCell_c = db.Column(db.Float, info='Refined cell') - refinedCell_alpha = db.Column(db.Float, info='Refined cell') - refinedCell_beta = db.Column(db.Float, info='Refined cell') - refinedCell_gamma = db.Column(db.Float, info='Refined cell') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - - -class AutoProcIntegration(db.Model): - __tablename__ = 'AutoProcIntegration' - - autoProcIntegrationId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='DataCollection item') - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related program item') - startImageNumber = db.Column(db.Integer, info='start image number') - endImageNumber = db.Column(db.Integer, info='end image number') - refinedDetectorDistance = db.Column(db.Float, info='Refined DataCollection.detectorDistance') - refinedXBeam = db.Column(db.Float, info='Refined DataCollection.xBeam') - refinedYBeam = db.Column(db.Float, info='Refined DataCollection.yBeam') - rotationAxisX = db.Column(db.Float, info='Rotation axis') - rotationAxisY = db.Column(db.Float, info='Rotation axis') - rotationAxisZ = db.Column(db.Float, info='Rotation axis') - beamVectorX = db.Column(db.Float, info='Beam vector') - beamVectorY = db.Column(db.Float, info='Beam vector') - beamVectorZ = db.Column(db.Float, info='Beam vector') - cell_a = db.Column(db.Float, info='Unit cell') - cell_b = db.Column(db.Float, info='Unit cell') - cell_c = db.Column(db.Float, info='Unit cell') - cell_alpha = db.Column(db.Float, info='Unit cell') - cell_beta = db.Column(db.Float, info='Unit cell') - cell_gamma = db.Column(db.Float, info='Unit cell') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - anomalous = db.Column(db.Integer, server_default=db.FetchedValue(), info='boolean type:0 noanoum - 1 anoum') - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='AutoProcIntegration.autoProcProgramId == AutoProcProgram.autoProcProgramId') - DataCollection = db.relationship('DataCollection', primaryjoin='AutoProcIntegration.dataCollectionId == DataCollection.dataCollectionId') - - - -class AutoProcProgram(db.Model): - __tablename__ = 'AutoProcProgram' - - autoProcProgramId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - processingCommandLine = db.Column(db.String(255), info='Command line for running the automatic processing') - processingPrograms = db.Column(db.String(255), info='Processing programs (comma separated)') - processingStatus = db.Column(db.Integer, info='success (1) / fail (0)') - processingMessage = db.Column(db.String(255), info='warning, error,...') - processingStartTime = db.Column(db.DateTime, info='Processing start time') - processingEndTime = db.Column(db.DateTime, info='Processing end time') - processingEnvironment = db.Column(db.String(255), info='Cpus, Nodes,...') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - processingJobId = db.Column(db.ForeignKey('ProcessingJob.processingJobId'), index=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - - DataCollection = db.relationship('DataCollection', primaryjoin='AutoProcProgram.dataCollectionId == DataCollection.dataCollectionId') - ProcessingJob = db.relationship('ProcessingJob', primaryjoin='AutoProcProgram.processingJobId == ProcessingJob.processingJobId') - - - -class AutoProcProgramAttachment(db.Model): - __tablename__ = 'AutoProcProgramAttachment' - - autoProcProgramAttachmentId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related autoProcProgram item') - fileType = db.Column(db.ENUM('Log', 'Result', 'Graph', 'Debug'), info='Type of file Attachment') - fileName = db.Column(db.String(255), info='Attachment filename') - filePath = db.Column(db.String(255), info='Attachment filepath to disk storage') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - importanceRank = db.Column(db.Integer, info='For the particular autoProcProgramId and fileType, indicate the importance of the attachment. Higher numbers are more important') - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='AutoProcProgramAttachment.autoProcProgramId == AutoProcProgram.autoProcProgramId') - - - -class AutoProcProgramMessage(db.Model): - __tablename__ = 'AutoProcProgramMessage' - - autoProcProgramMessageId = db.Column(db.Integer, primary_key=True) - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId'), index=True) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - severity = db.Column(db.ENUM('ERROR', 'WARNING', 'INFO')) - message = db.Column(db.String(200)) - description = db.Column(db.Text) - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='AutoProcProgramMessage.autoProcProgramId == AutoProcProgram.autoProcProgramId') - - - -class AutoProcScaling(db.Model): - __tablename__ = 'AutoProcScaling' - __table_args__ = ( - db.Index('AutoProcScalingIdx1', 'autoProcScalingId', 'autoProcId'), - ) - - autoProcScalingId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - autoProcId = db.Column(db.ForeignKey('AutoProc.autoProcId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related autoProc item (used by foreign key)') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - AutoProc = db.relationship('AutoProc', primaryjoin='AutoProcScaling.autoProcId == AutoProc.autoProcId') - - - -class AutoProcScalingStatistic(db.Model): - __tablename__ = 'AutoProcScalingStatistics' - - autoProcScalingStatisticsId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - autoProcScalingId = db.Column(db.ForeignKey('AutoProcScaling.autoProcScalingId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related autoProcScaling item (used by foreign key)') - scalingStatisticsType = db.Column(db.ENUM('overall', 'innerShell', 'outerShell'), nullable=False, index=True, server_default=db.FetchedValue(), info='Scaling statistics type') - comments = db.Column(db.String(255), info='Comments...') - resolutionLimitLow = db.Column(db.Float, info='Low resolution limit') - resolutionLimitHigh = db.Column(db.Float, info='High resolution limit') - rMerge = db.Column(db.Float, info='Rmerge') - rMeasWithinIPlusIMinus = db.Column(db.Float, info='Rmeas (within I+/I-)') - rMeasAllIPlusIMinus = db.Column(db.Float, info='Rmeas (all I+ & I-)') - rPimWithinIPlusIMinus = db.Column(db.Float, info='Rpim (within I+/I-) ') - rPimAllIPlusIMinus = db.Column(db.Float, info='Rpim (all I+ & I-)') - fractionalPartialBias = db.Column(db.Float, info='Fractional partial bias') - nTotalObservations = db.Column(db.Integer, info='Total number of observations') - nTotalUniqueObservations = db.Column(db.Integer, info='Total number unique') - meanIOverSigI = db.Column(db.Float, info='Mean((I)/sd(I))') - completeness = db.Column(db.Float, info='Completeness') - multiplicity = db.Column(db.Float, info='Multiplicity') - anomalousCompleteness = db.Column(db.Float, info='Anomalous completeness') - anomalousMultiplicity = db.Column(db.Float, info='Anomalous multiplicity') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - anomalous = db.Column(db.Integer, server_default=db.FetchedValue(), info='boolean type:0 noanoum - 1 anoum') - ccHalf = db.Column(db.Float, info='information from XDS') - ccAnomalous = db.Column(db.Float) - - AutoProcScaling = db.relationship('AutoProcScaling', primaryjoin='AutoProcScalingStatistic.autoProcScalingId == AutoProcScaling.autoProcScalingId') - - - -class AutoProcScalingHasInt(db.Model): - __tablename__ = 'AutoProcScaling_has_Int' - __table_args__ = ( - db.Index('AutoProcScalingHasInt_FKIndex3', 'autoProcScalingId', 'autoProcIntegrationId'), - ) - - autoProcScaling_has_IntId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - autoProcScalingId = db.Column(db.ForeignKey('AutoProcScaling.autoProcScalingId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='AutoProcScaling item') - autoProcIntegrationId = db.Column(db.ForeignKey('AutoProcIntegration.autoProcIntegrationId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='AutoProcIntegration item') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - AutoProcIntegration = db.relationship('AutoProcIntegration', primaryjoin='AutoProcScalingHasInt.autoProcIntegrationId == AutoProcIntegration.autoProcIntegrationId') - AutoProcScaling = db.relationship('AutoProcScaling', primaryjoin='AutoProcScalingHasInt.autoProcScalingId == AutoProcScaling.autoProcScalingId') - - - -class AutoProcStatus(db.Model): - __tablename__ = 'AutoProcStatus' - - autoProcStatusId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - autoProcIntegrationId = db.Column(db.ForeignKey('AutoProcIntegration.autoProcIntegrationId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - step = db.Column(db.ENUM('Indexing', 'Integration', 'Correction', 'Scaling', 'Importing'), nullable=False, info='autoprocessing step') - status = db.Column(db.ENUM('Launched', 'Successful', 'Failed'), nullable=False, info='autoprocessing status') - comments = db.Column(db.String(1024), info='comments') - bltimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - AutoProcIntegration = db.relationship('AutoProcIntegration', primaryjoin='AutoProcStatus.autoProcIntegrationId == AutoProcIntegration.autoProcIntegrationId') - - - -class BFComponent(db.Model): - __tablename__ = 'BF_component' - - componentId = db.Column(db.Integer, primary_key=True) - systemId = db.Column(db.ForeignKey('BF_system.systemId'), index=True) - name = db.Column(db.String(100)) - description = db.Column(db.String(200)) - - BF_system = db.relationship('BFSystem', primaryjoin='BFComponent.systemId == BFSystem.systemId') - - - -class BFComponentBeamline(db.Model): - __tablename__ = 'BF_component_beamline' - - component_beamlineId = db.Column(db.Integer, primary_key=True) - componentId = db.Column(db.ForeignKey('BF_component.componentId'), index=True) - beamlinename = db.Column(db.String(20)) - - BF_component = db.relationship('BFComponent', primaryjoin='BFComponentBeamline.componentId == BFComponent.componentId') - - - -class BFFault(db.Model): - __tablename__ = 'BF_fault' - - faultId = db.Column(db.Integer, primary_key=True) - sessionId = db.Column(db.ForeignKey('BLSession.sessionId'), nullable=False, index=True) - owner = db.Column(db.String(50)) - subcomponentId = db.Column(db.ForeignKey('BF_subcomponent.subcomponentId'), index=True) - starttime = db.Column(db.DateTime) - endtime = db.Column(db.DateTime) - beamtimelost = db.Column(db.Integer) - beamtimelost_starttime = db.Column(db.DateTime) - beamtimelost_endtime = db.Column(db.DateTime) - title = db.Column(db.String(200)) - description = db.Column(db.Text) - resolved = db.Column(db.Integer) - resolution = db.Column(db.Text) - attachment = db.Column(db.String(200)) - eLogId = db.Column(db.Integer) - assignee = db.Column(db.String(50)) - personId = db.Column(db.ForeignKey('Person.personId'), index=True) - assigneeId = db.Column(db.ForeignKey('Person.personId'), index=True) - - Person = db.relationship('Person', primaryjoin='BFFault.assigneeId == Person.personId') - Person1 = db.relationship('Person', primaryjoin='BFFault.personId == Person.personId') - BLSession = db.relationship('BLSession', primaryjoin='BFFault.sessionId == BLSession.sessionId') - BF_subcomponent = db.relationship('BFSubcomponent', primaryjoin='BFFault.subcomponentId == BFSubcomponent.subcomponentId') - - - -class BFSubcomponent(db.Model): - __tablename__ = 'BF_subcomponent' - - subcomponentId = db.Column(db.Integer, primary_key=True) - componentId = db.Column(db.ForeignKey('BF_component.componentId'), index=True) - name = db.Column(db.String(100)) - description = db.Column(db.String(200)) - - BF_component = db.relationship('BFComponent', primaryjoin='BFSubcomponent.componentId == BFComponent.componentId') - - - -class BFSubcomponentBeamline(db.Model): - __tablename__ = 'BF_subcomponent_beamline' - - subcomponent_beamlineId = db.Column(db.Integer, primary_key=True) - subcomponentId = db.Column(db.ForeignKey('BF_subcomponent.subcomponentId'), index=True) - beamlinename = db.Column(db.String(20)) - - BF_subcomponent = db.relationship('BFSubcomponent', primaryjoin='BFSubcomponentBeamline.subcomponentId == BFSubcomponent.subcomponentId') - - - -class BFSystem(db.Model): - __tablename__ = 'BF_system' - - systemId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(100)) - description = db.Column(db.String(200)) - - - -class BFSystemBeamline(db.Model): - __tablename__ = 'BF_system_beamline' - - system_beamlineId = db.Column(db.Integer, primary_key=True) - systemId = db.Column(db.ForeignKey('BF_system.systemId'), index=True) - beamlineName = db.Column(db.String(20)) - - BF_system = db.relationship('BFSystem', primaryjoin='BFSystemBeamline.systemId == BFSystem.systemId') - - - -class BLSample(db.Model): - __tablename__ = 'BLSample' - __table_args__ = ( - db.Index('crystalId', 'crystalId', 'containerId'), - ) - - blSampleId = db.Column(db.Integer, primary_key=True) - diffractionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - crystalId = db.Column(db.ForeignKey('Crystal.crystalId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - containerId = db.Column(db.ForeignKey('Container.containerId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - name = db.Column(db.String(45), index=True) - code = db.Column(db.String(45)) - location = db.Column(db.String(45)) - holderLength = db.Column(db.Float(asdecimal=True)) - loopLength = db.Column(db.Float(asdecimal=True)) - loopType = db.Column(db.String(45)) - wireWidth = db.Column(db.Float(asdecimal=True)) - comments = db.Column(db.String(1024)) - completionStage = db.Column(db.String(45)) - structureStage = db.Column(db.String(45)) - publicationStage = db.Column(db.String(45)) - publicationComments = db.Column(db.String(255)) - blSampleStatus = db.Column(db.String(20), index=True) - isInSampleChanger = db.Column(db.Integer) - lastKnownCenteringPosition = db.Column(db.String(255)) - POSITIONID = db.Column(db.Integer) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - SMILES = db.Column(db.String(400), info='the symbolic description of the structure of a chemical compound') - blSubSampleId = db.Column(db.ForeignKey('BLSubSample.blSubSampleId'), index=True) - lastImageURL = db.Column(db.String(255)) - screenComponentGroupId = db.Column(db.ForeignKey('ScreenComponentGroup.screenComponentGroupId'), index=True) - volume = db.Column(db.Float) - dimension1 = db.Column(db.Float(asdecimal=True)) - dimension2 = db.Column(db.Float(asdecimal=True)) - dimension3 = db.Column(db.Float(asdecimal=True)) - shape = db.Column(db.String(15)) - packingFraction = db.Column(db.Float) - preparationTemeprature = db.Column(db.Integer, info='Sample preparation temperature, Units: kelvin') - preparationHumidity = db.Column(db.Float, info='Sample preparation humidity, Units: %') - blottingTime = db.Column(db.Integer, info='Blotting time, Units: sec') - blottingForce = db.Column(db.Float, info='Force used when blotting sample, Units: N?') - blottingDrainTime = db.Column(db.Integer, info='Time sample left to drain after blotting, Units: sec') - support = db.Column(db.String(50), info='Sample support material') - subLocation = db.Column(db.SmallInteger, info="Indicates the sample's location on a multi-sample pin, where 1 is closest to the pin base") - - BLSubSample = db.relationship('BLSubSample', primaryjoin='BLSample.blSubSampleId == BLSubSample.blSubSampleId') - Container = db.relationship('Container', primaryjoin='BLSample.containerId == Container.containerId') - Crystal = db.relationship('Crystal', primaryjoin='BLSample.crystalId == Crystal.crystalId') - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='BLSample.diffractionPlanId == DiffractionPlan.diffractionPlanId') - ScreenComponentGroup = db.relationship('ScreenComponentGroup', primaryjoin='BLSample.screenComponentGroupId == ScreenComponentGroup.screenComponentGroupId') - Project = db.relationship('Project', secondary='Project_has_BLSample') - - - -class BLSampleGroup(db.Model): - __tablename__ = 'BLSampleGroup' - - blSampleGroupId = db.Column(db.Integer, primary_key=True) - - - -class BLSampleGroupHasBLSample(db.Model): - __tablename__ = 'BLSampleGroup_has_BLSample' - - blSampleGroupId = db.Column(db.ForeignKey('BLSampleGroup.blSampleGroupId'), primary_key=True, nullable=False) - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId'), primary_key=True, nullable=False, index=True) - groupOrder = db.Column(db.Integer) - type = db.Column(db.ENUM('background', 'container', 'sample', 'calibrant')) - - BLSampleGroup = db.relationship('BLSampleGroup', primaryjoin='BLSampleGroupHasBLSample.blSampleGroupId == BLSampleGroup.blSampleGroupId') - BLSample = db.relationship('BLSample', primaryjoin='BLSampleGroupHasBLSample.blSampleId == BLSample.blSampleId') - - - -class BLSampleImage(db.Model): - __tablename__ = 'BLSampleImage' - - blSampleImageId = db.Column(db.Integer, primary_key=True) - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId'), nullable=False, index=True) - micronsPerPixelX = db.Column(db.Float) - micronsPerPixelY = db.Column(db.Float) - imageFullPath = db.Column(db.String(255)) - blSampleImageScoreId = db.Column(db.ForeignKey('BLSampleImageScore.blSampleImageScoreId'), index=True) - comments = db.Column(db.String(255)) - blTimeStamp = db.Column(db.DateTime) - containerInspectionId = db.Column(db.ForeignKey('ContainerInspection.containerInspectionId'), index=True) - modifiedTimeStamp = db.Column(db.DateTime) - - BLSample = db.relationship('BLSample', primaryjoin='BLSampleImage.blSampleId == BLSample.blSampleId') - BLSampleImageScore = db.relationship('BLSampleImageScore', primaryjoin='BLSampleImage.blSampleImageScoreId == BLSampleImageScore.blSampleImageScoreId') - ContainerInspection = db.relationship('ContainerInspection', primaryjoin='BLSampleImage.containerInspectionId == ContainerInspection.containerInspectionId') - - - -class BLSampleImageAnalysi(db.Model): - __tablename__ = 'BLSampleImageAnalysis' - - blSampleImageAnalysisId = db.Column(db.Integer, primary_key=True) - blSampleImageId = db.Column(db.ForeignKey('BLSampleImage.blSampleImageId'), index=True) - oavSnapshotBefore = db.Column(db.String(255)) - oavSnapshotAfter = db.Column(db.String(255)) - deltaX = db.Column(db.Integer) - deltaY = db.Column(db.Integer) - goodnessOfFit = db.Column(db.Float) - scaleFactor = db.Column(db.Float) - resultCode = db.Column(db.String(15)) - matchStartTimeStamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - matchEndTimeStamp = db.Column(db.DateTime) - - BLSampleImage = db.relationship('BLSampleImage', primaryjoin='BLSampleImageAnalysi.blSampleImageId == BLSampleImage.blSampleImageId') - - - -class BLSampleImageAutoScoreClas(db.Model): - __tablename__ = 'BLSampleImageAutoScoreClass' - - blSampleImageAutoScoreClassId = db.Column(db.Integer, primary_key=True) - blSampleImageAutoScoreSchemaId = db.Column(db.ForeignKey('BLSampleImageAutoScoreSchema.blSampleImageAutoScoreSchemaId'), index=True) - scoreClass = db.Column(db.String(15), nullable=False, info='Thing being scored e.g. crystal, precipitant') - - BLSampleImageAutoScoreSchema = db.relationship('BLSampleImageAutoScoreSchema', primaryjoin='BLSampleImageAutoScoreClas.blSampleImageAutoScoreSchemaId == BLSampleImageAutoScoreSchema.blSampleImageAutoScoreSchemaId') - - - -class BLSampleImageAutoScoreSchema(db.Model): - __tablename__ = 'BLSampleImageAutoScoreSchema' - - blSampleImageAutoScoreSchemaId = db.Column(db.Integer, primary_key=True) - schemaName = db.Column(db.String(25), nullable=False, info='Name of the schema e.g. Hampton, MARCO') - enabled = db.Column(db.Integer, server_default=db.FetchedValue(), info='Whether this schema is enabled (could be configurable in the UI)') - - - -class BLSampleImageMeasurement(db.Model): - __tablename__ = 'BLSampleImageMeasurement' - - blSampleImageMeasurementId = db.Column(db.Integer, primary_key=True) - blSampleImageId = db.Column(db.ForeignKey('BLSampleImage.blSampleImageId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - blSubSampleId = db.Column(db.ForeignKey('BLSubSample.blSubSampleId'), index=True) - startPosX = db.Column(db.Float(asdecimal=True)) - startPosY = db.Column(db.Float(asdecimal=True)) - endPosX = db.Column(db.Float(asdecimal=True)) - endPosY = db.Column(db.Float(asdecimal=True)) - blTimeStamp = db.Column(db.DateTime) - - BLSampleImage = db.relationship('BLSampleImage', primaryjoin='BLSampleImageMeasurement.blSampleImageId == BLSampleImage.blSampleImageId') - BLSubSample = db.relationship('BLSubSample', primaryjoin='BLSampleImageMeasurement.blSubSampleId == BLSubSample.blSubSampleId') - - - -class BLSampleImageScore(db.Model): - __tablename__ = 'BLSampleImageScore' - - blSampleImageScoreId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - score = db.Column(db.Float) - colour = db.Column(db.String(15)) - - - -class BLSampleImageHasAutoScoreClas(db.Model): - __tablename__ = 'BLSampleImage_has_AutoScoreClass' - - blSampleImageId = db.Column(db.ForeignKey('BLSampleImage.blSampleImageId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False) - blSampleImageAutoScoreClassId = db.Column(db.ForeignKey('BLSampleImageAutoScoreClass.blSampleImageAutoScoreClassId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) - probability = db.Column(db.Float) - - BLSampleImageAutoScoreClas = db.relationship('BLSampleImageAutoScoreClas', primaryjoin='BLSampleImageHasAutoScoreClas.blSampleImageAutoScoreClassId == BLSampleImageAutoScoreClas.blSampleImageAutoScoreClassId') - BLSampleImage = db.relationship('BLSampleImage', primaryjoin='BLSampleImageHasAutoScoreClas.blSampleImageId == BLSampleImage.blSampleImageId') - - - -class BLSampleTypeHasComponent(db.Model): - __tablename__ = 'BLSampleType_has_Component' - - blSampleTypeId = db.Column(db.ForeignKey('Crystal.crystalId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False) - componentId = db.Column(db.ForeignKey('Protein.proteinId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) - abundance = db.Column(db.Float) - - Crystal = db.relationship('Crystal', primaryjoin='BLSampleTypeHasComponent.blSampleTypeId == Crystal.crystalId') - Protein = db.relationship('Protein', primaryjoin='BLSampleTypeHasComponent.componentId == Protein.proteinId') - - - -class BLSampleHasDataCollectionPlan(db.Model): - __tablename__ = 'BLSample_has_DataCollectionPlan' - - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId'), primary_key=True, nullable=False) - dataCollectionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId'), primary_key=True, nullable=False, index=True) - planOrder = db.Column(db.Integer) - - BLSample = db.relationship('BLSample', primaryjoin='BLSampleHasDataCollectionPlan.blSampleId == BLSample.blSampleId') - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='BLSampleHasDataCollectionPlan.dataCollectionPlanId == DiffractionPlan.diffractionPlanId') - - - -class BLSampleHasEnergyScan(db.Model): - __tablename__ = 'BLSample_has_EnergyScan' - - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - energyScanId = db.Column(db.ForeignKey('EnergyScan.energyScanId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - blSampleHasEnergyScanId = db.Column(db.Integer, primary_key=True) - - BLSample = db.relationship('BLSample', primaryjoin='BLSampleHasEnergyScan.blSampleId == BLSample.blSampleId') - EnergyScan = db.relationship('EnergyScan', primaryjoin='BLSampleHasEnergyScan.energyScanId == EnergyScan.energyScanId') - - - -class BLSession(db.Model): - __tablename__ = 'BLSession' - - sessionId = db.Column(db.Integer, primary_key=True) - beamLineSetupId = db.Column(db.ForeignKey('BeamLineSetup.beamLineSetupId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - beamCalendarId = db.Column(db.ForeignKey('BeamCalendar.beamCalendarId'), index=True) - projectCode = db.Column(db.String(45)) - startDate = db.Column(db.DateTime, index=True) - endDate = db.Column(db.DateTime, index=True) - beamLineName = db.Column(db.String(45), index=True) - scheduled = db.Column(db.Integer) - nbShifts = db.Column(db.Integer) - comments = db.Column(db.String(2000)) - beamLineOperator = db.Column(db.String(45)) - bltimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - visit_number = db.Column(db.Integer, server_default=db.FetchedValue()) - usedFlag = db.Column(db.Integer, info='indicates if session has Datacollections or XFE or EnergyScans attached') - sessionTitle = db.Column(db.String(255), info='fx accounts only') - structureDeterminations = db.Column(db.Float) - dewarTransport = db.Column(db.Float) - databackupFrance = db.Column(db.Float, info='data backup and express delivery France') - databackupEurope = db.Column(db.Float, info='data backup and express delivery Europe') - expSessionPk = db.Column(db.Integer, info='smis session Pk ') - operatorSiteNumber = db.Column(db.String(10), index=True, info='matricule site') - lastUpdate = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='last update timestamp: by default the end of the session, the last collect...') - protectedData = db.Column(db.String(1024), info='indicates if the data are protected or not') - externalId = db.Column(db.BINARY(16)) - archived = db.Column(db.Integer, server_default=db.FetchedValue(), info='The data for the session is archived and no longer available on disk') - - BeamCalendar = db.relationship('BeamCalendar', primaryjoin='BLSession.beamCalendarId == BeamCalendar.beamCalendarId') - BeamLineSetup = db.relationship('BeamLineSetup', primaryjoin='BLSession.beamLineSetupId == BeamLineSetup.beamLineSetupId') - Proposal = db.relationship('Proposal', primaryjoin='BLSession.proposalId == Proposal.proposalId') - Shipping = db.relationship('Shipping', secondary='ShippingHasSession') - - - -class BLSessionHasSCPosition(db.Model): - __tablename__ = 'BLSession_has_SCPosition' - - blsessionhasscpositionid = db.Column(db.Integer, primary_key=True) - blsessionid = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - scContainer = db.Column(db.SmallInteger, info='Position of container within sample changer') - containerPosition = db.Column(db.SmallInteger, info='Position of sample within container') - - BLSession = db.relationship('BLSession', primaryjoin='BLSessionHasSCPosition.blsessionid == BLSession.sessionId') - - - -class BLSubSample(db.Model): - __tablename__ = 'BLSubSample' - - blSubSampleId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='sample') - diffractionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='eventually diffractionPlan') - blSampleImageId = db.Column(db.ForeignKey('BLSampleImage.blSampleImageId'), index=True) - positionId = db.Column(db.ForeignKey('Position.positionId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='position of the subsample') - position2Id = db.Column(db.ForeignKey('Position.positionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - motorPositionId = db.Column(db.ForeignKey('MotorPosition.motorPositionId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='motor position') - blSubSampleUUID = db.Column(db.String(45), info='uuid of the blsubsample') - imgFileName = db.Column(db.String(255), info='image filename') - imgFilePath = db.Column(db.String(1024), info='url image') - comments = db.Column(db.String(1024), info='comments') - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - - BLSample = db.relationship('BLSample', primaryjoin='BLSubSample.blSampleId == BLSample.blSampleId') - BLSampleImage = db.relationship('BLSampleImage', primaryjoin='BLSubSample.blSampleImageId == BLSampleImage.blSampleImageId') - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='BLSubSample.diffractionPlanId == DiffractionPlan.diffractionPlanId') - MotorPosition = db.relationship('MotorPosition', primaryjoin='BLSubSample.motorPositionId == MotorPosition.motorPositionId') - Position = db.relationship('Position', primaryjoin='BLSubSample.position2Id == Position.positionId') - Position1 = db.relationship('Position', primaryjoin='BLSubSample.positionId == Position.positionId') - - - -class BeamAperture(db.Model): - __tablename__ = 'BeamApertures' - - beamAperturesid = db.Column(db.Integer, primary_key=True) - beamlineStatsId = db.Column(db.ForeignKey('BeamlineStats.beamlineStatsId', ondelete='CASCADE'), index=True) - flux = db.Column(db.Float(asdecimal=True)) - x = db.Column(db.Float) - y = db.Column(db.Float) - apertureSize = db.Column(db.SmallInteger) - - BeamlineStat = db.relationship('BeamlineStat', primaryjoin='BeamAperture.beamlineStatsId == BeamlineStat.beamlineStatsId') - - - -class BeamCalendar(db.Model): - __tablename__ = 'BeamCalendar' - - beamCalendarId = db.Column(db.Integer, primary_key=True) - run = db.Column(db.String(7), nullable=False) - beamStatus = db.Column(db.String(24), nullable=False) - startDate = db.Column(db.DateTime, nullable=False) - endDate = db.Column(db.DateTime, nullable=False) - - - -class BeamCentre(db.Model): - __tablename__ = 'BeamCentres' - - beamCentresid = db.Column(db.Integer, primary_key=True) - beamlineStatsId = db.Column(db.ForeignKey('BeamlineStats.beamlineStatsId', ondelete='CASCADE'), index=True) - x = db.Column(db.Float) - y = db.Column(db.Float) - zoom = db.Column(db.Integer) - - BeamlineStat = db.relationship('BeamlineStat', primaryjoin='BeamCentre.beamlineStatsId == BeamlineStat.beamlineStatsId') - - - -class BeamLineSetup(db.Model): - __tablename__ = 'BeamLineSetup' - - beamLineSetupId = db.Column(db.Integer, primary_key=True) - detectorId = db.Column(db.ForeignKey('Detector.detectorId'), index=True) - synchrotronMode = db.Column(db.String(255)) - undulatorType1 = db.Column(db.String(45)) - undulatorType2 = db.Column(db.String(45)) - undulatorType3 = db.Column(db.String(45)) - focalSpotSizeAtSample = db.Column(db.Float) - focusingOptic = db.Column(db.String(255)) - beamDivergenceHorizontal = db.Column(db.Float) - beamDivergenceVertical = db.Column(db.Float) - polarisation = db.Column(db.Float) - monochromatorType = db.Column(db.String(255)) - setupDate = db.Column(db.DateTime) - synchrotronName = db.Column(db.String(255)) - maxExpTimePerDataCollection = db.Column(db.Float(asdecimal=True)) - maxExposureTimePerImage = db.Column(db.Float, info='unit: seconds') - minExposureTimePerImage = db.Column(db.Float(asdecimal=True)) - goniostatMaxOscillationSpeed = db.Column(db.Float(asdecimal=True)) - goniostatMaxOscillationWidth = db.Column(db.Float(asdecimal=True), info='unit: degrees') - goniostatMinOscillationWidth = db.Column(db.Float(asdecimal=True)) - maxTransmission = db.Column(db.Float(asdecimal=True), info='unit: percentage') - minTransmission = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - CS = db.Column(db.Float, info='Spherical Aberration, Units: mm?') - beamlineName = db.Column(db.String(50), info='Beamline that this setup relates to') - beamSizeXMin = db.Column(db.Float, info='unit: um') - beamSizeXMax = db.Column(db.Float, info='unit: um') - beamSizeYMin = db.Column(db.Float, info='unit: um') - beamSizeYMax = db.Column(db.Float, info='unit: um') - energyMin = db.Column(db.Float, info='unit: eV') - energyMax = db.Column(db.Float, info='unit: eV') - omegaMin = db.Column(db.Float, info='unit: degrees') - omegaMax = db.Column(db.Float, info='unit: degrees') - kappaMin = db.Column(db.Float, info='unit: degrees') - kappaMax = db.Column(db.Float, info='unit: degrees') - phiMin = db.Column(db.Float, info='unit: degrees') - phiMax = db.Column(db.Float, info='unit: degrees') - active = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - numberOfImagesMax = db.Column(db.Integer) - numberOfImagesMin = db.Column(db.Integer) - boxSizeXMin = db.Column(db.Float(asdecimal=True), info='For gridscans, unit: um') - boxSizeXMax = db.Column(db.Float(asdecimal=True), info='For gridscans, unit: um') - boxSizeYMin = db.Column(db.Float(asdecimal=True), info='For gridscans, unit: um') - boxSizeYMax = db.Column(db.Float(asdecimal=True), info='For gridscans, unit: um') - monoBandwidthMin = db.Column(db.Float(asdecimal=True), info='unit: percentage') - monoBandwidthMax = db.Column(db.Float(asdecimal=True), info='unit: percentage') - - Detector = db.relationship('Detector', primaryjoin='BeamLineSetup.detectorId == Detector.detectorId') - - - -class BeamlineAction(db.Model): - __tablename__ = 'BeamlineAction' - - beamlineActionId = db.Column(db.Integer, primary_key=True) - sessionId = db.Column(db.ForeignKey('BLSession.sessionId'), index=True) - startTimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - endTimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - message = db.Column(db.String(255)) - parameter = db.Column(db.String(50)) - value = db.Column(db.String(30)) - loglevel = db.Column(db.ENUM('DEBUG', 'CRITICAL', 'INFO')) - status = db.Column(db.ENUM('PAUSED', 'RUNNING', 'TERMINATED', 'COMPLETE', 'ERROR', 'EPICSFAIL')) - - BLSession = db.relationship('BLSession', primaryjoin='BeamlineAction.sessionId == BLSession.sessionId') - - - -class BeamlineStat(db.Model): - __tablename__ = 'BeamlineStats' - - beamlineStatsId = db.Column(db.Integer, primary_key=True) - beamline = db.Column(db.String(10)) - recordTimeStamp = db.Column(db.DateTime) - ringCurrent = db.Column(db.Float) - energy = db.Column(db.Float) - gony = db.Column(db.Float) - beamW = db.Column(db.Float) - beamH = db.Column(db.Float) - flux = db.Column(db.Float(asdecimal=True)) - scanFileW = db.Column(db.String(255)) - scanFileH = db.Column(db.String(255)) - - - -class Buffer(db.Model): - __tablename__ = 'Buffer' - - bufferId = db.Column(db.Integer, primary_key=True) - BLSESSIONID = db.Column(db.Integer) - safetyLevelId = db.Column(db.ForeignKey('SafetyLevel.safetyLevelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - name = db.Column(db.String(45)) - acronym = db.Column(db.String(45)) - pH = db.Column(db.String(45)) - composition = db.Column(db.String(45)) - comments = db.Column(db.String(512)) - proposalId = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - - SafetyLevel = db.relationship('SafetyLevel', primaryjoin='Buffer.safetyLevelId == SafetyLevel.safetyLevelId') - - - -class BufferHasAdditive(db.Model): - __tablename__ = 'BufferHasAdditive' - - bufferHasAdditiveId = db.Column(db.Integer, primary_key=True) - bufferId = db.Column(db.ForeignKey('Buffer.bufferId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - additiveId = db.Column(db.ForeignKey('Additive.additiveId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - measurementUnitId = db.Column(db.ForeignKey('MeasurementUnit.measurementUnitId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - quantity = db.Column(db.String(45)) - - Additive = db.relationship('Additive', primaryjoin='BufferHasAdditive.additiveId == Additive.additiveId') - Buffer = db.relationship('Buffer', primaryjoin='BufferHasAdditive.bufferId == Buffer.bufferId') - MeasurementUnit = db.relationship('MeasurementUnit', primaryjoin='BufferHasAdditive.measurementUnitId == MeasurementUnit.measurementUnitId') - - - -class CTF(db.Model): - __tablename__ = 'CTF' - - ctfId = db.Column(db.Integer, primary_key=True) - motionCorrectionId = db.Column(db.ForeignKey('MotionCorrection.motionCorrectionId'), index=True) - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId'), index=True) - boxSizeX = db.Column(db.Float, info='Box size in x, Units: pixels') - boxSizeY = db.Column(db.Float, info='Box size in y, Units: pixels') - minResolution = db.Column(db.Float, info='Minimum resolution for CTF, Units: A') - maxResolution = db.Column(db.Float, info='Units: A') - minDefocus = db.Column(db.Float, info='Units: A') - maxDefocus = db.Column(db.Float, info='Units: A') - defocusStepSize = db.Column(db.Float, info='Units: A') - astigmatism = db.Column(db.Float, info='Units: A') - astigmatismAngle = db.Column(db.Float, info='Units: deg?') - estimatedResolution = db.Column(db.Float, info='Units: A') - estimatedDefocus = db.Column(db.Float, info='Units: A') - amplitudeContrast = db.Column(db.Float, info='Units: %?') - ccValue = db.Column(db.Float, info='Correlation value') - fftTheoreticalFullPath = db.Column(db.String(255), info='Full path to the jpg image of the simulated FFT') - comments = db.Column(db.String(255)) - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='CTF.autoProcProgramId == AutoProcProgram.autoProcProgramId') - MotionCorrection = db.relationship('MotionCorrection', primaryjoin='CTF.motionCorrectionId == MotionCorrection.motionCorrectionId') - - - -class CalendarHash(db.Model): - __tablename__ = 'CalendarHash' - - calendarHashId = db.Column(db.Integer, primary_key=True) - ckey = db.Column(db.String(50)) - hash = db.Column(db.String(128)) - beamline = db.Column(db.Integer) - - - -class ComponentLattice(db.Model): - __tablename__ = 'ComponentLattice' - - componentLatticeId = db.Column(db.Integer, primary_key=True) - componentId = db.Column(db.ForeignKey('Protein.proteinId'), index=True) - spaceGroup = db.Column(db.String(20)) - cell_a = db.Column(db.Float(asdecimal=True)) - cell_b = db.Column(db.Float(asdecimal=True)) - cell_c = db.Column(db.Float(asdecimal=True)) - cell_alpha = db.Column(db.Float(asdecimal=True)) - cell_beta = db.Column(db.Float(asdecimal=True)) - cell_gamma = db.Column(db.Float(asdecimal=True)) - - Protein = db.relationship('Protein', primaryjoin='ComponentLattice.componentId == Protein.proteinId') - - - -class ComponentSubType(db.Model): - __tablename__ = 'ComponentSubType' - - componentSubTypeId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(31), nullable=False) - hasPh = db.Column(db.Integer, server_default=db.FetchedValue()) - - - -class ComponentType(db.Model): - __tablename__ = 'ComponentType' - - componentTypeId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(31), nullable=False) - - - -t_Component_has_SubType = db.Table( - 'Component_has_SubType', - db.Column('componentId', db.ForeignKey('Protein.proteinId', ondelete='CASCADE'), primary_key=True, nullable=False), - db.Column('componentSubTypeId', db.ForeignKey('ComponentSubType.componentSubTypeId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -class ConcentrationType(db.Model): - __tablename__ = 'ConcentrationType' - - concentrationTypeId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(31), nullable=False) - symbol = db.Column(db.String(8), nullable=False) - - - -class Container(db.Model): - __tablename__ = 'Container' - - containerId = db.Column(db.Integer, primary_key=True) - dewarId = db.Column(db.ForeignKey('Dewar.dewarId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - code = db.Column(db.String(45)) - containerType = db.Column(db.String(20)) - capacity = db.Column(db.Integer) - sampleChangerLocation = db.Column(db.String(20)) - containerStatus = db.Column(db.String(45), index=True) - bltimeStamp = db.Column(db.DateTime) - beamlineLocation = db.Column(db.String(20), index=True) - screenId = db.Column(db.ForeignKey('Screen.screenId'), index=True) - scheduleId = db.Column(db.ForeignKey('Schedule.scheduleId'), index=True) - barcode = db.Column(db.String(45), unique=True) - imagerId = db.Column(db.ForeignKey('Imager.imagerId'), index=True) - sessionId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='SET NULL', onupdate='CASCADE'), index=True) - ownerId = db.Column(db.ForeignKey('Person.personId'), index=True) - requestedImagerId = db.Column(db.ForeignKey('Imager.imagerId'), index=True) - requestedReturn = db.Column(db.Integer, server_default=db.FetchedValue(), info='True for requesting return, False means container will be disposed') - comments = db.Column(db.String(255)) - experimentType = db.Column(db.String(20)) - storageTemperature = db.Column(db.Float) - containerRegistryId = db.Column(db.ForeignKey('ContainerRegistry.containerRegistryId'), index=True) - - ContainerRegistry = db.relationship('ContainerRegistry', primaryjoin='Container.containerRegistryId == ContainerRegistry.containerRegistryId') - Dewar = db.relationship('Dewar', primaryjoin='Container.dewarId == Dewar.dewarId') - Imager = db.relationship('Imager', primaryjoin='Container.imagerId == Imager.imagerId') - Person = db.relationship('Person', primaryjoin='Container.ownerId == Person.personId') - Imager1 = db.relationship('Imager', primaryjoin='Container.requestedImagerId == Imager.imagerId') - Schedule = db.relationship('Schedule', primaryjoin='Container.scheduleId == Schedule.scheduleId') - Screen = db.relationship('Screen', primaryjoin='Container.screenId == Screen.screenId') - BLSession = db.relationship('BLSession', primaryjoin='Container.sessionId == BLSession.sessionId') - - - -class ContainerHistory(db.Model): - __tablename__ = 'ContainerHistory' - - containerHistoryId = db.Column(db.Integer, primary_key=True) - containerId = db.Column(db.ForeignKey('Container.containerId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - location = db.Column(db.String(45)) - blTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - status = db.Column(db.String(45)) - beamlineName = db.Column(db.String(20)) - - Container = db.relationship('Container', primaryjoin='ContainerHistory.containerId == Container.containerId') - - - -class ContainerInspection(db.Model): - __tablename__ = 'ContainerInspection' - - containerInspectionId = db.Column(db.Integer, primary_key=True) - containerId = db.Column(db.ForeignKey('Container.containerId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - inspectionTypeId = db.Column(db.ForeignKey('InspectionType.inspectionTypeId'), nullable=False, index=True) - imagerId = db.Column(db.ForeignKey('Imager.imagerId'), index=True) - temperature = db.Column(db.Float) - blTimeStamp = db.Column(db.DateTime) - scheduleComponentid = db.Column(db.ForeignKey('ScheduleComponent.scheduleComponentId'), index=True) - state = db.Column(db.String(20)) - priority = db.Column(db.SmallInteger) - manual = db.Column(db.Integer) - scheduledTimeStamp = db.Column(db.DateTime) - completedTimeStamp = db.Column(db.DateTime) - - Container = db.relationship('Container', primaryjoin='ContainerInspection.containerId == Container.containerId') - Imager = db.relationship('Imager', primaryjoin='ContainerInspection.imagerId == Imager.imagerId') - InspectionType = db.relationship('InspectionType', primaryjoin='ContainerInspection.inspectionTypeId == InspectionType.inspectionTypeId') - ScheduleComponent = db.relationship('ScheduleComponent', primaryjoin='ContainerInspection.scheduleComponentid == ScheduleComponent.scheduleComponentId') - - - -class ContainerQueue(db.Model): - __tablename__ = 'ContainerQueue' - - containerQueueId = db.Column(db.Integer, primary_key=True) - containerId = db.Column(db.ForeignKey('Container.containerId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - personId = db.Column(db.ForeignKey('Person.personId', onupdate='CASCADE'), index=True) - createdTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - completedTimeStamp = db.Column(db.DateTime) - - Container = db.relationship('Container', primaryjoin='ContainerQueue.containerId == Container.containerId') - Person = db.relationship('Person', primaryjoin='ContainerQueue.personId == Person.personId') - - - -class ContainerQueueSample(db.Model): - __tablename__ = 'ContainerQueueSample' - - containerQueueSampleId = db.Column(db.Integer, primary_key=True) - containerQueueId = db.Column(db.ForeignKey('ContainerQueue.containerQueueId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - blSubSampleId = db.Column(db.ForeignKey('BLSubSample.blSubSampleId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - - BLSubSample = db.relationship('BLSubSample', primaryjoin='ContainerQueueSample.blSubSampleId == BLSubSample.blSubSampleId') - ContainerQueue = db.relationship('ContainerQueue', primaryjoin='ContainerQueueSample.containerQueueId == ContainerQueue.containerQueueId') - - - -class ContainerRegistry(db.Model): - __tablename__ = 'ContainerRegistry' - - containerRegistryId = db.Column(db.Integer, primary_key=True) - barcode = db.Column(db.String(20)) - comments = db.Column(db.String(255)) - recordTimestamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - - - -class ContainerRegistryHasProposal(db.Model): - __tablename__ = 'ContainerRegistry_has_Proposal' - __table_args__ = ( - db.Index('containerRegistryId', 'containerRegistryId', 'proposalId'), - ) - - containerRegistryHasProposalId = db.Column(db.Integer, primary_key=True) - containerRegistryId = db.Column(db.ForeignKey('ContainerRegistry.containerRegistryId')) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId'), index=True) - personId = db.Column(db.ForeignKey('Person.personId'), index=True, info='Person registering the container') - recordTimestamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - - ContainerRegistry = db.relationship('ContainerRegistry', primaryjoin='ContainerRegistryHasProposal.containerRegistryId == ContainerRegistry.containerRegistryId') - Person = db.relationship('Person', primaryjoin='ContainerRegistryHasProposal.personId == Person.personId') - Proposal = db.relationship('Proposal', primaryjoin='ContainerRegistryHasProposal.proposalId == Proposal.proposalId') - - - -class ContainerReport(db.Model): - __tablename__ = 'ContainerReport' - - containerReportId = db.Column(db.Integer, primary_key=True) - containerRegistryId = db.Column(db.ForeignKey('ContainerRegistry.containerRegistryId'), index=True) - personId = db.Column(db.ForeignKey('Person.personId'), index=True, info='Person making report') - report = db.Column(db.Text) - attachmentFilePath = db.Column(db.String(255)) - recordTimestamp = db.Column(db.DateTime) - - ContainerRegistry = db.relationship('ContainerRegistry', primaryjoin='ContainerReport.containerRegistryId == ContainerRegistry.containerRegistryId') - Person = db.relationship('Person', primaryjoin='ContainerReport.personId == Person.personId') - - - -class CourierTermsAccepted(db.Model): - __tablename__ = 'CourierTermsAccepted' - - courierTermsAcceptedId = db.Column(db.Integer, primary_key=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId'), nullable=False, index=True) - personId = db.Column(db.ForeignKey('Person.personId'), nullable=False, index=True) - shippingName = db.Column(db.String(100)) - timestamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - shippingId = db.Column(db.ForeignKey('Shipping.shippingId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - - Person = db.relationship('Person', primaryjoin='CourierTermsAccepted.personId == Person.personId') - Proposal = db.relationship('Proposal', primaryjoin='CourierTermsAccepted.proposalId == Proposal.proposalId') - Shipping = db.relationship('Shipping', primaryjoin='CourierTermsAccepted.shippingId == Shipping.shippingId') - - - -class Crystal(db.Model): - __tablename__ = 'Crystal' - - crystalId = db.Column(db.Integer, primary_key=True) - diffractionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - proteinId = db.Column(db.ForeignKey('Protein.proteinId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - crystalUUID = db.Column(db.String(45)) - name = db.Column(db.String(255)) - spaceGroup = db.Column(db.String(20)) - morphology = db.Column(db.String(255)) - color = db.Column(db.String(45)) - size_X = db.Column(db.Float(asdecimal=True)) - size_Y = db.Column(db.Float(asdecimal=True)) - size_Z = db.Column(db.Float(asdecimal=True)) - cell_a = db.Column(db.Float(asdecimal=True)) - cell_b = db.Column(db.Float(asdecimal=True)) - cell_c = db.Column(db.Float(asdecimal=True)) - cell_alpha = db.Column(db.Float(asdecimal=True)) - cell_beta = db.Column(db.Float(asdecimal=True)) - cell_gamma = db.Column(db.Float(asdecimal=True)) - comments = db.Column(db.String(255)) - pdbFileName = db.Column(db.String(255), info='pdb file name') - pdbFilePath = db.Column(db.String(1024), info='pdb file path') - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - abundance = db.Column(db.Float) - theoreticalDensity = db.Column(db.Float) - - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='Crystal.diffractionPlanId == DiffractionPlan.diffractionPlanId') - Protein = db.relationship('Protein', primaryjoin='Crystal.proteinId == Protein.proteinId') - - - -class CrystalHasUUID(db.Model): - __tablename__ = 'Crystal_has_UUID' - - crystal_has_UUID_Id = db.Column(db.Integer, primary_key=True) - crystalId = db.Column(db.ForeignKey('Crystal.crystalId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - UUID = db.Column(db.String(45), index=True) - imageURL = db.Column(db.String(255)) - - Crystal = db.relationship('Crystal', primaryjoin='CrystalHasUUID.crystalId == Crystal.crystalId') - - - -class DataAcquisition(db.Model): - __tablename__ = 'DataAcquisition' - - dataAcquisitionId = db.Column(db.Integer, primary_key=True) - sampleCellId = db.Column(db.Integer, nullable=False) - framesCount = db.Column(db.String(45)) - energy = db.Column(db.String(45)) - waitTime = db.Column(db.String(45)) - detectorDistance = db.Column(db.String(45)) - - - -class DataCollection(db.Model): - __tablename__ = 'DataCollection' - - dataCollectionId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - BLSAMPLEID = db.Column(db.Integer, index=True) - SESSIONID = db.Column(db.Integer, index=True, server_default=db.FetchedValue()) - experimenttype = db.Column(db.String(24)) - dataCollectionNumber = db.Column(db.Integer, index=True) - startTime = db.Column(db.DateTime, index=True, info='Start time of the dataCollection') - endTime = db.Column(db.DateTime, info='end time of the dataCollection') - runStatus = db.Column(db.String(45)) - axisStart = db.Column(db.Float) - axisEnd = db.Column(db.Float) - axisRange = db.Column(db.Float) - overlap = db.Column(db.Float) - numberOfImages = db.Column(db.Integer) - startImageNumber = db.Column(db.Integer) - numberOfPasses = db.Column(db.Integer) - exposureTime = db.Column(db.Float) - imageDirectory = db.Column(db.String(255), index=True, info='The directory where files reside - should end with a slash') - imagePrefix = db.Column(db.String(45), index=True) - imageSuffix = db.Column(db.String(45)) - imageContainerSubPath = db.Column(db.String(255), info='Internal path of a HDF5 file pointing to the data for this data collection') - fileTemplate = db.Column(db.String(255)) - wavelength = db.Column(db.Float) - resolution = db.Column(db.Float) - detectorDistance = db.Column(db.Float) - xBeam = db.Column(db.Float) - yBeam = db.Column(db.Float) - comments = db.Column(db.String(1024)) - printableForReport = db.Column(db.Integer, server_default=db.FetchedValue()) - CRYSTALCLASS = db.Column(db.String(20)) - slitGapVertical = db.Column(db.Float) - slitGapHorizontal = db.Column(db.Float) - transmission = db.Column(db.Float) - synchrotronMode = db.Column(db.String(20)) - xtalSnapshotFullPath1 = db.Column(db.String(255)) - xtalSnapshotFullPath2 = db.Column(db.String(255)) - xtalSnapshotFullPath3 = db.Column(db.String(255)) - xtalSnapshotFullPath4 = db.Column(db.String(255)) - rotationAxis = db.Column(db.ENUM('Omega', 'Kappa', 'Phi')) - phiStart = db.Column(db.Float) - kappaStart = db.Column(db.Float) - omegaStart = db.Column(db.Float) - chiStart = db.Column(db.Float) - resolutionAtCorner = db.Column(db.Float) - detector2Theta = db.Column(db.Float) - DETECTORMODE = db.Column(db.String(255)) - undulatorGap1 = db.Column(db.Float) - undulatorGap2 = db.Column(db.Float) - undulatorGap3 = db.Column(db.Float) - beamSizeAtSampleX = db.Column(db.Float) - beamSizeAtSampleY = db.Column(db.Float) - centeringMethod = db.Column(db.String(255)) - averageTemperature = db.Column(db.Float) - ACTUALSAMPLEBARCODE = db.Column(db.String(45)) - ACTUALSAMPLESLOTINCONTAINER = db.Column(db.Integer) - ACTUALCONTAINERBARCODE = db.Column(db.String(45)) - ACTUALCONTAINERSLOTINSC = db.Column(db.Integer) - actualCenteringPosition = db.Column(db.String(255)) - beamShape = db.Column(db.String(45)) - dataCollectionGroupId = db.Column(db.ForeignKey('DataCollectionGroup.dataCollectionGroupId'), nullable=False, index=True, info='references DataCollectionGroup table') - POSITIONID = db.Column(db.Integer) - detectorId = db.Column(db.ForeignKey('Detector.detectorId'), index=True, info='references Detector table') - FOCALSPOTSIZEATSAMPLEX = db.Column(db.Float) - POLARISATION = db.Column(db.Float) - FOCALSPOTSIZEATSAMPLEY = db.Column(db.Float) - APERTUREID = db.Column(db.Integer) - screeningOrigId = db.Column(db.Integer) - startPositionId = db.Column(db.ForeignKey('MotorPosition.motorPositionId'), index=True) - endPositionId = db.Column(db.ForeignKey('MotorPosition.motorPositionId'), index=True) - flux = db.Column(db.Float(asdecimal=True)) - strategySubWedgeOrigId = db.Column(db.ForeignKey('ScreeningStrategySubWedge.screeningStrategySubWedgeId'), index=True, info='references ScreeningStrategySubWedge table') - blSubSampleId = db.Column(db.ForeignKey('BLSubSample.blSubSampleId'), index=True) - flux_end = db.Column(db.Float(asdecimal=True), info='flux measured after the collect') - bestWilsonPlotPath = db.Column(db.String(255)) - processedDataFile = db.Column(db.String(255)) - datFullPath = db.Column(db.String(255)) - magnification = db.Column(db.Float, info='Calibrated magnification, Units: dimensionless') - totalAbsorbedDose = db.Column(db.Float, info='Unit: e-/A^2 for EM') - binning = db.Column(db.Integer, server_default=db.FetchedValue(), info='1 or 2. Number of pixels to process as 1. (Use mean value.)') - particleDiameter = db.Column(db.Float, info='Unit: nm') - boxSize_CTF = db.Column(db.Float, info='Unit: pixels') - minResolution = db.Column(db.Float, info='Unit: A') - minDefocus = db.Column(db.Float, info='Unit: A') - maxDefocus = db.Column(db.Float, info='Unit: A') - defocusStepSize = db.Column(db.Float, info='Unit: A') - amountAstigmatism = db.Column(db.Float, info='Unit: A') - extractSize = db.Column(db.Float, info='Unit: pixels') - bgRadius = db.Column(db.Float, info='Unit: nm') - voltage = db.Column(db.Float, info='Unit: kV') - objAperture = db.Column(db.Float, info='Unit: um') - c1aperture = db.Column(db.Float, info='Unit: um') - c2aperture = db.Column(db.Float, info='Unit: um') - c3aperture = db.Column(db.Float, info='Unit: um') - c1lens = db.Column(db.Float, info='Unit: %') - c2lens = db.Column(db.Float, info='Unit: %') - c3lens = db.Column(db.Float, info='Unit: %') - totalExposedDose = db.Column(db.Float, info='Units: e-/A^2') - nominalMagnification = db.Column(db.Float, info='Nominal magnification: Units: dimensionless') - nominalDefocus = db.Column(db.Float, info='Nominal defocus, Units: A') - imageSizeX = db.Column(db.Integer, info='Image size in x, incase crop has been used, Units: pixels') - imageSizeY = db.Column(db.Integer, info='Image size in y, Units: pixels') - pixelSizeOnImage = db.Column(db.Float, info='Pixel size on image, calculated from magnification, duplicate? Units: um?') - phasePlate = db.Column(db.Integer, info='Whether the phase plate was used') - - BLSubSample = db.relationship('BLSubSample', primaryjoin='DataCollection.blSubSampleId == BLSubSample.blSubSampleId') - DataCollectionGroup = db.relationship('DataCollectionGroup', primaryjoin='DataCollection.dataCollectionGroupId == DataCollectionGroup.dataCollectionGroupId') - Detector = db.relationship('Detector', primaryjoin='DataCollection.detectorId == Detector.detectorId') - MotorPosition = db.relationship('MotorPosition', primaryjoin='DataCollection.endPositionId == MotorPosition.motorPositionId') - MotorPosition1 = db.relationship('MotorPosition', primaryjoin='DataCollection.startPositionId == MotorPosition.motorPositionId') - ScreeningStrategySubWedge = db.relationship('ScreeningStrategySubWedge', primaryjoin='DataCollection.strategySubWedgeOrigId == ScreeningStrategySubWedge.screeningStrategySubWedgeId') - - - -class DataCollectionComment(db.Model): - __tablename__ = 'DataCollectionComment' - - dataCollectionCommentId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - personId = db.Column(db.ForeignKey('Person.personId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - comments = db.Column(db.String(4000)) - createTime = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - modTime = db.Column(db.Date) - - DataCollection = db.relationship('DataCollection', primaryjoin='DataCollectionComment.dataCollectionId == DataCollection.dataCollectionId') - Person = db.relationship('Person', primaryjoin='DataCollectionComment.personId == Person.personId') - - - -class DataCollectionFileAttachment(db.Model): - __tablename__ = 'DataCollectionFileAttachment' - - dataCollectionFileAttachmentId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - fileFullPath = db.Column(db.String(255), nullable=False) - fileType = db.Column(db.ENUM('snapshot', 'log', 'xy', 'recip', 'pia', 'warning')) - createTime = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - DataCollection = db.relationship('DataCollection', primaryjoin='DataCollectionFileAttachment.dataCollectionId == DataCollection.dataCollectionId') - - - -class DataCollectionGroup(db.Model): - __tablename__ = 'DataCollectionGroup' - - dataCollectionGroupId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - sessionId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='references Session table') - comments = db.Column(db.String(1024), info='comments') - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='references BLSample table') - experimentType = db.Column(db.ENUM('SAD', 'SAD - Inverse Beam', 'OSC', 'Collect - Multiwedge', 'MAD', 'Helical', 'Multi-positional', 'Mesh', 'Burn', 'MAD - Inverse Beam', 'Characterization', 'Dehydration', 'tomo', 'experiment', 'EM', 'PDF', 'PDF+Bragg', 'Bragg', 'single particle', 'Serial Fixed', 'Serial Jet', 'Standard', 'Time Resolved', 'Diamond Anvil High Pressure', 'Custom'), info='Standard: Routine structure determination experiment. Time Resolved: Investigate the change of a system over time. Custom: Special or non-standard data collection.') - startTime = db.Column(db.DateTime, info='Start time of the dataCollectionGroup') - endTime = db.Column(db.DateTime, info='end time of the dataCollectionGroup') - crystalClass = db.Column(db.String(20), info='Crystal Class for industrials users') - detectorMode = db.Column(db.String(255), info='Detector mode') - actualSampleBarcode = db.Column(db.String(45), info='Actual sample barcode') - actualSampleSlotInContainer = db.Column(db.Integer, info='Actual sample slot number in container') - actualContainerBarcode = db.Column(db.String(45), info='Actual container barcode') - actualContainerSlotInSC = db.Column(db.Integer, info='Actual container slot number in sample changer') - workflowId = db.Column(db.ForeignKey('Workflow.workflowId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - xtalSnapshotFullPath = db.Column(db.String(255)) - scanParameters = db.Column(db.String(collation='utf8mb4_bin')) - - BLSample = db.relationship('BLSample', primaryjoin='DataCollectionGroup.blSampleId == BLSample.blSampleId') - BLSession = db.relationship('BLSession', primaryjoin='DataCollectionGroup.sessionId == BLSession.sessionId') - Workflow = db.relationship('Workflow', primaryjoin='DataCollectionGroup.workflowId == Workflow.workflowId') - Project = db.relationship('Project', secondary='Project_has_DCGroup') - - - -class DataCollectionPlanHasDetector(db.Model): - __tablename__ = 'DataCollectionPlan_has_Detector' - __table_args__ = ( - db.Index('dataCollectionPlanId', 'dataCollectionPlanId', 'detectorId'), - ) - - dataCollectionPlanHasDetectorId = db.Column(db.Integer, primary_key=True) - dataCollectionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId'), nullable=False) - detectorId = db.Column(db.ForeignKey('Detector.detectorId'), nullable=False, index=True) - exposureTime = db.Column(db.Float(asdecimal=True)) - distance = db.Column(db.Float(asdecimal=True)) - roll = db.Column(db.Float(asdecimal=True)) - - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='DataCollectionPlanHasDetector.dataCollectionPlanId == DiffractionPlan.diffractionPlanId') - Detector = db.relationship('Detector', primaryjoin='DataCollectionPlanHasDetector.detectorId == Detector.detectorId') - - - -class DataReductionStatu(db.Model): - __tablename__ = 'DataReductionStatus' - - dataReductionStatusId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.Integer, nullable=False) - status = db.Column(db.String(15)) - filename = db.Column(db.String(255)) - message = db.Column(db.String(255)) - - - -class Detector(db.Model): - __tablename__ = 'Detector' - __table_args__ = ( - db.Index('Detector_FKIndex1', 'detectorType', 'detectorManufacturer', 'detectorModel', 'detectorPixelSizeHorizontal', 'detectorPixelSizeVertical'), - ) - - detectorId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - detectorType = db.Column(db.String(255)) - detectorManufacturer = db.Column(db.String(255)) - detectorModel = db.Column(db.String(255)) - detectorPixelSizeHorizontal = db.Column(db.Float) - detectorPixelSizeVertical = db.Column(db.Float) - DETECTORMAXRESOLUTION = db.Column(db.Float) - DETECTORMINRESOLUTION = db.Column(db.Float) - detectorSerialNumber = db.Column(db.String(30), unique=True) - detectorDistanceMin = db.Column(db.Float(asdecimal=True)) - detectorDistanceMax = db.Column(db.Float(asdecimal=True)) - trustedPixelValueRangeLower = db.Column(db.Float(asdecimal=True)) - trustedPixelValueRangeUpper = db.Column(db.Float(asdecimal=True)) - sensorThickness = db.Column(db.Float) - overload = db.Column(db.Float) - XGeoCorr = db.Column(db.String(255)) - YGeoCorr = db.Column(db.String(255)) - detectorMode = db.Column(db.String(255)) - density = db.Column(db.Float) - composition = db.Column(db.String(16)) - numberOfPixelsX = db.Column(db.Integer, info='Detector number of pixels in x') - numberOfPixelsY = db.Column(db.Integer, info='Detector number of pixels in y') - detectorRollMin = db.Column(db.Float(asdecimal=True), info='unit: degrees') - detectorRollMax = db.Column(db.Float(asdecimal=True), info='unit: degrees') - localName = db.Column(db.String(40), info='Colloquial name for the detector') - - - -class Dewar(db.Model): - __tablename__ = 'Dewar' - - dewarId = db.Column(db.Integer, primary_key=True) - shippingId = db.Column(db.ForeignKey('Shipping.shippingId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - code = db.Column(db.String(45), index=True) - comments = db.Column(db.String) - storageLocation = db.Column(db.String(45)) - dewarStatus = db.Column(db.String(45), index=True) - bltimeStamp = db.Column(db.DateTime) - isStorageDewar = db.Column(db.Integer, server_default=db.FetchedValue()) - barCode = db.Column(db.String(45), unique=True) - firstExperimentId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - customsValue = db.Column(db.Integer) - transportValue = db.Column(db.Integer) - trackingNumberToSynchrotron = db.Column(db.String(30)) - trackingNumberFromSynchrotron = db.Column(db.String(30)) - type = db.Column(db.ENUM('Dewar', 'Toolbox'), nullable=False, server_default=db.FetchedValue()) - FACILITYCODE = db.Column(db.String(20)) - weight = db.Column(db.Float, info='dewar weight in kg') - deliveryAgent_barcode = db.Column(db.String(30), info='Courier piece barcode (not the airway bill)') - - BLSession = db.relationship('BLSession', primaryjoin='Dewar.firstExperimentId == BLSession.sessionId') - Shipping = db.relationship('Shipping', primaryjoin='Dewar.shippingId == Shipping.shippingId') - - - -class DewarLocation(db.Model): - __tablename__ = 'DewarLocation' - - eventId = db.Column(db.Integer, primary_key=True) - dewarNumber = db.Column(db.String(128), nullable=False, info='Dewar number') - userId = db.Column(db.String(128), info='User who locates the dewar') - dateTime = db.Column(db.DateTime, info='Date and time of locatization') - locationName = db.Column(db.String(128), info='Location of the dewar') - courierName = db.Column(db.String(128), info="Carrier name who's shipping back the dewar") - courierTrackingNumber = db.Column(db.String(128), info='Tracking number of the shippment') - - - -class DewarLocationList(db.Model): - __tablename__ = 'DewarLocationList' - - locationId = db.Column(db.Integer, primary_key=True) - locationName = db.Column(db.String(128), nullable=False, server_default=db.FetchedValue(), info='Location') - - - -class DewarRegistry(db.Model): - __tablename__ = 'DewarRegistry' - - facilityCode = db.Column(db.String(20), primary_key=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId', ondelete='CASCADE'), nullable=False, index=True) - labContactId = db.Column(db.ForeignKey('LabContact.labContactId', ondelete='CASCADE'), nullable=False, index=True) - purchaseDate = db.Column(db.DateTime) - bltimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - LabContact = db.relationship('LabContact', primaryjoin='DewarRegistry.labContactId == LabContact.labContactId') - Proposal = db.relationship('Proposal', primaryjoin='DewarRegistry.proposalId == Proposal.proposalId') - - - -class DewarReport(db.Model): - __tablename__ = 'DewarReport' - - dewarReportId = db.Column(db.Integer, primary_key=True) - facilityCode = db.Column(db.ForeignKey('DewarRegistry.facilityCode', ondelete='CASCADE'), nullable=False, index=True) - report = db.Column(db.Text) - attachment = db.Column(db.String(255)) - bltimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - DewarRegistry = db.relationship('DewarRegistry', primaryjoin='DewarReport.facilityCode == DewarRegistry.facilityCode') - - - -class DewarTransportHistory(db.Model): - __tablename__ = 'DewarTransportHistory' - - DewarTransportHistoryId = db.Column(db.Integer, primary_key=True) - dewarId = db.Column(db.ForeignKey('Dewar.dewarId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - dewarStatus = db.Column(db.String(45), nullable=False) - storageLocation = db.Column(db.String(45), nullable=False) - arrivalDate = db.Column(db.DateTime, nullable=False) - - Dewar = db.relationship('Dewar', primaryjoin='DewarTransportHistory.dewarId == Dewar.dewarId') - - - -class DiffractionPlan(db.Model): - __tablename__ = 'DiffractionPlan' - - diffractionPlanId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(20)) - experimentKind = db.Column(db.ENUM('Default', 'MXPressE', 'MXPressO', 'MXPressE_SAD', 'MXScore', 'MXPressM', 'MAD', 'SAD', 'Fixed', 'Ligand binding', 'Refinement', 'OSC', 'MAD - Inverse Beam', 'SAD - Inverse Beam', 'MESH', 'XFE', 'Stepped transmission')) - observedResolution = db.Column(db.Float) - minimalResolution = db.Column(db.Float) - exposureTime = db.Column(db.Float) - oscillationRange = db.Column(db.Float) - maximalResolution = db.Column(db.Float) - screeningResolution = db.Column(db.Float) - radiationSensitivity = db.Column(db.Float) - anomalousScatterer = db.Column(db.String(255)) - preferredBeamSizeX = db.Column(db.Float) - preferredBeamSizeY = db.Column(db.Float) - preferredBeamDiameter = db.Column(db.Float) - comments = db.Column(db.String(1024)) - DIFFRACTIONPLANUUID = db.Column(db.String(1000)) - aimedCompleteness = db.Column(db.Float(asdecimal=True)) - aimedIOverSigmaAtHighestRes = db.Column(db.Float(asdecimal=True)) - aimedMultiplicity = db.Column(db.Float(asdecimal=True)) - aimedResolution = db.Column(db.Float(asdecimal=True)) - anomalousData = db.Column(db.Integer, server_default=db.FetchedValue()) - complexity = db.Column(db.String(45)) - estimateRadiationDamage = db.Column(db.Integer, server_default=db.FetchedValue()) - forcedSpaceGroup = db.Column(db.String(45)) - requiredCompleteness = db.Column(db.Float(asdecimal=True)) - requiredMultiplicity = db.Column(db.Float(asdecimal=True)) - requiredResolution = db.Column(db.Float(asdecimal=True)) - strategyOption = db.Column(db.String(45)) - kappaStrategyOption = db.Column(db.String(45)) - numberOfPositions = db.Column(db.Integer) - minDimAccrossSpindleAxis = db.Column(db.Float(asdecimal=True), info='minimum dimension accross the spindle axis') - maxDimAccrossSpindleAxis = db.Column(db.Float(asdecimal=True), info='maximum dimension accross the spindle axis') - radiationSensitivityBeta = db.Column(db.Float(asdecimal=True)) - radiationSensitivityGamma = db.Column(db.Float(asdecimal=True)) - minOscWidth = db.Column(db.Float) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - monochromator = db.Column(db.String(8), info='DMM or DCM') - energy = db.Column(db.Float, info='eV') - transmission = db.Column(db.Float, info='Decimal fraction in range [0,1]') - boxSizeX = db.Column(db.Float, info='microns') - boxSizeY = db.Column(db.Float, info='microns') - kappaStart = db.Column(db.Float, info='degrees') - axisStart = db.Column(db.Float, info='degrees') - axisRange = db.Column(db.Float, info='degrees') - numberOfImages = db.Column(db.Integer, info='The number of images requested') - presetForProposalId = db.Column(db.ForeignKey('Proposal.proposalId'), index=True, info='Indicates this plan is available to all sessions on given proposal') - beamLineName = db.Column(db.String(45), info='Indicates this plan is available to all sessions on given beamline') - detectorId = db.Column(db.ForeignKey('Detector.detectorId', onupdate='CASCADE'), index=True) - distance = db.Column(db.Float(asdecimal=True)) - orientation = db.Column(db.Float(asdecimal=True)) - monoBandwidth = db.Column(db.Float(asdecimal=True)) - centringMethod = db.Column(db.ENUM('xray', 'loop', 'diffraction', 'optical')) - - Detector = db.relationship('Detector', primaryjoin='DiffractionPlan.detectorId == Detector.detectorId') - Proposal = db.relationship('Proposal', primaryjoin='DiffractionPlan.presetForProposalId == Proposal.proposalId') - - - -class EMMicroscope(db.Model): - __tablename__ = 'EMMicroscope' - - emMicroscopeId = db.Column(db.Integer, primary_key=True) - instrumentName = db.Column(db.String(100), nullable=False) - voltage = db.Column(db.Float) - CS = db.Column(db.Float) - detectorPixelSize = db.Column(db.Float) - C2aperture = db.Column(db.Float) - ObjAperture = db.Column(db.Float) - C2lens = db.Column(db.Float) - - - -class EnergyScan(db.Model): - __tablename__ = 'EnergyScan' - - energyScanId = db.Column(db.Integer, primary_key=True) - sessionId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId'), index=True) - fluorescenceDetector = db.Column(db.String(255)) - scanFileFullPath = db.Column(db.String(255)) - jpegChoochFileFullPath = db.Column(db.String(255)) - element = db.Column(db.String(45)) - startEnergy = db.Column(db.Float) - endEnergy = db.Column(db.Float) - transmissionFactor = db.Column(db.Float) - exposureTime = db.Column(db.Float) - axisPosition = db.Column(db.Float) - synchrotronCurrent = db.Column(db.Float) - temperature = db.Column(db.Float) - peakEnergy = db.Column(db.Float) - peakFPrime = db.Column(db.Float) - peakFDoublePrime = db.Column(db.Float) - inflectionEnergy = db.Column(db.Float) - inflectionFPrime = db.Column(db.Float) - inflectionFDoublePrime = db.Column(db.Float) - xrayDose = db.Column(db.Float) - startTime = db.Column(db.DateTime) - endTime = db.Column(db.DateTime) - edgeEnergy = db.Column(db.String(255)) - filename = db.Column(db.String(255)) - beamSizeVertical = db.Column(db.Float) - beamSizeHorizontal = db.Column(db.Float) - choochFileFullPath = db.Column(db.String(255)) - crystalClass = db.Column(db.String(20)) - comments = db.Column(db.String(1024)) - flux = db.Column(db.Float(asdecimal=True), info='flux measured before the energyScan') - flux_end = db.Column(db.Float(asdecimal=True), info='flux measured after the energyScan') - workingDirectory = db.Column(db.String(45)) - blSubSampleId = db.Column(db.ForeignKey('BLSubSample.blSubSampleId'), index=True) - - BLSample = db.relationship('BLSample', primaryjoin='EnergyScan.blSampleId == BLSample.blSampleId') - BLSubSample = db.relationship('BLSubSample', primaryjoin='EnergyScan.blSubSampleId == BLSubSample.blSubSampleId') - BLSession = db.relationship('BLSession', primaryjoin='EnergyScan.sessionId == BLSession.sessionId') - Project = db.relationship('Project', secondary='Project_has_EnergyScan') - - - -class Experiment(db.Model): - __tablename__ = 'Experiment' - - experimentId = db.Column(db.Integer, primary_key=True) - proposalId = db.Column(db.Integer, nullable=False) - name = db.Column(db.String(255)) - creationDate = db.Column(db.DateTime) - comments = db.Column(db.String(512)) - experimentType = db.Column(db.String(128)) - sourceFilePath = db.Column(db.String(256)) - dataAcquisitionFilePath = db.Column(db.String(256), info='The file path pointing to the data acquisition. Eventually it may be a compressed file with all the files or just the folder') - status = db.Column(db.String(45)) - sessionId = db.Column(db.Integer) - - - -class ExperimentKindDetail(db.Model): - __tablename__ = 'ExperimentKindDetails' - - experimentKindId = db.Column(db.Integer, primary_key=True) - diffractionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - exposureIndex = db.Column(db.Integer) - dataCollectionType = db.Column(db.String(45)) - dataCollectionKind = db.Column(db.String(45)) - wedgeValue = db.Column(db.Float) - - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='ExperimentKindDetail.diffractionPlanId == DiffractionPlan.diffractionPlanId') - - - -class Frame(db.Model): - __tablename__ = 'Frame' - - frameId = db.Column(db.Integer, primary_key=True) - FRAMESETID = db.Column(db.Integer) - filePath = db.Column(db.String(255)) - comments = db.Column(db.String(45)) - - - -class FrameList(db.Model): - __tablename__ = 'FrameList' - - frameListId = db.Column(db.Integer, primary_key=True) - comments = db.Column(db.Integer) - - - -class FrameSet(db.Model): - __tablename__ = 'FrameSet' - - frameSetId = db.Column(db.Integer, primary_key=True) - runId = db.Column(db.ForeignKey('Run.runId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - FILEPATH = db.Column(db.String(255)) - INTERNALPATH = db.Column(db.String(255)) - frameListId = db.Column(db.ForeignKey('FrameList.frameListId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - detectorId = db.Column(db.Integer) - detectorDistance = db.Column(db.String(45)) - - FrameList = db.relationship('FrameList', primaryjoin='FrameSet.frameListId == FrameList.frameListId') - Run = db.relationship('Run', primaryjoin='FrameSet.runId == Run.runId') - - - -class FrameToList(db.Model): - __tablename__ = 'FrameToList' - - frameToListId = db.Column(db.Integer, primary_key=True) - frameListId = db.Column(db.ForeignKey('FrameList.frameListId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - frameId = db.Column(db.ForeignKey('Frame.frameId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - - Frame = db.relationship('Frame', primaryjoin='FrameToList.frameId == Frame.frameId') - FrameList = db.relationship('FrameList', primaryjoin='FrameToList.frameListId == FrameList.frameListId') - - - -class GeometryClassname(db.Model): - __tablename__ = 'GeometryClassname' - - geometryClassnameId = db.Column(db.Integer, primary_key=True) - geometryClassname = db.Column(db.String(45)) - geometryOrder = db.Column(db.Integer, nullable=False) - - - -class GridImageMap(db.Model): - __tablename__ = 'GridImageMap' - - gridImageMapId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - imageNumber = db.Column(db.Integer, info='Movie number, sequential 1-n in time order') - outputFileId = db.Column(db.String(80), info='File number, file 1 may not be movie 1') - positionX = db.Column(db.Float, info='X position of stage, Units: um') - positionY = db.Column(db.Float, info='Y position of stage, Units: um') - - DataCollection = db.relationship('DataCollection', primaryjoin='GridImageMap.dataCollectionId == DataCollection.dataCollectionId') - - - -class GridInfo(db.Model): - __tablename__ = 'GridInfo' - - gridInfoId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - xOffset = db.Column(db.Float(asdecimal=True)) - yOffset = db.Column(db.Float(asdecimal=True)) - dx_mm = db.Column(db.Float(asdecimal=True)) - dy_mm = db.Column(db.Float(asdecimal=True)) - steps_x = db.Column(db.Float(asdecimal=True)) - steps_y = db.Column(db.Float(asdecimal=True)) - meshAngle = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - workflowMeshId = db.Column(db.ForeignKey('WorkflowMesh.workflowMeshId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - orientation = db.Column(db.ENUM('vertical', 'horizontal'), server_default=db.FetchedValue()) - dataCollectionGroupId = db.Column(db.ForeignKey('DataCollectionGroup.dataCollectionGroupId'), index=True) - pixelsPerMicronX = db.Column(db.Float) - pixelsPerMicronY = db.Column(db.Float) - snapshot_offsetXPixel = db.Column(db.Float) - snapshot_offsetYPixel = db.Column(db.Float) - snaked = db.Column(db.Integer, server_default=db.FetchedValue(), info='True: The images associated with the DCG were collected in a snaked pattern') - - DataCollectionGroup = db.relationship('DataCollectionGroup', primaryjoin='GridInfo.dataCollectionGroupId == DataCollectionGroup.dataCollectionGroupId') - WorkflowMesh = db.relationship('WorkflowMesh', primaryjoin='GridInfo.workflowMeshId == WorkflowMesh.workflowMeshId') - - - -class Image(db.Model): - __tablename__ = 'Image' - __table_args__ = ( - db.Index('Image_Index3', 'fileLocation', 'fileName'), - ) - - imageId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - imageNumber = db.Column(db.Integer, index=True) - fileName = db.Column(db.String(255)) - fileLocation = db.Column(db.String(255)) - measuredIntensity = db.Column(db.Float) - jpegFileFullPath = db.Column(db.String(255)) - jpegThumbnailFileFullPath = db.Column(db.String(255)) - temperature = db.Column(db.Float) - cumulativeIntensity = db.Column(db.Float) - synchrotronCurrent = db.Column(db.Float) - comments = db.Column(db.String(1024)) - machineMessage = db.Column(db.String(1024)) - BLTIMESTAMP = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - motorPositionId = db.Column(db.ForeignKey('MotorPosition.motorPositionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - - DataCollection = db.relationship('DataCollection', primaryjoin='Image.dataCollectionId == DataCollection.dataCollectionId') - MotorPosition = db.relationship('MotorPosition', primaryjoin='Image.motorPositionId == MotorPosition.motorPositionId') - - - -class ImageQualityIndicator(db.Model): - __tablename__ = 'ImageQualityIndicators' - - dataCollectionId = db.Column(db.Integer, primary_key=True, nullable=False) - imageNumber = db.Column(db.Integer, primary_key=True, nullable=False) - imageId = db.Column(db.Integer) - autoProcProgramId = db.Column(db.Integer, info='Foreign key to the AutoProcProgram table') - spotTotal = db.Column(db.Integer, info='Total number of spots') - inResTotal = db.Column(db.Integer, info='Total number of spots in resolution range') - goodBraggCandidates = db.Column(db.Integer, info='Total number of Bragg diffraction spots') - iceRings = db.Column(db.Integer, info='Number of ice rings identified') - method1Res = db.Column(db.Float, info='Resolution estimate 1 (see publication)') - method2Res = db.Column(db.Float, info='Resolution estimate 2 (see publication)') - maxUnitCell = db.Column(db.Float, info='Estimation of the largest possible unit cell edge') - pctSaturationTop50Peaks = db.Column(db.Float, info='The fraction of the dynamic range being used') - inResolutionOvrlSpots = db.Column(db.Integer, info='Number of spots overloaded') - binPopCutOffMethod2Res = db.Column(db.Float, info='Cut off used in resolution limit calculation') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - totalIntegratedSignal = db.Column(db.Float(asdecimal=True)) - dozor_score = db.Column(db.Float(asdecimal=True), info='dozor_score') - driftFactor = db.Column(db.Float, info='EM movie drift factor') - - - -class Imager(db.Model): - __tablename__ = 'Imager' - - imagerId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45), nullable=False) - temperature = db.Column(db.Float) - serial = db.Column(db.String(45)) - capacity = db.Column(db.SmallInteger) - - - -class InspectionType(db.Model): - __tablename__ = 'InspectionType' - - inspectionTypeId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - - - -class Instruction(db.Model): - __tablename__ = 'Instruction' - - instructionId = db.Column(db.Integer, primary_key=True) - instructionSetId = db.Column(db.ForeignKey('InstructionSet.instructionSetId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - INSTRUCTIONORDER = db.Column(db.Integer) - comments = db.Column(db.String(255)) - order = db.Column(db.Integer, nullable=False) - - InstructionSet = db.relationship('InstructionSet', primaryjoin='Instruction.instructionSetId == InstructionSet.instructionSetId') - - - -class InstructionSet(db.Model): - __tablename__ = 'InstructionSet' - - instructionSetId = db.Column(db.Integer, primary_key=True) - type = db.Column(db.String(50)) - - - -class IspybCrystalClas(db.Model): - __tablename__ = 'IspybCrystalClass' - - crystalClassId = db.Column(db.Integer, primary_key=True) - crystalClass_code = db.Column(db.String(20), nullable=False) - crystalClass_name = db.Column(db.String(255), nullable=False) - - - -class IspybReference(db.Model): - __tablename__ = 'IspybReference' - - referenceId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - referenceName = db.Column(db.String(255), info='reference name') - referenceUrl = db.Column(db.String(1024), info='url of the reference') - referenceBibtext = db.Column(db.LargeBinary, info='bibtext value of the reference') - beamline = db.Column(db.ENUM('All', 'ID14-4', 'ID23-1', 'ID23-2', 'ID29', 'XRF', 'AllXRF', 'Mesh'), info='beamline involved') - - - -class LabContact(db.Model): - __tablename__ = 'LabContact' - __table_args__ = ( - db.Index('personAndProposal', 'personId', 'proposalId'), - db.Index('cardNameAndProposal', 'cardName', 'proposalId') - ) - - labContactId = db.Column(db.Integer, primary_key=True) - personId = db.Column(db.ForeignKey('Person.personId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) - cardName = db.Column(db.String(40), nullable=False) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - defaultCourrierCompany = db.Column(db.String(45)) - courierAccount = db.Column(db.String(45)) - billingReference = db.Column(db.String(45)) - dewarAvgCustomsValue = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - dewarAvgTransportValue = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - - Person = db.relationship('Person', primaryjoin='LabContact.personId == Person.personId') - Proposal = db.relationship('Proposal', primaryjoin='LabContact.proposalId == Proposal.proposalId') - - - -class Laboratory(db.Model): - __tablename__ = 'Laboratory' - - laboratoryId = db.Column(db.Integer, primary_key=True) - laboratoryUUID = db.Column(db.String(45)) - name = db.Column(db.String(45)) - address = db.Column(db.String(255)) - city = db.Column(db.String(45)) - country = db.Column(db.String(45)) - url = db.Column(db.String(255)) - organization = db.Column(db.String(45)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - laboratoryPk = db.Column(db.Integer) - postcode = db.Column(db.String(15)) - - - -class Log4Stat(db.Model): - __tablename__ = 'Log4Stat' - - id = db.Column(db.Integer, primary_key=True) - priority = db.Column(db.String(15)) - LOG4JTIMESTAMP = db.Column(db.DateTime) - msg = db.Column(db.String(255)) - detail = db.Column(db.String(255)) - value = db.Column(db.String(255)) - timestamp = db.Column(db.DateTime) - - - -class MXMRRun(db.Model): - __tablename__ = 'MXMRRun' - - mxMRRunId = db.Column(db.Integer, primary_key=True) - autoProcScalingId = db.Column(db.ForeignKey('AutoProcScaling.autoProcScalingId'), nullable=False, index=True) - success = db.Column(db.Integer, server_default=db.FetchedValue(), info='Indicates whether the program completed. 1 for success, 0 for failure.') - message = db.Column(db.String(255), info='A short summary of the findings, success or failure.') - pipeline = db.Column(db.String(50)) - inputCoordFile = db.Column(db.String(255)) - outputCoordFile = db.Column(db.String(255)) - inputMTZFile = db.Column(db.String(255)) - outputMTZFile = db.Column(db.String(255)) - runDirectory = db.Column(db.String(255)) - logFile = db.Column(db.String(255)) - commandLine = db.Column(db.String(255)) - rValueStart = db.Column(db.Float) - rValueEnd = db.Column(db.Float) - rFreeValueStart = db.Column(db.Float) - rFreeValueEnd = db.Column(db.Float) - starttime = db.Column(db.DateTime) - endtime = db.Column(db.DateTime) - - AutoProcScaling = db.relationship('AutoProcScaling', primaryjoin='MXMRRun.autoProcScalingId == AutoProcScaling.autoProcScalingId') - - - -class MXMRRunBlob(db.Model): - __tablename__ = 'MXMRRunBlob' - - mxMRRunBlobId = db.Column(db.Integer, primary_key=True) - mxMRRunId = db.Column(db.ForeignKey('MXMRRun.mxMRRunId'), nullable=False, index=True) - view1 = db.Column(db.String(255)) - view2 = db.Column(db.String(255)) - view3 = db.Column(db.String(255)) - - MXMRRun = db.relationship('MXMRRun', primaryjoin='MXMRRunBlob.mxMRRunId == MXMRRun.mxMRRunId') - - - -class Macromolecule(db.Model): - __tablename__ = 'Macromolecule' - - macromoleculeId = db.Column(db.Integer, primary_key=True) - proposalId = db.Column(db.Integer) - safetyLevelId = db.Column(db.ForeignKey('SafetyLevel.safetyLevelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - name = db.Column(db.String(45)) - acronym = db.Column(db.String(45)) - molecularMass = db.Column(db.String(45)) - extintionCoefficient = db.Column(db.String(45)) - sequence = db.Column(db.String(1000)) - creationDate = db.Column(db.DateTime) - comments = db.Column(db.String(1024)) - - SafetyLevel = db.relationship('SafetyLevel', primaryjoin='Macromolecule.safetyLevelId == SafetyLevel.safetyLevelId') - - - -class MacromoleculeRegion(db.Model): - __tablename__ = 'MacromoleculeRegion' - - macromoleculeRegionId = db.Column(db.Integer, primary_key=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - regionType = db.Column(db.String(45)) - id = db.Column(db.String(45)) - count = db.Column(db.String(45)) - sequence = db.Column(db.String(45)) - - Macromolecule = db.relationship('Macromolecule', primaryjoin='MacromoleculeRegion.macromoleculeId == Macromolecule.macromoleculeId') - - - -class Measurement(db.Model): - __tablename__ = 'Measurement' - - specimenId = db.Column(db.ForeignKey('Specimen.specimenId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - runId = db.Column(db.ForeignKey('Run.runId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - code = db.Column(db.String(100)) - priorityLevelId = db.Column(db.Integer) - exposureTemperature = db.Column(db.String(45)) - viscosity = db.Column(db.String(45)) - flow = db.Column(db.Integer) - extraFlowTime = db.Column(db.String(45)) - volumeToLoad = db.Column(db.String(45)) - waitTime = db.Column(db.String(45)) - transmission = db.Column(db.String(45)) - comments = db.Column(db.String(512)) - measurementId = db.Column(db.Integer, primary_key=True) - - Run = db.relationship('Run', primaryjoin='Measurement.runId == Run.runId') - Speciman = db.relationship('Speciman', primaryjoin='Measurement.specimenId == Speciman.specimenId') - - - -class MeasurementToDataCollection(db.Model): - __tablename__ = 'MeasurementToDataCollection' - - measurementToDataCollectionId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('SaxsDataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - measurementId = db.Column(db.ForeignKey('Measurement.measurementId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - dataCollectionOrder = db.Column(db.Integer) - - SaxsDataCollection = db.relationship('SaxsDataCollection', primaryjoin='MeasurementToDataCollection.dataCollectionId == SaxsDataCollection.dataCollectionId') - Measurement = db.relationship('Measurement', primaryjoin='MeasurementToDataCollection.measurementId == Measurement.measurementId') - - - -class MeasurementUnit(db.Model): - __tablename__ = 'MeasurementUnit' - - measurementUnitId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - unitType = db.Column(db.String(45)) - - - -class Merge(db.Model): - __tablename__ = 'Merge' - - mergeId = db.Column(db.Integer, primary_key=True) - measurementId = db.Column(db.ForeignKey('Measurement.measurementId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - frameListId = db.Column(db.ForeignKey('FrameList.frameListId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - discardedFrameNameList = db.Column(db.String(1024)) - averageFilePath = db.Column(db.String(255)) - framesCount = db.Column(db.String(45)) - framesMerge = db.Column(db.String(45)) - - FrameList = db.relationship('FrameList', primaryjoin='Merge.frameListId == FrameList.frameListId') - Measurement = db.relationship('Measurement', primaryjoin='Merge.measurementId == Measurement.measurementId') - - - -class Model(db.Model): - __tablename__ = 'Model' - - modelId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - pdbFile = db.Column(db.String(255)) - fitFile = db.Column(db.String(255)) - firFile = db.Column(db.String(255)) - logFile = db.Column(db.String(255)) - rFactor = db.Column(db.String(45)) - chiSqrt = db.Column(db.String(45)) - volume = db.Column(db.String(45)) - rg = db.Column(db.String(45)) - dMax = db.Column(db.String(45)) - - - -class ModelBuilding(db.Model): - __tablename__ = 'ModelBuilding' - - modelBuildingId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingAnalysisId = db.Column(db.ForeignKey('PhasingAnalysis.phasingAnalysisId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related phasing analysis item') - phasingProgramRunId = db.Column(db.ForeignKey('PhasingProgramRun.phasingProgramRunId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related program item') - spaceGroupId = db.Column(db.ForeignKey('SpaceGroup.spaceGroupId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related spaceGroup') - lowRes = db.Column(db.Float(asdecimal=True)) - highRes = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - PhasingAnalysi = db.relationship('PhasingAnalysi', primaryjoin='ModelBuilding.phasingAnalysisId == PhasingAnalysi.phasingAnalysisId') - PhasingProgramRun = db.relationship('PhasingProgramRun', primaryjoin='ModelBuilding.phasingProgramRunId == PhasingProgramRun.phasingProgramRunId') - SpaceGroup = db.relationship('SpaceGroup', primaryjoin='ModelBuilding.spaceGroupId == SpaceGroup.spaceGroupId') - - - -class ModelList(db.Model): - __tablename__ = 'ModelList' - - modelListId = db.Column(db.Integer, primary_key=True) - nsdFilePath = db.Column(db.String(255)) - chi2RgFilePath = db.Column(db.String(255)) - - - -class ModelToList(db.Model): - __tablename__ = 'ModelToList' - - modelToListId = db.Column(db.Integer, primary_key=True) - modelId = db.Column(db.ForeignKey('Model.modelId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - modelListId = db.Column(db.ForeignKey('ModelList.modelListId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - - Model = db.relationship('Model', primaryjoin='ModelToList.modelId == Model.modelId') - ModelList = db.relationship('ModelList', primaryjoin='ModelToList.modelListId == ModelList.modelListId') - - - -class MotionCorrection(db.Model): - __tablename__ = 'MotionCorrection' - - motionCorrectionId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId'), index=True) - imageNumber = db.Column(db.SmallInteger, info='Movie number, sequential in time 1-n') - firstFrame = db.Column(db.SmallInteger, info='First frame of movie used') - lastFrame = db.Column(db.SmallInteger, info='Last frame of movie used') - dosePerFrame = db.Column(db.Float, info='Dose per frame, Units: e-/A^2') - doseWeight = db.Column(db.Float, info='Dose weight, Units: dimensionless') - totalMotion = db.Column(db.Float, info='Total motion, Units: A') - averageMotionPerFrame = db.Column(db.Float, info='Average motion per frame, Units: A') - driftPlotFullPath = db.Column(db.String(255), info='Full path to the drift plot') - micrographFullPath = db.Column(db.String(255), info='Full path to the micrograph') - micrographSnapshotFullPath = db.Column(db.String(255), info='Full path to a snapshot (jpg) of the micrograph') - patchesUsedX = db.Column(db.Integer, info='Number of patches used in x (for motioncor2)') - patchesUsedY = db.Column(db.Integer, info='Number of patches used in y (for motioncor2)') - fftFullPath = db.Column(db.String(255), info='Full path to the jpg image of the raw micrograph FFT') - fftCorrectedFullPath = db.Column(db.String(255), info='Full path to the jpg image of the drift corrected micrograph FFT') - comments = db.Column(db.String(255)) - movieId = db.Column(db.ForeignKey('Movie.movieId'), index=True) - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='MotionCorrection.autoProcProgramId == AutoProcProgram.autoProcProgramId') - DataCollection = db.relationship('DataCollection', primaryjoin='MotionCorrection.dataCollectionId == DataCollection.dataCollectionId') - Movie = db.relationship('Movie', primaryjoin='MotionCorrection.movieId == Movie.movieId') - - - -class MotionCorrectionDrift(db.Model): - __tablename__ = 'MotionCorrectionDrift' - - motionCorrectionDriftId = db.Column(db.Integer, primary_key=True) - motionCorrectionId = db.Column(db.ForeignKey('MotionCorrection.motionCorrectionId'), index=True) - frameNumber = db.Column(db.SmallInteger, info='Frame number of the movie these drift values relate to') - deltaX = db.Column(db.Float, info='Drift in x, Units: A') - deltaY = db.Column(db.Float, info='Drift in y, Units: A') - - MotionCorrection = db.relationship('MotionCorrection', primaryjoin='MotionCorrectionDrift.motionCorrectionId == MotionCorrection.motionCorrectionId') - - - -class MotorPosition(db.Model): - __tablename__ = 'MotorPosition' - - motorPositionId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phiX = db.Column(db.Float(asdecimal=True)) - phiY = db.Column(db.Float(asdecimal=True)) - phiZ = db.Column(db.Float(asdecimal=True)) - sampX = db.Column(db.Float(asdecimal=True)) - sampY = db.Column(db.Float(asdecimal=True)) - omega = db.Column(db.Float(asdecimal=True)) - kappa = db.Column(db.Float(asdecimal=True)) - phi = db.Column(db.Float(asdecimal=True)) - chi = db.Column(db.Float(asdecimal=True)) - gridIndexY = db.Column(db.Integer) - gridIndexZ = db.Column(db.Integer) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - - - -class Movie(db.Model): - __tablename__ = 'Movie' - - movieId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - movieNumber = db.Column(db.Integer) - movieFullPath = db.Column(db.String(255)) - createdTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - positionX = db.Column(db.Float) - positionY = db.Column(db.Float) - nominalDefocus = db.Column(db.Float, info='Nominal defocus, Units: A') - - DataCollection = db.relationship('DataCollection', primaryjoin='Movie.dataCollectionId == DataCollection.dataCollectionId') - - - -class PDB(db.Model): - __tablename__ = 'PDB' - - pdbId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(255)) - contents = db.Column(db.String) - code = db.Column(db.String(4)) - - - -class PDBEntry(db.Model): - __tablename__ = 'PDBEntry' - - pdbEntryId = db.Column(db.Integer, primary_key=True) - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId', ondelete='CASCADE'), nullable=False, index=True) - code = db.Column(db.String(4)) - cell_a = db.Column(db.Float) - cell_b = db.Column(db.Float) - cell_c = db.Column(db.Float) - cell_alpha = db.Column(db.Float) - cell_beta = db.Column(db.Float) - cell_gamma = db.Column(db.Float) - resolution = db.Column(db.Float) - pdbTitle = db.Column(db.String(255)) - pdbAuthors = db.Column(db.String(600)) - pdbDate = db.Column(db.DateTime) - pdbBeamlineName = db.Column(db.String(50)) - beamlines = db.Column(db.String(100)) - distance = db.Column(db.Float) - autoProcCount = db.Column(db.SmallInteger) - dataCollectionCount = db.Column(db.SmallInteger) - beamlineMatch = db.Column(db.Integer) - authorMatch = db.Column(db.Integer) - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='PDBEntry.autoProcProgramId == AutoProcProgram.autoProcProgramId') - - - -class PDBEntryHasAutoProcProgram(db.Model): - __tablename__ = 'PDBEntry_has_AutoProcProgram' - - pdbEntryHasAutoProcId = db.Column(db.Integer, primary_key=True) - pdbEntryId = db.Column(db.ForeignKey('PDBEntry.pdbEntryId', ondelete='CASCADE'), nullable=False, index=True) - autoProcProgramId = db.Column(db.ForeignKey('AutoProcProgram.autoProcProgramId', ondelete='CASCADE'), nullable=False, index=True) - distance = db.Column(db.Float) - - AutoProcProgram = db.relationship('AutoProcProgram', primaryjoin='PDBEntryHasAutoProcProgram.autoProcProgramId == AutoProcProgram.autoProcProgramId') - PDBEntry = db.relationship('PDBEntry', primaryjoin='PDBEntryHasAutoProcProgram.pdbEntryId == PDBEntry.pdbEntryId') - - - -class PHPSession(db.Model): - __tablename__ = 'PHPSession' - - id = db.Column(db.String(50), primary_key=True) - accessDate = db.Column(db.DateTime) - data = db.Column(db.String(4000)) - - - -class Particle(db.Model): - __tablename__ = 'Particle' - - particleId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - x = db.Column(db.Float) - y = db.Column(db.Float) - - DataCollection = db.relationship('DataCollection', primaryjoin='Particle.dataCollectionId == DataCollection.dataCollectionId') - - - -class Permission(db.Model): - __tablename__ = 'Permission' - - permissionId = db.Column(db.Integer, primary_key=True) - type = db.Column(db.String(15), nullable=False) - description = db.Column(db.String(100)) - - UserGroup = db.relationship('UserGroup', secondary='UserGroup_has_Permission') - - - -class Person(db.Model): - __tablename__ = 'Person' - - personId = db.Column(db.Integer, primary_key=True) - laboratoryId = db.Column(db.ForeignKey('Laboratory.laboratoryId'), index=True) - siteId = db.Column(db.Integer, index=True) - personUUID = db.Column(db.String(45)) - familyName = db.Column(db.String(100), index=True) - givenName = db.Column(db.String(45)) - title = db.Column(db.String(45)) - emailAddress = db.Column(db.String(60)) - phoneNumber = db.Column(db.String(45)) - login = db.Column(db.String(45), unique=True) - faxNumber = db.Column(db.String(45)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - cache = db.Column(db.Text) - externalId = db.Column(db.BINARY(16)) - - Laboratory = db.relationship('Laboratory', primaryjoin='Person.laboratoryId == Laboratory.laboratoryId') - Project = db.relationship('Project', secondary='Project_has_Person') - UserGroup = db.relationship('UserGroup', secondary='UserGroup_has_Person') - - - -class Phasing(db.Model): - __tablename__ = 'Phasing' - - phasingId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingAnalysisId = db.Column(db.ForeignKey('PhasingAnalysis.phasingAnalysisId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related phasing analysis item') - phasingProgramRunId = db.Column(db.ForeignKey('PhasingProgramRun.phasingProgramRunId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related program item') - spaceGroupId = db.Column(db.ForeignKey('SpaceGroup.spaceGroupId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related spaceGroup') - method = db.Column(db.ENUM('solvent flattening', 'solvent flipping'), info='phasing method') - solventContent = db.Column(db.Float(asdecimal=True)) - enantiomorph = db.Column(db.Integer, info='0 or 1') - lowRes = db.Column(db.Float(asdecimal=True)) - highRes = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - - PhasingAnalysi = db.relationship('PhasingAnalysi', primaryjoin='Phasing.phasingAnalysisId == PhasingAnalysi.phasingAnalysisId') - PhasingProgramRun = db.relationship('PhasingProgramRun', primaryjoin='Phasing.phasingProgramRunId == PhasingProgramRun.phasingProgramRunId') - SpaceGroup = db.relationship('SpaceGroup', primaryjoin='Phasing.spaceGroupId == SpaceGroup.spaceGroupId') - - - -class PhasingAnalysi(db.Model): - __tablename__ = 'PhasingAnalysis' - - phasingAnalysisId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - - -class PhasingProgramAttachment(db.Model): - __tablename__ = 'PhasingProgramAttachment' - - phasingProgramAttachmentId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingProgramRunId = db.Column(db.ForeignKey('PhasingProgramRun.phasingProgramRunId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related program item') - fileType = db.Column(db.ENUM('Map', 'Logfile', 'PDB', 'CSV', 'INS', 'RES', 'TXT'), info='file type') - fileName = db.Column(db.String(45), info='file name') - filePath = db.Column(db.String(255), info='file path') - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - PhasingProgramRun = db.relationship('PhasingProgramRun', primaryjoin='PhasingProgramAttachment.phasingProgramRunId == PhasingProgramRun.phasingProgramRunId') - - - -class PhasingProgramRun(db.Model): - __tablename__ = 'PhasingProgramRun' - - phasingProgramRunId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingCommandLine = db.Column(db.String(255), info='Command line for phasing') - phasingPrograms = db.Column(db.String(255), info='Phasing programs (comma separated)') - phasingStatus = db.Column(db.Integer, info='success (1) / fail (0)') - phasingMessage = db.Column(db.String(255), info='warning, error,...') - phasingStartTime = db.Column(db.DateTime, info='Processing start time') - phasingEndTime = db.Column(db.DateTime, info='Processing end time') - phasingEnvironment = db.Column(db.String(255), info='Cpus, Nodes,...') - recordTimeStamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - - - -class PhasingStatistic(db.Model): - __tablename__ = 'PhasingStatistics' - - phasingStatisticsId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingHasScalingId1 = db.Column(db.ForeignKey('Phasing_has_Scaling.phasingHasScalingId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='the dataset in question') - phasingHasScalingId2 = db.Column(db.ForeignKey('Phasing_has_Scaling.phasingHasScalingId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='if this is MIT or MAD, which scaling are being compared, null otherwise') - phasingStepId = db.Column(db.ForeignKey('PhasingStep.phasingStepId'), index=True) - numberOfBins = db.Column(db.Integer, info='the total number of bins') - binNumber = db.Column(db.Integer, info='binNumber, 999 for overall') - lowRes = db.Column(db.Float(asdecimal=True), info='low resolution cutoff of this binfloat') - highRes = db.Column(db.Float(asdecimal=True), info='high resolution cutoff of this binfloat') - metric = db.Column(db.ENUM('Rcullis', 'Average Fragment Length', 'Chain Count', 'Residues Count', 'CC', 'PhasingPower', 'FOM', '', 'Best CC', 'CC(1/2)', 'Weak CC', 'CFOM', 'Pseudo_free_CC', 'CC of partial model'), info='metric') - statisticsValue = db.Column(db.Float(asdecimal=True), info='the statistics value') - nReflections = db.Column(db.Integer) - recordTimeStamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - - Phasing_has_Scaling = db.relationship('PhasingHasScaling', primaryjoin='PhasingStatistic.phasingHasScalingId1 == PhasingHasScaling.phasingHasScalingId') - Phasing_has_Scaling1 = db.relationship('PhasingHasScaling', primaryjoin='PhasingStatistic.phasingHasScalingId2 == PhasingHasScaling.phasingHasScalingId') - PhasingStep = db.relationship('PhasingStep', primaryjoin='PhasingStatistic.phasingStepId == PhasingStep.phasingStepId') - - - -class PhasingStep(db.Model): - __tablename__ = 'PhasingStep' - - phasingStepId = db.Column(db.Integer, primary_key=True) - previousPhasingStepId = db.Column(db.Integer) - programRunId = db.Column(db.ForeignKey('PhasingProgramRun.phasingProgramRunId'), index=True) - spaceGroupId = db.Column(db.ForeignKey('SpaceGroup.spaceGroupId'), index=True) - autoProcScalingId = db.Column(db.ForeignKey('AutoProcScaling.autoProcScalingId'), index=True) - phasingAnalysisId = db.Column(db.Integer, index=True) - phasingStepType = db.Column(db.ENUM('PREPARE', 'SUBSTRUCTUREDETERMINATION', 'PHASING', 'MODELBUILDING')) - method = db.Column(db.String(45)) - solventContent = db.Column(db.String(45)) - enantiomorph = db.Column(db.String(45)) - lowRes = db.Column(db.String(45)) - highRes = db.Column(db.String(45)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - AutoProcScaling = db.relationship('AutoProcScaling', primaryjoin='PhasingStep.autoProcScalingId == AutoProcScaling.autoProcScalingId') - PhasingProgramRun = db.relationship('PhasingProgramRun', primaryjoin='PhasingStep.programRunId == PhasingProgramRun.phasingProgramRunId') - SpaceGroup = db.relationship('SpaceGroup', primaryjoin='PhasingStep.spaceGroupId == SpaceGroup.spaceGroupId') - - - -class PhasingHasScaling(db.Model): - __tablename__ = 'Phasing_has_Scaling' - - phasingHasScalingId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingAnalysisId = db.Column(db.ForeignKey('PhasingAnalysis.phasingAnalysisId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related phasing analysis item') - autoProcScalingId = db.Column(db.ForeignKey('AutoProcScaling.autoProcScalingId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related autoProcScaling item') - datasetNumber = db.Column(db.Integer, info='serial number of the dataset and always reserve 0 for the reference') - recordTimeStamp = db.Column(db.DateTime, server_default=db.FetchedValue()) - - AutoProcScaling = db.relationship('AutoProcScaling', primaryjoin='PhasingHasScaling.autoProcScalingId == AutoProcScaling.autoProcScalingId') - PhasingAnalysi = db.relationship('PhasingAnalysi', primaryjoin='PhasingHasScaling.phasingAnalysisId == PhasingAnalysi.phasingAnalysisId') - - - -class PlateGroup(db.Model): - __tablename__ = 'PlateGroup' - - plateGroupId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(255)) - storageTemperature = db.Column(db.String(45)) - - - -class PlateType(db.Model): - __tablename__ = 'PlateType' - - PlateTypeId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - description = db.Column(db.String(45)) - shape = db.Column(db.String(45)) - rowCount = db.Column(db.Integer) - columnCount = db.Column(db.Integer) - experimentId = db.Column(db.Integer, index=True) - - - -class Position(db.Model): - __tablename__ = 'Position' - - positionId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - relativePositionId = db.Column(db.ForeignKey('Position.positionId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='relative position, null otherwise') - posX = db.Column(db.Float(asdecimal=True)) - posY = db.Column(db.Float(asdecimal=True)) - posZ = db.Column(db.Float(asdecimal=True)) - scale = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - X = db.Column(db.Float(asdecimal=True), server_default=db.FetchedValue()) - Y = db.Column(db.Float(asdecimal=True), server_default=db.FetchedValue()) - Z = db.Column(db.Float(asdecimal=True), server_default=db.FetchedValue()) - - parent = db.relationship('Position', remote_side=[positionId], primaryjoin='Position.relativePositionId == Position.positionId') - - - -class PreparePhasingDatum(db.Model): - __tablename__ = 'PreparePhasingData' - - preparePhasingDataId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingAnalysisId = db.Column(db.ForeignKey('PhasingAnalysis.phasingAnalysisId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related phasing analysis item') - phasingProgramRunId = db.Column(db.ForeignKey('PhasingProgramRun.phasingProgramRunId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related program item') - spaceGroupId = db.Column(db.ForeignKey('SpaceGroup.spaceGroupId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related spaceGroup') - lowRes = db.Column(db.Float(asdecimal=True)) - highRes = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - PhasingAnalysi = db.relationship('PhasingAnalysi', primaryjoin='PreparePhasingDatum.phasingAnalysisId == PhasingAnalysi.phasingAnalysisId') - PhasingProgramRun = db.relationship('PhasingProgramRun', primaryjoin='PreparePhasingDatum.phasingProgramRunId == PhasingProgramRun.phasingProgramRunId') - SpaceGroup = db.relationship('SpaceGroup', primaryjoin='PreparePhasingDatum.spaceGroupId == SpaceGroup.spaceGroupId') - - - -class ProcessingJob(db.Model): - __tablename__ = 'ProcessingJob' - - processingJobId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - displayName = db.Column(db.String(80), info='xia2, fast_dp, dimple, etc') - comments = db.Column(db.String(255), info='For users to annotate the job and see the motivation for the job') - recordTimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='When job was submitted') - recipe = db.Column(db.String(50), info='What we want to run (xia, dimple, etc).') - automatic = db.Column(db.Integer, info='Whether this processing job was triggered automatically or not') - - DataCollection = db.relationship('DataCollection', primaryjoin='ProcessingJob.dataCollectionId == DataCollection.dataCollectionId') - - - -class ProcessingJobImageSweep(db.Model): - __tablename__ = 'ProcessingJobImageSweep' - - processingJobImageSweepId = db.Column(db.Integer, primary_key=True) - processingJobId = db.Column(db.ForeignKey('ProcessingJob.processingJobId'), index=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - startImage = db.Column(db.Integer) - endImage = db.Column(db.Integer) - - DataCollection = db.relationship('DataCollection', primaryjoin='ProcessingJobImageSweep.dataCollectionId == DataCollection.dataCollectionId') - ProcessingJob = db.relationship('ProcessingJob', primaryjoin='ProcessingJobImageSweep.processingJobId == ProcessingJob.processingJobId') - - - -class ProcessingJobParameter(db.Model): - __tablename__ = 'ProcessingJobParameter' - - processingJobParameterId = db.Column(db.Integer, primary_key=True) - processingJobId = db.Column(db.ForeignKey('ProcessingJob.processingJobId'), index=True) - parameterKey = db.Column(db.String(80), info='E.g. resolution, spacegroup, pipeline') - parameterValue = db.Column(db.String(1024)) - - ProcessingJob = db.relationship('ProcessingJob', primaryjoin='ProcessingJobParameter.processingJobId == ProcessingJob.processingJobId') - - - -class Project(db.Model): - __tablename__ = 'Project' - - projectId = db.Column(db.Integer, primary_key=True) - personId = db.Column(db.ForeignKey('Person.personId'), index=True) - title = db.Column(db.String(200)) - acronym = db.Column(db.String(100)) - owner = db.Column(db.String(50)) - - Person = db.relationship('Person', primaryjoin='Project.personId == Person.personId') - Protein = db.relationship('Protein', secondary='Project_has_Protein') - BLSession = db.relationship('BLSession', secondary='Project_has_Session') - Shipping = db.relationship('Shipping', secondary='Project_has_Shipping') - XFEFluorescenceSpectrum = db.relationship('XFEFluorescenceSpectrum', secondary='Project_has_XFEFSpectrum') - - - -t_Project_has_BLSample = db.Table( - 'Project_has_BLSample', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False), - db.Column('blSampleId', db.ForeignKey('BLSample.blSampleId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_Project_has_DCGroup = db.Table( - 'Project_has_DCGroup', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False), - db.Column('dataCollectionGroupId', db.ForeignKey('DataCollectionGroup.dataCollectionGroupId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_Project_has_EnergyScan = db.Table( - 'Project_has_EnergyScan', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False), - db.Column('energyScanId', db.ForeignKey('EnergyScan.energyScanId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_Project_has_Person = db.Table( - 'Project_has_Person', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE'), primary_key=True, nullable=False), - db.Column('personId', db.ForeignKey('Person.personId', ondelete='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_Project_has_Protein = db.Table( - 'Project_has_Protein', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE'), primary_key=True, nullable=False), - db.Column('proteinId', db.ForeignKey('Protein.proteinId', ondelete='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_Project_has_Session = db.Table( - 'Project_has_Session', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False), - db.Column('sessionId', db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_Project_has_Shipping = db.Table( - 'Project_has_Shipping', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE'), primary_key=True, nullable=False), - db.Column('shippingId', db.ForeignKey('Shipping.shippingId', ondelete='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -class ProjectHasUser(db.Model): - __tablename__ = 'Project_has_User' - - projecthasuserid = db.Column(db.Integer, primary_key=True) - projectid = db.Column(db.ForeignKey('Project.projectId'), nullable=False, index=True) - username = db.Column(db.String(15)) - - Project = db.relationship('Project', primaryjoin='ProjectHasUser.projectid == Project.projectId') - - - -t_Project_has_XFEFSpectrum = db.Table( - 'Project_has_XFEFSpectrum', - db.Column('projectId', db.ForeignKey('Project.projectId', ondelete='CASCADE'), primary_key=True, nullable=False), - db.Column('xfeFluorescenceSpectrumId', db.ForeignKey('XFEFluorescenceSpectrum.xfeFluorescenceSpectrumId', ondelete='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -class Proposal(db.Model): - __tablename__ = 'Proposal' - __table_args__ = ( - db.Index('Proposal_FKIndexCodeNumber', 'proposalCode', 'proposalNumber'), - ) - - proposalId = db.Column(db.Integer, primary_key=True) - personId = db.Column(db.ForeignKey('Person.personId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - title = db.Column(db.String(200)) - proposalCode = db.Column(db.String(45)) - proposalNumber = db.Column(db.String(45)) - bltimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - proposalType = db.Column(db.String(2), info='Proposal type: MX, BX') - externalId = db.Column(db.BINARY(16)) - state = db.Column(db.ENUM('Open', 'Closed', 'Cancelled'), server_default=db.FetchedValue()) - - Person = db.relationship('Person', primaryjoin='Proposal.personId == Person.personId') - - - -class ProposalHasPerson(db.Model): - __tablename__ = 'ProposalHasPerson' - - proposalHasPersonId = db.Column(db.Integer, primary_key=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId'), nullable=False, index=True) - personId = db.Column(db.ForeignKey('Person.personId'), nullable=False, index=True) - role = db.Column(db.ENUM('Co-Investigator', 'Principal Investigator', 'Alternate Contact')) - - Person = db.relationship('Person', primaryjoin='ProposalHasPerson.personId == Person.personId') - Proposal = db.relationship('Proposal', primaryjoin='ProposalHasPerson.proposalId == Proposal.proposalId') - - - -class Protein(db.Model): - __tablename__ = 'Protein' - __table_args__ = ( - db.Index('ProteinAcronym_Index', 'proposalId', 'acronym'), - ) - - proteinId = db.Column(db.Integer, primary_key=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - name = db.Column(db.String(255)) - acronym = db.Column(db.String(45), index=True) - molecularMass = db.Column(db.Float(asdecimal=True)) - proteinType = db.Column(db.String(45)) - personId = db.Column(db.Integer, index=True) - bltimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - isCreatedBySampleSheet = db.Column(db.Integer, server_default=db.FetchedValue()) - sequence = db.Column(db.Text) - MOD_ID = db.Column(db.String(20)) - componentTypeId = db.Column(db.ForeignKey('ComponentType.componentTypeId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - concentrationTypeId = db.Column(db.ForeignKey('ConcentrationType.concentrationTypeId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - _global = db.Column('global', db.Integer, server_default=db.FetchedValue()) - externalId = db.Column(db.BINARY(16)) - density = db.Column(db.Float) - abundance = db.Column(db.Float, info='Deprecated') - - ComponentType = db.relationship('ComponentType', primaryjoin='Protein.componentTypeId == ComponentType.componentTypeId') - ConcentrationType = db.relationship('ConcentrationType', primaryjoin='Protein.concentrationTypeId == ConcentrationType.concentrationTypeId') - Proposal = db.relationship('Proposal', primaryjoin='Protein.proposalId == Proposal.proposalId') - ComponentSubType = db.relationship('ComponentSubType', secondary='Component_has_SubType') - - - -class ProteinHasPDB(db.Model): - __tablename__ = 'Protein_has_PDB' - - proteinhaspdbid = db.Column(db.Integer, primary_key=True) - proteinid = db.Column(db.ForeignKey('Protein.proteinId'), nullable=False, index=True) - pdbid = db.Column(db.ForeignKey('PDB.pdbId'), nullable=False, index=True) - - PDB = db.relationship('PDB', primaryjoin='ProteinHasPDB.pdbid == PDB.pdbId') - Protein = db.relationship('Protein', primaryjoin='ProteinHasPDB.proteinid == Protein.proteinId') - - - -class Reprocessing(db.Model): - __tablename__ = 'Reprocessing' - - reprocessingId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - displayName = db.Column(db.String(80), info='xia2, fast_dp, dimple, etc') - comments = db.Column(db.String(255), info='For users to annotate the job and see the motivation for the job') - recordTimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='When job was submitted') - recipe = db.Column(db.String(50), info='What we want to run (xia, dimple, etc) ') - automatic = db.Column(db.Integer, info='Whether this processing was triggered automatically or not') - - DataCollection = db.relationship('DataCollection', primaryjoin='Reprocessing.dataCollectionId == DataCollection.dataCollectionId') - - - -class ReprocessingImageSweep(db.Model): - __tablename__ = 'ReprocessingImageSweep' - - reprocessingImageSweepId = db.Column(db.Integer, primary_key=True) - reprocessingId = db.Column(db.ForeignKey('Reprocessing.reprocessingId'), index=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId'), index=True) - startImage = db.Column(db.Integer) - endImage = db.Column(db.Integer) - - DataCollection = db.relationship('DataCollection', primaryjoin='ReprocessingImageSweep.dataCollectionId == DataCollection.dataCollectionId') - Reprocessing = db.relationship('Reprocessing', primaryjoin='ReprocessingImageSweep.reprocessingId == Reprocessing.reprocessingId') - - - -class ReprocessingParameter(db.Model): - __tablename__ = 'ReprocessingParameter' - - reprocessingParameterId = db.Column(db.Integer, primary_key=True) - reprocessingId = db.Column(db.ForeignKey('Reprocessing.reprocessingId'), index=True) - parameterKey = db.Column(db.String(80), info='E.g. resolution, spacegroup, pipeline') - parameterValue = db.Column(db.String(255)) - - Reprocessing = db.relationship('Reprocessing', primaryjoin='ReprocessingParameter.reprocessingId == Reprocessing.reprocessingId') - - - -class RobotAction(db.Model): - __tablename__ = 'RobotAction' - - robotActionId = db.Column(db.Integer, primary_key=True) - blsessionId = db.Column(db.ForeignKey('BLSession.sessionId'), nullable=False, index=True) - blsampleId = db.Column(db.ForeignKey('BLSample.blSampleId'), index=True) - actionType = db.Column(db.ENUM('LOAD', 'UNLOAD', 'DISPOSE', 'STORE', 'WASH', 'ANNEAL')) - startTimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - endTimestamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - status = db.Column(db.ENUM('SUCCESS', 'ERROR', 'CRITICAL', 'WARNING', 'EPICSFAIL', 'COMMANDNOTSENT')) - message = db.Column(db.String(255)) - containerLocation = db.Column(db.SmallInteger) - dewarLocation = db.Column(db.SmallInteger) - sampleBarcode = db.Column(db.String(45)) - xtalSnapshotBefore = db.Column(db.String(255)) - xtalSnapshotAfter = db.Column(db.String(255)) - - BLSample = db.relationship('BLSample', primaryjoin='RobotAction.blsampleId == BLSample.blSampleId') - BLSession = db.relationship('BLSession', primaryjoin='RobotAction.blsessionId == BLSession.sessionId') - - - -class Run(db.Model): - __tablename__ = 'Run' - - runId = db.Column(db.Integer, primary_key=True) - timePerFrame = db.Column(db.String(45)) - timeStart = db.Column(db.String(45)) - timeEnd = db.Column(db.String(45)) - storageTemperature = db.Column(db.String(45)) - exposureTemperature = db.Column(db.String(45)) - spectrophotometer = db.Column(db.String(45)) - energy = db.Column(db.String(45)) - creationDate = db.Column(db.DateTime) - frameAverage = db.Column(db.String(45)) - frameCount = db.Column(db.String(45)) - transmission = db.Column(db.String(45)) - beamCenterX = db.Column(db.String(45)) - beamCenterY = db.Column(db.String(45)) - pixelSizeX = db.Column(db.String(45)) - pixelSizeY = db.Column(db.String(45)) - radiationRelative = db.Column(db.String(45)) - radiationAbsolute = db.Column(db.String(45)) - normalization = db.Column(db.String(45)) - - - -t_SAFETYREQUEST = db.Table( - 'SAFETYREQUEST', - db.Column('SAFETYREQUESTID', db.Numeric(10, 0)), - db.Column('XMLDOCUMENTID', db.Numeric(10, 0)), - db.Column('PROTEINID', db.Numeric(10, 0)), - db.Column('PROJECTCODE', db.String(45)), - db.Column('SUBMISSIONDATE', db.DateTime), - db.Column('RESPONSE', db.Numeric(3, 0)), - db.Column('REPONSEDATE', db.DateTime), - db.Column('RESPONSEDETAILS', db.String(255)) -) - - - -class SAMPLECELL(db.Model): - __tablename__ = 'SAMPLECELL' - - SAMPLECELLID = db.Column(db.Integer, primary_key=True) - SAMPLEEXPOSUREUNITID = db.Column(db.Integer) - ID = db.Column(db.String(45)) - NAME = db.Column(db.String(45)) - DIAMETER = db.Column(db.String(45)) - MATERIAL = db.Column(db.String(45)) - - - -class SAMPLEEXPOSUREUNIT(db.Model): - __tablename__ = 'SAMPLEEXPOSUREUNIT' - - SAMPLEEXPOSUREUNITID = db.Column(db.Integer, primary_key=True) - ID = db.Column(db.String(45)) - PATHLENGTH = db.Column(db.String(45)) - VOLUME = db.Column(db.String(45)) - - - -class SAXSDATACOLLECTIONGROUP(db.Model): - __tablename__ = 'SAXSDATACOLLECTIONGROUP' - - DATACOLLECTIONGROUPID = db.Column(db.Integer, primary_key=True) - DEFAULTDATAACQUISITIONID = db.Column(db.Integer) - SAXSDATACOLLECTIONARRAYID = db.Column(db.Integer) - - - -class SWOnceToken(db.Model): - __tablename__ = 'SW_onceToken' - - onceTokenId = db.Column(db.Integer, primary_key=True) - token = db.Column(db.String(128)) - personId = db.Column(db.ForeignKey('Person.personId'), index=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId'), index=True) - validity = db.Column(db.String(200)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - Person = db.relationship('Person', primaryjoin='SWOnceToken.personId == Person.personId') - Proposal = db.relationship('Proposal', primaryjoin='SWOnceToken.proposalId == Proposal.proposalId') - - - -class SafetyLevel(db.Model): - __tablename__ = 'SafetyLevel' - - safetyLevelId = db.Column(db.Integer, primary_key=True) - code = db.Column(db.String(45)) - description = db.Column(db.String(45)) - - - -class SamplePlate(db.Model): - __tablename__ = 'SamplePlate' - - samplePlateId = db.Column(db.Integer, primary_key=True) - BLSESSIONID = db.Column(db.Integer) - plateGroupId = db.Column(db.ForeignKey('PlateGroup.plateGroupId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - plateTypeId = db.Column(db.ForeignKey('PlateType.PlateTypeId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - instructionSetId = db.Column(db.ForeignKey('InstructionSet.instructionSetId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - boxId = db.Column(db.Integer) - name = db.Column(db.String(45)) - slotPositionRow = db.Column(db.String(45)) - slotPositionColumn = db.Column(db.String(45)) - storageTemperature = db.Column(db.String(45)) - experimentId = db.Column(db.ForeignKey('Experiment.experimentId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - - Experiment = db.relationship('Experiment', primaryjoin='SamplePlate.experimentId == Experiment.experimentId') - InstructionSet = db.relationship('InstructionSet', primaryjoin='SamplePlate.instructionSetId == InstructionSet.instructionSetId') - PlateGroup = db.relationship('PlateGroup', primaryjoin='SamplePlate.plateGroupId == PlateGroup.plateGroupId') - PlateType = db.relationship('PlateType', primaryjoin='SamplePlate.plateTypeId == PlateType.PlateTypeId') - - - -class SamplePlatePosition(db.Model): - __tablename__ = 'SamplePlatePosition' - - samplePlatePositionId = db.Column(db.Integer, primary_key=True) - samplePlateId = db.Column(db.ForeignKey('SamplePlate.samplePlateId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - rowNumber = db.Column(db.Integer) - columnNumber = db.Column(db.Integer) - volume = db.Column(db.String(45)) - - SamplePlate = db.relationship('SamplePlate', primaryjoin='SamplePlatePosition.samplePlateId == SamplePlate.samplePlateId') - - - -class SaxsDataCollection(db.Model): - __tablename__ = 'SaxsDataCollection' - - dataCollectionId = db.Column(db.Integer, primary_key=True) - BLSESSIONID = db.Column(db.Integer) - experimentId = db.Column(db.ForeignKey('Experiment.experimentId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - comments = db.Column(db.String(5120)) - - Experiment = db.relationship('Experiment', primaryjoin='SaxsDataCollection.experimentId == Experiment.experimentId') - - - -class ScanParametersModel(db.Model): - __tablename__ = 'ScanParametersModel' - - scanParametersModelId = db.Column(db.Integer, primary_key=True) - scanParametersServiceId = db.Column(db.ForeignKey('ScanParametersService.scanParametersServiceId', onupdate='CASCADE'), index=True) - dataCollectionPlanId = db.Column(db.ForeignKey('DiffractionPlan.diffractionPlanId', onupdate='CASCADE'), index=True) - sequenceNumber = db.Column(db.Integer) - start = db.Column(db.Float(asdecimal=True)) - stop = db.Column(db.Float(asdecimal=True)) - step = db.Column(db.Float(asdecimal=True)) - array = db.Column(db.Text) - duration = db.Column(db.Integer, info='Duration for parameter change in seconds') - - DiffractionPlan = db.relationship('DiffractionPlan', primaryjoin='ScanParametersModel.dataCollectionPlanId == DiffractionPlan.diffractionPlanId') - ScanParametersService = db.relationship('ScanParametersService', primaryjoin='ScanParametersModel.scanParametersServiceId == ScanParametersService.scanParametersServiceId') - - - -class ScanParametersService(db.Model): - __tablename__ = 'ScanParametersService' - - scanParametersServiceId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - description = db.Column(db.String(45)) - - - -class Schedule(db.Model): - __tablename__ = 'Schedule' - - scheduleId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - - - -class ScheduleComponent(db.Model): - __tablename__ = 'ScheduleComponent' - - scheduleComponentId = db.Column(db.Integer, primary_key=True) - scheduleId = db.Column(db.ForeignKey('Schedule.scheduleId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - offset_hours = db.Column(db.Integer) - inspectionTypeId = db.Column(db.ForeignKey('InspectionType.inspectionTypeId', ondelete='CASCADE'), index=True) - - InspectionType = db.relationship('InspectionType', primaryjoin='ScheduleComponent.inspectionTypeId == InspectionType.inspectionTypeId') - Schedule = db.relationship('Schedule', primaryjoin='ScheduleComponent.scheduleId == Schedule.scheduleId') - - - -class SchemaStatu(db.Model): - __tablename__ = 'SchemaStatus' - - schemaStatusId = db.Column(db.Integer, primary_key=True) - scriptName = db.Column(db.String(100), nullable=False, unique=True) - schemaStatus = db.Column(db.String(10)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - - - -class Screen(db.Model): - __tablename__ = 'Screen' - - screenId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(45)) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId'), index=True) - _global = db.Column('global', db.Integer) - - Proposal = db.relationship('Proposal', primaryjoin='Screen.proposalId == Proposal.proposalId') - - - -class ScreenComponent(db.Model): - __tablename__ = 'ScreenComponent' - - screenComponentId = db.Column(db.Integer, primary_key=True) - screenComponentGroupId = db.Column(db.ForeignKey('ScreenComponentGroup.screenComponentGroupId'), nullable=False, index=True) - componentId = db.Column(db.ForeignKey('Protein.proteinId'), index=True) - concentration = db.Column(db.Float) - pH = db.Column(db.Float) - - Protein = db.relationship('Protein', primaryjoin='ScreenComponent.componentId == Protein.proteinId') - ScreenComponentGroup = db.relationship('ScreenComponentGroup', primaryjoin='ScreenComponent.screenComponentGroupId == ScreenComponentGroup.screenComponentGroupId') - - - -class ScreenComponentGroup(db.Model): - __tablename__ = 'ScreenComponentGroup' - - screenComponentGroupId = db.Column(db.Integer, primary_key=True) - screenId = db.Column(db.ForeignKey('Screen.screenId'), nullable=False, index=True) - position = db.Column(db.SmallInteger) - - Screen = db.relationship('Screen', primaryjoin='ScreenComponentGroup.screenId == Screen.screenId') - - - -class Screening(db.Model): - __tablename__ = 'Screening' - - screeningId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - bltimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - programVersion = db.Column(db.String(45)) - comments = db.Column(db.String(255)) - shortComments = db.Column(db.String(20)) - diffractionPlanId = db.Column(db.Integer, index=True, info='references DiffractionPlan') - dataCollectionGroupId = db.Column(db.ForeignKey('DataCollectionGroup.dataCollectionGroupId'), index=True) - xmlSampleInformation = db.Column(db.LONGBLOB) - - DataCollectionGroup = db.relationship('DataCollectionGroup', primaryjoin='Screening.dataCollectionGroupId == DataCollectionGroup.dataCollectionGroupId') - DataCollection = db.relationship('DataCollection', primaryjoin='Screening.dataCollectionId == DataCollection.dataCollectionId') - - - -class ScreeningInput(db.Model): - __tablename__ = 'ScreeningInput' - - screeningInputId = db.Column(db.Integer, primary_key=True) - screeningId = db.Column(db.ForeignKey('Screening.screeningId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - beamX = db.Column(db.Float) - beamY = db.Column(db.Float) - rmsErrorLimits = db.Column(db.Float) - minimumFractionIndexed = db.Column(db.Float) - maximumFractionRejected = db.Column(db.Float) - minimumSignalToNoise = db.Column(db.Float) - diffractionPlanId = db.Column(db.Integer, info='references DiffractionPlan table') - xmlSampleInformation = db.Column(db.LONGBLOB) - - Screening = db.relationship('Screening', primaryjoin='ScreeningInput.screeningId == Screening.screeningId') - - - -class ScreeningOutput(db.Model): - __tablename__ = 'ScreeningOutput' - - screeningOutputId = db.Column(db.Integer, primary_key=True) - screeningId = db.Column(db.ForeignKey('Screening.screeningId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - statusDescription = db.Column(db.String(1024)) - rejectedReflections = db.Column(db.Integer) - resolutionObtained = db.Column(db.Float) - spotDeviationR = db.Column(db.Float) - spotDeviationTheta = db.Column(db.Float) - beamShiftX = db.Column(db.Float) - beamShiftY = db.Column(db.Float) - numSpotsFound = db.Column(db.Integer) - numSpotsUsed = db.Column(db.Integer) - numSpotsRejected = db.Column(db.Integer) - mosaicity = db.Column(db.Float) - iOverSigma = db.Column(db.Float) - diffractionRings = db.Column(db.Integer) - SCREENINGSUCCESS = db.Column(db.Integer, server_default=db.FetchedValue(), info='Column to be deleted') - mosaicityEstimated = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - rankingResolution = db.Column(db.Float(asdecimal=True)) - program = db.Column(db.String(45)) - doseTotal = db.Column(db.Float(asdecimal=True)) - totalExposureTime = db.Column(db.Float(asdecimal=True)) - totalRotationRange = db.Column(db.Float(asdecimal=True)) - totalNumberOfImages = db.Column(db.Integer) - rFriedel = db.Column(db.Float(asdecimal=True)) - indexingSuccess = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - strategySuccess = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - alignmentSuccess = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - - Screening = db.relationship('Screening', primaryjoin='ScreeningOutput.screeningId == Screening.screeningId') - - - -class ScreeningOutputLattice(db.Model): - __tablename__ = 'ScreeningOutputLattice' - - screeningOutputLatticeId = db.Column(db.Integer, primary_key=True) - screeningOutputId = db.Column(db.ForeignKey('ScreeningOutput.screeningOutputId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - spaceGroup = db.Column(db.String(45)) - pointGroup = db.Column(db.String(45)) - bravaisLattice = db.Column(db.String(45)) - rawOrientationMatrix_a_x = db.Column(db.Float) - rawOrientationMatrix_a_y = db.Column(db.Float) - rawOrientationMatrix_a_z = db.Column(db.Float) - rawOrientationMatrix_b_x = db.Column(db.Float) - rawOrientationMatrix_b_y = db.Column(db.Float) - rawOrientationMatrix_b_z = db.Column(db.Float) - rawOrientationMatrix_c_x = db.Column(db.Float) - rawOrientationMatrix_c_y = db.Column(db.Float) - rawOrientationMatrix_c_z = db.Column(db.Float) - unitCell_a = db.Column(db.Float) - unitCell_b = db.Column(db.Float) - unitCell_c = db.Column(db.Float) - unitCell_alpha = db.Column(db.Float) - unitCell_beta = db.Column(db.Float) - unitCell_gamma = db.Column(db.Float) - bltimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - labelitIndexing = db.Column(db.Integer, server_default=db.FetchedValue()) - - ScreeningOutput = db.relationship('ScreeningOutput', primaryjoin='ScreeningOutputLattice.screeningOutputId == ScreeningOutput.screeningOutputId') - - - -class ScreeningRank(db.Model): - __tablename__ = 'ScreeningRank' - - screeningRankId = db.Column(db.Integer, primary_key=True) - screeningRankSetId = db.Column(db.ForeignKey('ScreeningRankSet.screeningRankSetId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - screeningId = db.Column(db.ForeignKey('Screening.screeningId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - rankValue = db.Column(db.Float) - rankInformation = db.Column(db.String(1024)) - - Screening = db.relationship('Screening', primaryjoin='ScreeningRank.screeningId == Screening.screeningId') - ScreeningRankSet = db.relationship('ScreeningRankSet', primaryjoin='ScreeningRank.screeningRankSetId == ScreeningRankSet.screeningRankSetId') - - - -class ScreeningRankSet(db.Model): - __tablename__ = 'ScreeningRankSet' - - screeningRankSetId = db.Column(db.Integer, primary_key=True) - rankEngine = db.Column(db.String(255)) - rankingProjectFileName = db.Column(db.String(255)) - rankingSummaryFileName = db.Column(db.String(255)) - - - -class ScreeningStrategy(db.Model): - __tablename__ = 'ScreeningStrategy' - - screeningStrategyId = db.Column(db.Integer, primary_key=True) - screeningOutputId = db.Column(db.ForeignKey('ScreeningOutput.screeningOutputId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - phiStart = db.Column(db.Float) - phiEnd = db.Column(db.Float) - rotation = db.Column(db.Float) - exposureTime = db.Column(db.Float) - resolution = db.Column(db.Float) - completeness = db.Column(db.Float) - multiplicity = db.Column(db.Float) - anomalous = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - program = db.Column(db.String(45)) - rankingResolution = db.Column(db.Float) - transmission = db.Column(db.Float, info='Transmission for the strategy as given by the strategy program.') - - ScreeningOutput = db.relationship('ScreeningOutput', primaryjoin='ScreeningStrategy.screeningOutputId == ScreeningOutput.screeningOutputId') - - - -class ScreeningStrategySubWedge(db.Model): - __tablename__ = 'ScreeningStrategySubWedge' - - screeningStrategySubWedgeId = db.Column(db.Integer, primary_key=True, info='Primary key') - screeningStrategyWedgeId = db.Column(db.ForeignKey('ScreeningStrategyWedge.screeningStrategyWedgeId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Foreign key to parent table') - subWedgeNumber = db.Column(db.Integer, info='The number of this subwedge within the wedge') - rotationAxis = db.Column(db.String(45), info='Angle where subwedge starts') - axisStart = db.Column(db.Float, info='Angle where subwedge ends') - axisEnd = db.Column(db.Float, info='Exposure time for subwedge') - exposureTime = db.Column(db.Float, info='Transmission for subwedge') - transmission = db.Column(db.Float) - oscillationRange = db.Column(db.Float) - completeness = db.Column(db.Float) - multiplicity = db.Column(db.Float) - RESOLUTION = db.Column(db.Float) - doseTotal = db.Column(db.Float, info='Total dose for this subwedge') - numberOfImages = db.Column(db.Integer, info='Number of images for this subwedge') - comments = db.Column(db.String(255)) - - ScreeningStrategyWedge = db.relationship('ScreeningStrategyWedge', primaryjoin='ScreeningStrategySubWedge.screeningStrategyWedgeId == ScreeningStrategyWedge.screeningStrategyWedgeId') - - - -class ScreeningStrategyWedge(db.Model): - __tablename__ = 'ScreeningStrategyWedge' - - screeningStrategyWedgeId = db.Column(db.Integer, primary_key=True, info='Primary key') - screeningStrategyId = db.Column(db.ForeignKey('ScreeningStrategy.screeningStrategyId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Foreign key to parent table') - wedgeNumber = db.Column(db.Integer, info='The number of this wedge within the strategy') - resolution = db.Column(db.Float) - completeness = db.Column(db.Float) - multiplicity = db.Column(db.Float) - doseTotal = db.Column(db.Float, info='Total dose for this wedge') - numberOfImages = db.Column(db.Integer, info='Number of images for this wedge') - phi = db.Column(db.Float) - kappa = db.Column(db.Float) - chi = db.Column(db.Float) - comments = db.Column(db.String(255)) - wavelength = db.Column(db.Float(asdecimal=True)) - - ScreeningStrategy = db.relationship('ScreeningStrategy', primaryjoin='ScreeningStrategyWedge.screeningStrategyId == ScreeningStrategy.screeningStrategyId') - - - -class SessionType(db.Model): - __tablename__ = 'SessionType' - - sessionTypeId = db.Column(db.Integer, primary_key=True) - sessionId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - typeName = db.Column(db.String(31), nullable=False) - - BLSession = db.relationship('BLSession', primaryjoin='SessionType.sessionId == BLSession.sessionId') - - - -class SessionHasPerson(db.Model): - __tablename__ = 'Session_has_Person' - - sessionId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True, server_default=db.FetchedValue()) - personId = db.Column(db.ForeignKey('Person.personId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True, server_default=db.FetchedValue()) - role = db.Column(db.ENUM('Local Contact', 'Local Contact 2', 'Staff', 'Team Leader', 'Co-Investigator', 'Principal Investigator', 'Alternate Contact', 'Data Access', 'Team Member')) - remote = db.Column(db.Integer, server_default=db.FetchedValue()) - - Person = db.relationship('Person', primaryjoin='SessionHasPerson.personId == Person.personId') - BLSession = db.relationship('BLSession', primaryjoin='SessionHasPerson.sessionId == BLSession.sessionId') - - - -class Shipping(db.Model): - __tablename__ = 'Shipping' - - shippingId = db.Column(db.Integer, primary_key=True) - proposalId = db.Column(db.ForeignKey('Proposal.proposalId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, server_default=db.FetchedValue()) - shippingName = db.Column(db.String(45), index=True) - deliveryAgent_agentName = db.Column(db.String(45)) - deliveryAgent_shippingDate = db.Column(db.Date) - deliveryAgent_deliveryDate = db.Column(db.Date) - deliveryAgent_agentCode = db.Column(db.String(45)) - deliveryAgent_flightCode = db.Column(db.String(45)) - shippingStatus = db.Column(db.String(45), index=True) - bltimeStamp = db.Column(db.DateTime) - laboratoryId = db.Column(db.Integer, index=True) - isStorageShipping = db.Column(db.Integer, server_default=db.FetchedValue()) - creationDate = db.Column(db.DateTime, index=True) - comments = db.Column(db.String(255)) - sendingLabContactId = db.Column(db.ForeignKey('LabContact.labContactId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - returnLabContactId = db.Column(db.ForeignKey('LabContact.labContactId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - returnCourier = db.Column(db.String(45)) - dateOfShippingToUser = db.Column(db.DateTime) - shippingType = db.Column(db.String(45)) - SAFETYLEVEL = db.Column(db.String(8)) - deliveryAgent_flightCodeTimestamp = db.Column(db.DateTime, info='Date flight code created, if automatic') - deliveryAgent_label = db.Column(db.Text, info='Base64 encoded pdf of airway label') - readyByTime = db.Column(db.Time, info='Time shipment will be ready') - closeTime = db.Column(db.Time, info='Time after which shipment cannot be picked up') - physicalLocation = db.Column(db.String(50), info='Where shipment can be picked up from: i.e. Stores') - deliveryAgent_pickupConfirmationTimestamp = db.Column(db.DateTime, info='Date picked confirmed') - deliveryAgent_pickupConfirmation = db.Column(db.String(10), info='Confirmation number of requested pickup') - deliveryAgent_readyByTime = db.Column(db.Time, info='Confirmed ready-by time') - deliveryAgent_callinTime = db.Column(db.Time, info='Confirmed courier call-in time') - deliveryAgent_productcode = db.Column(db.String(10), info='A code that identifies which shipment service was used') - deliveryAgent_flightCodePersonId = db.Column(db.ForeignKey('Person.personId'), index=True, info='The person who created the AWB (for auditing)') - - Person = db.relationship('Person', primaryjoin='Shipping.deliveryAgent_flightCodePersonId == Person.personId') - Proposal = db.relationship('Proposal', primaryjoin='Shipping.proposalId == Proposal.proposalId') - LabContact = db.relationship('LabContact', primaryjoin='Shipping.returnLabContactId == LabContact.labContactId') - LabContact1 = db.relationship('LabContact', primaryjoin='Shipping.sendingLabContactId == LabContact.labContactId') - - - -t_ShippingHasSession = db.Table( - 'ShippingHasSession', - db.Column('shippingId', db.ForeignKey('Shipping.shippingId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True), - db.Column('sessionId', db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -class Sleeve(db.Model): - __tablename__ = 'Sleeve' - - sleeveId = db.Column(db.Integer, primary_key=True, info='The unique sleeve id 1...255 which also identifies its home location in the freezer') - location = db.Column(db.Integer, info='NULL == freezer, 1...255 for local storage locations') - lastMovedToFreezer = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue()) - lastMovedFromFreezer = db.Column(db.DateTime, server_default=db.FetchedValue()) - - - -class SpaceGroup(db.Model): - __tablename__ = 'SpaceGroup' - - spaceGroupId = db.Column(db.Integer, primary_key=True, info='Primary key') - spaceGroupNumber = db.Column(db.Integer, info='ccp4 number pr IUCR') - spaceGroupShortName = db.Column(db.String(45), index=True, info='short name without blank') - spaceGroupName = db.Column(db.String(45), info='verbose name') - bravaisLattice = db.Column(db.String(45), info='short name') - bravaisLatticeName = db.Column(db.String(45), info='verbose name') - pointGroup = db.Column(db.String(45), info='point group') - geometryClassnameId = db.Column(db.ForeignKey('GeometryClassname.geometryClassnameId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - MX_used = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue(), info='1 if used in the crystal form') - - GeometryClassname = db.relationship('GeometryClassname', primaryjoin='SpaceGroup.geometryClassnameId == GeometryClassname.geometryClassnameId') - - - -class Speciman(db.Model): - __tablename__ = 'Specimen' - - specimenId = db.Column(db.Integer, primary_key=True) - BLSESSIONID = db.Column(db.Integer) - bufferId = db.Column(db.ForeignKey('Buffer.bufferId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - samplePlatePositionId = db.Column(db.ForeignKey('SamplePlatePosition.samplePlatePositionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - safetyLevelId = db.Column(db.ForeignKey('SafetyLevel.safetyLevelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - stockSolutionId = db.Column(db.ForeignKey('StockSolution.stockSolutionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - code = db.Column(db.String(255)) - concentration = db.Column(db.String(45)) - volume = db.Column(db.String(45)) - experimentId = db.Column(db.ForeignKey('Experiment.experimentId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - comments = db.Column(db.String(5120)) - - Buffer = db.relationship('Buffer', primaryjoin='Speciman.bufferId == Buffer.bufferId') - Experiment = db.relationship('Experiment', primaryjoin='Speciman.experimentId == Experiment.experimentId') - Macromolecule = db.relationship('Macromolecule', primaryjoin='Speciman.macromoleculeId == Macromolecule.macromoleculeId') - SafetyLevel = db.relationship('SafetyLevel', primaryjoin='Speciman.safetyLevelId == SafetyLevel.safetyLevelId') - SamplePlatePosition = db.relationship('SamplePlatePosition', primaryjoin='Speciman.samplePlatePositionId == SamplePlatePosition.samplePlatePositionId') - StockSolution = db.relationship('StockSolution', primaryjoin='Speciman.stockSolutionId == StockSolution.stockSolutionId') - - - -class StockSolution(db.Model): - __tablename__ = 'StockSolution' - - stockSolutionId = db.Column(db.Integer, primary_key=True) - BLSESSIONID = db.Column(db.Integer) - bufferId = db.Column(db.ForeignKey('Buffer.bufferId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - instructionSetId = db.Column(db.ForeignKey('InstructionSet.instructionSetId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - boxId = db.Column(db.Integer) - name = db.Column(db.String(45)) - storageTemperature = db.Column(db.String(55)) - volume = db.Column(db.String(55)) - concentration = db.Column(db.String(55)) - comments = db.Column(db.String(255)) - proposalId = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue()) - - Buffer = db.relationship('Buffer', primaryjoin='StockSolution.bufferId == Buffer.bufferId') - InstructionSet = db.relationship('InstructionSet', primaryjoin='StockSolution.instructionSetId == InstructionSet.instructionSetId') - Macromolecule = db.relationship('Macromolecule', primaryjoin='StockSolution.macromoleculeId == Macromolecule.macromoleculeId') - - - -class Stoichiometry(db.Model): - __tablename__ = 'Stoichiometry' - - stoichiometryId = db.Column(db.Integer, primary_key=True) - hostMacromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - ratio = db.Column(db.String(45)) - - Macromolecule = db.relationship('Macromolecule', primaryjoin='Stoichiometry.hostMacromoleculeId == Macromolecule.macromoleculeId') - Macromolecule1 = db.relationship('Macromolecule', primaryjoin='Stoichiometry.macromoleculeId == Macromolecule.macromoleculeId') - - - -class Structure(db.Model): - __tablename__ = 'Structure' - - structureId = db.Column(db.Integer, primary_key=True) - macromoleculeId = db.Column(db.ForeignKey('Macromolecule.macromoleculeId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - PDB = db.Column(db.String(45)) - structureType = db.Column(db.String(45)) - fromResiduesBases = db.Column(db.String(45)) - toResiduesBases = db.Column(db.String(45)) - sequence = db.Column(db.String(45)) - - Macromolecule = db.relationship('Macromolecule', primaryjoin='Structure.macromoleculeId == Macromolecule.macromoleculeId') - - - -class SubstructureDetermination(db.Model): - __tablename__ = 'SubstructureDetermination' - - substructureDeterminationId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - phasingAnalysisId = db.Column(db.ForeignKey('PhasingAnalysis.phasingAnalysisId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related phasing analysis item') - phasingProgramRunId = db.Column(db.ForeignKey('PhasingProgramRun.phasingProgramRunId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related program item') - spaceGroupId = db.Column(db.ForeignKey('SpaceGroup.spaceGroupId', ondelete='CASCADE', onupdate='CASCADE'), index=True, info='Related spaceGroup') - method = db.Column(db.ENUM('SAD', 'MAD', 'SIR', 'SIRAS', 'MR', 'MIR', 'MIRAS', 'RIP', 'RIPAS'), info='phasing method') - lowRes = db.Column(db.Float(asdecimal=True)) - highRes = db.Column(db.Float(asdecimal=True)) - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - - PhasingAnalysi = db.relationship('PhasingAnalysi', primaryjoin='SubstructureDetermination.phasingAnalysisId == PhasingAnalysi.phasingAnalysisId') - PhasingProgramRun = db.relationship('PhasingProgramRun', primaryjoin='SubstructureDetermination.phasingProgramRunId == PhasingProgramRun.phasingProgramRunId') - SpaceGroup = db.relationship('SpaceGroup', primaryjoin='SubstructureDetermination.spaceGroupId == SpaceGroup.spaceGroupId') - - - -class Subtraction(db.Model): - __tablename__ = 'Subtraction' - - subtractionId = db.Column(db.Integer, primary_key=True) - dataCollectionId = db.Column(db.ForeignKey('SaxsDataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - rg = db.Column(db.String(45)) - rgStdev = db.Column(db.String(45)) - I0 = db.Column(db.String(45)) - I0Stdev = db.Column(db.String(45)) - firstPointUsed = db.Column(db.String(45)) - lastPointUsed = db.Column(db.String(45)) - quality = db.Column(db.String(45)) - isagregated = db.Column(db.String(45)) - concentration = db.Column(db.String(45)) - gnomFilePath = db.Column(db.String(255)) - rgGuinier = db.Column(db.String(45)) - rgGnom = db.Column(db.String(45)) - dmax = db.Column(db.String(45)) - total = db.Column(db.String(45)) - volume = db.Column(db.String(45)) - creationTime = db.Column(db.DateTime) - kratkyFilePath = db.Column(db.String(255)) - scatteringFilePath = db.Column(db.String(255)) - guinierFilePath = db.Column(db.String(255)) - SUBTRACTEDFILEPATH = db.Column(db.String(255)) - gnomFilePathOutput = db.Column(db.String(255)) - substractedFilePath = db.Column(db.String(255)) - - SaxsDataCollection = db.relationship('SaxsDataCollection', primaryjoin='Subtraction.dataCollectionId == SaxsDataCollection.dataCollectionId') - - - -class SubtractionToAbInitioModel(db.Model): - __tablename__ = 'SubtractionToAbInitioModel' - - subtractionToAbInitioModelId = db.Column(db.Integer, primary_key=True) - abInitioId = db.Column(db.ForeignKey('AbInitioModel.abInitioModelId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - subtractionId = db.Column(db.ForeignKey('Subtraction.subtractionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - - AbInitioModel = db.relationship('AbInitioModel', primaryjoin='SubtractionToAbInitioModel.abInitioId == AbInitioModel.abInitioModelId') - Subtraction = db.relationship('Subtraction', primaryjoin='SubtractionToAbInitioModel.subtractionId == Subtraction.subtractionId') - - - -class UserGroup(db.Model): - __tablename__ = 'UserGroup' - - userGroupId = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(31), nullable=False, unique=True) - - - -t_UserGroup_has_Permission = db.Table( - 'UserGroup_has_Permission', - db.Column('userGroupId', db.ForeignKey('UserGroup.userGroupId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False), - db.Column('permissionId', db.ForeignKey('Permission.permissionId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -t_UserGroup_has_Person = db.Table( - 'UserGroup_has_Person', - db.Column('userGroupId', db.ForeignKey('UserGroup.userGroupId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False), - db.Column('personId', db.ForeignKey('Person.personId', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True, nullable=False, index=True) -) - - - -class Workflow(db.Model): - __tablename__ = 'Workflow' - - workflowId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - workflowTitle = db.Column(db.String(255)) - workflowType = db.Column(db.ENUM('Undefined', 'BioSAXS Post Processing', 'EnhancedCharacterisation', 'LineScan', 'MeshScan', 'Dehydration', 'KappaReorientation', 'BurnStrategy', 'XrayCentering', 'DiffractionTomography', 'TroubleShooting', 'VisualReorientation', 'HelicalCharacterisation', 'GroupedProcessing', 'MXPressE', 'MXPressO', 'MXPressL', 'MXScore', 'MXPressI', 'MXPressM', 'MXPressA')) - workflowTypeId = db.Column(db.Integer) - comments = db.Column(db.String(1024)) - status = db.Column(db.String(255)) - resultFilePath = db.Column(db.String(255)) - logFilePath = db.Column(db.String(255)) - recordTimeStamp = db.Column(db.DateTime, info='Creation or last update date/time') - workflowDescriptionFullPath = db.Column(db.String(255), info='Full file path to a json description of the workflow') - - - -class WorkflowMesh(db.Model): - __tablename__ = 'WorkflowMesh' - - workflowMeshId = db.Column(db.Integer, primary_key=True, info='Primary key (auto-incremented)') - workflowId = db.Column(db.ForeignKey('Workflow.workflowId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True, info='Related workflow') - bestPositionId = db.Column(db.ForeignKey('MotorPosition.motorPositionId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - bestImageId = db.Column(db.ForeignKey('Image.imageId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - value1 = db.Column(db.Float(asdecimal=True)) - value2 = db.Column(db.Float(asdecimal=True)) - value3 = db.Column(db.Float(asdecimal=True), info='N value') - value4 = db.Column(db.Float(asdecimal=True)) - cartographyPath = db.Column(db.String(255)) - recordTimeStamp = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue(), info='Creation or last update date/time') - - Image = db.relationship('Image', primaryjoin='WorkflowMesh.bestImageId == Image.imageId') - MotorPosition = db.relationship('MotorPosition', primaryjoin='WorkflowMesh.bestPositionId == MotorPosition.motorPositionId') - Workflow = db.relationship('Workflow', primaryjoin='WorkflowMesh.workflowId == Workflow.workflowId') - - - -class WorkflowStep(db.Model): - __tablename__ = 'WorkflowStep' - - workflowStepId = db.Column(db.Integer, primary_key=True) - workflowId = db.Column(db.ForeignKey('Workflow.workflowId'), nullable=False, index=True) - type = db.Column(db.String(45)) - status = db.Column(db.String(45)) - folderPath = db.Column(db.String(1024)) - imageResultFilePath = db.Column(db.String(1024)) - htmlResultFilePath = db.Column(db.String(1024)) - resultFilePath = db.Column(db.String(1024)) - comments = db.Column(db.String(2048)) - crystalSizeX = db.Column(db.String(45)) - crystalSizeY = db.Column(db.String(45)) - crystalSizeZ = db.Column(db.String(45)) - maxDozorScore = db.Column(db.String(45)) - recordTimeStamp = db.Column(db.DateTime) - - Workflow = db.relationship('Workflow', primaryjoin='WorkflowStep.workflowId == Workflow.workflowId') - - - -class WorkflowType(db.Model): - __tablename__ = 'WorkflowType' - - workflowTypeId = db.Column(db.Integer, primary_key=True) - workflowTypeName = db.Column(db.String(45)) - comments = db.Column(db.String(2048)) - recordTimeStamp = db.Column(db.DateTime) - - - -class XFEFluorescenceSpectrum(db.Model): - __tablename__ = 'XFEFluorescenceSpectrum' - - xfeFluorescenceSpectrumId = db.Column(db.Integer, primary_key=True) - sessionId = db.Column(db.ForeignKey('BLSession.sessionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - blSampleId = db.Column(db.ForeignKey('BLSample.blSampleId', ondelete='CASCADE', onupdate='CASCADE'), index=True) - jpegScanFileFullPath = db.Column(db.String(255)) - startTime = db.Column(db.DateTime) - endTime = db.Column(db.DateTime) - filename = db.Column(db.String(255)) - exposureTime = db.Column(db.Float) - axisPosition = db.Column(db.Float) - beamTransmission = db.Column(db.Float) - annotatedPymcaXfeSpectrum = db.Column(db.String(255)) - fittedDataFileFullPath = db.Column(db.String(255)) - scanFileFullPath = db.Column(db.String(255)) - energy = db.Column(db.Float) - beamSizeVertical = db.Column(db.Float) - beamSizeHorizontal = db.Column(db.Float) - crystalClass = db.Column(db.String(20)) - comments = db.Column(db.String(1024)) - blSubSampleId = db.Column(db.ForeignKey('BLSubSample.blSubSampleId'), index=True) - flux = db.Column(db.Float(asdecimal=True), info='flux measured before the xrfSpectra') - flux_end = db.Column(db.Float(asdecimal=True), info='flux measured after the xrfSpectra') - workingDirectory = db.Column(db.String(512)) - - BLSample = db.relationship('BLSample', primaryjoin='XFEFluorescenceSpectrum.blSampleId == BLSample.blSampleId') - BLSubSample = db.relationship('BLSubSample', primaryjoin='XFEFluorescenceSpectrum.blSubSampleId == BLSubSample.blSubSampleId') - BLSession = db.relationship('BLSession', primaryjoin='XFEFluorescenceSpectrum.sessionId == BLSession.sessionId') - - - -class XRFFluorescenceMapping(db.Model): - __tablename__ = 'XRFFluorescenceMapping' - - xrfFluorescenceMappingId = db.Column(db.Integer, primary_key=True) - xrfFluorescenceMappingROIId = db.Column(db.ForeignKey('XRFFluorescenceMappingROI.xrfFluorescenceMappingROIId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - dataCollectionId = db.Column(db.ForeignKey('DataCollection.dataCollectionId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - imageNumber = db.Column(db.Integer, nullable=False) - counts = db.Column(db.Integer, nullable=False) - - DataCollection = db.relationship('DataCollection', primaryjoin='XRFFluorescenceMapping.dataCollectionId == DataCollection.dataCollectionId') - XRFFluorescenceMappingROI = db.relationship('XRFFluorescenceMappingROI', primaryjoin='XRFFluorescenceMapping.xrfFluorescenceMappingROIId == XRFFluorescenceMappingROI.xrfFluorescenceMappingROIId') - - - -class XRFFluorescenceMappingROI(db.Model): - __tablename__ = 'XRFFluorescenceMappingROI' - - xrfFluorescenceMappingROIId = db.Column(db.Integer, primary_key=True) - startEnergy = db.Column(db.Float, nullable=False) - endEnergy = db.Column(db.Float, nullable=False) - element = db.Column(db.String(2)) - edge = db.Column(db.String(2), info='In future may be changed to enum(K, L)') - r = db.Column(db.Integer, info='R colour component') - g = db.Column(db.Integer, info='G colour component') - b = db.Column(db.Integer, info='B colour component') - - - -class XrayCentringResult(db.Model): - __tablename__ = 'XrayCentringResult' - - xrayCentringResultId = db.Column(db.Integer, primary_key=True) - gridInfoId = db.Column(db.ForeignKey('GridInfo.gridInfoId', ondelete='CASCADE', onupdate='CASCADE'), nullable=False, index=True) - method = db.Column(db.String(15), info='Type of X-ray centering calculation') - status = db.Column(db.ENUM('success', 'failure', 'pending'), nullable=False, server_default=db.FetchedValue()) - x = db.Column(db.Float, info='position in number of boxes in direction of the fast scan within GridInfo grid') - y = db.Column(db.Float, info='position in number of boxes in direction of the slow scan within GridInfo grid') - - GridInfo = db.relationship('GridInfo', primaryjoin='XrayCentringResult.gridInfoId == GridInfo.gridInfoId') - - - -class VRun(db.Model): - __tablename__ = 'v_run' - __table_args__ = ( - db.Index('v_run_idx1', 'startDate', 'endDate'), - ) - - runId = db.Column(db.Integer, primary_key=True) - run = db.Column(db.String(7), nullable=False, server_default=db.FetchedValue()) - startDate = db.Column(db.DateTime) - endDate = db.Column(db.DateTime) diff --git a/pyispyb/em/modules/api.py b/pyispyb/em/modules/api.py deleted file mode 100644 index 1e3bf6e7..00000000 --- a/pyispyb/em/modules/api.py +++ /dev/null @@ -1,34 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -__license__ = "LGPLv3+" - - -from flask import Blueprint - -from pyispyb.app.extensions import api - - -def init_app(app, **kwargs): - # pylint: disable=unused-argument - api_v1_blueprint = Blueprint("api", __name__, url_prefix=app.config["API_ROOT"]) - api.api_v1.init_app(api_v1_blueprint) - app.register_blueprint(api_v1_blueprint, url_prefix=app.config["API_ROOT"]) diff --git a/pyispyb/em/modules/motion_correction.py b/pyispyb/em/modules/motion_correction.py deleted file mode 100644 index 90d2f50d..00000000 --- a/pyispyb/em/modules/motion_correction.py +++ /dev/null @@ -1,152 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - -from pyispyb.app.extensions import db -from pyispyb.app.extensions.auth import auth_provider -from pyispyb.app.utils import create_response_item - -from pyispyb.em import models, schemas - - -def get_motion_corrections(request): - """ - Returns motion_correction based on query parameters. - - Args: - query_dict ([type]): [description] - - Returns: - [type]: [description] - """ - query_dict = request.args.to_dict() - - #is_admin, proposal_id_list = proposal.get_proposal_ids_by_username(request) - - run_query = True - #if is_admin: - # run_query = True - msg = "Unable to run query" - - if run_query: - return db.get_db_items( - models.MotionCorrection, - schemas.motion_correction.dict_schema, - schemas.motion_correction.ma_schema, - query_dict, - ) - else: - return create_response_item(msg=msg) - - -def add_motion_correction(data_dict): - """ - Adds new motion_correction. - - Args: - motion_correction_dict ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item(models.MotionCorrection, schemas.motion_correction.ma_schema, data_dict) - - -def get_motion_correction_by_id(motion_correction_id): - """ - Returns motion_correction info by its motion_correctionId. - - Args: - motion_correction_id (int): corresponds to motion_correctionId in db - - Returns: - dict: info about motion_correction as dict - """ - data_dict = {"motionCorrectionId": motion_correction_id} - return db.get_db_item( - models.MotionCorrection, schemas.motion_correction.ma_schema, data_dict - ) - - -def get_motion_correction_info_by_id(motion_correction_id): - """ - Returns motion_correction info by its motion_correctionId. - - Args: - motion_correction_id (int): corresponds to motion_correctionId in db - - Returns: - dict: info about motion_correction as dict - """ - motion_correction_json = get_motion_correction_by_id(motion_correction_id) - return motion_correction_json - - -def update_motion_correction(motion_correction_id, data_dict): - """ - Updates motion_correction. - - Args: - motion_correction_id ([type]): [description] - motion_correction_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"motionCorrectionId": motion_correction_id} - return db.update_db_item( - models.MotionCorrection, schemas.motion_correction.ma_schema, id_dict, data_dict - ) - - -def patch_motion_correction(motion_correction_id, data_dict): - """ - Patch a motion_correction. - - Args: - motion_correction_id ([type]): [description] - data_dict ([type]): [description] - - Returns: - [type]: [description] - """ - id_dict = {"motionCorrectionId": motion_correction_id} - return db.patch_db_item( - models.MotionCorrection, schemas.motion_correction.ma_schema, id_dict, data_dict - ) - - -def delete_motion_correction(motion_correction_id): - """ - Deletes motion_correction item from db. - - Args: - motion_correction_id (int): motion_correctionId column in db - - Returns: - bool: True if the motion_correction exists and deleted successfully, - otherwise return False - """ - id_dict = {"motionCorrectionId": motion_correction_id} - return db.delete_db_item(models.MotionCorrection, id_dict) \ No newline at end of file diff --git a/pyispyb/em/routes/__init__.py b/pyispyb/em/routes/__init__.py deleted file mode 100644 index d9fadab4..00000000 --- a/pyispyb/em/routes/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - -import os -from importlib import import_module - - -__license__ = "LGPLv3+" - - -def init_app(app, **kwargs): - """Inits routes - - Args: - app ([type]): [description] - """ - for module_name in os.listdir(os.path.dirname(__file__)): - if not module_name.startswith("__") and module_name.endswith(".py"): - module = import_module(".%s" % module_name[:-3], package=__name__) - if hasattr(module, "init_app"): - module.init_app(app, **kwargs) diff --git a/pyispyb/em/routes/motion_correction.py b/pyispyb/em/routes/motion_correction.py deleted file mode 100644 index e8dc913b..00000000 --- a/pyispyb/em/routes/motion_correction.py +++ /dev/null @@ -1,66 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - -__license__ = "LGPLv3+" - -from flask import request, current_app -from flask_restx._http import HTTPStatus - -from pyispyb.flask_restx_patched import Resource - -from pyispyb.app.extensions.api import api_v1, Namespace -from pyispyb.app.extensions.auth import token_required, role_required -from pyispyb.em.schemas import motion_correction as motion_correction_schemas -from pyispyb.em.modules import motion_correction - - -api = Namespace( - "Motion correction", description="Motion correction namespace", path="/motion_correction" -) -api_v1.add_namespace(api) - - -@api.route("", endpoint="motion_correction") -@api.doc(security="apikey") -class MotionCorrections(Resource): - """Allows to get all motion_corrections""" - - @token_required - @role_required - def get(self): - """Returns motion_corrections based on query parameters""" - - api.logger.info("Get all motion_corrections") - return "Test" - #return motion_correction.get_motion_corrections(request) - - @api.expect(motion_correction_schemas.f_schema) - @api.marshal_with(motion_correction_schemas.f_schema, code=201) - # @api.errorhandler(FakeException) - # TODO add custom exception handling - @token_required - @role_required - def post(self): - """Adds a new motion_correction""" - - api.logger.info("Inserts a new motion_correction") - return "Test post" - #return motion_correction.add_motion_correction(api.payload) diff --git a/pyispyb/em/schemas/ctf.py b/pyispyb/em/schemas/ctf.py deleted file mode 100644 index f30c8de9..00000000 --- a/pyispyb/em/schemas/ctf.py +++ /dev/null @@ -1,78 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'ctfId': f_fields.Integer(required=True, description=''), - 'motionCorrectionId': f_fields.Integer(required=False, description=''), - 'autoProcProgramId': f_fields.Integer(required=False, description=''), - 'boxSizeX': f_fields.Float(required=False, description='Box size in x, Units: pixels'), - 'boxSizeY': f_fields.Float(required=False, description='Box size in y, Units: pixels'), - 'minResolution': f_fields.Float(required=False, description='Minimum resolution for CTF, Units: A'), - 'maxResolution': f_fields.Float(required=False, description='Units: A'), - 'minDefocus': f_fields.Float(required=False, description='Units: A'), - 'maxDefocus': f_fields.Float(required=False, description='Units: A'), - 'defocusStepSize': f_fields.Float(required=False, description='Units: A'), - 'astigmatism': f_fields.Float(required=False, description='Units: A'), - 'astigmatismAngle': f_fields.Float(required=False, description='Units: deg?'), - 'estimatedResolution': f_fields.Float(required=False, description='Units: A'), - 'estimatedDefocus': f_fields.Float(required=False, description='Units: A'), - 'amplitudeContrast': f_fields.Float(required=False, description='Units: %?'), - 'ccValue': f_fields.Float(required=False, description='Correlation value'), - 'fftTheoreticalFullPath': f_fields.String(required=False, description='Full path to the jpg image of the simulated FFT'), - 'comments': f_fields.String(required=False, description=''), - } - -class CTFSchema(Schema): - """Marshmallows schema class representing CTF table""" - - ctfId = ma_fields.Integer() - motionCorrectionId = ma_fields.Integer() - autoProcProgramId = ma_fields.Integer() - boxSizeX = ma_fields.Float() - boxSizeY = ma_fields.Float() - minResolution = ma_fields.Float() - maxResolution = ma_fields.Float() - minDefocus = ma_fields.Float() - maxDefocus = ma_fields.Float() - defocusStepSize = ma_fields.Float() - astigmatism = ma_fields.Float() - astigmatismAngle = ma_fields.Float() - estimatedResolution = ma_fields.Float() - estimatedDefocus = ma_fields.Float() - amplitudeContrast = ma_fields.Float() - ccValue = ma_fields.Float() - fftTheoreticalFullPath = ma_fields.String() - comments = ma_fields.String() - -f_schema = api.model('CTF', dict_schema) -ma_schema = CTFSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/em/schemas/motion_correction.py b/pyispyb/em/schemas/motion_correction.py deleted file mode 100644 index 847c19c2..00000000 --- a/pyispyb/em/schemas/motion_correction.py +++ /dev/null @@ -1,80 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'motionCorrectionId': f_fields.Integer(required=True, description=''), - 'dataCollectionId': f_fields.Integer(required=False, description=''), - 'autoProcProgramId': f_fields.Integer(required=False, description=''), - 'imageNumber': f_fields.Integer(required=False, description='Movie number, sequential in time 1-n'), - 'firstFrame': f_fields.Integer(required=False, description='First frame of movie used'), - 'lastFrame': f_fields.Integer(required=False, description='Last frame of movie used'), - 'dosePerFrame': f_fields.Float(required=False, description='Dose per frame, Units: e-/A^2'), - 'doseWeight': f_fields.Float(required=False, description='Dose weight, Units: dimensionless'), - 'totalMotion': f_fields.Float(required=False, description='Total motion, Units: A'), - 'averageMotionPerFrame': f_fields.Float(required=False, description='Average motion per frame, Units: A'), - 'driftPlotFullPath': f_fields.String(required=False, description='Full path to the drift plot'), - 'micrographFullPath': f_fields.String(required=False, description='Full path to the micrograph'), - 'micrographSnapshotFullPath': f_fields.String(required=False, description='Full path to a snapshot (jpg) of the micrograph'), - 'patchesUsedX': f_fields.Integer(required=False, description='Number of patches used in x (for motioncor2)'), - 'patchesUsedY': f_fields.Integer(required=False, description='Number of patches used in y (for motioncor2)'), - 'fftFullPath': f_fields.String(required=False, description='Full path to the jpg image of the raw micrograph FFT'), - 'fftCorrectedFullPath': f_fields.String(required=False, description='Full path to the jpg image of the drift corrected micrograph FFT'), - 'comments': f_fields.String(required=False, description=''), - 'movieId': f_fields.Integer(required=False, description=''), - } - -class MotionCorrectionSchema(Schema): - """Marshmallows schema class representing MotionCorrection table""" - - motionCorrectionId = ma_fields.Integer() - dataCollectionId = ma_fields.Integer() - autoProcProgramId = ma_fields.Integer() - imageNumber = ma_fields.Integer() - firstFrame = ma_fields.Integer() - lastFrame = ma_fields.Integer() - dosePerFrame = ma_fields.Float() - doseWeight = ma_fields.Float() - totalMotion = ma_fields.Float() - averageMotionPerFrame = ma_fields.Float() - driftPlotFullPath = ma_fields.String() - micrographFullPath = ma_fields.String() - micrographSnapshotFullPath = ma_fields.String() - patchesUsedX = ma_fields.Integer() - patchesUsedY = ma_fields.Integer() - fftFullPath = ma_fields.String() - fftCorrectedFullPath = ma_fields.String() - comments = ma_fields.String() - movieId = ma_fields.Integer() - -f_schema = api.model('MotionCorrection', dict_schema) -ma_schema = MotionCorrectionSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/em/schemas/particle.py b/pyispyb/em/schemas/particle.py deleted file mode 100644 index 85b43029..00000000 --- a/pyispyb/em/schemas/particle.py +++ /dev/null @@ -1,50 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'particleId': f_fields.Integer(required=True, description=''), - 'dataCollectionId': f_fields.Integer(required=True, description=''), - 'x': f_fields.Float(required=False, description=''), - 'y': f_fields.Float(required=False, description=''), - } - -class ParticleSchema(Schema): - """Marshmallows schema class representing Particle table""" - - particleId = ma_fields.Integer() - dataCollectionId = ma_fields.Integer() - x = ma_fields.Float() - y = ma_fields.Float() - -f_schema = api.model('Particle', dict_schema) -ma_schema = ParticleSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/filters.py b/pyispyb/filters.py new file mode 100644 index 00000000..3bd496d7 --- /dev/null +++ b/pyispyb/filters.py @@ -0,0 +1,124 @@ +from typing import Optional +from fastapi import Query + + +def session( + session: Optional[str] = Query( + None, description="Session name to filter by", regex=r"^(\w|-)+\d+-\d+$" + ) +) -> Optional[str]: + return session + + +def sessionId( + sessionId: Optional[int] = Query(None, description="Session id to filter by") +) -> Optional[int]: + return sessionId + + +def proposal( + proposal: Optional[str] = Query( + None, description="Proposal name to filter by", regex=r"^(\w|-)+\d+$" + ) +) -> Optional[str]: + return proposal + + +def proposalId( + proposalId: Optional[int] = Query(None, description="Proposal id to filter by") +) -> Optional[int]: + return proposalId + + +def beamLineName( + beamLineName: Optional[str] = Query(None, description="Beamline name to filter by") +) -> Optional[str]: + return beamLineName + + +def dataCollectionGroupId( + dataCollectionGroupId: Optional[int] = Query( + None, description="Data collection group id to filter by" + ) +) -> Optional[int]: + return dataCollectionGroupId + + +def dataCollectionId( + dataCollectionId: Optional[int] = Query( + None, description="Data collection id to filter by" + ) +) -> Optional[int]: + return dataCollectionId + + +def blSampleId( + blSampleId: Optional[int] = Query(None, description="Sample id to filter by") +) -> Optional[int]: + return blSampleId + + +def blSubSampleId( + blSubSampleId: Optional[int] = Query(None, description="Sub sample id to filter by") +) -> Optional[int]: + return blSubSampleId + + +def proteinId( + proteinId: Optional[int] = Query(None, description="Protein id to filter by") +) -> Optional[int]: + return proteinId + + +def search( + search: str = Query(None, description="Search string to filter by") +) -> Optional[str]: + return search + + +def containerId( + containerId: Optional[int] = Query(None, description="Container id to filter by") +) -> Optional[int]: + return containerId + + +def dewarId( + dewarId: Optional[int] = Query(None, description="Dewar id to filter by") +) -> Optional[int]: + return dewarId + + +def shippingId( + shippingId: Optional[int] = Query(None, description="Shipping id to filter by") +) -> Optional[int]: + return shippingId + + +def month( + month: Optional[str] = Query(None, description="Month filter by", regex=r"^\d\d?$") +) -> Optional[str]: + return month + + +def year( + year: Optional[str] = Query(None, description="Year filter by", regex=r"^\d\d\d\d$") +) -> Optional[str]: + return year + + +def runId( + runId: Optional[str] = Query(None, description="Run id filter by") +) -> Optional[str]: + return runId + + +def userGroupId( + userGroupId: Optional[int] = Query(None, description="UserGroup id to filter by") +) -> Optional[int]: + return userGroupId + + +def permissionId( + permissionId: Optional[int] = Query(None, description="Permission id to filter by") +) -> Optional[int]: + return permissionId diff --git a/pyispyb/flask_restx_patched/__init__.py b/pyispyb/flask_restx_patched/__init__.py deleted file mode 100644 index 929da166..00000000 --- a/pyispyb/flask_restx_patched/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from flask_restx import * -from flask_restx._http import HTTPStatus - -from .api import Api -from .namespace import Namespace -from .parameters import Parameters, PostFormParameters, PatchJSONParameters -from .resource import Resource diff --git a/pyispyb/flask_restx_patched/api.py b/pyispyb/flask_restx_patched/api.py deleted file mode 100644 index dd4b3fed..00000000 --- a/pyispyb/flask_restx_patched/api.py +++ /dev/null @@ -1,35 +0,0 @@ -from flask import jsonify -from flask_restx import Api as OriginalApi -from flask_restx._http import HTTPStatus - -# from werkzeug import cached_property - -from .namespace import Namespace - - -class Api(OriginalApi): - def init_app(self, app, **kwargs): - # This solves the issue of late resources registration: - # https://github.com/frol/flask-restplus-server-example/issues/110 - # https://github.com/noirbizarre/flask-restplus/pull/483 - self.app = app - - super(Api, self).init_app(app, **kwargs) - app.errorhandler(HTTPStatus.UNPROCESSABLE_ENTITY.value)(handle_validation_error) - - def namespace(self, *args, **kwargs): - # The only purpose of this method is to pass a custom Namespace class - _namespace = Namespace(*args, **kwargs) - self.add_namespace(_namespace) - return _namespace - - -# Return validation errors as JSON -def handle_validation_error(err): - exc = err.data["exc"] - return ( - jsonify( - {"status": HTTPStatus.UNPROCESSABLE_ENTITY.value, "message": exc.messages} - ), - HTTPStatus.UNPROCESSABLE_ENTITY.value, - ) diff --git a/pyispyb/flask_restx_patched/namespace.py b/pyispyb/flask_restx_patched/namespace.py deleted file mode 100644 index b037a007..00000000 --- a/pyispyb/flask_restx_patched/namespace.py +++ /dev/null @@ -1,125 +0,0 @@ -from functools import wraps - -import flask -import flask_marshmallow -from flask_restx import Namespace as OriginalNamespace -from flask_restx.utils import merge, unpack -from flask_restx._http import HTTPStatus -from webargs.flaskparser import parser as webargs_parser -from werkzeug import exceptions as http_exceptions - -# from .model import Model, DefaultHTTPErrorSchema - -from flask_restx.model import Model - - -class Namespace(OriginalNamespace): - - WEBARGS_PARSER = webargs_parser - - def _handle_api_doc(self, cls, doc): - if doc is False: - cls.__apidoc__ = False - return - # unshortcut_params_description(doc) - # handle_deprecations(doc) - # for key in 'get', 'post', 'put', 'delete', 'options', 'head', 'patch': - # if key in doc: - # if doc[key] is False: - # continue - # unshortcut_params_description(doc[key]) - # handle_deprecations(doc[key]) - # if 'expect' in doc[key] and not isinstance(doc[key]['expect'], (list, tuple)): - ## doc[key]['expect'] = [doc[key]['expect']] - cls.__apidoc__ = merge(getattr(cls, "__apidoc__", {}), doc) - - def resolve_object(self, object_arg_name, resolver): - """ - A helper decorator to resolve object instance from arguments (e.g. identity). - - Example: - >>> @namespace.route('/') - ... class MyResource(Resource): - ... @namespace.resolve_object( - ... object_arg_name='user', - ... resolver=lambda kwargs: User.query.get_or_404(kwargs.pop('user_id')) - ... ) - ... def get(self, user): - ... # user is a User instance here - """ - - def decorator(func_or_class): - if isinstance(func_or_class, type): - # Handle Resource classes decoration - # pylint: disable=protected-access - func_or_class._apply_decorator_to_methods(decorator) - return func_or_class - - @wraps(func_or_class) - def wrapper(*args, **kwargs): - kwargs[object_arg_name] = resolver(kwargs) - return func_or_class(*args, **kwargs) - - return wrapper - - return decorator - - def model(self, name=None, model=None, mask=None, **kwargs): - """ - Model registration decorator. - """ - if isinstance( - model, (flask_marshmallow.Schema, flask_marshmallow.base_fields.FieldABC) - ): - if not name: - name = model.__class__.__name__ - api_model = Model(name, model, mask=mask) - api_model.__apidoc__ = kwargs - return self.add_model(name, api_model) - return super(Namespace, self).model(name=name, model=model, **kwargs) - - def parameters(self, parameters, locations=None): - """ - Endpoint parameters registration decorator. - """ - - def decorator(func): - if locations is None and parameters.many: - _locations = ("json",) - else: - _locations = locations - if _locations is not None: - parameters.context["in"] = _locations - - return self.doc(params=parameters)( - self.response(code=HTTPStatus.UNPROCESSABLE_ENTITY)( - self.WEBARGS_PARSER.use_args(parameters, locations=_locations)(func) - ) - ) - - return decorator - - def preflight_options_handler(self, func): - @wraps(func) - def wrapper(self, *args, **kwargs): - if "Access-Control-Request-Method" in flask.request.headers: - response = flask.Response(status=HTTPStatus.OK) - response.headers["Access-Control-Allow-Methods"] = ", ".join( - self.methods - ) - return response - return func(self, *args, **kwargs) - - return wrapper - - def route(self, *args, **kwargs): - base_wrapper = super(Namespace, self).route(*args, **kwargs) - - def wrapper(cls): - if "OPTIONS" in cls.methods: - cls.options = self.preflight_options_handler( - self.response(code=HTTPStatus.NO_CONTENT)(cls.options) - ) - return base_wrapper(cls) - - return wrapper diff --git a/pyispyb/flask_restx_patched/parameters.py b/pyispyb/flask_restx_patched/parameters.py deleted file mode 100644 index 02c07fbc..00000000 --- a/pyispyb/flask_restx_patched/parameters.py +++ /dev/null @@ -1,233 +0,0 @@ -# encoding: utf-8 -# pylint: disable=missing-docstring -import logging - -from six import itervalues - -from flask_marshmallow import Schema, base_fields -from marshmallow import validate, validates_schema, ValidationError - - -log = logging.getLogger(__name__) # pylint: disable=invalid-name - - -class Parameters(Schema): - class Meta: - ordered = True - - def __init__(self, **kwargs): - super(Parameters, self).__init__(strict=True, **kwargs) - # This is an add-hoc implementation of the feature which didn't make - # into Marshmallow upstream: - # https://github.com/marshmallow-code/marshmallow/issues/344 - for required_field_name in getattr(self.Meta, "required", []): - self.fields[required_field_name].required = True - - def __contains__(self, field): - return field in self.fields - - def make_instance(self, data): - # pylint: disable=unused-argument - """ - This is a no-op function which shadows ``ModelSchema.make_instance`` - method (when inherited classes inherit from ``ModelSchema``). Thus, we - avoid a new instance creation because it is undesirable behaviour for - parameters (they can be used not only for saving new instances). - """ - return - - -class PostFormParameters(Parameters): - def __init__(self, *args, **kwargs): - super(PostFormParameters, self).__init__(*args, **kwargs) - for field in itervalues(self.fields): - if field.dump_only: - continue - if not field.metadata.get("location"): - field.metadata["location"] = "form" - - -class PatchJSONParameters(Parameters): - """ - Base parameters class for handling PATCH arguments according to RFC 6902. - """ - - # All operations described in RFC 6902 - OP_ADD = "add" - OP_REMOVE = "remove" - OP_REPLACE = "replace" - OP_MOVE = "move" - OP_COPY = "copy" - OP_TEST = "test" - - # However, we use only those which make sense in RESTful API - OPERATION_CHOICES = ( - OP_TEST, - OP_ADD, - OP_REMOVE, - OP_REPLACE, - ) - op = base_fields.String(required=True) # pylint: disable=invalid-name - - PATH_CHOICES = None - - path = base_fields.String(required=True) - - NO_VALUE_OPERATIONS = (OP_REMOVE,) - - value = base_fields.Raw(required=False) - - def __init__(self, *args, **kwargs): - if "many" in kwargs: - assert kwargs["many"], "PATCH Parameters must be marked as 'many'" - kwargs["many"] = True - super(PatchJSONParameters, self).__init__(*args, **kwargs) - if not self.PATH_CHOICES: - raise ValueError("%s.PATH_CHOICES has to be set" % self.__class__.__name__) - # Make a copy of `validators` as otherwise we will modify the behaviour - # of all `marshmallow.Schema`-based classes - self.fields["op"].validators = self.fields["op"].validators + [ - validate.OneOf(self.OPERATION_CHOICES) - ] - self.fields["path"].validators = self.fields["path"].validators + [ - validate.OneOf(self.PATH_CHOICES) - ] - - @validates_schema - def validate_patch_structure(self, data): - """ - Common validation of PATCH structure - - Provide check that 'value' present in all operations expect it. - - Provide check if 'path' is present. 'path' can be absent if provided - without '/' at the start. Supposed that if 'path' is present than it - is prepended with '/'. - Removing '/' in the beginning to simplify usage in resource. - """ - if data["op"] not in self.NO_VALUE_OPERATIONS and "value" not in data: - raise ValidationError("value is required") - - if "path" not in data: - raise ValidationError("Path is required and must always begin with /") - else: - data["field_name"] = data["path"][1:] - - @classmethod - def perform_patch(cls, operations, obj, state=None): - """ - Performs all necessary operations by calling class methods with - corresponding names. - """ - if state is None: - state = {} - for operation in operations: - if not cls._process_patch_operation(operation, obj=obj, state=state): - log.info( - "%s patching has been stopped because of unknown operation %s", - obj.__class__.__name__, - operation, - ) - raise ValidationError( - "Failed to update %s details. Operation %s could not succeed." - % (obj.__class__.__name__, operation) - ) - return True - - @classmethod - def _process_patch_operation(cls, operation, obj, state): - """ - Args: - operation (dict): one patch operation in RFC 6902 format. - obj (object): an instance which is needed to be patched. - state (dict): inter-operations state storage - - Returns: - processing_status (bool): True if operation was handled, otherwise False. - """ - field_operaion = operation["op"] - - if field_operaion == cls.OP_REPLACE: - return cls.replace( - obj, operation["field_name"], operation["value"], state=state - ) - - elif field_operaion == cls.OP_TEST: - return cls.test( - obj, operation["field_name"], operation["value"], state=state - ) - - elif field_operaion == cls.OP_ADD: - return cls.add( - obj, operation["field_name"], operation["value"], state=state - ) - - elif field_operaion == cls.OP_MOVE: - return cls.move( - obj, operation["field_name"], operation["value"], state=state - ) - - elif field_operaion == cls.OP_COPY: - return cls.copy( - obj, operation["field_name"], operation["value"], state=state - ) - - elif field_operaion == cls.OP_REMOVE: - return cls.remove(obj, operation["field_name"], state=state) - - return False - - @classmethod - def replace(cls, obj, field, value, state): - """ - This is method for replace operation. It is separated to provide a - possibility to easily override it in your Parameters. - - Args: - obj (object): an instance to change. - field (str): field name - value (str): new value - state (dict): inter-operations state storage - - Returns: - processing_status (bool): True - """ - if not hasattr(obj, field): - raise ValidationError( - "Field '%s' does not exist, so it cannot be patched" % field - ) - setattr(obj, field, value) - return True - - @classmethod - def test(cls, obj, field, value, state): - """ - This is method for test operation. It is separated to provide a - possibility to easily override it in your Parameters. - - Args: - obj (object): an instance to change. - field (str): field name - value (str): new value - state (dict): inter-operations state storage - - Returns: - processing_status (bool): True - """ - return getattr(obj, field) == value - - @classmethod - def add(cls, obj, field, value, state): - raise NotImplementedError() - - @classmethod - def remove(cls, obj, field, state): - raise NotImplementedError() - - @classmethod - def move(cls, obj, field, value, state): - raise NotImplementedError() - - @classmethod - def copy(cls, obj, field, value, state): - raise NotImplementedError() diff --git a/pyispyb/flask_restx_patched/resource.py b/pyispyb/flask_restx_patched/resource.py deleted file mode 100644 index 57f51f44..00000000 --- a/pyispyb/flask_restx_patched/resource.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -# pylint: disable=protected-access -import flask -from flask_restx import Resource as OriginalResource -from flask_restx._http import HTTPStatus -from werkzeug.exceptions import HTTPException - - -class Resource(OriginalResource): - """ - Extended Flast-RESTPlus Resource to add options method - """ - - @classmethod - def _apply_decorator_to_methods(cls, decorator): - """ - This helper can apply a given decorator to all methods on the current - Resource. - - NOTE: In contrast to ``Resource.method_decorators``, which has a - similar use-case, this method applies decorators directly and override - methods in-place, while the decorators listed in - ``Resource.method_decorators`` are applied on every request which is - quite a waste of resources. - """ - for method in cls.methods: - method_name = method.lower() - decorated_method_func = decorator(getattr(cls, method_name)) - setattr(cls, method_name, decorated_method_func) - - def options(self, *args, **kwargs): - """ - Check which methods are allowed. - - Use this method if you need to know what operations are allowed to be - performed on this endpoint, e.g. to decide wether to display a button - in your UI. - - The list of allowed methods is provided in `Allow` response header. - """ - # This is a generic implementation of OPTIONS method for resources. - # This method checks every permissions provided as decorators for other - # methods to provide information about what methods `current_user` can - # use. - method_funcs = [getattr(self, m.lower()) for m in self.methods] - allowed_methods = [] - request_oauth_backup = getattr(flask.request, "oauth", None) - for method_func in method_funcs: - if getattr(method_func, "_access_restriction_decorators", None): - if not hasattr(method_func, "_cached_fake_method_func"): - fake_method_func = lambda *args, **kwargs: True - # `__name__` is used in `login_required` decorator, so it - # is required to fake this also - fake_method_func.__name__ = "options" - - # Decorate the fake method with the registered access - # restriction decorators - for decorator in method_func._access_restriction_decorators: - fake_method_func = decorator(fake_method_func) - - # Cache the `fake_method_func` to avoid redoing this over - # and over again - method_func.__dict__["_cached_fake_method_func"] = fake_method_func - else: - fake_method_func = method_func._cached_fake_method_func - - flask.request.oauth = None - try: - fake_method_func(self, *args, **kwargs) - except HTTPException: - # This method is not allowed, so skip it - continue - - allowed_methods.append(method_func.__name__.upper()) - flask.request.oauth = request_oauth_backup - - return flask.Response( - status=HTTPStatus.NO_CONTENT, headers={"Allow": ", ".join(allowed_methods)} - ) diff --git a/pyispyb/requirements.txt b/pyispyb/requirements.txt deleted file mode 100644 index a32215e3..00000000 --- a/pyispyb/requirements.txt +++ /dev/null @@ -1,22 +0,0 @@ -Flask>=1.1,<2 -flask-restx -Flask-Cors>=3.0.8,<4 - -SQLAlchemy>=1.3.0,<2 -Flask-SQLAlchemy>=2.4,<3 -flask-sqlacodegen - -marshmallow>=2.13.5,<3 -flask-marshmallow>=0.7,<0.8 -marshmallow-sqlalchemy>=0.12,<0.13 -marshmallow_jsonschema==0.10.0 -webargs>=1.4.0,<2 - -Alembic>=1.0,<2 - -pyjwt -mysqlclient -ruamel.yaml -pdfkit -python-barcode -qrcode diff --git a/pyispyb/resources/queries/dataCollection/groups.sql b/pyispyb/resources/queries/dataCollection/groups.sql new file mode 100644 index 00000000..beaf830f --- /dev/null +++ b/pyispyb/resources/queries/dataCollection/groups.sql @@ -0,0 +1,216 @@ +select + *, + ( + select + GROUP_CONCAT(workflowStepId) + from + WorkflowStep + where + WorkflowStep.workflowId = v_datacollection_summary.Workflow_workflowId + order by + WorkflowStep.workflowStepId DESC + ) as WorkflowStep_workflowStepId, + ( + select + GROUP_CONCAT(workflowStepType) + from + WorkflowStep + where + WorkflowStep.workflowId = v_datacollection_summary.Workflow_workflowId + ) as WorkflowStep_workflowStepType, + ( + select + GROUP_CONCAT(status) + from + WorkflowStep + where + WorkflowStep.workflowId = v_datacollection_summary.Workflow_workflowId + ) as WorkflowStep_status, + GROUP_CONCAT( + `AutoProcProgram_processingPrograms` SEPARATOR ', ' + ) AS `processingPrograms`, + GROUP_CONCAT( + `AutoProcProgram_processingStatus` SEPARATOR ', ' + ) AS `processingStatus`, + GROUP_CONCAT( + `AutoProcIntegration_autoProcIntegrationId` SEPARATOR ', ' + ) AS `autoProcIntegrationId`, + GROUP_CONCAT(`cell_a` SEPARATOR ', ') AS `Autoprocessing_cell_a`, + GROUP_CONCAT(`cell_b` SEPARATOR ', ') AS `Autoprocessing_cell_b`, + GROUP_CONCAT(`cell_c` SEPARATOR ', ') AS `Autoprocessing_cell_c`, + GROUP_CONCAT(`cell_alpha` SEPARATOR ', ') AS `Autoprocessing_cell_alpha`, + GROUP_CONCAT(`cell_beta` SEPARATOR ', ') AS `Autoprocessing_cell_beta`, + GROUP_CONCAT(`cell_gamma` SEPARATOR ', ') AS `Autoprocessing_cell_gamma`, + GROUP_CONCAT(`anomalous` SEPARATOR ', ') AS `Autoprocessing_anomalous`, + GROUP_CONCAT(`autoProcId` SEPARATOR ', ') AS `autoProcIds`, + GROUP_CONCAT(`scalingStatisticsType` SEPARATOR ', ') AS `scalingStatisticsTypes`, + GROUP_CONCAT(`resolutionLimitHigh` SEPARATOR ', ') AS `resolutionsLimitHigh`, + GROUP_CONCAT(`resolutionLimitLow` SEPARATOR ', ') AS `resolutionsLimitLow`, + GROUP_CONCAT(`rMerge` SEPARATOR ', ') AS `rMerges`, + GROUP_CONCAT(`completeness` SEPARATOR ', ') AS `completenessList`, + GROUP_CONCAT(`AutoProc_spaceGroup` SEPARATOR ', ') AS `AutoProc_spaceGroups`, + ( + SELECT + count(*) + FROM + `PhasingStep` + LEFT JOIN `AutoProcScaling` ON `AutoProcScaling`.`autoProcScalingId` = `PhasingStep`.`autoProcScalingId` + LEFT JOIN `AutoProcScaling_has_Int` ON `AutoProcScaling_has_Int`.`autoProcScalingId` = `AutoProcScaling`.`autoProcScalingId` + LEFT JOIN `AutoProcIntegration` ON `AutoProcIntegration`.`autoProcIntegrationId` = `AutoProcScaling_has_Int`.`autoProcIntegrationId` + LEFT JOIN `DataCollection` ON `DataCollection`.`dataCollectionId` = `AutoProcIntegration`.`dataCollectionId` + LEFT JOIN `DataCollectionGroup` ON `DataCollectionGroup`.`dataCollectionGroupId` = `DataCollection`.`dataCollectionGroupId` + where + `DataCollectionGroup`.`dataCollectionGroupId` = v_datacollection_summary.DataCollection_dataCollectionGroupId + and PhasingStep.method = "SAD" + ) as hasPhasing, + ( + SELECT + count(*) + FROM + `PhasingStep` + LEFT JOIN `AutoProcScaling` ON `AutoProcScaling`.`autoProcScalingId` = `PhasingStep`.`autoProcScalingId` + LEFT JOIN `AutoProcScaling_has_Int` ON `AutoProcScaling_has_Int`.`autoProcScalingId` = `AutoProcScaling`.`autoProcScalingId` + LEFT JOIN `AutoProcIntegration` ON `AutoProcIntegration`.`autoProcIntegrationId` = `AutoProcScaling_has_Int`.`autoProcIntegrationId` + LEFT JOIN `DataCollection` ON `DataCollection`.`dataCollectionId` = `AutoProcIntegration`.`dataCollectionId` + LEFT JOIN `DataCollectionGroup` ON `DataCollectionGroup`.`dataCollectionGroupId` = `DataCollection`.`dataCollectionGroupId` + where + `DataCollectionGroup`.`dataCollectionGroupId` = v_datacollection_summary.DataCollection_dataCollectionGroupId + and PhasingStep.method = "MR" + ) as hasMR, + ( + SELECT + GROUP_CONCAT(DISTINCT(spaceGroupShortName)) + FROM + `PhasingStep` + LEFT JOIN `SpaceGroup` ON `PhasingStep`.`spaceGroupId` = `SpaceGroup`.`spaceGroupId` + LEFT JOIN `AutoProcScaling` ON `AutoProcScaling`.`autoProcScalingId` = `PhasingStep`.`autoProcScalingId` + LEFT JOIN `AutoProcScaling_has_Int` ON `AutoProcScaling_has_Int`.`autoProcScalingId` = `AutoProcScaling`.`autoProcScalingId` + LEFT JOIN `AutoProcIntegration` ON `AutoProcIntegration`.`autoProcIntegrationId` = `AutoProcScaling_has_Int`.`autoProcIntegrationId` + LEFT JOIN `DataCollection` ON `DataCollection`.`dataCollectionId` = `AutoProcIntegration`.`dataCollectionId` + LEFT JOIN `DataCollectionGroup` ON `DataCollectionGroup`.`dataCollectionGroupId` = `DataCollection`.`dataCollectionGroupId` + where + `DataCollectionGroup`.`dataCollectionGroupId` = v_datacollection_summary.DataCollection_dataCollectionGroupId + and PhasingStep.phasingStepType = 'MODELBUILDING' + ) as SpaceGroupModelResolvedByPhasing, + ( + SELECT + GROUP_CONCAT(DISTINCT(spaceGroupShortName)) + FROM + `PhasingStep` + LEFT JOIN `SpaceGroup` ON `PhasingStep`.`spaceGroupId` = `SpaceGroup`.`spaceGroupId` + LEFT JOIN `AutoProcScaling` ON `AutoProcScaling`.`autoProcScalingId` = `PhasingStep`.`autoProcScalingId` + LEFT JOIN `AutoProcScaling_has_Int` ON `AutoProcScaling_has_Int`.`autoProcScalingId` = `AutoProcScaling`.`autoProcScalingId` + LEFT JOIN `AutoProcIntegration` ON `AutoProcIntegration`.`autoProcIntegrationId` = `AutoProcScaling_has_Int`.`autoProcIntegrationId` + LEFT JOIN `DataCollection` ON `DataCollection`.`dataCollectionId` = `AutoProcIntegration`.`dataCollectionId` + LEFT JOIN `DataCollectionGroup` ON `DataCollectionGroup`.`dataCollectionGroupId` = `DataCollection`.`dataCollectionGroupId` + where + `DataCollectionGroup`.`dataCollectionGroupId` = v_datacollection_summary.DataCollection_dataCollectionGroupId + and PhasingStep.phasingStepType = 'REFINEMENT' + ) as SpaceGroupModelResolvedByMr, + ( + select + SUM(numberOfImages) + FROM + DataCollection + where + dataCollectionGroupId = v_datacollection_summary.DataCollectionGroup_dataCollectionGroupId + ) as totalNumberOfImages, + ( + select + count(*) + FROM + DataCollection + where + dataCollectionGroupId = v_datacollection_summary.DataCollectionGroup_dataCollectionGroupId + ) as totalNumberOfDataCollections, + ( + select + MAX(imageId) + FROM + Image + where + dataCollectionId = v_datacollection_summary.DataCollection_dataCollectionId + ) as lastImageId, + ( + select + MIN(imageId) + FROM + Image + where + dataCollectionId = v_datacollection_summary.DataCollection_dataCollectionId + ) as firstImageId, + ( + select + GROUP_CONCAT(synchrotronCurrent) + FROM + Image + where + dataCollectionId = v_datacollection_summary.DataCollection_dataCollectionId + ) as synchrotronCurrent, + ( + select + GROUP_CONCAT(workflowStepId) + from + WorkflowStep + where + WorkflowStep.workflowId = v_datacollection_summary.Workflow_workflowId + and WorkflowStep.WorkflowStepType = 'Characterisation' + ) as characterisationWorkflowStepIds, + ( + select + count(*) + from + DataCollection + where + DataCollection.dataCollectionGroupId = v_datacollection_summary.DataCollectionGroup_dataCollectionGroupId + ) as numberOfGridSquares, + ( + select + GROUP_CONCAT(dataCollectionId) + from + DataCollection + where + DataCollection.dataCollectionGroupId = v_datacollection_summary.DataCollectionGroup_dataCollectionGroupId + ) as dataCollectionIdList, + ( + select + GROUP_CONCAT(imageDirectory) + from + DataCollection + where + DataCollection.dataCollectionGroupId = v_datacollection_summary.DataCollectionGroup_dataCollectionGroupId + ) as imageDirectoryList, + ( + select + GROUP_CONCAT(startTime) + from + DataCollection + where + DataCollection.dataCollectionGroupId = v_datacollection_summary.DataCollectionGroup_dataCollectionGroupId + ) as startTimeList, + ( + select + GROUP_CONCAT(magnification) + from + DataCollection + where + DataCollection.dataCollectionGroupId = v_datacollection_summary.DataCollectionGroup_dataCollectionGroupId + ) as magnificationList, + ( + select + GROUP_CONCAT(voltage) + from + DataCollection + where + DataCollection.dataCollectionGroupId = v_datacollection_summary.DataCollectionGroup_dataCollectionGroupId + ) as voltageList, + ( + SELECT + GROUP_CONCAT(numberOfImages) numberOfImages + FROM + DataCollection + where + DataCollection.dataCollectionGroupId = v_datacollection_summary.DataCollectionGroup_dataCollectionGroupId + ) as imagesCount +from + v_datacollection_summary \ No newline at end of file diff --git a/pyispyb/resources/queries/em/dataCollectionsStats.sql b/pyispyb/resources/queries/em/dataCollectionsStats.sql new file mode 100644 index 00000000..8575bb0a --- /dev/null +++ b/pyispyb/resources/queries/em/dataCollectionsStats.sql @@ -0,0 +1,33 @@ +select + dataCollectionId, + ( + select + count(*) + from + Movie + where + Movie.dataCollectionId = DataCollection.dataCollectionId + ) as movieCount, + ( + select + count(*) + from + MotionCorrection + inner join Movie on Movie.movieId = MotionCorrection.movieId + where + Movie.dataCollectionId = DataCollection.dataCollectionId + ) as motionCorrectionCount, + ( + select + count(*) + from + CTF + inner join MotionCorrection on MotionCorrection.motionCorrectionId = CTF.motionCorrectionId + inner join Movie on Movie.movieId = MotionCorrection.movieId + where + Movie.dataCollectionId = DataCollection.dataCollectionId + ) as ctfCorrectionCount +from + DataCollection + INNER JOIN DataCollectionGroup on DataCollectionGroup.dataCollectionGroupId = DataCollection.dataCollectionGroupId + INNER JOIN BLSession on BLSession.sessionId = DataCollectionGroup.sessionId \ No newline at end of file diff --git a/pyispyb/resources/queries/em/movie.sql b/pyispyb/resources/queries/em/movie.sql new file mode 100644 index 00000000..71f081c5 --- /dev/null +++ b/pyispyb/resources/queries/em/movie.sql @@ -0,0 +1 @@ +select * from v_em_movie \ No newline at end of file diff --git a/pyispyb/resources/queries/em/movie_thumbnails.sql b/pyispyb/resources/queries/em/movie_thumbnails.sql new file mode 100644 index 00000000..59a57774 --- /dev/null +++ b/pyispyb/resources/queries/em/movie_thumbnails.sql @@ -0,0 +1,20 @@ +select + Movie.micrographSnapshotFullPath as movie_thumbnail, + MotionCorrection.micrographSnapshotFullPath as motion_correction_thumbnail, + MotionCorrection.driftPlotFullPath as motion_correction_drift, + CTF.spectraImageThumbnailFullPath as ctf_thumbnail +from + Movie, + MotionCorrection, + CTF, + DataCollection, + DataCollectionGroup, + BLSession +WHERE + Movie.movieId = :movieId + and Movie.dataCollectionId = DataCollection.dataCollectionId + and DataCollection.dataCollectionGroupId = DataCollectionGroup.dataCollectionGroupId + and DataCollectionGroup.sessionId = BLSession.sessionId + and BLSession.proposalId = :proposalId + and MotionCorrection.movieId = Movie.movieId + and CTF.motionCorrectionId = MotionCorrection.motionCorrectionId \ No newline at end of file diff --git a/pyispyb/resources/queries/em/sessionStats.sql b/pyispyb/resources/queries/em/sessionStats.sql new file mode 100644 index 00000000..616f2609 --- /dev/null +++ b/pyispyb/resources/queries/em/sessionStats.sql @@ -0,0 +1 @@ +select * from v_em_stats \ No newline at end of file diff --git a/pyispyb/resources/queries/proposal/findProposalId.sql b/pyispyb/resources/queries/proposal/findProposalId.sql new file mode 100644 index 00000000..439a7845 --- /dev/null +++ b/pyispyb/resources/queries/proposal/findProposalId.sql @@ -0,0 +1,7 @@ +select + proposalId +from + Proposal +where + proposalId = :name + or UPPER(concat(proposalCode, proposalNumber)) = UPPER(:name) \ No newline at end of file diff --git a/pyispyb/resources/queries/proposal/loginAuthorizedProposal.sql b/pyispyb/resources/queries/proposal/loginAuthorizedProposal.sql new file mode 100644 index 00000000..a6f17721 --- /dev/null +++ b/pyispyb/resources/queries/proposal/loginAuthorizedProposal.sql @@ -0,0 +1,24 @@ +select + ( + select + count(*) + from + Proposal, + ProposalHasPerson, + Person + where + Proposal.proposalId = :proposalId + and Proposal.proposalId = ProposalHasPerson.proposalId + and Person.personId = ProposalHasPerson.personId + and Person.login = :login + ) + ( + select + count(*) + from + Proposal, + Person + where + Proposal.proposalId = :proposalId + and Person.personId = Proposal.personId + and Person.login = :login + ) as is_authorized \ No newline at end of file diff --git a/pyispyb/resources/queries/proposal/personProposalIds.sql b/pyispyb/resources/queries/proposal/personProposalIds.sql new file mode 100644 index 00000000..1f150ab5 --- /dev/null +++ b/pyispyb/resources/queries/proposal/personProposalIds.sql @@ -0,0 +1,8 @@ +select + proposalId +from + Proposal, + Person +where + Proposal.personId = Person.personId + and Person.login = :login; \ No newline at end of file diff --git a/pyispyb/resources/queries/proposal/proposalsInfosAll.sql b/pyispyb/resources/queries/proposal/proposalsInfosAll.sql new file mode 100644 index 00000000..eb9ad4e8 --- /dev/null +++ b/pyispyb/resources/queries/proposal/proposalsInfosAll.sql @@ -0,0 +1,9 @@ +select + distinct p.proposalId as Proposal_proposalId, + p.proposalType as Proposal_proposalType, + p.personId as Proposal_personId, + p.title as Proposal_title, + p.proposalCode as Proposal_proposalCode, + p.proposalNumber as Proposal_proposalNumber +from + Proposal as p; \ No newline at end of file diff --git a/pyispyb/resources/queries/proposal/proposalsInfosLogin.sql b/pyispyb/resources/queries/proposal/proposalsInfosLogin.sql new file mode 100644 index 00000000..7cc7e855 --- /dev/null +++ b/pyispyb/resources/queries/proposal/proposalsInfosLogin.sql @@ -0,0 +1,31 @@ +select + distinct p.proposalId as Proposal_proposalId, + p.proposalType as Proposal_proposalType, + p.personId as Proposal_personId, + p.title as Proposal_title, + p.proposalCode as Proposal_proposalCode, + p.proposalNumber as Proposal_proposalNumber +from + ( + select + Proposal.* + from + Proposal, + Person + where + Proposal.personId = Person.personId + and Person.login = :login + union + select + Proposal.* + from + Proposal, + Person, + BLSession, + Session_has_Person + where + Person.login = :login + and Session_has_Person.personId = Person.personId + and BLSession.sessionId = Session_has_Person.sessionId + and Proposal.proposalId = BLSession.proposalId + ) as p \ No newline at end of file diff --git a/pyispyb/resources/queries/session/loginAuthorizedSession.sql b/pyispyb/resources/queries/session/loginAuthorizedSession.sql new file mode 100644 index 00000000..f56befa3 --- /dev/null +++ b/pyispyb/resources/queries/session/loginAuthorizedSession.sql @@ -0,0 +1,26 @@ +select + ( + select + count(*) + from + BLSession, + Session_has_Person, + Person + where + BLSession.sessionId = :sessionId + and BLSession.sessionId = Session_has_Person.sessionId + and Person.personId = Session_has_Person.personId + and Person.login = :login + ) + ( + select + count(*) + from + BLSession, + Proposal, + Person + where + BLSession.sessionId = :sessionId + and Proposal.proposalId = BLSession.proposalId + and Person.personId = Proposal.personId + and Person.login = :login + ) as is_authorized \ No newline at end of file diff --git a/pyispyb/resources/queries/session/sessionsInfosAll.sql b/pyispyb/resources/queries/session/sessionsInfosAll.sql new file mode 100644 index 00000000..1acec358 --- /dev/null +++ b/pyispyb/resources/queries/session/sessionsInfosAll.sql @@ -0,0 +1,128 @@ +select + *, + ( + select + count(*) + from + EnergyScan + where + EnergyScan.sessionId = v_session.sessionId + ) as energyScanCount, + ( + select + count(distinct(blSampleId)) + from + DataCollectionGroup + where + DataCollectionGroup.sessionId = v_session.sessionId + ) as sampleCount, + ( + select + sum(DataCollection.numberOfImages) + from + DataCollectionGroup, + DataCollection + where + DataCollectionGroup.sessionId = v_session.sessionId + and DataCollection.dataCollectionGroupId = DataCollectionGroup.dataCollectionGroupId + ) as imagesCount, + ( + select + count(*) + from + DataCollectionGroup, + DataCollection + where + DataCollectionGroup.sessionId = v_session.sessionId + and DataCollection.dataCollectionGroupId = DataCollectionGroup.dataCollectionGroupId + and DataCollection.numberOfImages < 5 + ) as testDataCollectionGroupCount, + ( + select + count(*) + from + DataCollectionGroup, + DataCollection + where + DataCollectionGroup.sessionId = v_session.sessionId + and DataCollection.dataCollectionGroupId = DataCollectionGroup.dataCollectionGroupId + and DataCollection.numberOfImages > 4 + ) as dataCollectionGroupCount, + ( + select + count(*) + from + DataCollectionGroup, + DataCollection + where + DataCollectionGroup.sessionId = v_session.sessionId + and DataCollection.dataCollectionGroupId = DataCollectionGroup.dataCollectionGroupId + and DataCollectionGroup.experimentType = 'EM' + ) as EMdataCollectionGroupCount, + ( + select + count(*) + from + XFEFluorescenceSpectrum + where + XFEFluorescenceSpectrum.sessionId = v_session.sessionId + ) as xrfSpectrumCount, + ( + select + count(*) + from + Experiment exp1 + where + v_session.sessionId = exp1.sessionId + and exp1.experimentType = 'HPLC' + ) as hplcCount, + ( + select + count(*) + from + Experiment exp2 + where + v_session.sessionId = exp2.sessionId + and exp2.experimentType = 'STATIC' + ) as sampleChangerCount, + ( + select + count(*) + from + Experiment exp3 + where + v_session.sessionId = exp3.sessionId + and exp3.experimentType = 'CALIBRATION' + ) as calibrationCount, + ( + select + experimentType + from + DataCollectionGroup + where + DataCollectionGroup.dataCollectionGroupId = ( + select + max(dataCollectionGroupId) + from + DataCollectionGroup dg2 + where + dg2.sessionId = v_session.sessionId + ) + ) as lastExperimentDataCollectionGroup, + ( + select + endTime + from + DataCollectionGroup + where + DataCollectionGroup.dataCollectionGroupId = ( + select + max(dataCollectionGroupId) + from + DataCollectionGroup dg2 + where + dg2.sessionId = v_session.sessionId + ) + ) as lastEndTimeDataCollectionGroup +from + v_session \ No newline at end of file diff --git a/pyispyb/resources/queries/session/sessionsInfosLogin.sql b/pyispyb/resources/queries/session/sessionsInfosLogin.sql new file mode 100644 index 00000000..514049a0 --- /dev/null +++ b/pyispyb/resources/queries/session/sessionsInfosLogin.sql @@ -0,0 +1,157 @@ +select + *, + ( + select + count(*) + from + EnergyScan + where + EnergyScan.sessionId = v_session.sessionId + ) as energyScanCount, + ( + select + count(distinct(blSampleId)) + from + DataCollectionGroup + where + DataCollectionGroup.sessionId = v_session.sessionId + ) as sampleCount, + ( + select + sum(DataCollection.numberOfImages) + from + DataCollectionGroup, + DataCollection + where + DataCollectionGroup.sessionId = v_session.sessionId + and DataCollection.dataCollectionGroupId = DataCollectionGroup.dataCollectionGroupId + ) as imagesCount, + ( + select + count(*) + from + DataCollectionGroup, + DataCollection + where + DataCollectionGroup.sessionId = v_session.sessionId + and DataCollection.dataCollectionGroupId = DataCollectionGroup.dataCollectionGroupId + and DataCollection.numberOfImages < 5 + ) as testDataCollectionGroupCount, + ( + select + count(*) + from + DataCollectionGroup, + DataCollection + where + DataCollectionGroup.sessionId = v_session.sessionId + and DataCollection.dataCollectionGroupId = DataCollectionGroup.dataCollectionGroupId + and DataCollection.numberOfImages > 4 + ) as dataCollectionGroupCount, + ( + select + count(*) + from + DataCollectionGroup, + DataCollection + where + DataCollectionGroup.sessionId = v_session.sessionId + and DataCollection.dataCollectionGroupId = DataCollectionGroup.dataCollectionGroupId + and DataCollectionGroup.experimentType = 'EM' + ) as EMdataCollectionGroupCount, + ( + select + count(*) + from + XFEFluorescenceSpectrum + where + XFEFluorescenceSpectrum.sessionId = v_session.sessionId + ) as xrfSpectrumCount, + ( + select + count(*) + from + Experiment exp1 + where + v_session.sessionId = exp1.sessionId + and exp1.experimentType = 'HPLC' + ) as hplcCount, + ( + select + count(*) + from + Experiment exp2 + where + v_session.sessionId = exp2.sessionId + and exp2.experimentType = 'STATIC' + ) as sampleChangerCount, + ( + select + count(*) + from + Experiment exp3 + where + v_session.sessionId = exp3.sessionId + and exp3.experimentType = 'CALIBRATION' + ) as calibrationCount, + ( + select + experimentType + from + DataCollectionGroup + where + DataCollectionGroup.dataCollectionGroupId = ( + select + max(dataCollectionGroupId) + from + DataCollectionGroup dg2 + where + dg2.sessionId = v_session.sessionId + ) + ) as lastExperimentDataCollectionGroup, + ( + select + endTime + from + DataCollectionGroup + where + DataCollectionGroup.dataCollectionGroupId = ( + select + max(dataCollectionGroupId) + from + DataCollectionGroup dg2 + where + dg2.sessionId = v_session.sessionId + ) + ) as lastEndTimeDataCollectionGroup +from + v_session +where + sessionId in ( + select + ( + select + BLSession.sessionId + from + BLSession, + Session_has_Person, + Person + where + BLSession.sessionId = Session_has_Person.sessionId + and Person.personId = Session_has_Person.personId + and Person.login = :login + ) as sessionId + union + ( + select + BLSession.sessionId + from + BLSession, + Proposal, + Person + where + Proposal.proposalId = BLSession.proposalId + and Person.personId = Proposal.personId + and Person.login = :login + ) + ) \ No newline at end of file diff --git a/pyispyb/simulation/__init__.py b/pyispyb/simulation/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pyispyb/simulation/base.py b/pyispyb/simulation/base.py new file mode 100644 index 00000000..4d45722b --- /dev/null +++ b/pyispyb/simulation/base.py @@ -0,0 +1,79 @@ +from abc import ABC, abstractmethod +from contextlib import contextmanager +import logging +import os +import pkg_resources +from typing import Any + +import yaml + +from ..config import settings +from ..app.extensions.database.session import _session + +logger = logging.getLogger(__name__) + + +def load_config() -> dict[str, Any]: + if not settings.simulation_config: + raise RuntimeError("`SIMULATION_CONFIG` environment variable is not defined") + + if not os.path.exists(settings.simulation_config): + raise AttributeError(f"Cannot find config file: `{settings.simulation_config}`") + + config = {} + with open(settings.simulation_config, "r") as stream: + config = yaml.safe_load(stream) + + return config + + +class Simulation(ABC): + def __init__(self): + self._config = load_config() + + @property + def config(self) -> dict[str, Any]: + return self._config + + @contextmanager + def session(self): + db_session = _session() + try: + yield db_session + db_session.commit() + except Exception as e: # noqa + db_session.rollback() + raise + finally: + db_session.close() + + @property + def beamlines(self) -> list[str]: + return self.config["sessions"].keys() + + @property + def experiment_types(self) -> list[str]: + return self.config["experiments"].keys() + + def before_start(self, dataCollectionId: int) -> None: + for entry in pkg_resources.iter_entry_points( + "ispyb.simulator.before_datacollection" + ): + fn = entry.load() + logger.info(f"Executing before start plugin `{entry.name}`") + fn(dataCollectionId) + + def after_end(self, dataCollectionId: int) -> None: + for entry in pkg_resources.iter_entry_points( + "ispyb.simulator.after_datacollection" + ): + fn = entry.load() + logger.info(f"Executing after end plugin `{entry.name}`") + fn(dataCollectionId) + + def do_run(self, *args, **kwargs) -> None: + self.run(*args, **kwargs) + + @abstractmethod + def run(self, *args, **kwargs) -> None: + pass diff --git a/pyispyb/simulation/cli.py b/pyispyb/simulation/cli.py new file mode 100644 index 00000000..b4a6b7f0 --- /dev/null +++ b/pyispyb/simulation/cli.py @@ -0,0 +1,51 @@ +import argparse +import logging + +from .datacollection import SimulateDataCollection + + +logger = logging.getLogger(__name__) +logging.basicConfig(level=logging.INFO) + + +def run() -> None: + try: + sdc = SimulateDataCollection() + except AttributeError as e: + exit(f"Simulation Error: {e}") + + parser = argparse.ArgumentParser(description="ISPyB simulation tool") + parser.add_argument( + "beamline", help="Beamline to run simulation against", choices=sdc.beamlines + ) + + parser.add_argument( + "experiment", help="Experiment to simluate", choices=sdc.experiment_types + ) + + parser.add_argument( + "--delay", + default=5, + type=int, + dest="delay", + help="Delay between plugin start and end events", + ) + parser.add_argument( + "--debug", + action="store_true", + help="Enable debug output", + ) + + args = parser.parse_args() + + root = logging.getLogger() + root.setLevel(level=logging.DEBUG if args.debug else logging.INFO) + + try: + sdc.do_run(args.beamline, args.experiment, delay=args.delay) + except Exception as e: + if args.debug: + logger.exception("Simulation Error") + print(e) + else: + print(f"Simulation Error: {e}") diff --git a/pyispyb/simulation/datacollection.py b/pyispyb/simulation/datacollection.py new file mode 100644 index 00000000..d1fca4e0 --- /dev/null +++ b/pyispyb/simulation/datacollection.py @@ -0,0 +1,390 @@ +from datetime import datetime, timedelta +import logging +import os +import shutil +import time + +from ispyb import models + +from ..app.extensions.database.definitions import _proposal, _session +from .base import Simulation + + +logger = logging.getLogger(__name__) + + +class SimulateDataCollection(Simulation): + def _get_container_position( + self, ses, blsession: str, proposalId: str, beamline: str + ) -> list[int, int]: + shipment_name = "Simulation_Shipment" + shipment = ( + ses.query(models.Shipping) + .filter(models.Shipping.proposalId == proposalId) + .filter(models.Shipping.shippingName == shipment_name) + .first() + ) + + if not shipment: + logger.debug("Creating shipment") + shipment = models.Shipping( + shippingName=shipment_name, + proposalId=proposalId, + creationDate=datetime.now(), + ) + + ses.add(shipment) + ses.commit() + + dewar_name = "Simulation_Dewar" + dewar = ( + ses.query(models.Dewar.dewarId) + .filter(models.Dewar.shippingId == shipment.shippingId) + .filter(models.Dewar.code == dewar_name) + .first() + ) + + if not dewar: + logger.debug("Creating dewar") + dewar = models.Dewar( + shippingId=shipment.shippingId, + code=dewar_name, + dewarStatus="processing", + ) + ses.add(dewar) + ses.commit() + + container_name = "Simulation_Container" + container = ( + ses.query(models.Container.containerId) + .filter(models.Container.dewarId == dewar.dewarId) + .filter(models.Container.code == container_name) + .first() + ) + + if not container: + logger.debug("Creating container") + container = models.Container( + dewarId=dewar.dewarId, + code=container_name, + containerType="Box", + capacity=25, + bltimeStamp=datetime.now(), + containerStatus="at facility", + # beamlineLocation=beamline, + # sampleChangerLocation=1, + ) + ses.add(container) + ses.commit() + + containerhistory = models.ContainerHistory( + containerId=container.containerId, + status="at facility", + location=1, + beamlineName=beamline, + ) + + ses.add(containerhistory) + ses.commit() + + samples = ( + ses.query(models.BLSample) + .filter(models.BLSample.containerId == container.containerId) + .all() + ) + max_loc = 0 + for s in samples: + if int(s.location) > max_loc: + max_loc = int(s.location) + + return container.containerId, max_loc + 1 + + def run(self, beamline: str, experiment: str, delay=0): + blses: str = self.config["sessions"][beamline] + + if experiment not in self.config["experiments"]: + raise KeyError(f"No such experiment {experiment}") + + exp = self.config["experiments"][experiment] + data = os.path.join(self.config["raw_data"], exp["data"]) + + if not exp.get("experimentType"): + raise KeyError( + f"Experiment `{experiment}` does not specify `experimentType`" + ) + + if not os.path.exists(data): + raise AttributeError(f"Raw data file: `{data}` does not exist") + + if not exp.get("sample"): + raise KeyError(f"No sample specified for experiment `{experiment}`") + + if exp["sample"] not in self.config["samples"]: + raise KeyError( + f"Experiment sample `{exp['sample']}` is not defined in `samples`" + ) + + sample = self.config["samples"][exp["sample"]] + + with self.session() as ses: + prop, blsession = ( + ses.query(_proposal, models.BLSession) + .join( + models.Proposal, + models.Proposal.proposalId == models.BLSession.proposalId, + ) + .filter(_session == blses) + .first() + ) + + blsample = ( + ses.query(models.BLSample) + .filter(models.BLSample.name == sample["name"]) + .first() + ) + + if not blsample: + for k in ["component", "name"]: + if not sample.get(k): + raise KeyError(f"No {k} specified for sample {exp['sample']}") + + if sample["component"] not in self.config["components"]: + raise KeyError( + f"Sample component {sample['component']} is not defined in `components`" + ) + + comp = self.config["components"][sample["component"]] + for k in ["acronym"]: + if not comp.get(k): + raise KeyError( + f"No {k} specified for component {sample['component']}" + ) + + component = ( + ses.query(models.Protein) + .filter(models.Protein.acronym == comp["acronym"]) + .first() + ) + + if not component: + logger.info(f"Creating component {comp['acronym']}") + component = models.Protein( + proposalId=blsession.proposalId, + acronym=comp.get("acronym"), + name=comp.get("name", comp.get("acronym")), + sequence=comp.get("sequence"), + density=comp.get("density"), + molecularMass=comp.get("molecularMass"), + description="Simulated component", + ) + ses.add(component) + ses.commit() + + crystal = models.Crystal(proteinId=component.proteinId) + ses.add(crystal) + ses.commit() + + logger.info(f"Creating sample {sample['name']}") + containerid, position = self._get_container_position( + ses, blses, blsession.proposalId, beamline + ) + blsample = models.BLSample( + name=sample["name"], + crystalId=crystal.crystalId, + location=position, + containerId=containerid, + ) + ses.add(blsample) + ses.commit() + + subsampleid = None + if exp.get("subsample"): + logger.info("Creating subsample") + sub = exp["subsample"] + + pos1id = None + if sub.get("x") and sub.get("y"): + pos1 = models.Position( + posX=sub["x"], + posY=sub["y"], + ) + ses.add(pos1) + ses.commit() + + pos1id = pos1.positionId + + pos2id = None + if sub.get("x2") and sub.get("y2"): + pos2 = models.Position( + posX=sub["x2"], + posY=sub["y2"], + ) + ses.add(pos2) + ses.commit() + + pos2id = pos2.positionId + + subsample = models.BLSubSample( + positionId=pos1id, + position2Id=pos2id, + blSampleId=blsample.blSampleId, + comments="Simulated sample", + ) + + if hasattr(subsample, "type"): + subsample.type = sub.get("type") + + ses.add(subsample) + ses.commit() + + subsampleid = subsample.blSubSampleId + + startTime = datetime.now() + endTime = datetime.now() + timedelta(minutes=5) + + logger.debug("Creating datacollection group") + dcg = models.DataCollectionGroup( + sessionId=blsession.sessionId, + experimentType=exp["experimentType"], + blSampleId=blsample.blSampleId, + startTime=startTime, + endTime=endTime, + ) + ses.add(dcg) + ses.commit() + + logger.debug("Creating datacollection") + dc = models.DataCollection( + blSubSampleId=subsampleid, + dataCollectionGroupId=dcg.dataCollectionGroupId, + fileTemplate=os.path.basename(exp["data"]), + imageContainerSubPath=exp.get( + "imageContainerSubPath", "1.1/measurement" + ), + numberOfImages=exp.get("numberOfImages"), + wavelength=exp.get("wavelength"), + exposureTime=exp.get("exposureTime"), + runStatus="Successful", + comments="Simulated datacollection", + startTime=startTime, + endTime=endTime, + ) + + # Deprecated + if hasattr(dc, "BLSAMPLEID"): + dc.BLSAMPLEID = blsample.blSampleId + + ses.add(dc) + ses.commit() + + if exp.get("grid"): + logger.debug("Creating gridinfo") + grid = models.GridInfo( + steps_x=exp["grid"]["steps_x"], + steps_y=exp["grid"]["steps_y"], + dx_mm=exp["grid"]["dx_mm"], + dy_mm=exp["grid"]["dy_mm"], + ) + + if hasattr(grid, "snapshot_offsetXPixel"): + grid.snapshot_offsetXPixel = exp["grid"]["snapshot_offsetXPixel"] + grid.snapshot_offsetYPixel = exp["grid"]["snapshot_offsetYPixel"] + + if hasattr(grid, "pixelsPerMicronX"): + grid.pixelsPerMicronX = exp["grid"]["pixelsPerMicronX"] + grid.pixelsPerMicronY = exp["grid"]["pixelsPerMicronY"] + + if hasattr(grid, "dataCollectionId"): + grid.dataCollectionId = dc.dataCollectionId + # Deprecated but needed for pydb + else: + grid.dataCollectionGroupId = dcg.dataCollectionGroupId + + ses.add(grid) + ses.commit() + + logger.info(f"Created datacollection: `{dc.dataCollectionId}`") + logger.info( + f"{self.config['ispyb_url']}/visit/{blses}/id/{dc.dataCollectionId}" + ) + + logger.info("Triggering before start plugins") + self.before_start(dc.dataCollectionId) + + # Create the dataset dir + data_dir = os.path.join( + self.config["data_dir"].format(beamline=beamline), + prop, + exp["sample"], + f"{exp['sample']}_{dc.dataCollectionId}", + ) + + dc.imageDirectory = data_dir + ses.commit() + + if os.path.exists(data_dir): + logger.warning(f"Data directory already exists: {data_dir}") + + os.makedirs(data_dir) + if not os.path.exists(data_dir): + raise AttributeError( + f"Could not create output data directory: {data_dir}" + ) + + # Link data files / snapshots + link = self.config.get("copy_method", "copy") == "link" + if link: + logger.debug("Linking data") + os.link(data, os.path.join(data_dir, os.path.basename(data))) + else: + logger.debug("Copying data") + shutil.copy(data, os.path.join(data_dir, os.path.basename(data))) + + snapshot_path = os.path.join( + self.config["raw_data"], exp.get("xtalSnapshotFullPath1") + ) + if snapshot_path: + if os.path.exists(snapshot_path): + snapshot = os.path.join(data_dir, os.path.basename(snapshot_path)) + if link: + logger.debug("Linking snapshot") + os.link(snapshot_path, snapshot) + else: + logger.debug("Copying snapshot") + shutil.copy(snapshot_path, snapshot) + + snap, snap_extension = os.path.splitext(snapshot_path) + thumb = f"{snap}t{snap_extension}" + if os.path.exists(thumb): + if link: + logger.debug("Linking thumbnail") + os.link( + thumb, + os.path.join( + data_dir, + f"{os.path.basename(snap)}t{snap_extension}", + ), + ) + else: + logger.debug("Copying thumbnail") + shutil.copy( + thumb, + os.path.join( + data_dir, + f"{os.path.basename(snap)}t{snap_extension}", + ), + ) + else: + logger.warning(f"Snapshot thumbnail does not exist {thumb}") + + dc.xtalSnapshotFullPath1 = snapshot + else: + logger.warning(f"Snapshot file does not exist {snapshot_path}") + + logger.info(f"Finshed copying data to: {data_dir}") + + if delay: + time.sleep(delay) + + logger.info("Triggering after end plugins") + self.after_end(dc.dataCollectionId) diff --git a/pyispyb/ssx/__init__.py b/pyispyb/ssx/__init__.py deleted file mode 100644 index 99242dfd..00000000 --- a/pyispyb/ssx/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -# encoding: utf-8 -# pylint: disable=no-member -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -__license__ = "LGPLv3+" - - -def init_app(app): - - from . import modules - - modules.init_app(app) - - from . import routes - - routes.init_app(app) diff --git a/pyispyb/ssx/models.py b/pyispyb/ssx/models.py deleted file mode 100644 index 86c07585..00000000 --- a/pyispyb/ssx/models.py +++ /dev/null @@ -1,213 +0,0 @@ -# coding: utf-8 -from sqlalchemy import Column, Float, ForeignKey, Integer, JSON, String -from sqlalchemy.orm import relationship -from sqlalchemy.dialects.mysql.enumerated import ENUM -from flask_sqlalchemy import SQLAlchemy - - -from pyispyb.app.extensions import db - - - -class CrystalSizeDistribution(db.Model): - __tablename__ = 'CrystalSizeDistribution' - - crystalSizeDistributionId = db.Column(db.Integer, primary_key=True, unique=True) - crystalHabit = db.Column(db.String(255)) - characteristicDimensions = db.Column(db.String(255)) - minDimension = db.Column(db.String(255), info='comma separated floats') - maxDimension = db.Column(db.String(255), info='comma separated floats') - - - -class CrystalSlurry(db.Model): - __tablename__ = 'CrystalSlurry' - - crystalSlurryId = db.Column(db.Integer, primary_key=True, unique=True) - name = db.Column(db.String(255)) - crystalSizeDistributionId = db.Column(db.ForeignKey('CrystalSizeDistribution.crystalSizeDistributionId'), index=True) - crystalDensity = db.Column(db.Float, info='1/mm3') - bufferId = db.Column(db.Float, info='reference to Buffer.bufferId') - - CrystalSizeDistribution = db.relationship('CrystalSizeDistribution', primaryjoin='CrystalSlurry.crystalSizeDistributionId == CrystalSizeDistribution.crystalSizeDistributionId') - - - -class CrystalSlurryHasCrystal(db.Model): - __tablename__ = 'CrystalSlurry_has_Crystal' - - CrystalSlurryHasCrystalId = db.Column(db.Integer, primary_key=True, unique=True) - crystalSlurryId = db.Column(db.ForeignKey('CrystalSlurry.crystalSlurryId'), nullable=False, index=True) - crystalId = db.Column(db.Integer, nullable=False) - - CrystalSlurry = db.relationship('CrystalSlurry', primaryjoin='CrystalSlurryHasCrystal.crystalSlurryId == CrystalSlurry.crystalSlurryId') - - - -class DataSet(db.Model): - __tablename__ = 'DataSet' - - dataSetId = db.Column(db.Integer, primary_key=True, unique=True) - name = db.Column(db.String(255), nullable=False) - mergedResultsFilename = db.Column(db.String(255)) - - - -class EventTrain(db.Model): - __tablename__ = 'EventTrain' - - eventTrainId = db.Column(db.Integer, primary_key=True, unique=True) - name = db.Column(db.String(255)) - timeOn = db.Column(db.Float, info='sec') - duration = db.Column(db.Float, info='sec') - period = db.Column(db.Float) - numberOfRepetitions = db.Column(db.Float) - nameInEventLog = db.Column(db.String(255)) - triggerDevice = db.Column(db.String(255)) - - - -class ExperimentalPlan(db.Model): - __tablename__ = 'ExperimentalPlan' - - experimentalPlanId = db.Column(db.Integer, primary_key=True, unique=True) - name = db.Column(db.String(255)) - numberOfRepetitions = db.Column(db.Integer, info='for micro-fluidic, jet, tape but not for chip') - period = db.Column(db.Float, info='seconds but unknown/self adjusting for chip') - masterTriggerId = db.Column(db.ForeignKey('MasterTrigger.masterTriggerId'), index=True) - repeatedSequenceId = db.Column(db.ForeignKey('RepeatedSequence.repeatedSequenceId'), nullable=False, index=True) - - MasterTrigger = db.relationship('MasterTrigger', primaryjoin='ExperimentalPlan.masterTriggerId == MasterTrigger.masterTriggerId') - RepeatedSequence = db.relationship('RepeatedSequence', primaryjoin='ExperimentalPlan.repeatedSequenceId == RepeatedSequence.repeatedSequenceId') - - - -class LoadedSample(db.Model): - __tablename__ = 'LoadedSample' - - loadedSampleId = db.Column(db.Integer, primary_key=True, unique=True) - name = db.Column(db.String(255), info='to be used as part of the image and processing file names') - sampleStockId = db.Column(db.ForeignKey('SampleStock.sampleStockId'), index=True) - sampleDeliveryDeviceId = db.Column(db.ForeignKey('SampleDeliveryDevice.sampleDeliveryDeviceId'), index=True) - loadingPattern = db.Column(db.Integer) - descriptionJson = db.Column(db.JSON) - - SampleDeliveryDevice = db.relationship('SampleDeliveryDevice', primaryjoin='LoadedSample.sampleDeliveryDeviceId == SampleDeliveryDevice.sampleDeliveryDeviceId') - SampleStock = db.relationship('SampleStock', primaryjoin='LoadedSample.sampleStockId == SampleStock.sampleStockId') - - - -class MasterTrigger(db.Model): - __tablename__ = 'MasterTrigger' - - masterTriggerId = db.Column(db.Integer, primary_key=True, unique=True) - nameInEventLog = db.Column(db.String(255)) - triggerDevice = db.Column(db.Integer) - descriptionJson = db.Column(db.JSON) - - - -class Micrograph(db.Model): - __tablename__ = 'Micrograph' - - micrographId = db.Column(db.Integer, primary_key=True, unique=True) - crystalSlurryId = db.Column(db.ForeignKey('CrystalSlurry.crystalSlurryId'), nullable=False, index=True) - url = db.Column(db.String(255)) - objectSidePixelSize = db.Column(db.String(255), info='comma separated two floats') - descriptionJson = db.Column(db.JSON) - - CrystalSlurry = db.relationship('CrystalSlurry', primaryjoin='Micrograph.crystalSlurryId == CrystalSlurry.crystalSlurryId') - - - -class RepeatedSequence(db.Model): - __tablename__ = 'RepeatedSequence' - - repeatedSequenceId = db.Column(db.Integer, primary_key=True, unique=True) - name = db.Column(db.String(255)) - - - -class SampleDeliveryDevice(db.Model): - __tablename__ = 'SampleDeliveryDevice' - - sampleDeliveryDeviceId = db.Column(db.Integer, primary_key=True, unique=True) - type = db.Column(db.ENUM('photoChip', 'microFluidics', 'viscoousJet', 'tapeDevice')) - descriptionJson = db.Column(db.JSON) - - - -class SampleStock(db.Model): - __tablename__ = 'SampleStock' - - sampleStockId = db.Column(db.Integer, primary_key=True, unique=True) - name = db.Column(db.String(255), nullable=False) - crystalSlurryId = db.Column(db.ForeignKey('CrystalSlurry.crystalSlurryId'), nullable=False, index=True) - concentrationFactor = db.Column(db.Float, nullable=False) - crystalDensity = db.Column(db.Float, nullable=False) - additiveId = db.Column(db.Integer, info='reference to Additive.additiveId') - note = db.Column(db.String(255)) - - CrystalSlurry = db.relationship('CrystalSlurry', primaryjoin='SampleStock.crystalSlurryId == CrystalSlurry.crystalSlurryId') - - - -class SsxDataAcquisition(db.Model): - __tablename__ = 'SsxDataAcquisition' - - ssxDataAcquisitionId = db.Column(db.Integer, primary_key=True, unique=True) - loadedSampleId = db.Column(db.ForeignKey('LoadedSample.loadedSampleId'), nullable=False, index=True) - dataCollectionId = db.Column(db.Integer, nullable=False, info='reference to DataCollection.dataCollectionId') - experimentalPlanId = db.Column(db.ForeignKey('ExperimentalPlan.experimentalPlanId'), nullable=False, index=True) - eventLogFilename = db.Column(db.String(255), nullable=False, info='url to shorlist file') - dataSetId = db.Column(db.ForeignKey('DataSet.dataSetId'), nullable=False, index=True) - autoprocessingProgrammId = db.Column(db.Integer, info='reference to AutoProcProgram.autoProcProgramId') - - DataSet = db.relationship('DataSet', primaryjoin='SsxDataAcquisition.dataSetId == DataSet.dataSetId') - ExperimentalPlan = db.relationship('ExperimentalPlan', primaryjoin='SsxDataAcquisition.experimentalPlanId == ExperimentalPlan.experimentalPlanId') - LoadedSample = db.relationship('LoadedSample', primaryjoin='SsxDataAcquisition.loadedSampleId == LoadedSample.loadedSampleId') - - - -class TimedExcitation(db.Model): - __tablename__ = 'TimedExcitation' - - timedExcitationId = db.Column(db.Integer, primary_key=True, unique=True) - name = db.Column(db.String(255)) - repeatedSequenceId = db.Column(db.ForeignKey('RepeatedSequence.repeatedSequenceId'), index=True) - eventTrainId = db.Column(db.ForeignKey('EventTrain.eventTrainId'), index=True) - ssxExcitation = db.Column(db.String(255)) - - EventTrain = db.relationship('EventTrain', primaryjoin='TimedExcitation.eventTrainId == EventTrain.eventTrainId') - RepeatedSequence = db.relationship('RepeatedSequence', primaryjoin='TimedExcitation.repeatedSequenceId == RepeatedSequence.repeatedSequenceId') - - - -class TimedXrayDetection(db.Model): - __tablename__ = 'TimedXrayDetection' - - timedXrayDetectionId = db.Column(db.Integer, primary_key=True, unique=True) - name = db.Column(db.String(255)) - repeatedSequenceId = db.Column(db.ForeignKey('RepeatedSequence.repeatedSequenceId'), index=True) - eventTrainId = db.Column(db.ForeignKey('EventTrain.eventTrainId'), index=True) - numberOfInternalTriggers = db.Column(db.Integer) - internalTriggerPeriod = db.Column(db.Integer) - internalGateDuration = db.Column(db.Integer) - - EventTrain = db.relationship('EventTrain', primaryjoin='TimedXrayDetection.eventTrainId == EventTrain.eventTrainId') - RepeatedSequence = db.relationship('RepeatedSequence', primaryjoin='TimedXrayDetection.repeatedSequenceId == RepeatedSequence.repeatedSequenceId') - - - -class TimedXrayExposure(db.Model): - __tablename__ = 'TimedXrayExposure' - - timedXrayExposureId = db.Column(db.Integer, primary_key=True, unique=True) - name = db.Column(db.String(255)) - repeatedSequenceId = db.Column(db.ForeignKey('RepeatedSequence.repeatedSequenceId'), index=True) - eventTrainId = db.Column(db.ForeignKey('EventTrain.eventTrainId'), index=True) - timedBunches = db.Column(db.String(255)) - shutter = db.Column(db.String(255)) - - EventTrain = db.relationship('EventTrain', primaryjoin='TimedXrayExposure.eventTrainId == EventTrain.eventTrainId') - RepeatedSequence = db.relationship('RepeatedSequence', primaryjoin='TimedXrayExposure.repeatedSequenceId == RepeatedSequence.repeatedSequenceId') diff --git a/pyispyb/ssx/modules/__init__.py b/pyispyb/ssx/modules/__init__.py deleted file mode 100644 index 3e1b08b0..00000000 --- a/pyispyb/ssx/modules/__init__.py +++ /dev/null @@ -1,43 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -import os -import logging -from importlib import import_module - -__license__ = "LGPLv3+" - - -log = logging.getLogger(__name__) - - -def init_app(app, **kwargs): - """Inits extensions. - - Args: - app (Flask app): [description] - """ - - for module_name in os.listdir(os.path.dirname(__file__)): - if not module_name.startswith("__") and module_name.endswith(".py"): - module = import_module(".%s" % module_name[:-3], package=__name__) - if hasattr(module, "init_app"): - module.init_app(app, **kwargs) diff --git a/pyispyb/ssx/modules/api.py b/pyispyb/ssx/modules/api.py deleted file mode 100644 index 1e3bf6e7..00000000 --- a/pyispyb/ssx/modules/api.py +++ /dev/null @@ -1,34 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -__license__ = "LGPLv3+" - - -from flask import Blueprint - -from pyispyb.app.extensions import api - - -def init_app(app, **kwargs): - # pylint: disable=unused-argument - api_v1_blueprint = Blueprint("api", __name__, url_prefix=app.config["API_ROOT"]) - api.api_v1.init_app(api_v1_blueprint) - app.register_blueprint(api_v1_blueprint, url_prefix=app.config["API_ROOT"]) diff --git a/pyispyb/ssx/modules/loaded_sample.py b/pyispyb/ssx/modules/loaded_sample.py deleted file mode 100644 index 78a4a2e3..00000000 --- a/pyispyb/ssx/modules/loaded_sample.py +++ /dev/null @@ -1,261 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -__license__ = "LGPLv3+" - - -import logging - -from flask_restx import abort -from flask_restx._http import HTTPStatus - - -from pyispyb.app.extensions import db, auth_provider -from pyispyb.ssx import models, schemas -from pyispyb import connector as ispyb_service_connector - -log = logging.getLogger(__name__) - - -def get_loaded_samples(request): - """Returns loaded_samples by query parameters""" - - query_dict = request.args.to_dict() - - return ( - db.get_db_items( - models.LoadedSample, - schemas.loaded_sample.dict_schema, - schemas.loaded_sample.ma_schema, - query_dict, - ), - HTTPStatus.OK, - ) - - -def get_loaded_sample_by_id(loaded_sample_id): - """Returns loaded_sample by its loaded_sampleId. - - Args: - loaded_sample_id (int): corresponds to loaded_sampleId in db - - Returns: - dict: info about loaded_sample as dict - """ - id_dict = {"loadedSampleId": loaded_sample_id} - - return db.get_db_item( - models.LoadedSample, schemas.loaded_sample.ma_schema, id_dict - ) - - -def add_loaded_sample(data_dict): - """Adds a new ssx loaded sample. - - Args: - data_dict ([type]): [description] - - Returns: - int, dict: HTTP status code and response dict - """ - return db.add_db_item( - models.LoadedSample, schemas.loaded_sample.ma_schema, data_dict - ) - - -def get_loaded_sample_info_by_id(loaded_sample_id): - """ - Returns loaded_sample by its loaded_sampleId. - - Args: - loaded_sample_id (int): corresponds to loaded_sampleId in db - - Returns: - dict: info about loaded_sample as dict - """ - loaded_sample_json = get_loaded_sample_by_id(loaded_sample_id) - - sample_stock_json = get_sample_stock_info_by_id(loaded_sample_json["sampleStockId"]) - loaded_sample_json["sample_stock"] = sample_stock_json - - sample_delivery_device = get_sample_delivery_device_by_id( - loaded_sample_json["sampleDeliveryDeviceId"] - ) - loaded_sample_json["sample_delivery_device"] = sample_delivery_device - - return loaded_sample_json - - -def get_all_crystal_slurry(): - """Returns all crystal slurry db items. - - Returns: - [type]: [description] - """ - crystal_slurry_list = models.CrystalSlurry.query.all() - return schemas.crystal_slurry.ma_schema.dump(crystal_slurry_list, many=True) - - -def get_crystal_slurry_by_id(crystal_slurry_id): - id_dict = {"crystalSlurryId": crystal_slurry_id} - return db.get_db_item( - models.CrystalSlurry, schemas.crystal_slurry.ma_schema, id_dict - ) - - -def add_crystal_slurry(data_dict): - """Adds a new crystal slurry item. - - Args: - crystal_slurry_dict ([type]): [description] - - Returns: - [type]: [description] - """ - status_code, result = ispyb_service_connector.get_ispyb_resource( - "core", "/samples/crystals/%d" % data_dict["crystalId"] - ) - if status_code == 200: - crystal_id = data_dict.get("crystalId") - if crystal_id is None: - abort(HTTPStatus.NO_CONTENT, "No crystalId in crystalSlurry dict") - else: - data_dict.pop("crystalId") - return db.add_db_item( - models.CrystalSlurry, schemas.crystal_slurry.ma_schema, data_dict - ) - else: - result = ( - "Unable to add new crystal slurry Crystal with id %s do not exist" - % data_dict["crystalId"] - ) - abort(status_code, result) - - -def get_crystal_size_distributions(): - """Returns all crystal size distribution db items. - - Returns: - [type]: [description] - """ - crystal_size_distribution_list = models.CrystalSizeDistribution.query.all() - return schemas.crystal_size_distribution.ma_schema.dump( - crystal_size_distribution_list, many=True - ) - - -def add_crystal_size_distribution(data_dict): - """Adds a new crystal size distribution. - - Args: - data_dict ([type]): [description] - - Returns: - int, dict: HTTP status code and response dict - """ - return db.add_db_item( - models.CrystalSizeDistribution, - schemas.crystal_size_distribution.ma_schema, - data_dict, - ) - - -def get_sample_delivery_devices(request): - """Returns all sample delivery devices. - - Args: - query_dict ([type]): [description] - - Returns: - [type]: [description] - """ - query_dict = request.args.to_dict() - - return ( - db.get_db_items( - models.SampleDeliveryDevice, - schemas.sample_delivery_device.f_schema, - schemas.sample_delivery_device.ma_schema, - query_dict, - ), - HTTPStatus.OK, - ) - - -def add_sample_delivery_device(data_dict): - """Adds a new sample delivery device. - - Args: - sample_delivery_device ([type]): [description] - - Returns: - [type]: [description] - """ - return db.add_db_item( - models.SampleDeliveryDevice, schemas.sample_delivery_device.ma_schema, data_dict - ) - - -def get_sample_delivery_device_by_id(sample_delivery_device_id): - id_dict = {"sampleDeliveryDeviceId": sample_delivery_device_id} - return db.get_db_item( - models.SampleDeliveryDevice, schemas.sample_delivery_device.ma_schema, id_dict - ) - - -def get_sample_stocks(): - """Returns all sample stock db items. - - Returns: - [type]: [description] - """ - sample_stock_list = models.SampleStock.query.all() - return schemas.sample_stock.ma_schema.dump(sample_stock_list, many=True) - - -def add_sample_stock(data_dict): - """Adds a new crystal slurry item. - - Args: - data_dict ([type]): [description] - - Returns: - [type]: [description] - - """ - return db.add_db_item(models.SampleStock, schemas.sample_stock.ma_schema, data_dict) - - -def get_sample_stock_by_id(sample_stock_id): - id_dict = {"sampleStockId": sample_stock_id} - return db.get_db_item( - models.SampleStock, schemas.sample_stock.ma_schema, id_dict - ) - - -def get_sample_stock_info_by_id(sample_stock_id): - sample_stock_json = get_sample_stock_by_id(sample_stock_id) - - crystal_slurry_json = get_crystal_slurry_by_id(sample_stock_json["crystalSlurryId"]) - - sample_stock_json["crystal_slurry"] = crystal_slurry_json - - return sample_stock_json diff --git a/pyispyb/ssx/routes/__init__.py b/pyispyb/ssx/routes/__init__.py deleted file mode 100644 index bf981998..00000000 --- a/pyispyb/ssx/routes/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - -import os -from importlib import import_module - - -__license__ = "LGPLv3+" - - -def init_app(app, **kwargs): - - for module_name in os.listdir(os.path.dirname(__file__)): - if not module_name.startswith("__") and module_name.endswith(".py"): - module = import_module(".%s" % module_name[:-3], package=__name__) - if hasattr(module, "init_app"): - module.init_app(app, **kwargs) diff --git a/pyispyb/ssx/routes/sample.py b/pyispyb/ssx/routes/sample.py deleted file mode 100644 index 2861059a..00000000 --- a/pyispyb/ssx/routes/sample.py +++ /dev/null @@ -1,185 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -import logging -from flask import request, current_app -from flask_restx._http import HTTPStatus - -from pyispyb.flask_restx_patched import Resource - -from pyispyb.app.extensions.api import api_v1, Namespace -from pyispyb.app.extensions.auth import token_required, role_required - -from pyispyb.ssx.schemas import loaded_sample as loaded_sample_schemas -from pyispyb.ssx.schemas import crystal_slurry as crystal_slurry_schemas -from pyispyb.ssx.schemas import ( - crystal_size_distribution as crystal_size_distribution_schemas, -) -from pyispyb.ssx.schemas import sample_stock as sample_stock_schemas -from pyispyb.ssx.schemas import sample_delivery_device as sample_delivery_device_schemas -from pyispyb.ssx.modules import loaded_sample - - -__license__ = "LGPLv3+" - - -log = logging.getLogger(__name__) -api = Namespace("Samples", description="Samples related namespace", path="/samples") -api_v1.add_namespace(api) - - -@api.route("", endpoint="loaded_samples") -@api.doc(security="apikey") -class LoadedSample(Resource): - """Loaded sample resource""" - - # @token_required - def get(self): - """Returns all loaded samples""" - # app.logger.info("Return all data collections") - return loaded_sample.get_loaded_samples(request) - - # @token_required - @api.expect(loaded_sample_schemas.f_schema) - # @api.marshal_with(loaded_sample_schemas.f_schema, code=201) - # @role_required - def post(self): - """Adds a new loaded sample""" - return loaded_sample.add_loaded_sample(api.payload) - - -@api.route("/", endpoint="loaded_sample_by_id") -@api.param("loaded_sample_id", "loaded_sample id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="loaded_sample not found.") -class LoadedSampleById(Resource): - """Allows to get/set/delete a loaded_sample""" - - @token_required - @role_required - @api.doc(description="loaded_sample_id should be an integer ") - @api.marshal_with( - loaded_sample_schemas.f_schema, skip_none=False, code=HTTPStatus.OK - ) - def get(self, loaded_sample_id): - """Returns a loaded_sample by loaded_sampleId""" - return loaded_sample.get_loaded_sample_by_id(loaded_sample_id) - - -@api.route("//info", endpoint="loaded_sample_info_by_id") -@api.param("loaded_sample_id", "loaded_sample id (integer)") -@api.doc(security="apikey") -@api.response(code=HTTPStatus.NOT_FOUND, description="loaded_sample not found.") -class LoadedSampleInfoById(Resource): - """Returns full information of a loaded_sample""" - - @token_required - @role_required - @api.doc(description="loaded_sample_id should be an integer ") - # @api.marshal_with(loaded_sample_desc_f_schema) - def get(self, loaded_sample_id): - """Returns a full description of a loaded_sample by loaded_sampleId""" - return loaded_sample.get_loaded_sample_info_by_id(loaded_sample_id) - - -@api.route("/crystal_slurry", endpoint="crystal_slurry") -@api.doc(security="apikey") -class CrystalSlurry(Resource): - """Crystal slurry resource""" - - @token_required - @role_required - def get(self): - """Returns all crystal slurry""" - # app.logger.info("Return all data collections") - return loaded_sample.get_all_crystal_slurry() - - #@token_required - #@role_required - #@api.expect(crystal_slurry_schemas.f_schema) - # @api.marshal_with(crystal_slurry_schemas.f_schema, code=201) - def post(self): - """Adds a new crystal slury""" - return loaded_sample.add_crystal_slurry(api.payload) - - -@api.route("/crystal_size_distribution", endpoint="crystal_size_distribution") -@api.doc(security="apikey") -class CrystalSizeDistribution(Resource): - """Crystal size distribution resource""" - - @token_required - @role_required - def get(self): - """Returns all crystal size distributions""" - # app.logger.info("Return all data collections") - return loaded_sample.get_crystal_size_distributions() - - @token_required - @role_required - @api.expect(crystal_size_distribution_schemas.f_schema) - # @api.marshal_with(crystal_slurry_schemas.crystal_slurry_f_schema, code=201) - def post(self): - """Adds a new crystal slury""" - return loaded_sample.add_crystal_size_distribution(api.payload) - - -@api.route("/sample_stocks", endpoint="sample_stocks") -@api.doc(security="apikey") -class SampleStocks(Resource): - """Sample stocks resource""" - - @token_required - @role_required - def get(self): - """Returns all sample stocks""" - # app.logger.info("Return all data collections") - return loaded_sample.get_sample_stocks() - - @token_required - @role_required - @api.expect(sample_stock_schemas.f_schema) - # @api.marshal_with(crystal_slurry_schemas.crystal_slurry_f_schema, code=201) - def post(self): - """Adds a new sample stock""" - return loaded_sample.add_sample_stock(api.payload) - - -@api.route("/delivery_devices", endpoint="sample_delivery_devices") -@api.doc(security="apikey") -class SamplDeliveryDevices(Resource): - """SampleDeliveryDevice resource""" - - @token_required - @role_required - def get(self): - """Returns sample delivery devices""" - return loaded_sample.get_sample_delivery_devices(request) - - @token_required - @role_required - @api.expect(sample_delivery_device_schemas.f_schema) - # @api.marshal_with(sample_delivery_device_schemas.f_schema, code=201) - def post(self): - """Adds a new sample delivery device""" - - current_app.logger.info("Inserts a new sample delivery device") - return loaded_sample.add_sample_delivery_device(api.payload) diff --git a/pyispyb/ssx/schemas/crystal_size_distribution.py b/pyispyb/ssx/schemas/crystal_size_distribution.py deleted file mode 100644 index 925d87ee..00000000 --- a/pyispyb/ssx/schemas/crystal_size_distribution.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'crystalSizeDistributionId': f_fields.Integer(required=True, description=''), - 'crystalHabit': f_fields.String(required=False, description=''), - 'characteristicDimensions': f_fields.String(required=False, description=''), - 'minDimension': f_fields.String(required=False, description='comma separated floats'), - 'maxDimension': f_fields.String(required=False, description='comma separated floats'), - } - -class CrystalSizeDistributionSchema(Schema): - """Marshmallows schema class representing CrystalSizeDistribution table""" - - crystalSizeDistributionId = ma_fields.Integer() - crystalHabit = ma_fields.String() - characteristicDimensions = ma_fields.String() - minDimension = ma_fields.String() - maxDimension = ma_fields.String() - -f_schema = api.model('CrystalSizeDistribution', dict_schema) -ma_schema = CrystalSizeDistributionSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/crystal_slurry.py b/pyispyb/ssx/schemas/crystal_slurry.py deleted file mode 100644 index cdff89f0..00000000 --- a/pyispyb/ssx/schemas/crystal_slurry.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'crystalSlurryId': f_fields.Integer(required=True, description=''), - 'name': f_fields.String(required=False, description=''), - 'crystalSizeDistributionId': f_fields.Integer(required=False, description=''), - 'crystalDensity': f_fields.Float(required=False, description='1/mm3'), - 'bufferId': f_fields.Float(required=False, description='reference to Buffer.bufferId'), - } - -class CrystalSlurrySchema(Schema): - """Marshmallows schema class representing CrystalSlurry table""" - - crystalSlurryId = ma_fields.Integer() - name = ma_fields.String() - crystalSizeDistributionId = ma_fields.Integer() - crystalDensity = ma_fields.Float() - bufferId = ma_fields.Float() - -f_schema = api.model('CrystalSlurry', dict_schema) -ma_schema = CrystalSlurrySchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/data_acquisition.py b/pyispyb/ssx/schemas/data_acquisition.py deleted file mode 100644 index 4116fee3..00000000 --- a/pyispyb/ssx/schemas/data_acquisition.py +++ /dev/null @@ -1,58 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -__license__ = "LGPLv3+" - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -data_acquisition_dict_schema = { - "dataAcquisitionId": f_fields.Integer(required=True, description=""), - "loadedSampleId": f_fields.Integer(required=True, description=""), - "dataCollectionId": f_fields.Integer( - required=True, description="reference to DataCollection.dataCollectionId" - ), - "experimentalPlanId": f_fields.Integer(required=True, description=""), - "shortList": f_fields.String(required=True, description="url to shorlist file"), - "autoprocessingProgrammId": f_fields.Integer( - required=False, description="reference to AutoProcProgram.autoProcProgramId" - ), -} - - -class DataAcquisitionSchema(Schema): - """Marshmallows schema class representing DataAcquisition table""" - - dataAcquisitionId = ma_fields.Integer() - loadedSampleId = ma_fields.Integer() - dataCollectionId = ma_fields.Integer() - experimentalPlanId = ma_fields.Integer() - shortList = ma_fields.String() - autoprocessingProgrammId = ma_fields.Integer() - - -data_acquisition_f_schema = api.model("DataAcquisition", data_acquisition_dict_schema) -data_acquisition_ma_schema = DataAcquisitionSchema() -data_acquisition_json_schema = JSONSchema().dump(data_acquisition_ma_schema) diff --git a/pyispyb/ssx/schemas/data_set.py b/pyispyb/ssx/schemas/data_set.py deleted file mode 100644 index 7b50a66e..00000000 --- a/pyispyb/ssx/schemas/data_set.py +++ /dev/null @@ -1,48 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'dataSetId': f_fields.Integer(required=True, description=''), - 'name': f_fields.String(required=True, description=''), - 'mergedResultsFilename': f_fields.String(required=False, description=''), - } - -class DataSetSchema(Schema): - """Marshmallows schema class representing DataSet table""" - - dataSetId = ma_fields.Integer() - name = ma_fields.String() - mergedResultsFilename = ma_fields.String() - -f_schema = api.model('DataSet', dict_schema) -ma_schema = DataSetSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/event_train.py b/pyispyb/ssx/schemas/event_train.py deleted file mode 100644 index e3e34e4b..00000000 --- a/pyispyb/ssx/schemas/event_train.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'eventTrainId': f_fields.Integer(required=True, description=''), - 'name': f_fields.String(required=False, description=''), - 'timeOn': f_fields.Float(required=False, description='sec'), - 'duration': f_fields.Float(required=False, description='sec'), - 'period': f_fields.Float(required=False, description=''), - 'numberOfRepetitions': f_fields.Float(required=False, description=''), - 'nameInEventLog': f_fields.String(required=False, description=''), - 'triggerDevice': f_fields.String(required=False, description=''), - } - -class EventTrainSchema(Schema): - """Marshmallows schema class representing EventTrain table""" - - eventTrainId = ma_fields.Integer() - name = ma_fields.String() - timeOn = ma_fields.Float() - duration = ma_fields.Float() - period = ma_fields.Float() - numberOfRepetitions = ma_fields.Float() - nameInEventLog = ma_fields.String() - triggerDevice = ma_fields.String() - -f_schema = api.model('EventTrain', dict_schema) -ma_schema = EventTrainSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/experimental_plan.py b/pyispyb/ssx/schemas/experimental_plan.py deleted file mode 100644 index cdb58721..00000000 --- a/pyispyb/ssx/schemas/experimental_plan.py +++ /dev/null @@ -1,54 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'experimentalPlanId': f_fields.Integer(required=True, description=''), - 'name': f_fields.String(required=False, description=''), - 'numberOfRepetitions': f_fields.Integer(required=False, description='for micro-fluidic, jet, tape but not for chip'), - 'period': f_fields.Float(required=False, description='seconds but unknown/self adjusting for chip'), - 'masterTriggerId': f_fields.Integer(required=False, description=''), - 'repeatedSequenceId': f_fields.Integer(required=True, description=''), - } - -class ExperimentalPlanSchema(Schema): - """Marshmallows schema class representing ExperimentalPlan table""" - - experimentalPlanId = ma_fields.Integer() - name = ma_fields.String() - numberOfRepetitions = ma_fields.Integer() - period = ma_fields.Float() - masterTriggerId = ma_fields.Integer() - repeatedSequenceId = ma_fields.Integer() - -f_schema = api.model('ExperimentalPlan', dict_schema) -ma_schema = ExperimentalPlanSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/loaded_sample.py b/pyispyb/ssx/schemas/loaded_sample.py deleted file mode 100644 index 217289b3..00000000 --- a/pyispyb/ssx/schemas/loaded_sample.py +++ /dev/null @@ -1,54 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'loadedSampleId': f_fields.Integer(required=True, description=''), - 'name': f_fields.String(required=False, description='to be used as part of the image and processing file names'), - 'sampleStockId': f_fields.Integer(required=False, description=''), - 'sampleDeliveryDeviceId': f_fields.Integer(required=False, description=''), - 'loadingPattern': f_fields.Integer(required=False, description=''), - 'descriptionJson': f_fields.String(required=False, description=''), - } - -class LoadedSampleSchema(Schema): - """Marshmallows schema class representing LoadedSample table""" - - loadedSampleId = ma_fields.Integer() - name = ma_fields.String() - sampleStockId = ma_fields.Integer() - sampleDeliveryDeviceId = ma_fields.Integer() - loadingPattern = ma_fields.Integer() - descriptionJson = ma_fields.String() - -f_schema = api.model('LoadedSample', dict_schema) -ma_schema = LoadedSampleSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/master_trigger.py b/pyispyb/ssx/schemas/master_trigger.py deleted file mode 100644 index 33494eeb..00000000 --- a/pyispyb/ssx/schemas/master_trigger.py +++ /dev/null @@ -1,50 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'masterTriggerId': f_fields.Integer(required=True, description=''), - 'nameInEventLog': f_fields.String(required=False, description=''), - 'triggerDevice': f_fields.Integer(required=False, description=''), - 'descriptionJson': f_fields.String(required=False, description=''), - } - -class MasterTriggerSchema(Schema): - """Marshmallows schema class representing MasterTrigger table""" - - masterTriggerId = ma_fields.Integer() - nameInEventLog = ma_fields.String() - triggerDevice = ma_fields.Integer() - descriptionJson = ma_fields.String() - -f_schema = api.model('MasterTrigger', dict_schema) -ma_schema = MasterTriggerSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/micrograph.py b/pyispyb/ssx/schemas/micrograph.py deleted file mode 100644 index 67baebf1..00000000 --- a/pyispyb/ssx/schemas/micrograph.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'micrographId': f_fields.Integer(required=True, description=''), - 'crystalSlurryId': f_fields.Integer(required=True, description=''), - 'url': f_fields.String(required=False, description=''), - 'objectSidePixelSize': f_fields.String(required=False, description='comma separated two floats'), - 'descriptionJson': f_fields.String(required=False, description=''), - } - -class MicrographSchema(Schema): - """Marshmallows schema class representing Micrograph table""" - - micrographId = ma_fields.Integer() - crystalSlurryId = ma_fields.Integer() - url = ma_fields.String() - objectSidePixelSize = ma_fields.String() - descriptionJson = ma_fields.String() - -f_schema = api.model('Micrograph', dict_schema) -ma_schema = MicrographSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/repeated_sequence.py b/pyispyb/ssx/schemas/repeated_sequence.py deleted file mode 100644 index 0eabd712..00000000 --- a/pyispyb/ssx/schemas/repeated_sequence.py +++ /dev/null @@ -1,46 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'repeatedSequenceId': f_fields.Integer(required=True, description=''), - 'name': f_fields.String(required=False, description=''), - } - -class RepeatedSequenceSchema(Schema): - """Marshmallows schema class representing RepeatedSequence table""" - - repeatedSequenceId = ma_fields.Integer() - name = ma_fields.String() - -f_schema = api.model('RepeatedSequence', dict_schema) -ma_schema = RepeatedSequenceSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/repeated_sequence_has_action.py b/pyispyb/ssx/schemas/repeated_sequence_has_action.py deleted file mode 100644 index f9c84953..00000000 --- a/pyispyb/ssx/schemas/repeated_sequence_has_action.py +++ /dev/null @@ -1,56 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -__license__ = "LGPLv3+" - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -repeated_sequence_has_action_dict_schema = { - "repeatedSequenceHasActionId": f_fields.Integer(required=True, description=""), - "repeatedSequenceId": f_fields.Integer(required=False, description=""), - "timedExcitationId": f_fields.Integer(required=False, description=""), - "timedXrayExposureId": f_fields.Integer(required=False, description=""), - "timedXrayDetectionId": f_fields.Integer(required=False, description=""), -} - - -class RepeatedSequenceHasActionSchema(Schema): - """Marshmallows schema class representing RepeatedSequenceHasAction table""" - - repeatedSequenceHasActionId = ma_fields.Integer() - repeatedSequenceId = ma_fields.Integer() - timedExcitationId = ma_fields.Integer() - timedXrayExposureId = ma_fields.Integer() - timedXrayDetectionId = ma_fields.Integer() - - -repeated_sequence_has_action_f_schema = api.model( - "RepeatedSequenceHasAction", repeated_sequence_has_action_dict_schema -) -repeated_sequence_has_action_ma_schema = RepeatedSequenceHasActionSchema() -repeated_sequence_has_action_json_schema = JSONSchema().dump( - repeated_sequence_has_action_ma_schema -) diff --git a/pyispyb/ssx/schemas/sample_delivery_device.py b/pyispyb/ssx/schemas/sample_delivery_device.py deleted file mode 100644 index f6097888..00000000 --- a/pyispyb/ssx/schemas/sample_delivery_device.py +++ /dev/null @@ -1,48 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'sampleDeliveryDeviceId': f_fields.Integer(required=True, description=''), - 'type': f_fields.String(required=False, description='enum(photoChip,microFluidics,viscoousJet,tapeDevice)'), - 'descriptionJson': f_fields.String(required=False, description=''), - } - -class SampleDeliveryDeviceSchema(Schema): - """Marshmallows schema class representing SampleDeliveryDevice table""" - - sampleDeliveryDeviceId = ma_fields.Integer() - type = ma_fields.String() - descriptionJson = ma_fields.String() - -f_schema = api.model('SampleDeliveryDevice', dict_schema) -ma_schema = SampleDeliveryDeviceSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/sample_stock.py b/pyispyb/ssx/schemas/sample_stock.py deleted file mode 100644 index 38274f85..00000000 --- a/pyispyb/ssx/schemas/sample_stock.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'sampleStockId': f_fields.Integer(required=True, description=''), - 'name': f_fields.String(required=True, description=''), - 'crystalSlurryId': f_fields.Integer(required=True, description=''), - 'concentrationFactor': f_fields.Float(required=True, description=''), - 'crystalDensity': f_fields.Float(required=True, description=''), - 'additiveId': f_fields.Integer(required=False, description='reference to Additive.additiveId'), - 'note': f_fields.String(required=False, description=''), - } - -class SampleStockSchema(Schema): - """Marshmallows schema class representing SampleStock table""" - - sampleStockId = ma_fields.Integer() - name = ma_fields.String() - crystalSlurryId = ma_fields.Integer() - concentrationFactor = ma_fields.Float() - crystalDensity = ma_fields.Float() - additiveId = ma_fields.Integer() - note = ma_fields.String() - -f_schema = api.model('SampleStock', dict_schema) -ma_schema = SampleStockSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/ssx_data_acquisition.py b/pyispyb/ssx/schemas/ssx_data_acquisition.py deleted file mode 100644 index c354f9de..00000000 --- a/pyispyb/ssx/schemas/ssx_data_acquisition.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'ssxDataAcquisitionId': f_fields.Integer(required=True, description=''), - 'loadedSampleId': f_fields.Integer(required=True, description=''), - 'dataCollectionId': f_fields.Integer(required=True, description='reference to DataCollection.dataCollectionId'), - 'experimentalPlanId': f_fields.Integer(required=True, description=''), - 'eventLogFilename': f_fields.String(required=True, description='url to shorlist file'), - 'dataSetId': f_fields.Integer(required=True, description=''), - 'autoprocessingProgrammId': f_fields.Integer(required=False, description='reference to AutoProcProgram.autoProcProgramId'), - } - -class SsxDataAcquisitionSchema(Schema): - """Marshmallows schema class representing SsxDataAcquisition table""" - - ssxDataAcquisitionId = ma_fields.Integer() - loadedSampleId = ma_fields.Integer() - dataCollectionId = ma_fields.Integer() - experimentalPlanId = ma_fields.Integer() - eventLogFilename = ma_fields.String() - dataSetId = ma_fields.Integer() - autoprocessingProgrammId = ma_fields.Integer() - -f_schema = api.model('SsxDataAcquisition', dict_schema) -ma_schema = SsxDataAcquisitionSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/timed_excitation.py b/pyispyb/ssx/schemas/timed_excitation.py deleted file mode 100644 index c457dbda..00000000 --- a/pyispyb/ssx/schemas/timed_excitation.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'timedExcitationId': f_fields.Integer(required=True, description=''), - 'name': f_fields.String(required=False, description=''), - 'repeatedSequenceId': f_fields.Integer(required=False, description=''), - 'eventTrainId': f_fields.Integer(required=False, description=''), - 'ssxExcitation': f_fields.String(required=False, description=''), - } - -class TimedExcitationSchema(Schema): - """Marshmallows schema class representing TimedExcitation table""" - - timedExcitationId = ma_fields.Integer() - name = ma_fields.String() - repeatedSequenceId = ma_fields.Integer() - eventTrainId = ma_fields.Integer() - ssxExcitation = ma_fields.String() - -f_schema = api.model('TimedExcitation', dict_schema) -ma_schema = TimedExcitationSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/timed_sequence.py b/pyispyb/ssx/schemas/timed_sequence.py deleted file mode 100644 index 86b68efe..00000000 --- a/pyispyb/ssx/schemas/timed_sequence.py +++ /dev/null @@ -1,54 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -__license__ = "LGPLv3+" - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -timed_sequence_dict_schema = { - "timedSequenceId": f_fields.Integer(required=True, description=""), - "name": f_fields.String(required=False, description=""), - "timeOn": f_fields.Float(required=False, description="sec"), - "timeOff": f_fields.Float(required=False, description="sec"), - "nameInShortlist": f_fields.String(required=False, description=""), - "triggerDevice": f_fields.String(required=False, description=""), -} - - -class TimedSequenceSchema(Schema): - """Marshmallows schema class representing TimedSequence table""" - - timedSequenceId = ma_fields.Integer() - name = ma_fields.String() - timeOn = ma_fields.Float() - timeOff = ma_fields.Float() - nameInShortlist = ma_fields.String() - triggerDevice = ma_fields.String() - - -timed_sequence_f_schema = api.model("TimedSequence", timed_sequence_dict_schema) -timed_sequence_ma_schema = TimedSequenceSchema() -timed_sequence_json_schema = JSONSchema().dump(timed_sequence_ma_schema) diff --git a/pyispyb/ssx/schemas/timed_xray_detection.py b/pyispyb/ssx/schemas/timed_xray_detection.py deleted file mode 100644 index 65309adf..00000000 --- a/pyispyb/ssx/schemas/timed_xray_detection.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'timedXrayDetectionId': f_fields.Integer(required=True, description=''), - 'name': f_fields.String(required=False, description=''), - 'repeatedSequenceId': f_fields.Integer(required=False, description=''), - 'eventTrainId': f_fields.Integer(required=False, description=''), - 'numberOfInternalTriggers': f_fields.Integer(required=False, description=''), - 'internalTriggerPeriod': f_fields.Integer(required=False, description=''), - 'internalGateDuration': f_fields.Integer(required=False, description=''), - } - -class TimedXrayDetectionSchema(Schema): - """Marshmallows schema class representing TimedXrayDetection table""" - - timedXrayDetectionId = ma_fields.Integer() - name = ma_fields.String() - repeatedSequenceId = ma_fields.Integer() - eventTrainId = ma_fields.Integer() - numberOfInternalTriggers = ma_fields.Integer() - internalTriggerPeriod = ma_fields.Integer() - internalGateDuration = ma_fields.Integer() - -f_schema = api.model('TimedXrayDetection', dict_schema) -ma_schema = TimedXrayDetectionSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/ssx/schemas/timed_xray_exposure.py b/pyispyb/ssx/schemas/timed_xray_exposure.py deleted file mode 100644 index bb887183..00000000 --- a/pyispyb/ssx/schemas/timed_xray_exposure.py +++ /dev/null @@ -1,54 +0,0 @@ -""" -Project: py-ispyb. - -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with py-ispyb. If not, see . -""" - - -__license__ = "LGPLv3+" - - - -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -dict_schema = { - 'timedXrayExposureId': f_fields.Integer(required=True, description=''), - 'name': f_fields.String(required=False, description=''), - 'repeatedSequenceId': f_fields.Integer(required=False, description=''), - 'eventTrainId': f_fields.Integer(required=False, description=''), - 'timedBunches': f_fields.String(required=False, description=''), - 'shutter': f_fields.String(required=False, description=''), - } - -class TimedXrayExposureSchema(Schema): - """Marshmallows schema class representing TimedXrayExposure table""" - - timedXrayExposureId = ma_fields.Integer() - name = ma_fields.String() - repeatedSequenceId = ma_fields.Integer() - eventTrainId = ma_fields.Integer() - timedBunches = ma_fields.String() - shutter = ma_fields.String() - -f_schema = api.model('TimedXrayExposure', dict_schema) -ma_schema = TimedXrayExposureSchema() -json_schema = JSONSchema().dump(ma_schema) diff --git a/pyispyb/static/dewar_label_template.html b/pyispyb/static/dewar_label_template.html deleted file mode 100644 index 814175c9..00000000 --- a/pyispyb/static/dewar_label_template.html +++ /dev/null @@ -1,54 +0,0 @@ -

ISPyB Dewar Tracking

-
-

LABEL INSTRUCTIONS
Please print the following three labels and use as follows:

-


1) Dewar Label: affix this label to your dewar which ensures it can be identified
at all times at the {site_name}
2) Outward bound Address label: To be attached to the outside of your transport
container for shipment to the {site_name}
3) Return bound address label: The return address for your shipment
(Please include this in your shipment, e.g. put it behind the outward bound address
or in the transport container)

-
-

-

 

-

1) DEWAR LABEL: affix this to your DEWAR

- -
-
- -
-
- -
-
- - - - - - - - -
 Parcel label {parcel_label}
-

 

- - - - - - - - - - - - - - - - - - - - - - - -
 Shipment name {shipping_label}
 Number of parcels {num_parcels}
 Proposal number {proposal_number}
 Laboratory name {laboratory_name}
 Local contact {local_contact}
-

 

-

-

2)

\ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 00000000..e45ffe23 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,7 @@ +black +flake8 +pytest +pytest-cov +requests +jsondiff +httpx>=0.23.0 #required for FastAPI tests \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index a5985a32..750fe688 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,34 @@ --r tasks/requirements.txt --r pyispyb/requirements.txt +ispyb-models==1.0.6 + +fastapi +pydantic[dotenv] +pydantic-sqlalchemy + +SQLAlchemy + +pyjwt +pdfkit +python-barcode +qrcode + +python-keycloak>=2.6.0 + +pint +python_ldap==3.4.0 + +sqlparse + +mysql-connector-python==8.0.29 + +fabio +h5py +hdf5plugin +h5grove +numpy +matplotlib + +mkdocs +mkdocs-material + +uvicorn +gunicorn \ No newline at end of file diff --git a/scripts/coverage.sh b/scripts/coverage.sh new file mode 100644 index 00000000..a4a64cbe --- /dev/null +++ b/scripts/coverage.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +export ISPYB_ENVIRONMENT="test" + +pytest --cov=pyispyb --cov-report html tests/ \ No newline at end of file diff --git a/scripts/create_core_db.sh b/scripts/create_core_db.sh deleted file mode 100644 index ddc296e5..00000000 --- a/scripts/create_core_db.sh +++ /dev/null @@ -1,9 +0,0 @@ -wget https://github.com/DiamondLightSource/ispyb-database/releases/download/v1.10.4/ispyb-database-1.10.4.tar.gz -tar xvfz ispyb-database-1.10.4.tar.gz -mysql -u root -e "CREATE DATABASE pydb_test; SET GLOBAL log_bin_trust_function_creators=ON;" -mysql -u root -D pydb_test < schema/tables.sql -mysql -u root -D pydb_test < schema/lookups.sql -mysql -u root -D pydb_test < schema/data.sql -mysql -u root -D pydb_test < schema/routines.sql -mysql -u root -e "CREATE USER mxuser@'localhost' IDENTIFIED BY 'mxpass';" -mysql -u root -e "GRANT ALL ON pydb_test.* TO 'mxuser'@'localhost';" diff --git a/scripts/create_ssx_db.sh b/scripts/create_ssx_db.sh deleted file mode 100644 index 62372031..00000000 --- a/scripts/create_ssx_db.sh +++ /dev/null @@ -1,3 +0,0 @@ -mysql -u root -e "CREATE DATABASE ispyb_ssx; SET GLOBAL log_bin_trust_function_creators=ON;" -mysql -u root -D ispyb_ssx < ../examples/ispyb_ssx_db.sql -mysql -u root -e "GRANT ALL ON ispyb_ssx.* TO 'mxuser'@'localhost';" diff --git a/scripts/format_code.sh b/scripts/format_code.sh old mode 100755 new mode 100644 index 15ece15d..de1937d5 --- a/scripts/format_code.sh +++ b/scripts/format_code.sh @@ -1,2 +1 @@ -autopep8 -a -r -j 0 -i --max-line-length 88 $1 -black --safe $1 +black . \ No newline at end of file diff --git a/scripts/generate_apispec.py b/scripts/generate_apispec.py new file mode 100644 index 00000000..1f7e4d46 --- /dev/null +++ b/scripts/generate_apispec.py @@ -0,0 +1,20 @@ +# from https://github.com/tiangolo/fastapi/issues/1173 +from fastapi.openapi.utils import get_openapi +import json + +from pyispyb.app.main import app, custom_openapi + +app.openapi = custom_openapi + +with open("openapi.json", "w") as f: + json.dump( + get_openapi( + title=app.title, + version=app.version, + openapi_version=app.openapi_version, + description=app.description, + routes=app.routes, + # openapi_prefix=app.openapi_prefix, + ), + f, + ) diff --git a/scripts/generate_core_models.sh b/scripts/generate_core_models.sh deleted file mode 100755 index 39e8c5fc..00000000 --- a/scripts/generate_core_models.sh +++ /dev/null @@ -1,12 +0,0 @@ -URI="$(grep SQLALCHEMY_DATABASE_URI $1)" -URI="$(echo "$URI" | tr -d '"' | tr -d ' ' | sed -e "s/SQLALCHEMY_DATABASE_URI://")" -echo "Generating SqlAlchemy models in pyispyb/core/models.py ..." -echo ""$URI -flask-sqlacodegen --flask --nobackrefs --noviews --outfile ../pyispyb/core/models.py $URI - - -sed -i -e 's/db = SQLAlchemy()/from pyispyb.app.extensions import db/g' ../pyispyb/core/models.py -sed -i -e 's/class AutoProcStatu(db.Model)/class AutoProcStatus(db.Model)/g' ../pyispyb/core/models.py -sed -i -e 's/AutoProcStatu.autoProcIntegrationId/AutoProcStatus.autoProcIntegrationId/g' ../pyispyb/core/models.py - -echo "Done!" diff --git a/scripts/generate_core_schemas.py b/scripts/generate_core_schemas.py deleted file mode 100644 index 47d8b70e..00000000 --- a/scripts/generate_core_schemas.py +++ /dev/null @@ -1,146 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -import os -import sys -import csv -import MySQLdb - -ispyb_root = os.path.dirname(os.path.abspath(__file__)).split(os.sep) -ispyb_root = "/" + os.path.join(*ispyb_root[1:-1]) -sys.path.insert(0, ispyb_root) - - -from pyispyb.config import BaseConfig -config = BaseConfig(os.path.join(ispyb_root, "ispyb_core_config.yml")) - -uri = config.SQLALCHEMY_DATABASE_URI -# mysql://ispyb_api:password_1234@localhost/ispyb_test -user = uri.split("//")[1].split(":")[0] -passwd = uri.split("//")[1].split(":")[1].split("@")[0] -host = uri.split("@")[1].split("/")[0] -db_name = uri.split("/")[-1] - -gen_tables = [] -gen_modules = [] - -with open("%s/examples/core_db_mapping.csv" % ispyb_root) as csvfile: - reader = csv.reader(csvfile) - for row in reader: - gen_modules.append(row[0]) - gen_tables.append(row[1]) - - -connection = MySQLdb.connect(host=host, user=user, passwd=passwd) -cursor = connection.cursor() -cursor.execute("USE %s" % db_name) -cursor.execute("SHOW TABLES") -tables = cursor.fetchall() - -schema_file_header = "" - -licence_header_file = open(ispyb_root + "/py_file_header.txt", "r") -schema_file_header = licence_header_file.read() -licence_header_file.close() - - -schema_file_header += """ -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -""" - -print("Generating ispyb core schemas") - -for table in tables: - table_name = table[0] - if table_name in gen_tables: - schema_name = gen_modules[gen_tables.index(table_name)] - print( - "Generting flask and marshmallow models for table %s in %s.py" - % (table_name, schema_name) - ) - cursor.execute("SHOW FULL COLUMNS FROM %s" % table) - columns = cursor.fetchall() - table_name = table_name.replace("BF_", "").replace("BL", "") - dict_text = "dict_schema = {\n" - ma_text = "class %sSchema(Schema):\n" % table_name - ma_text += ( - ' """Marshmallows schema class representing %s table"""\n\n' % table_name - ) - - for column in columns: - name = column[0] - if name == "global": - name = "Global" - data_type = "String" - data_size = "()" - required = "required=False" - if column[3] == "NO": - required = "required=True" - # default = None - # if column[5] != "NULL": - # default = - description = "description='%s'" % column[8].replace("'", "") - if "int" in column[1] or column[1].startswith("binary"): - data_type = "Integer" - elif column[1].startswith("float"): - data_type = "Float" - elif column[1].startswith("varchar") or column[1].startswith("text"): - data_type = "String" - data_size = column[1].replace("varchar", "") - elif column[1].startswith("timestamp") or column[1].startswith("datetime"): - data_type = "DateTime" - elif column[1].startswith("enum"): - data_type = "String" - description = "description='%s%s'" % ( - column[8].replace("'", ""), - column[1].replace("'", ""), - ) - dict_text += " '%s': f_fields.%s(%s, %s),\n" % ( - name, - data_type, - required, - description, - ) - ma_text += " %s = ma_fields.%s()\n" % (name, data_type) - dict_text += " }\n\n" - - class_text = "f_schema = api.model('%s', dict_schema)\n" % (table_name,) - class_text += "ma_schema = %sSchema()\n" % (table_name) - json_text = "json_schema = JSONSchema().dump(ma_schema)\n" - - schema_file_path = "%s/pyispyb/core/schemas/%s.py" % (ispyb_root, schema_name) - if not os.path.exists(os.path.dirname(schema_file_path)): - os.makedirs(os.path.dirname(schema_file_path)) - schema_file = open(schema_file_path, "w") - schema_file.write(schema_file_header) - schema_file.write(dict_text) - schema_file.write(ma_text) - schema_file.write("\n") - schema_file.write(class_text) - schema_file.write(json_text) - schema_file.close() - -print("done") diff --git a/scripts/generate_em_models.sh b/scripts/generate_em_models.sh deleted file mode 100755 index a84d3db7..00000000 --- a/scripts/generate_em_models.sh +++ /dev/null @@ -1,12 +0,0 @@ -URI="$(grep SQLALCHEMY_DATABASE_URI $1)" -URI="$(echo "$URI" | tr -d '"' | tr -d ' ' | sed -e "s/SQLALCHEMY_DATABASE_URI://")" -echo "Generating SqlAlchemy models in pyispyb/em/models.py ..." - -flask-sqlacodegen --flask --nobackrefs --noviews --outfile ../pyispyb/em/models.py $URI - - -sed -i -e 's/db = SQLAlchemy()/from pyispyb.app.extensions import db/g' ../pyispyb/em/models.py -sed -i -e 's/class AutoProcStatu(db.Model)/class AutoProcStatus(db.Model)/g' ../pyispyb/em/models.py -sed -i -e 's/AutoProcStatu.autoProcIntegrationId/AutoProcStatus.autoProcIntegrationId/g' ../pyispyb/em/models.py - -echo "Done!" diff --git a/scripts/generate_em_schemas.py b/scripts/generate_em_schemas.py deleted file mode 100644 index 9d3874a5..00000000 --- a/scripts/generate_em_schemas.py +++ /dev/null @@ -1,146 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -import os -import sys -import csv -import MySQLdb - -ispyb_root = os.path.dirname(os.path.abspath(__file__)).split(os.sep) -ispyb_root = "/" + os.path.join(*ispyb_root[1:-1]) -sys.path.insert(0, ispyb_root) - - -from pyispyb.config import BaseConfig -config = BaseConfig(os.path.join(ispyb_root, "ispyb_em_config.yml")) - -uri = config.SQLALCHEMY_DATABASE_URI -# mysql://ispyb_api:password_1234@localhost/ispyb_test -user = uri.split("//")[1].split(":")[0] -passwd = uri.split("//")[1].split(":")[1].split("@")[0] -host = uri.split("@")[1].split("/")[0] -db_name = uri.split("/")[-1] - -gen_tables = [] -gen_modules = [] - -with open("%s/examples/em_db_mapping.csv" % ispyb_root) as csvfile: - reader = csv.reader(csvfile) - for row in reader: - gen_modules.append(row[0]) - gen_tables.append(row[1]) - - -connection = MySQLdb.connect(host=host, user=user, passwd=passwd) -cursor = connection.cursor() -cursor.execute("USE %s" % db_name) -cursor.execute("SHOW TABLES") -tables = cursor.fetchall() - -schema_file_header = "" - -licence_header_file = open(ispyb_root + "/py_file_header.txt", "r") -schema_file_header = licence_header_file.read() -licence_header_file.close() - - -schema_file_header += """ -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -""" - -print("Generating ispyb em schemas") - -for table in tables: - table_name = table[0] - if table_name in gen_tables: - schema_name = gen_modules[gen_tables.index(table_name)] - print( - "Generting flask and marshmallow models for table %s in %s.py" - % (table_name, schema_name) - ) - cursor.execute("SHOW FULL COLUMNS FROM %s" % table) - columns = cursor.fetchall() - table_name = table_name.replace("BF_", "").replace("BL", "") - dict_text = "dict_schema = {\n" - ma_text = "class %sSchema(Schema):\n" % table_name - ma_text += ( - ' """Marshmallows schema class representing %s table"""\n\n' % table_name - ) - - for column in columns: - name = column[0] - if name == "global": - name = "Global" - data_type = "String" - data_size = "()" - required = "required=False" - if column[3] == "NO": - required = "required=True" - # default = None - # if column[5] != "NULL": - # default = - description = "description='%s'" % column[8].replace("'", "") - if "int" in column[1] or column[1].startswith("binary"): - data_type = "Integer" - elif column[1].startswith("float"): - data_type = "Float" - elif column[1].startswith("varchar") or column[1].startswith("text"): - data_type = "String" - data_size = column[1].replace("varchar", "") - elif column[1].startswith("timestamp") or column[1].startswith("datetime"): - data_type = "DateTime" - elif column[1].startswith("enum"): - data_type = "String" - description = "description='%s%s'" % ( - column[8].replace("'", ""), - column[1].replace("'", ""), - ) - dict_text += " '%s': f_fields.%s(%s, %s),\n" % ( - name, - data_type, - required, - description, - ) - ma_text += " %s = ma_fields.%s()\n" % (name, data_type) - dict_text += " }\n\n" - - class_text = "f_schema = api.model('%s', dict_schema)\n" % (table_name,) - class_text += "ma_schema = %sSchema()\n" % (table_name) - json_text = "json_schema = JSONSchema().dump(ma_schema)\n" - - schema_file_path = "%s/pyispyb/em/schemas/%s.py" % (ispyb_root, schema_name) - if not os.path.exists(os.path.dirname(schema_file_path)): - os.makedirs(os.path.dirname(schema_file_path)) - schema_file = open(schema_file_path, "w") - schema_file.write(schema_file_header) - schema_file.write(dict_text) - schema_file.write(ma_text) - schema_file.write("\n") - schema_file.write(class_text) - schema_file.write(json_text) - schema_file.close() - -print("done") diff --git a/scripts/generate_ssx_models.sh b/scripts/generate_ssx_models.sh deleted file mode 100755 index 2f894b88..00000000 --- a/scripts/generate_ssx_models.sh +++ /dev/null @@ -1,8 +0,0 @@ -URI="$(grep SQLALCHEMY_DATABASE_URI $1)" -URI="$(echo "$URI" | tr -d '"' | tr -d ' ' | sed -e "s/SQLALCHEMY_DATABASE_URI://")" -echo "Generating SqlAlchemy models in pyispyb/ssx/models.py ..." -flask-sqlacodegen --flask --nobackrefs --noviews --outfile ../pyispyb/ssx/models.py $URI - -sed -i -e 's/db = SQLAlchemy()/from pyispyb.app.extensions import db/g' ../pyispyb/ssx/models.py - -echo "Done!" diff --git a/scripts/generate_ssx_schemas.py b/scripts/generate_ssx_schemas.py deleted file mode 100644 index f967e626..00000000 --- a/scripts/generate_ssx_schemas.py +++ /dev/null @@ -1,144 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -import os -import re -import sys -import csv -import MySQLdb - -ispyb_root = os.path.dirname(os.path.abspath(__file__)).split(os.sep) -ispyb_root = "/" + os.path.join(*ispyb_root[1:-1]) -sys.path.insert(0, ispyb_root) - -from pyispyb.config import BaseConfig -config = BaseConfig(os.path.join(ispyb_root, "ispyb_ssx_config.yml")) - -uri = config.SQLALCHEMY_DATABASE_URI -# mysql://ispyb_api:password_1234@localhost/ispyb_test -user = uri.split("//")[1].split(":")[0] -passwd = uri.split("//")[1].split(":")[1].split("@")[0] -host = uri.split("@")[1].split("/")[0] -db_name = uri.split("/")[-1] - -gen_tables = [] -gen_modules = [] - -with open("%s/examples/ssx_db_mapping.csv" % ispyb_root) as csvfile: - reader = csv.reader(csvfile) - for row in reader: - gen_modules.append(row[0]) - gen_tables.append(row[1]) -connection = MySQLdb.connect(host=host, user=user, passwd=passwd) -cursor = connection.cursor() -cursor.execute("USE %s" % db_name) -cursor.execute("SHOW TABLES") -tables = cursor.fetchall() - -schema_file_header = "" - -licence_header_file = open(ispyb_root + "/py_file_header.txt", "r") -schema_file_header = licence_header_file.read() -licence_header_file.close() - - -schema_file_header += """ -from marshmallow import Schema, fields as ma_fields -from flask_restx import fields as f_fields -from marshmallow_jsonschema import JSONSchema - -from pyispyb.app.extensions.api import api_v1 as api - -""" - -print("Generating ssx schemas..") - -for table in tables: - table_name = table[0] - if table_name in gen_tables: - cursor.execute("SHOW FULL COLUMNS FROM %s" % table) - columns = cursor.fetchall() - table_name = table_name.replace("BF_", "").replace("BL", "") - schema_name = "_".join(re.findall("[A-Z][^A-Z]*", table_name)).lower() - print( - "Generting flask and marshmallow models for table %s in %s" - % (table_name, schema_name) - ) - dict_text = "dict_schema = {\n" - ma_text = "class %sSchema(Schema):\n" % table_name - ma_text += ( - ' """Marshmallows schema class representing %s table"""\n\n' % table_name - ) - - for column in columns: - name = column[0] - if name == "global": - name = "Global" - data_type = "String" - data_size = "()" - required = "required=False" - if column[3] == "NO": - required = "required=True" - # default = None - # if column[5] != "NULL": - # default = - description = "description='%s'" % column[8].replace("'", "") - if "int" in column[1] or column[1].startswith("binary"): - data_type = "Integer" - elif column[1].startswith("float"): - data_type = "Float" - elif column[1].startswith("varchar") or column[1].startswith("text"): - data_type = "String" - data_size = column[1].replace("varchar", "") - elif column[1].startswith("timestamp") or column[1].startswith("datetime"): - data_type = "DateTime" - elif column[1].startswith("enum"): - data_type = "String" - description = "description='%s%s'" % ( - column[8].replace("'", ""), - column[1].replace("'", ""), - ) - dict_text += " '%s': f_fields.%s(%s, %s),\n" % ( - name, - data_type, - required, - description, - ) - ma_text += " %s = ma_fields.%s()\n" % (name, data_type) - dict_text += " }\n\n" - - class_text = "f_schema = api.model('%s', dict_schema)\n" % (table_name,) - class_text += "ma_schema = %sSchema()\n" % (table_name) - json_text = "json_schema = JSONSchema().dump(ma_schema)\n" - - schema_file_path = "%s/pyispyb/ssx/schemas/%s.py" % (ispyb_root, schema_name) - if not os.path.exists(os.path.dirname(schema_file_path)): - os.makedirs(os.path.dirname(schema_file_path)) - schema_file = open(schema_file_path, "w") - schema_file.write(schema_file_header) - schema_file.write(dict_text) - schema_file.write(ma_text) - schema_file.write("\n") - schema_file.write(class_text) - schema_file.write(json_text) - schema_file.close() - -print("done") diff --git a/scripts/kong.conf b/scripts/kong.conf deleted file mode 100644 index 683ea442..00000000 --- a/scripts/kong.conf +++ /dev/null @@ -1,1252 +0,0 @@ -# ----------------------- -# Kong configuration file -# ----------------------- -# -# The commented-out settings shown in this file represent the default values. -# -# This file is read when `kong start` or `kong prepare` are used. Kong -# generates the Nginx configuration with the settings specified in this file. -# -# All environment variables prefixed with `KONG_` and capitalized will override -# the settings specified in this file. -# Example: -# `log_level` setting -> `KONG_LOG_LEVEL` env variable -# -# Boolean values can be specified as `on`/`off` or `true`/`false`. -# Lists must be specified as comma-separated strings. -# -# All comments in this file can be removed safely, including the -# commented-out properties. -# You can verify the integrity of your settings with `kong check `. - -#------------------------------------------------------------------------------ -# GENERAL -#------------------------------------------------------------------------------ - -prefix = /home/mxuser/software/kong # Working directory. Equivalent to Nginx's - # prefix path, containing temporary files - # and logs. - # Each Kong process must have a separate - # working directory. - -#log_level = notice # Log level of the Nginx server. Logs are - # found at `/logs/error.log`. - -# See http://nginx.org/en/docs/ngx_core_module.html#error_log for a list -# of accepted values. - -#proxy_access_log = logs/access.log # Path for proxy port request access - # logs. Set this value to `off` to - # disable logging proxy requests. - # If this value is a relative path, - # it will be placed under the - # `prefix` location. - -#proxy_error_log = logs/error.log # Path for proxy port request error - # logs. The granularity of these logs - # is adjusted by the `log_level` - # property. - -#admin_access_log = logs/admin_access.log # Path for Admin API request access - # logs. Set this value to `off` to - # disable logging Admin API requests. - # If this value is a relative path, - # it will be placed under the - # `prefix` location. - -#admin_error_log = logs/error.log # Path for Admin API request error - # logs. The granularity of these logs - # is adjusted by the `log_level` - # property. - -#status_access_log = off # Path for Status API request access - # logs. The default value of `off` - # implies that loggin for this API - # is disabled by default. - # If this value is a relative path, - # it will be placed under the - # `prefix` location. - -#status_error_log = logs/status_error.log # Path for Status API request error - # logs. The granularity of these logs - # is adjusted by the `log_level` - # property. - -#plugins = bundled # Comma-separated list of plugins this node - # should load. By default, only plugins - # bundled in official distributions are - # loaded via the `bundled` keyword. - # - # Loading a plugin does not enable it by - # default, but only instructs Kong to load its - # source code, and allows to configure the - # plugin via the various related Admin API - # endpoints. - # - # The specified name(s) will be substituted as - # such in the Lua namespace: - # `kong.plugins.{name}.*`. - # - # When the `off` keyword is specified as the - # only value, no plugins will be loaded. - # - # `bundled` and plugin names can be mixed - # together, as the following examples suggest: - # - # - `plugins = bundled,custom-auth,custom-log` - # will include the bundled plugins plus two - # custom ones - # - `plugins = custom-auth,custom-log` will - # *only* include the `custom-auth` and - # `custom-log` plugins. - # - `plugins = off` will not include any - # plugins - # - # **Note:** Kong will not start if some - # plugins were previously configured (i.e. - # have rows in the database) and are not - # specified in this list. Before disabling a - # plugin, ensure all instances of it are - # removed before restarting Kong. - # - # **Note:** Limiting the amount of available - # plugins can improve P99 latency when - # experiencing LRU churning in the database - # cache (i.e. when the configured - # `mem_cache_size`) is full. - -#go_pluginserver_exe = /usr/local/bin/go-pluginserver - # Path for the go-pluginserver executable, - # used for running Kong plugins written in Go. - -#go_plugins_dir = off # Directory for installing Kong plugins - # written in Go. - # - # This value can be set to `off`, thus disabling - # the plugin server and Go plugin loading. - -#port_maps = # With this configuration parameter, you can - # let the Kong to know about the port from - # which the packets are forwarded to it. This - # is fairly common when running Kong in a - # containerized or virtualized environment. - # For example, `port_maps=80:8000, 443:8443` - # instructs Kong that the port 80 is mapped - # to 8000 (and the port 443 to 8443), where - # 8000 and 8443 are the ports that Kong is - # listening to. - # - # This parameter helps Kong set a proper - # forwarded upstream HTTP request header or to - # get the proper forwarded port with the Kong PDK - # (in case other means determining it has - # failed). It changes routing by a destination - # port to route by a port from which packets - # are forwarded to Kong, and similarly it - # changes the default plugin log serializer to - # use the port according to this mapping - # instead of reporting the port Kong is - # listening to. - -#anonymous_reports = on # Send anonymous usage data such as error - # stack traces to help improve Kong. - -#------------------------------------------------------------------------------ -# HYBRID MODE -#------------------------------------------------------------------------------ - -#role = traditional # Use this setting to enable Hybrid Mode, - # This allows running some Kong nodes in a - # control plane role with a database and - # have them deliver configuration updates - # to other nodes running to DB-less running in - # a Data Plane role. - # - # Valid values to this setting are: - # - # - `traditional`: do not use Hybrid Mode. - # - `control_plane`: this node runs in a - # control plane role. It can use a database - # and will deliver configuration updates - # to data plane nodes. - # - `data_plane`: this is a data plane node. - # It runs DB-less and receives configuration - # updates from a control plane node. - -#cluster_mtls = shared # Sets the verification between nodes of the - # cluster. - # - # Valid values to this setting are: - # - # - `shared`: use a shared certificate/key - # pair specified with the `cluster_cert` - # and `cluster_cert_key` settings. - # Note that CP and DP nodes have to present - # the same certificate to establish mTLS - # connections. - # - `pki`: use `cluster_ca_cert`, - # `cluster_server_name` and `cluster_cert` - # for verification. - # These are different certificates for each - # DP node, but issued by a cluster-wide - # common CA certificate: `cluster_ca_cert`. - -#cluster_cert = # Filename of the cluster certificate to use - # when establishing secure communication - # between control and data plane nodes. - # You can use the `kong hybrid` command to - # generate the certificate/key pair. - # Under `shared` mode, it must be the same - # for all nodes. Under `pki` mode it - # should be a different certificate for each - # DP node. - -#cluster_cert_key = # Filename of the cluster certificate key to - # use when establishing secure communication - # between control and data plane nodes. - # You can use the `kong hybrid` command to - # generate the certificate/key pair. - # Under `shared` mode, it must be the same - # for all nodes. Under `pki` mode it - # should be a different certificate for each - # DP node. - -#cluster_ca_cert = # The trusted CA certificate file in PEM - # format used to verify the `cluster_cert`. - # Required if `cluster_mtls` is set to `pki`, - # ignored otherwise. - -#cluster_server_name = # The server name used in the SNI of the TLS - # connection from a DP node to a CP node. - # Must match the Common Name (CN) or Subject - # Alternative Name (SAN) found in the CP - # certificate. - # If `cluster_mtls` is set to - # `shared`, this setting is ignored and - # `kong_clustering` is used. - -#cluster_control_plane = # To be used by data plane nodes only: - # address of the control plane node from - # which configuration updates will be fetched, - # in `host:port` format. - -#cluster_listen = 0.0.0.0:8005 - # Comma-separated list of addresses and ports on - # which the cluster control plane server should listen - # for data plane connections. - # The cluster communication port of the control plane - # must be accessible by all the data planes - # within the same cluster. This port is mTLS protected - # to ensure end-to-end security and integrity. - # - # This setting has no effect if `role` is not set to - # `control_plane`. - -#------------------------------------------------------------------------------ -# NGINX -#------------------------------------------------------------------------------ - -#proxy_listen = 0.0.0.0:8000 reuseport backlog=16384, 0.0.0.0:8443 http2 ssl reuseport backlog=16384 - # Comma-separated list of addresses and ports on - # which the proxy server should listen for - # HTTP/HTTPS traffic. - # The proxy server is the public entry point of Kong, - # which proxies traffic from your consumers to your - # backend services. This value accepts IPv4, IPv6, and - # hostnames. - # - # Some suffixes can be specified for each pair: - # - # - `ssl` will require that all connections made - # through a particular address/port be made with TLS - # enabled. - # - `http2` will allow for clients to open HTTP/2 - # connections to Kong's proxy server. - # - `proxy_protocol` will enable usage of the - # PROXY protocol for a given address/port. - # - `deferred` instructs to use a deferred accept on - # Linux (the TCP_DEFER_ACCEPT socket option). - # - `bind` instructs to make a separate bind() call - # for a given address:port pair. - # - `reuseport` instructs to create an individual - # listening socket for each worker process - # allowing the Kernel to better distribute incoming - # connections between worker processes - # - `backlog=N` sets the maximum length for the queue - # of pending TCP connections. This number should - # not be too small in order to prevent clients - # seeing "Connection refused" error connecting to - # a busy Kong instance. - # **Note:** on Linux, this value is limited by the - # setting of `net.core.somaxconn` Kernel parameter. - # In order for the larger `backlog` set here to take - # effect it is necessary to raise - # `net.core.somaxconn` at the same time to match or - # exceed the `backlog` number set. - # - # This value can be set to `off`, thus disabling - # the HTTP/HTTPS proxy port for this node. - # If stream_listen is also set to `off`, this enables - # 'control-plane' mode for this node - # (in which all traffic proxying capabilities are - # disabled). This node can then be used only to - # configure a cluster of Kong - # nodes connected to the same datastore. - # - # Example: - # `proxy_listen = 0.0.0.0:443 ssl, 0.0.0.0:444 http2 ssl` - # - # See http://nginx.org/en/docs/http/ngx_http_core_module.html#listen - # for a description of the accepted formats for this - # and other `*_listen` values. - # - # See https://www.nginx.com/resources/admin-guide/proxy-protocol/ - # for more details about the `proxy_protocol` - # parameter. - # - # Not all `*_listen` values accept all formats - # specified in nginx's documentation. - -#stream_listen = off - # Comma-separated list of addresses and ports on - # which the stream mode should listen. - # - # This value accepts IPv4, IPv6, and hostnames. - # Some suffixes can be specified for each pair: - # - `ssl` will require that all connections made - # through a particular address/port be made with TLS - # enabled. - # - `proxy_protocol` will enable usage of the - # PROXY protocol for a given address/port. - # - `bind` instructs to make a separate bind() call - # for a given address:port pair. - # - `reuseport` instructs to create an individual - # listening socket for each worker process - # allowing the Kernel to better distribute incoming - # connections between worker processes - # - `backlog=N` sets the maximum length for the queue - # of pending TCP connections. This number should - # not be too small in order to prevent clients - # seeing "Connection refused" error connecting to - # a busy Kong instance. - # **Note:** on Linux, this value is limited by the - # setting of `net.core.somaxconn` Kernel parameter. - # In order for the larger `backlog` set here to take - # effect it is necessary to raise - # `net.core.somaxconn` at the same time to match or - # exceed the `backlog` number set. - # - # **Note:** The `ssl` suffix is not supported, - # and each address/port will accept TCP with or - # without TLS enabled. - # - # Examples: - # - # ``` - # stream_listen = 127.0.0.1:7000 reuseport backlog=16384 - # stream_listen = 0.0.0.0:989 reuseport backlog=65536, 0.0.0.0:20 - # stream_listen = [::1]:1234 backlog=16384 - # ``` - # - # By default this value is set to `off`, thus - # disabling the stream proxy port for this node. - -# See http://nginx.org/en/docs/stream/ngx_stream_core_module.html#listen -# for a description of the formats that Kong might accept in stream_listen. - -#admin_listen = 127.0.0.1:8001 reuseport backlog=16384, 127.0.0.1:8444 http2 ssl reuseport backlog=16384 - # Comma-separated list of addresses and ports on - # which the Admin interface should listen. - # The Admin interface is the API allowing you to - # configure and manage Kong. - # Access to this interface should be *restricted* - # to Kong administrators *only*. This value accepts - # IPv4, IPv6, and hostnames. - # - # Some suffixes can be specified for each pair: - # - # - `ssl` will require that all connections made - # through a particular address/port be made with TLS - # enabled. - # - `http2` will allow for clients to open HTTP/2 - # connections to Kong's proxy server. - # - `proxy_protocol` will enable usage of the - # PROXY protocol for a given address/port. - # - `deferred` instructs to use a deferred accept on - # Linux (the TCP_DEFER_ACCEPT socket option). - # - `bind` instructs to make a separate bind() call - # for a given address:port pair. - # - `reuseport` instructs to create an individual - # listening socket for each worker process - # allowing the Kernel to better distribute incoming - # connections between worker processes - # - `backlog=N` sets the maximum length for the queue - # of pending TCP connections. This number should - # not be too small in order to prevent clients - # seeing "Connection refused" error connecting to - # a busy Kong instance. - # **Note:** on Linux, this value is limited by the - # setting of `net.core.somaxconn` Kernel parameter. - # In order for the larger `backlog` set here to take - # effect it is necessary to raise - # `net.core.somaxconn` at the same time to match or - # exceed the `backlog` number set. - # - # This value can be set to `off`, thus disabling - # the Admin interface for this node, enabling a - # 'data-plane' mode (without configuration - # capabilities) pulling its configuration changes - # from the database. - # - # Example: `admin_listen = 127.0.0.1:8444 http2 ssl` - - #status_listen = off # Comma-separated list of addresses and ports on - # which the Status API should listen. - # The Status API is a read-only endpoint - # allowing monitoring tools to retrieve metrics, - # healthiness, and other non-sensitive information - # of the current Kong node. - # - # The following suffix can be specified for each pair: - # - # - `ssl` will require that all connections made - # through a particular address/port be made with TLS - # enabled. - # - # This value can be set to `off`, disabling - # the Status API for this node. - # - # Example: `status_listen = 0.0.0.0:8100` - - -#nginx_user = nobody nobody # Defines user and group credentials used by - # worker processes. If group is omitted, a - # group whose name equals that of user is - # used. - # - # Example: `nginx_user = nginx www` - -#nginx_worker_processes = auto # Determines the number of worker processes - # spawned by Nginx. - # - # See http://nginx.org/en/docs/ngx_core_module.html#worker_processes - # for detailed usage of the equivalent Nginx - # directive and a description of accepted - # values. - -#nginx_daemon = on # Determines whether Nginx will run as a daemon - # or as a foreground process. Mainly useful - # for development or when running Kong inside - # a Docker environment. - # - # See http://nginx.org/en/docs/ngx_core_module.html#daemon. - -#mem_cache_size = 128m # Size of each of the two in-memory caches - # for database entities. The accepted units are - # `k` and `m`, with a minimum recommended value of - # a few MBs. - # - # **Note**: As this option controls the size of two - # different cache entries, the total memory Kong - # uses to cache entities might be double this value. - -#ssl_cipher_suite = intermediate # Defines the TLS ciphers served by Nginx. - # Accepted values are `modern`, - # `intermediate`, `old`, or `custom`. - # - # See https://wiki.mozilla.org/Security/Server_Side_TLS - # for detailed descriptions of each cipher - # suite. - -#ssl_ciphers = # Defines a custom list of TLS ciphers to be - # served by Nginx. This list must conform to - # the pattern defined by `openssl ciphers`. - # This value is ignored if `ssl_cipher_suite` - # is not `custom`. - -#ssl_protocols = TLSv1.1 TLSv1.2 TLSv1.3 - # Enables the specified protocols for - # client-side connections. The set of - # supported protocol versions also depends - # on the version of OpenSSL Kong was built - # with. This value is ignored if - # `ssl_cipher_suite` is not `custom`. - # - # See http://nginx.org/en/docs/http/ngx_http_ssl_module.html#ssl_protocols - -#ssl_prefer_server_ciphers = on # Specifies that server ciphers should be - # preferred over client ciphers when using - # the SSLv3 and TLS protocols. This value is - # ignored if `ssl_cipher_suite` is not `custom`. - # - # See http://nginx.org/en/docs/http/ngx_http_ssl_module.html#ssl_prefer_server_ciphers - -#ssl_session_tickets = on # Enables or disables session resumption through - # TLS session tickets. This has no impact when - # used with TLSv1.3. - # - # Kong enables this by default for performance - # reasons, but it has security implications: - # https://github.com/mozilla/server-side-tls/issues/135 - # - # See http://nginx.org/en/docs/http/ngx_http_ssl_module.html#ssl_session_tickets - -#ssl_session_timeout = 1d # Specifies a time during which a client may - # reuse the session parameters. See the rationale: - # https://github.com/mozilla/server-side-tls/issues/198 - # - # See http://nginx.org/en/docs/http/ngx_http_ssl_module.html#ssl_session_timeout - -#ssl_cert = # The absolute path to the SSL certificate for - # `proxy_listen` values with SSL enabled. - -#ssl_cert_key = # The absolute path to the SSL key for - # `proxy_listen` values with SSL enabled. - -#client_ssl = off # Determines if Nginx should send client-side - # SSL certificates when proxying requests. - -#client_ssl_cert = # If `client_ssl` is enabled, the absolute - # path to the client SSL certificate for the - # `proxy_ssl_certificate` directive. Note that - # this value is statically defined on the - # node, and currently cannot be configured on - # a per-API basis. - -#client_ssl_cert_key = # If `client_ssl` is enabled, the absolute - # path to the client SSL key for the - # `proxy_ssl_certificate_key` address. Note - # this value is statically defined on the - # node, and currently cannot be configured on - # a per-API basis. - -#admin_ssl_cert = # The absolute path to the SSL certificate for - # `admin_listen` values with SSL enabled. - -#admin_ssl_cert_key = # The absolute path to the SSL key for - # `admin_listen` values with SSL enabled. - -#status_ssl_cert = # The absolute path to the SSL certificate for - # `status_listen` values with SSL enabled. - -#status_ssl_cert_key = # The absolute path to the SSL key for - # `status_listen` values with SSL enabled. - -#headers = server_tokens, latency_tokens - # Comma-separated list of headers Kong should - # inject in client responses. - # - # Accepted values are: - # - `Server`: Injects `Server: kong/x.y.z` - # on Kong-produced response (e.g. Admin - # API, rejected requests from auth plugin). - # - `Via`: Injects `Via: kong/x.y.z` for - # successfully proxied requests. - # - `X-Kong-Proxy-Latency`: Time taken - # (in milliseconds) by Kong to process - # a request and run all plugins before - # proxying the request upstream. - # - `X-Kong-Response-Latency`: time taken - # (in millisecond) by Kong to produce - # a response in case of e.g. plugin - # short-circuiting the request, or in - # in case of an error. - # - `X-Kong-Upstream-Latency`: Time taken - # (in milliseconds) by the upstream - # service to send response headers. - # - `X-Kong-Admin-Latency`: Time taken - # (in milliseconds) by Kong to process - # an Admin API request. - # - `X-Kong-Upstream-Status`: The HTTP status - # code returned by the upstream service. - # This is particularly useful for clients to - # distinguish upstream statuses if the - # response is rewritten by a plugin. - # - `server_tokens`: Same as specifying both - # `Server` and `Via`. - # - `latency_tokens`: Same as specifying - # `X-Kong-Proxy-Latency`, - # `X-Kong-Response-Latency`, - # `X-Kong-Admin-Latency` and - # `X-Kong-Upstream-Latency` - # - # In addition to those, this value can be set - # to `off`, which prevents Kong from injecting - # any of the above headers. Note that this - # does not prevent plugins from injecting - # headers of their own. - # - # Example: `headers = via, latency_tokens` - -#trusted_ips = # Defines trusted IP addresses blocks that are - # known to send correct `X-Forwarded-*` - # headers. - # Requests from trusted IPs make Kong forward - # their `X-Forwarded-*` headers upstream. - # Non-trusted requests make Kong insert its - # own `X-Forwarded-*` headers. - # - # This property also sets the - # `set_real_ip_from` directive(s) in the Nginx - # configuration. It accepts the same type of - # values (CIDR blocks) but as a - # comma-separated list. - # - # To trust *all* /!\ IPs, set this value to - # `0.0.0.0/0,::/0`. - # - # If the special value `unix:` is specified, - # all UNIX-domain sockets will be trusted. - # - # See http://nginx.org/en/docs/http/ngx_http_realip_module.html#set_real_ip_from - # for examples of accepted values. - -#real_ip_header = X-Real-IP # Defines the request header field whose value - # will be used to replace the client address. - # This value sets the `ngx_http_realip_module` - # directive of the same name in the Nginx - # configuration. - # - # If this value receives `proxy_protocol`: - # - # - at least one of the `proxy_listen` entries - # must have the `proxy_protocol` flag - # enabled. - # - the `proxy_protocol` parameter will be - # appended to the `listen` directive of the - # Nginx template. - # - # See http://nginx.org/en/docs/http/ngx_http_realip_module.html#real_ip_header - # for a description of this directive. - -#real_ip_recursive = off # This value sets the `ngx_http_realip_module` - # directive of the same name in the Nginx - # configuration. - # - # See http://nginx.org/en/docs/http/ngx_http_realip_module.html#real_ip_recursive - # for a description of this directive. - -#error_default_type = text/plain # Default MIME type to use when the request - # `Accept` header is missing and Nginx - # is returning an error for the request. - # Accepted values are `text/plain`, - # `text/html`, `application/json`, and - # `application/xml`. - -#upstream_keepalive_pool_size = 60 # Sets the default size of the upstream - # keepalive connection pools. - # Upstream keepalive connection pools - # are segmented by the `dst ip/dst - # port/SNI` attributes of a connection. - # A value of `0` will disable upstream - # keepalive connections by default, forcing - # each upstream request to open a new - # connection. - -#upstream_keepalive_max_requests = 100 # Sets the default maximum number of - # requests than can be proxied upstream - # through one keepalive connection. - # After the maximum number of requests - # is reached, the connection will be - # closed. - # A value of `0` will disable this - # behavior, and a keepalive connection - # can be used to proxy an indefinite - # number of requests. - -#upstream_keepalive_idle_timeout = 60 # Sets the default timeout (in seconds) - # for which an upstream keepalive - # connection should be kept open. When - # the timeout is reached while the - # connection has not been reused, it - # will be closed. - # A value of `0` will disable this - # behavior, and an idle keepalive - # connection may be kept open - # indefinitely. - -#------------------------------------------------------------------------------ -# NGINX injected directives -#------------------------------------------------------------------------------ - -# Nginx directives can be dynamically injected in the runtime nginx.conf file -# without requiring a custom Nginx configuration template. -# -# All configuration properties respecting the naming scheme -# `nginx__` will result in `` being injected in -# the Nginx configuration block corresponding to the property's ``. -# Example: -# `nginx_proxy_large_client_header_buffers = 8 24k` -# -# Will inject the following directive in Kong's proxy `server {}` block: -# -# `large_client_header_buffers 8 24k;` -# -# The following namespaces are supported: -# -# - `nginx_main_`: Injects `` in Kong's configuration -# `main` context. -# - `nginx_events_`: Injects `` in Kong's `events {}` -# block. -# - `nginx_http_`: Injects `` in Kong's `http {}` block. -# - `nginx_proxy_`: Injects `` in Kong's proxy -# `server {}` block. -# - `nginx_upstream_`: Injects `` in Kong's proxy -# `upstream {}` block. -# - `nginx_admin_`: Injects `` in Kong's Admin API -# `server {}` block. -# - `nginx_status_`: Injects `` in Kong's Status API -# `server {}` block (only effective if `status_listen` is enabled). -# - `nginx_stream_`: Injects `` in Kong's stream module -# `stream {}` block (only effective if `stream_listen` is enabled). -# - `nginx_sproxy_`: Injects `` in Kong's stream module -# `server {}` block (only effective if `stream_listen` is enabled). -# - `nginx_supstream_`: Injects `` in Kong's stream -# module `upstream {}` block. -# -# As with other configuration properties, Nginx directives can be injected via -# environment variables when capitalized and prefixed with `KONG_`. -# Example: -# `KONG_NGINX_HTTP_SSL_PROTOCOLS` -> `nginx_http_ssl_protocols` -# -# Will inject the following directive in Kong's `http {}` block: -# -# `ssl_protocols ;` -# -# If different sets of protocols are desired between the proxy and Admin API -# server, you may specify `nginx_proxy_ssl_protocols` and/or -# `nginx_admin_ssl_protocols`, both of which taking precedence over the -# `http {}` block. - -#nginx_main_worker_rlimit_nofile = auto - # Changes the limit on the maximum number of open files - # for worker processes. - # - # The special and default value of `auto` sets this - # value to `ulimit -n` with the upper bound limited to - # 16384 as a measure to protect against excess memory use. - # - # See http://nginx.org/en/docs/ngx_core_module.html#worker_rlimit_nofile - -#nginx_events_worker_connections = auto - # Sets the maximum number of simultaneous - # connections that can be opened by a worker process. - # - # The special and default value of `auto` sets this - # value to `ulimit -n` with the upper bound limited to - # 16384 as a measure to protect against excess memory use. - # - # See http://nginx.org/en/docs/ngx_core_module.html#worker_connections - -#nginx_http_client_header_buffer_size = 1k # Sets buffer size for reading the - # client request headers. - # See http://nginx.org/en/docs/http/ngx_http_core_module.html#client_header_buffer_size - -#nginx_http_large_client_header_buffers = 4 8k # Sets the maximum number and - # size of buffers used for - # reading large clients - # requests headers. - # See http://nginx.org/en/docs/http/ngx_http_core_module.html#large_client_header_buffers - -#nginx_http_client_max_body_size = 0 # Defines the maximum request body size - # allowed by requests proxied by Kong, - # specified in the Content-Length request - # header. If a request exceeds this - # limit, Kong will respond with a 413 - # (Request Entity Too Large). Setting - # this value to 0 disables checking the - # request body size. - # See http://nginx.org/en/docs/http/ngx_http_core_module.html#client_max_body_size - -#nginx_http_client_body_buffer_size = 8k # Defines the buffer size for reading - # the request body. If the client - # request body is larger than this - # value, the body will be buffered to - # disk. Note that when the body is - # buffered to disk, Kong plugins that - # access or manipulate the request - # body may not work, so it is - # advisable to set this value as high - # as possible (e.g., set it as high - # as `client_max_body_size` to force - # request bodies to be kept in - # memory). Do note that - # high-concurrency environments will - # require significant memory - # allocations to process many - # concurrent large request bodies. - # See http://nginx.org/en/docs/http/ngx_http_core_module.html#client_body_buffer_size - -#------------------------------------------------------------------------------ -# DATASTORE -#------------------------------------------------------------------------------ - -# Kong can run with a database to store coordinated data between Kong nodes in -# a cluster, or without a database, where each node stores its information -# independently in memory. -# -# When using a database, Kong will store data for all its entities (such as -# Routes, Services, Consumers, and Plugins) in either Cassandra or PostgreSQL, -# and all Kong nodes belonging to the same cluster must connect themselves -# to the same database. -# -# Kong supports the following database versions: -# - **PostgreSQL**: 9.5 and above. -# - **Cassandra**: 2.2 and above. -# -# When not using a database, Kong is said to be in "DB-less mode": it will keep -# its entities in memory, and each node needs to have this data entered via a -# declarative configuration file, which can be specified through the -# `declarative_config` property, or via the Admin API using the `/config` -# endpoint. -# -# When using Postgres as the backend storage, you can optionally enable Kong -# to serve read queries from a separate database instance. -# When the number of proxies is large, this can greatly reduce the load -# on the main Postgres instance and achieve better scalability. It may also -# reduce the latency jitter if the Kong proxy node's latency to the main -# Postgres instance is high. -# -# The read-only Postgres instance only serves read queries and write -# queries still goes to the main connection. The read-only Postgres instance -# can be eventually consistent while replicating changes from the main -# instance. -# -# At least the `pg_ro_host` config is needed to enable this feature. -# By default, all other database config for the read-only connection are -# inherited from the corresponding main connection config described above but -# may be optionally overwritten explicitly using the `pg_ro_*` config below. - -database = postgres # Determines which of PostgreSQL or Cassandra - # this node will use as its datastore. - # Accepted values are `postgres`, - # `cassandra`, and `off`. - -#pg_host = 127.0.0.1 # Host of the Postgres server. -#pg_port = 5432 # Port of the Postgres server. -#pg_timeout = 5000 # Defines the timeout (in ms), for connecting, - # reading and writing. - -pg_user = kong # Postgres user. -pg_password = mxpass # Postgres user's password. -pg_database = kong # The database name to connect to. - -#pg_schema = # The database schema to use. If unspecified, - # Kong will respect the `search_path` value of - # your PostgreSQL instance. - -#pg_ssl = off # Toggles client-server TLS connections - # between Kong and PostgreSQL. - -#pg_ssl_verify = off # Toggles server certificate verification if - # `pg_ssl` is enabled. - # See the `lua_ssl_trusted_certificate` - # setting to specify a certificate authority. - -#pg_max_concurrent_queries = 0 # Sets the maximum number of concurrent queries - # that can be executing at any given time. This - # limit is enforced per worker process; the - # total number of concurrent queries for this - # node will be will be: - # `pg_max_concurrent_queries * nginx_worker_processes`. - # - # The default value of 0 removes this - # concurrency limitation. - -#pg_semaphore_timeout = 60000 # Defines the timeout (in ms) after which - # PostgreSQL query semaphore resource - # acquisition attempts will fail. Such - # failures will generally result in the - # associated proxy or Admin API request - # failing with an HTTP 500 status code. - # Detailed discussion of this behavior is - # available in the online documentation. - -#pg_ro_host = NONE # Same as `pg_host`, but for the - # read-only connection. - # Value of `NONE` disables - # read-only connection. - # **Note:** Refer to the documentation - # section above for detailed usage. - -#pg_ro_port = # Same as `pg_port`, but for the - # read-only connection. - -#pg_ro_timeout = # Same as `pg_timeout`, but for the - # read-only connection. - -#pg_ro_user = # Same as `pg_user`, but for the - # read-only connection. - -#pg_ro_password = # Same as `pg_password`, but for the - # read-only connection. - -#pg_ro_database = # Same as `pg_database`, but for the - # read-only connection. - -#pg_ro_schema = # Same as `pg_schema`, but for the - # read-only connection. - -#pg_ro_ssl = # Same as `pg_ssl`, but for the - # read-only connection. - -#pg_ro_ssl_verify = - # Same as `pg_ssl_verify`, but for the - # read-only connection. - -#pg_ro_max_concurrent_queries = - # Same as `pg_max_concurrent_queries`, but for - # the read-only connection. - # Note: read-only concurrency is not shared - # with the main (read-write) connection. - -#pg_ro_semaphore_timeout = - # Same as `pg_semaphore_timeout`, but for the - # read-only connection. - -#cassandra_contact_points = 127.0.0.1 # A comma-separated list of contact - # points to your cluster. - # You may specify IP addresses or - # hostnames. Note that the port - # component of SRV records will be - # ignored in favor of `cassandra_port`. - # When connecting to a multi-DC cluster, - # ensure that contact points from the - # local datacenter are specified first - # in this list. - -#cassandra_port = 9042 # The port on which your nodes are listening - # on. All your nodes and contact points must - # listen on the same port. Will be created if - # it doesn't exist. - -#cassandra_keyspace = kong # The keyspace to use in your cluster. - -#cassandra_write_consistency = ONE # Consistency setting to use when - # writing to the Cassandra cluster. - -#cassandra_read_consistency = ONE # Consistency setting to use when - # reading from the Cassandra cluster. - -#cassandra_timeout = 5000 # Defines the timeout (in ms) for reading - # and writing. - -#cassandra_ssl = off # Toggles client-to-node TLS connections - # between Kong and Cassandra. - -#cassandra_ssl_verify = off # Toggles server certificate verification if - # `cassandra_ssl` is enabled. - # See the `lua_ssl_trusted_certificate` - # setting to specify a certificate authority. - -#cassandra_username = kong # Username when using the - # `PasswordAuthenticator` scheme. - -#cassandra_password = # Password when using the - # `PasswordAuthenticator` scheme. - -#cassandra_lb_policy = RequestRoundRobin # Load balancing policy to use when - # distributing queries across your - # Cassandra cluster. - # Accepted values are: - # `RoundRobin`, `RequestRoundRobin`, - # `DCAwareRoundRobin`, and - # `RequestDCAwareRoundRobin`. - # Policies prefixed with "Request" - # make efficient use of established - # connections throughout the same - # request. - # Prefer "DCAware" policies if and - # only if you are using a - # multi-datacenter cluster. - -#cassandra_local_datacenter = # When using the `DCAwareRoundRobin` - # or `RequestDCAwareRoundRobin` load - # balancing policy, you must specify the name - # of the local (closest) datacenter for this - # Kong node. - -#cassandra_refresh_frequency = 60 # Frequency (in seconds) at which - # the cluster topology will be - # checked for new or decommissioned - # nodes. - # A value of `0` will disable this - # check, and the cluster topology - # will never be refreshed. - -#cassandra_repl_strategy = SimpleStrategy # When migrating for the first time, - # Kong will use this setting to - # create your keyspace. - # Accepted values are - # `SimpleStrategy` and - # `NetworkTopologyStrategy`. - -#cassandra_repl_factor = 1 # When migrating for the first time, Kong - # will create the keyspace with this - # replication factor when using the - # `SimpleStrategy`. - -#cassandra_data_centers = dc1:2,dc2:3 # When migrating for the first time, - # will use this setting when using the - # `NetworkTopologyStrategy`. - # The format is a comma-separated list - # made of `:`. - -#cassandra_schema_consensus_timeout = 10000 # Defines the timeout (in ms) for - # the waiting period to reach a - # schema consensus between your - # Cassandra nodes. - # This value is only used during - # migrations. - -declarative_config = /home/mxuser/software/kong/conf/kong.yml - # The path to the declarative configuration - # file which holds the specification of all - # entities (Routes, Services, Consumers, etc.) - # to be used when the `database` is set to - # `off`. - # - # Entities are stored in Kong's in-memory cache, - # so you must ensure that enough memory is - # allocated to it via the `mem_cache_size` - # property. You must also ensure that items - # in the cache never expire, which means that - # `db_cache_ttl` should preserve its default - # value of 0. - -#------------------------------------------------------------------------------ -# DATASTORE CACHE -#------------------------------------------------------------------------------ - -# In order to avoid unnecessary communication with the datastore, Kong caches -# entities (such as APIs, Consumers, Credentials...) for a configurable period -# of time. It also handles invalidations if such an entity is updated. -# -# This section allows for configuring the behavior of Kong regarding the -# caching of such configuration entities. - -#db_update_frequency = 5 # Frequency (in seconds) at which to check for - # updated entities with the datastore. - # - # When a node creates, updates, or deletes an - # entity via the Admin API, other nodes need - # to wait for the next poll (configured by - # this value) to eventually purge the old - # cached entity and start using the new one. - -#db_update_propagation = 0 # Time (in seconds) taken for an entity in the - # datastore to be propagated to replica nodes - # of another datacenter. - # - # When in a distributed environment such as - # a multi-datacenter Cassandra cluster, this - # value should be the maximum number of - # seconds taken by Cassandra to propagate a - # row to other datacenters. - # - # When set, this property will increase the - # time taken by Kong to propagate the change - # of an entity. - # - # Single-datacenter setups or PostgreSQL - # servers should suffer no such delays, and - # this value can be safely set to 0. - -#db_cache_ttl = 0 # Time-to-live (in seconds) of an entity from - # the datastore when cached by this node. - # - # Database misses (no entity) are also cached - # according to this setting if you do not - # configure `db_cache_neg_ttl`. - # - # If set to 0 (default), such cached entities - # or misses never expire. - -#db_cache_neg_ttl = # Time-to-live (in seconds) of a datastore - # miss (no entity). - # - # If not specified (default), `db_cache_ttl` - # value will be used instead. - # - # If set to 0, misses will never expire. - -#db_resurrect_ttl = 30 # Time (in seconds) for which stale entities - # from the datastore should be resurrected for - # when they cannot be refreshed (e.g., the - # datastore is unreachable). When this TTL - # expires, a new attempt to refresh the stale - # entities will be made. - -#db_cache_warmup_entities = services, plugins - # Entities to be pre-loaded from the datastore - # into the in-memory cache at Kong start-up. - # This speeds up the first access of endpoints - # that use the given entities. - # - # When the `services` entity is configured - # for warmup, the DNS entries for values in - # its `host` attribute are pre-resolved - # asynchronously as well. - # - # Cache size set in `mem_cache_size` should - # be set to a value large enough to hold all - # instances of the specified entities. - # If the size is insufficient, Kong will log - # a warning. - -#------------------------------------------------------------------------------ -# DNS RESOLVER -#------------------------------------------------------------------------------ - -# By default, the DNS resolver will use the standard configuration files -# `/etc/hosts` and `/etc/resolv.conf`. The settings in the latter file will be -# overridden by the environment variables `LOCALDOMAIN` and `RES_OPTIONS` if -# they have been set. -# -# Kong will resolve hostnames as either `SRV` or `A` records (in that order, and -# `CNAME` records will be dereferenced in the process). -# In case a name was resolved as an `SRV` record it will also override any given -# port number by the `port` field contents received from the DNS server. -# -# The DNS options `SEARCH` and `NDOTS` (from the `/etc/resolv.conf` file) will -# be used to expand short names to fully qualified ones. So it will first try -# the entire `SEARCH` list for the `SRV` type, if that fails it will try the -# `SEARCH` list for `A`, etc. -# -# For the duration of the `ttl`, the internal DNS resolver will loadbalance each -# request it gets over the entries in the DNS record. For `SRV` records the -# `weight` fields will be honored, but it will only use the lowest `priority` -# field entries in the record. - -#dns_resolver = # Comma separated list of nameservers, each - # entry in `ip[:port]` format to be used by - # Kong. If not specified the nameservers in - # the local `resolv.conf` file will be used. - # Port defaults to 53 if omitted. Accepts - # both IPv4 and IPv6 addresses. - -#dns_hostsfile = /etc/hosts # The hosts file to use. This file is read - # once and its content is static in memory. - # To read the file again after modifying it, - # Kong must be reloaded. - -#dns_order = LAST,SRV,A,CNAME # The order in which to resolve different - # record types. The `LAST` type means the - # type of the last successful lookup (for the - # specified name). The format is a (case - # insensitive) comma separated list. - -#dns_valid_ttl = # By default, DNS records are cached using - # the TTL value of a response. If this - # property receives a value (in seconds), it - # will override the TTL for all records. - -#dns_stale_ttl = 4 # Defines, in seconds, how long a record will - # remain in cache past its TTL. This value - # will be used while the new DNS record is - # fetched in the background. - # Stale data will be used from expiry of a - # record until either the refresh query - # completes, or the `dns_stale_ttl` number of - # seconds have passed. - -#dns_not_found_ttl = 30 # TTL in seconds for empty DNS responses and - # "(3) name error" responses. - -#dns_error_ttl = 1 # TTL in seconds for error responses. - -#dns_no_sync = off # If enabled, then upon a cache-miss every - # request will trigger its own dns query. - # When disabled multiple requests for the - # same name/type will be synchronised to a - # single query. - -#------------------------------------------------------------------------------ -# TUNING & BEHAVIOR -#------------------------------------------------------------------------------ - -#worker_consistency = strict - # Defines whether this node should rebuild its - # state synchronously or asynchronously (the - # balancers and the router are rebuilt on - # updates that affects them, e.g., updates to - # Routes, Services or Upstreams, via the Admin - # API or loading a declarative configuration - # file). - # - # Accepted values are: - # - # - `strict`: the router will be rebuilt - # synchronously, causing incoming requests to - # be delayed until the rebuild is finished. - # - `eventual`: the router will be rebuilt - # asynchronously via a recurring background - # job running every second inside of each - # worker. - # - # Note that `strict` ensures that all workers - # of a given node will always proxy requests - # with an identical router, but that increased - # long tail latency can be observed if - # frequent Routes and Services updates are - # expected. - # Using `eventual` will help preventing long - # tail latency issues in such cases, but may - # cause workers to route requests differently - # for a short period of time after Routes and - # Services updates. - -#worker_state_update_frequency = 5 - # Defines how often the worker state changes are - # checked with a background job. When a change - # is detected, a new router or balancer will be - # built, as needed. Raising this value will - # decrease the load on database servers and - # result in less jitter in proxy latency, but - # it might take more time to propagate changes - # to each individual worker. - -#------------------------------------------------------------------------------ -# DEVELOPMENT & MISCELLANEOUS -#------------------------------------------------------------------------------ - -# Additional settings inherited from lua-nginx-module allowing for more -# flexibility and advanced usage. -# -# See the lua-nginx-module documentation for more information: -# https://github.com/openresty/lua-nginx-module - -#lua_ssl_trusted_certificate = # Absolute path to the certificate - # authority file for Lua cosockets in PEM - # format. This certificate will be the one - # used for verifying Kong's database - # connections, when `pg_ssl_verify` or - # `cassandra_ssl_verify` are enabled. - # - # See https://github.com/openresty/lua-nginx-module#lua_ssl_trusted_certificate - -#lua_ssl_verify_depth = 1 # Sets the verification depth in the server - # certificates chain used by Lua cosockets, - # set by `lua_ssl_trusted_certificate`. - # This includes the certificates configured - # for Kong's database connections. - # - # See https://github.com/openresty/lua-nginx-module#lua_ssl_verify_depth - -#lua_package_path = ./?.lua;./?/init.lua; # Sets the Lua module search path - # (LUA_PATH). Useful when developing - # or using custom plugins not stored - # in the default search path. - # - # See https://github.com/openresty/lua-nginx-module#lua_package_path - -#lua_package_cpath = # Sets the Lua C module search path - # (LUA_CPATH). - # - # See https://github.com/openresty/lua-nginx-module#lua_package_cpath - -#lua_socket_pool_size = 30 # Specifies the size limit for every cosocket - # connection pool associated with every remote - # server. - # - # See https://github.com/openresty/lua-nginx-module#lua_socket_pool_size diff --git a/scripts/kong.sh b/scripts/kong.sh deleted file mode 100755 index 8cad2b82..00000000 --- a/scripts/kong.sh +++ /dev/null @@ -1,50 +0,0 @@ -KONG_DIR=/home/mxuser/software/kong -CONFIG_FILE=$KONG_DIR/conf/kong.conf - -HOST=http://localhost -pyispyb.core_URL=http://127.0.0.1:5000/ispyb/api/v1 -ISPYB_SSX_URL=http://127.0.0.1:5010/ispyb/api/v1/ssx - -case "$1" in - start) - echo "Starting kong..." - sudo kong start -c $CONFIG_FILE - ;; - stop) - echo "Stopping kong..." - sudo kong stop -p $KONG_DIR - ;; - status) - echo "Kong status..." - sudo kong health -p $KONG_DIR - ;; - restart) - echo "Restarting kong..." - sudo kong restart -c $CONFIG_FILE - ;; - reset) - echo "Reseting kong..." - sudo kong migrations reset -c $CONFIG_FILE --v - sudo kong migrations bootstrap -c $CONFIG_FILE - ;; - init) - echo "Initializing kong services..." - curl -i -X POST --url $HOST:8001/services/ --data 'name=pyispyb.core' --data 'url=http://127.0.0.1:5000/ispyb/api/v1' - curl -i -X POST --url $HOST:8001/services/pyispyb.core/routes --data 'hosts[]=pyispyb.core' - - - curl -i -X POST --url $HOST:8001/services/ --data 'name=ispyb_ssx' --data 'url=http://127.0.0.1:5010/ispyb/api/v1/ssx' - curl -i -X POST --url $HOST:8001/services/ispyb_ssx/routes --data 'hosts[]=ispyb_ssx' - ;; - test) - curl -i -X GET --url $HOST:8000/proposals --header 'Host: pyispyb.core' --header "Authorization: Bearer MasterToken" - curl -i -X GET --url $HOST:8000/samples --header 'Host: ispyb_ssx' --header "Authorization: Bearer MasterToken" - ;; - - *) - echo "Usage: $0 {start | stop | status | restart | reset | init | test}" - exit 2 - ;; -esac - -exit 0 diff --git a/scripts/run_all.sh b/scripts/run_all.sh deleted file mode 100755 index ffaea114..00000000 --- a/scripts/run_all.sh +++ /dev/null @@ -1,9 +0,0 @@ -# Generates sqlalchemy db models -bash generate_core_models.sh ../ispyb_core_config.yml -bash generate_ssx_models.sh ../ispyb_ssx_config.yml -bash generate_em_models.sh ../ispyb_em_config.yml - -# Generates marshmallow and flask schemas -python3 generate_core_schemas.py -python3 generate_ssx_schemas.py -python3 generate_em_schemas.py diff --git a/scripts/test.sh b/scripts/test.sh new file mode 100644 index 00000000..af257c0d --- /dev/null +++ b/scripts/test.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +export ISPYB_ENVIRONMENT="test" + +pytest +flake8 \ No newline at end of file diff --git a/scripts/token.sh b/scripts/token.sh new file mode 100755 index 00000000..b05410c3 --- /dev/null +++ b/scripts/token.sh @@ -0,0 +1,69 @@ +#!/bin/bash +if [ $# -ne 5 ]; then + echo 1>&2 "Usage: . $0 hostname realm username clientid" + echo 1>&2 " options:" + echo 1>&2 " hostname: localhost:8081" + echo 1>&2 " realm:keycloak-demo" + echo 1>&2 " clientid:demo" + echo 1>&2 " For verify ssl: use 'y' (otherwise it will send curl post with --insecure)" + + +fi + +HOSTNAME=$1 +REALM_NAME=$2 +USERNAME=$3 +CLIENT_ID=$4 +SECURE=$5 + + + +KEYCLOAK_URL=https://$HOSTNAME/auth/realms/$REALM_NAME/protocol/openid-connect/token + + + +echo "Using Keycloak: $KEYCLOAK_URL" +echo "realm: $REALM_NAME" +echo "client-id: $CLIENT_ID" +echo "username: $USERNAME" +echo "secure: $SECURE" + + +if [[ $SECURE = 'y' ]]; then + INSECURE= +else + INSECURE=--insecure +fi + + +echo -n Password: +read -s PASSWORD + +curl -X POST "$KEYCLOAK_URL" "$INSECURE" \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "username=$USERNAME" \ + -d "password=$PASSWORD" \ + -d 'grant_type=password' \ + -d 'client_secret=c340cd2d-28d5-4301-9977-3688a8baf2ea' \ + -d "client_id=$CLIENT_ID" + + + + + + + +export TOKEN=$(curl -X POST "$KEYCLOAK_URL" "$INSECURE" \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "username=$USERNAME" \ + -d "password=$PASSWORD" \ + -d 'grant_type=password' \ + -d 'client_secret=c340cd2d-28d5-4301-9977-3688a8baf2ea' \ + -d "client_id=$CLIENT_ID" | jq -r '.access_token') + + +echo $TOKEN + +if [[ $(echo $TOKEN) != 'null' ]]; then + export KEYCLOAK_TOKEN=$TOKEN +fi diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..a34c00e2 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,69 @@ +[metadata] +name = pyispyb +author = ISPyB collaboration +author_email = ispyb-dev@esrf.fr +description = ISPyB FastAPI server +license_file = LICENSE +keywords = synchrotron, data acquisition, real-time monitoring, LIMS +long_description = file: README.md +long_description_content_type = text/x-markdown +classifiers = + Development Status :: 3 - Alpha + Intended Audience :: Developers + License :: OSI Approved :: BSD License + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.10 + Operating System :: OS Independent + Topic :: Software Development :: Libraries :: Python Modules +project-urls = + download = https://github.com/ispyb/py-ispyb + documentation = https://ispyb.gitlab-pages.esrf.fr/py-ispyb/ + + +[options] +python_requires = >=3.10 +zip_safe = true +include_package_data = true +packages = find: +package_dir = + pyispyb = pyispyb +install_requires = + ispyb-models == 1.0.0 + fastapi + pydantic[dotenv] + uvicorn + SQLAlchemy + pyjwt + ; mysqlclient + ; pdfkit + ; python-barcode + ; qrcode + ; python-keycloak == 0.26.1 + ; keycloak == 3.1.3 + ; keycloak-client + ; python_ldap == 3.4.0 + sqlparse + mysql-connector-python == 8.0.29 + +[options.packages.find] +exclude = + tests + +[options.entry_points] +console_scripts = + ispyb.simulate = pyispyb.simulation.cli:run + ispyb.rest = pyispyb.cli.rest:run + +[bdist_wheel] +universal = true + +[aliases] +test = pytest + +[flake8] +ignore = E501,W503,E203,W504,E251,E262,E265,E266,W291,W293 +# E501,W503,E203,W504,E251 -> let black handle for formatting +# E262,E265,E266 -> be less optionated about the way to write comments +# W291,W293 -> trailing whitespace in docstrings +exclude = + venv diff --git a/setup.py b/setup.py index 5ba991c9..60684932 100644 --- a/setup.py +++ b/setup.py @@ -1,22 +1,3 @@ -import setuptools +from setuptools import setup -with open("README.md", "r") as fh: - long_description = fh.read() - -setuptools.setup( - name="pyispyb", - version="1.0.0", - author="ISPyB collaboration", - author_email="ispyb-dev@esrf.fr", - description="ISPyB backend server", - long_description=long_description, - long_description_content_type="text/markdown", - url="https://github.com/ispyb/py-ispyb", - packages=setuptools.find_packages(), - license="LGPL-3.0", - classifiers=[ - "Development Status :: 1 - Planning", - "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)", - ], - python_requires='>=3.6', -) +setup() diff --git a/tasks/__init__.py b/tasks/__init__.py deleted file mode 100644 index 7bad3c13..00000000 --- a/tasks/__init__.py +++ /dev/null @@ -1,81 +0,0 @@ -# encoding: utf-8 -# pylint: disable=invalid-name,wrong-import-position -""" -The starting point of Invoke tasks for Example RESTful API Server project. -""" - -import logging -import os -import platform - -logging.basicConfig() -logger = logging.getLogger() -logger.setLevel(logging.INFO) -#logging.getLogger('app').setLevel(logging.DEBUG) - -try: - import colorlog -except ImportError: - pass -else: - formatter = colorlog.ColoredFormatter( - ( - '%(asctime)s ' - '[%(log_color)s%(levelname)s%(reset)s] ' - '[%(cyan)s%(name)s%(reset)s] ' - '%(message_log_color)s%(message)s' - ), - reset=True, - log_colors={ - 'DEBUG': 'bold_cyan', - 'INFO': 'bold_green', - 'WARNING': 'bold_yellow', - 'ERROR': 'bold_red', - 'CRITICAL': 'bold_red,bg_white', - }, - secondary_log_colors={ - 'message': { - 'DEBUG': 'white', - 'INFO': 'bold_white', - 'WARNING': 'bold_yellow', - 'ERROR': 'bold_red', - 'CRITICAL': 'bold_red', - }, - }, - style='%' - ) - - for handler in logger.handlers: - if isinstance(handler, logging.StreamHandler): - break - else: - handler = logging.StreamHandler() - logger.addHandler(handler) - handler.setFormatter(formatter) - - -from invoke import Collection -from invoke.executor import Executor - -from . import app - -# NOTE: `namespace` or `ns` name is required! -namespace = Collection( - app, -) - -def invoke_execute(context, command_name, **kwargs): - """ - Helper function to make invoke-tasks execution easier. - """ - results = Executor(namespace, config=context.config).execute((command_name, kwargs)) - target_task = context.root_namespace[command_name] - return results[target_task] - -namespace.configure({ - 'run': { - 'shell': '/bin/sh' if platform.system() != 'Windows' else os.environ.get('COMSPEC'), - }, - 'root_namespace': namespace, - 'invoke_execute': invoke_execute, -}) diff --git a/tasks/app/__init__.py b/tasks/app/__init__.py deleted file mode 100644 index 92a0851a..00000000 --- a/tasks/app/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# encoding: utf-8 -""" -Application related tasks for Invoke. -""" - -from invoke import Collection - -from . import dependencies, env, run, swagger - -from config import BaseConfig - -namespace = Collection( - dependencies, - env, - run, - swagger, -) - -namespace.configure({ - 'app': { - 'static_root': BaseConfig.STATIC_ROOT, - } -}) diff --git a/tasks/app/_utils.py b/tasks/app/_utils.py deleted file mode 100644 index 4a064bb5..00000000 --- a/tasks/app/_utils.py +++ /dev/null @@ -1,69 +0,0 @@ -# encoding: utf-8 -""" -Invoke tasks utilities for apps. -""" -import functools - -from invoke import Task as BaseTask - - -class Task(BaseTask): - """ - A patched Invoke Task adding support for decorated functions. - """ - def __init__(self, *args, **kwargs): - super(Task, self).__init__(*args, **kwargs) - # Make these tasks always contextualized (this is the only option in - # Invoke >=0.13), so we just backport this default on Invoke 0.12. - self.contextualized = True - - def argspec(self, body): - """ - See details in https://github.com/pyinvoke/invoke/pull/399. - """ - if hasattr(body, '__wrapped__'): - return self.argspec(body.__wrapped__) - return super(Task, self).argspec(body) - - -def app_context_task(*args, **kwargs): - """ - A helper Invoke Task decorator with auto app context activation. - - Examples: - - >>> @app_context_task - ... def my_task(context, some_arg, some_option='default'): - ... print("Done") - - >>> @app_context_task( - ... help={'some_arg': "This is something useful"} - ... ) - ... def my_task(context, some_arg, some_option='default'): - ... print("Done") - """ - if len(args) == 1: - func = args[0] - - @functools.wraps(func) - def wrapper(*args, **kwargs): - """ - A wrapped which tries to get ``app`` from ``kwargs`` or creates a - new ``app`` otherwise, and actives the application context, so the - decorated function is run inside the application context. - """ - app = kwargs.pop('app', None) - if app is None: - from app import create_app - app = create_app() - - with app.app_context(): - return func(*args, **kwargs) - - # This is the default in Python 3, so we just make it backwards - # compatible with Python 2 - if not hasattr(wrapper, '__wrapped__'): - wrapper.__wrapped__ = func - return Task(wrapper, **kwargs) - - return lambda func: app_context_task(func, **kwargs) diff --git a/tasks/app/dependencies.py b/tasks/app/dependencies.py deleted file mode 100644 index babfcc4f..00000000 --- a/tasks/app/dependencies.py +++ /dev/null @@ -1,103 +0,0 @@ -# encoding: utf-8 -""" -Application dependencies related tasks for Invoke. -""" -import logging -import os -import shutil -import zipfile - -try: - from invoke import ctask as task -except ImportError: # Invoke 0.13 renamed ctask to task - from invoke import task - -from tasks.utils import download_file - - -log = logging.getLogger(__name__) # pylint: disable=invalid-name - - -@task -def install_python_dependencies(context, force=False): - """ - Install Python dependencies listed in requirements.txt. - """ - log.info("Installing project dependencies...") - #context.run("pip3 install -r requirements.txt %s" % ('--upgrade' if force else '')) - log.info("Project dependencies are installed.") - -@task -def install_swagger_ui(context, force=False): - # pylint: disable=unused-argument - """ - Install Swagger UI HTML/JS/CSS assets. - """ - log.info("Installing Swagger UI assets...") - - try: - _FileExistsError = FileExistsError - except NameError: - _FileExistsError = OSError - try: - os.makedirs(os.path.join(context.app.static_root, 'bower')) - except _FileExistsError: - pass - - swagger_ui_zip_filepath = os.path.join(context.app.static_root, 'bower', 'swagger-ui.zip') - swagger_ui_root = os.path.join(context.app.static_root, 'bower', 'swagger-ui') - - if force: - try: - os.remove(swagger_ui_zip_filepath) - except FileNotFoundError: - pass - try: - shutil.rmtree(swagger_ui_root) - except FileNotFoundError: - pass - - # We are going to install Swagger UI from a fork which includes useful patches - log.info("Downloading Swagger UI assets...") - download_file( - url="https://github.com/swagger-api/swagger-ui/archive/v2.2.10.zip", - local_filepath=swagger_ui_zip_filepath - ) - - # Unzip swagger-ui.zip/dist into swagger-ui folder - log.info("Unpacking Swagger UI assets...") - with zipfile.ZipFile(swagger_ui_zip_filepath) as swagger_ui_zip_file: - for zipped_member in swagger_ui_zip_file.infolist(): - zipped_member_path = os.path.relpath(zipped_member.filename, 'swagger-ui-2.2.10') - - # We only need the 'dist' folder - try: - commonpath = os.path.commonpath - except AttributeError: # Python 2.x fallback - commonpath = os.path.commonprefix - if not commonpath([zipped_member_path, 'dist']): - continue - - extract_path = os.path.join(swagger_ui_root, zipped_member_path) - if not os.path.split(zipped_member.filename)[1]: - # If the path is folder, just create a folder - try: - os.makedirs(extract_path) - except _FileExistsError: - pass - else: - # Otherwise, read zipped file contents and write them to a file - with swagger_ui_zip_file.open(zipped_member) as zipped_file: - with open(extract_path, mode='wb') as unzipped_file: - unzipped_file.write(zipped_file.read()) - - log.info("Swagger UI is installed.") - -@task -def install(context): - # pylint: disable=unused-argument - """ - Install project dependencies. - """ - install_python_dependencies(context) - #install_swagger_ui(context) diff --git a/tasks/app/env.py b/tasks/app/env.py deleted file mode 100644 index 12b59b51..00000000 --- a/tasks/app/env.py +++ /dev/null @@ -1,44 +0,0 @@ -# encoding: utf-8 -""" -Application environment related tasks for Invoke. -""" - -try: - from invoke import ctask as task -except ImportError: # Invoke 0.13 renamed ctask to task - from invoke import task - - -@task -def enter(context, install_dependencies=True, upgrade_db=True): - """ - Enter into IPython notebook shell with an initialized app. - """ - if install_dependencies: - context.invoke_execute(context, 'app.dependencies.install') - if upgrade_db: - context.invoke_execute(context, 'app.db.upgrade') - context.invoke_execute( - context, - 'app.db.init_development_data', - upgrade_db=False, - skip_on_failure=True - ) - - - import pprint - - from werkzeug import script - import flask - - import app - flask_app = app.create_app() - - def shell_context(): - context = dict(pprint=pprint.pprint) - context.update(vars(flask)) - context.update(vars(app)) - return context - - with flask_app.app_context(): - script.make_shell(shell_context, use_ipython=True)() diff --git a/tasks/app/run.py b/tasks/app/run.py deleted file mode 100644 index fbb4dbbb..00000000 --- a/tasks/app/run.py +++ /dev/null @@ -1,65 +0,0 @@ -# encoding: utf-8 -# pylint: disable=too-many-arguments -""" -Application execution related tasks for Invoke. -""" - -try: - from importlib import reload -except ImportError: - pass # Python 2 has built-in reload() function -import os -import platform -import warnings - -try: - from invoke import ctask as task -except ImportError: # Invoke 0.13 renamed ctask to task - from invoke import task - - -@task(default=True) -def run( - context, - host='127.0.0.1', - port=5000, - flask_config=None, - install_dependencies=False, - with_gevent=False, - uwsgi=False, - uwsgi_mode='http', - uwsgi_extra_options='', - ): - """ - Run py-ispyb Server. - """ - if flask_config is not None: - os.environ['FLASK_CONFIG'] = flask_config - - if install_dependencies: - context.invoke_execute(context, 'app.dependencies.install') - - from app import create_app - app = create_app() - - use_reloader = app.debug - if uwsgi: - uwsgi_args = [ - "uwsgi", - "--need-app", - "--manage-script-name", - "--mount", "/=app:create_app()", - "--%s-socket" % uwsgi_mode, "%s:%d" % (host, port), - ] - if use_reloader: - uwsgi_args += ["--python-auto-reload", "2"] - if uwsgi_extra_options: - uwsgi_args += uwsgi_extra_options.split(' ') - os.execvpe('uwsgi', uwsgi_args, os.environ) - elif with_gevent: - from gevent.pywsgi import WSGIServer - http_server = WSGIServer((host, port), app) - http_server.serve_forever() - - else: - return app.run(host=host, port=port, use_reloader=use_reloader) diff --git a/tasks/app/swagger.py b/tasks/app/swagger.py deleted file mode 100644 index 09c909a4..00000000 --- a/tasks/app/swagger.py +++ /dev/null @@ -1,79 +0,0 @@ -""" -Swagger related invoke tasks -""" -from __future__ import print_function - -import logging -import os - -try: - from invoke import ctask as task -except ImportError: # Invoke 0.13 renamed ctask to task - from invoke import task - - -@task(default=True) -def export(context, output_format='json', quiet=False): - """ - Export swagger.json content - """ - # set logging level to ERROR to avoid [INFO] messages in result - logging.getLogger().setLevel(logging.ERROR) - - from app import create_app - app = create_app(flask_config_name='testing') - swagger_content = app.test_client().get('/api/v1/swagger.%s' % output_format).data - if not quiet: - print(swagger_content.decode('utf-8')) - return swagger_content - - -@task -def codegen(context, language, version, dry_run=False, offline=False): - if dry_run: - run = print - else: - run = context.run - - swagger_json_content = export(context, output_format='json', quiet=True) - if dry_run: - run( - "cat >./clients/%(language)s/swagger.json <<'EOF'\n%(swagger_json_content)s\nEOF" - % { - 'language': language, - 'swagger_json_content': swagger_json_content.decode('utf-8'), - } - ) - else: - with open(os.path.join('.', 'clients', language, 'swagger.json'), 'wb') as swagger_json: - swagger_json.write(swagger_json_content) - - if not offline: - run( - "docker pull 'khorolets/swagger-codegen'" - ) - - run( - "cd './clients/%(language)s' ;" - # Tar the config files to pass them into swagger-codegen docker-container. - "tar -c swagger.json swagger_codegen_config.json" - " | docker run --interactive --rm --entrypoint /bin/sh 'khorolets/swagger-codegen' -c \"" - # Unpack them, generate library code with these files. - " tar -x ;" - " java -jar '/opt/swagger-codegen/modules/swagger-codegen-cli/target/swagger-codegen-cli.jar'" - " generate" - " --input-spec './swagger.json'" - " --lang '%(language)s'" - " --output './dist'" - " --config './swagger_codegen_config.json'" - " --additional-properties 'packageVersion=%(version)s,projectVersion=%(version)s'" - " >&2 ;" - # tar the generated code and return it. - " tar -c dist\"" - # Finally, untar library source into current directory. - " | tar -x" - % { - 'language': language, - 'version': version, - } - ) diff --git a/tasks/requirements.txt b/tasks/requirements.txt deleted file mode 100644 index 6e0d0bce..00000000 --- a/tasks/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -invoke -colorlog -lockfile -requests -ruamel.yaml diff --git a/tasks/utils.py b/tasks/utils.py deleted file mode 100644 index 29553dab..00000000 --- a/tasks/utils.py +++ /dev/null @@ -1,57 +0,0 @@ -""" -Invoke tasks helper functions -============================= -""" -import logging -import os - - -log = logging.getLogger(__name__) # pylint: disable=invalid-name - - -def download_file( - url, - local_filepath, - chunk_size=1024*512, - lock_timeout=10, - http_timeout=None, - session=None -): - # pylint: disable=too-many-arguments - """ - A helper function which can download a file from a specified ``url`` to a - local file ``local_filepath`` in chunks and using a file lock to prevent - a concurrent download of the same file. - """ - # Avoid unnecessary dependencies when the function is not used. - import lockfile - import requests - - log.debug("Checking file existance in '%s'", local_filepath) - lock = lockfile.LockFile(local_filepath) - try: - lock.acquire(timeout=lock_timeout) - except lockfile.LockTimeout: - log.info( - "File '%s' is locked. Probably another instance is still downloading it.", - local_filepath - ) - raise - try: - if not os.path.exists(local_filepath): - log.info("Downloading a file from '%s' to '%s'", url, local_filepath) - if session is None: - session = requests - response = session.get(url, stream=True, timeout=http_timeout) - if response.status_code != 200: - log.error("Download '%s' is failed: %s", url, response) - response.raise_for_status() - with open(local_filepath, 'wb') as local_file: - for chunk in response.iter_content(chunk_size=chunk_size): - # filter out keep-alive new chunks - if chunk: - local_file.write(chunk) - log.debug("File '%s' has been downloaded", local_filepath) - return local_filepath - finally: - lock.release() diff --git a/tests/authclient.py b/tests/authclient.py new file mode 100644 index 00000000..cef82a19 --- /dev/null +++ b/tests/authclient.py @@ -0,0 +1,53 @@ +from fastapi import Response +from fastapi.testclient import TestClient + + +class AuthClient: + def __init__(self, client: TestClient, base_url: str): + self._client = client + self._base_url = base_url + + def login(self, login: str, permissions: str): + res = self._client.post( + f"{self._base_url}/auth/login", + json={"login": login, "password": permissions, "plugin": "dummy"}, + ) + + assert res.status_code == 201 + + self._token: str = res.json()["token"] + return res + + def client( + self, method: str, url: str, *args, use_base_url=True, **kwargs + ) -> Response: + headers = {"Authorization": f"Bearer {self._token}"} + full_url = url + if use_base_url: + full_url = self._base_url + url + payload = kwargs.pop("payload", None) + if payload is not None: + return getattr(self._client, method)( + full_url, json=payload, headers=headers + ) + else: + return getattr(self._client, method)(full_url, headers=headers) + + @property + def token(self): + return self._token + + def get(self, *args, **kwargs): + return self.client("get", *args, **kwargs) + + def post(self, *args, **kwargs): + return self.client("post", *args, **kwargs) + + def put(self, *args, **kwargs): + return self.client("put", *args, **kwargs) + + def patch(self, *args, **kwargs): + return self.client("patch", *args, **kwargs) + + def delete(self, *args, **kwargs): + return self.client("delete", *args, **kwargs) diff --git a/tests/config/auth.yml b/tests/config/auth.yml new file mode 100644 index 00000000..5867b07b --- /dev/null +++ b/tests/config/auth.yml @@ -0,0 +1,5 @@ +AUTH: + - dummy: + ENABLED: true + AUTH_MODULE: "pyispyb.app.extensions.auth.DummyAuthentication" + AUTH_CLASS: "DummyAuthentication" diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..f0891e31 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,81 @@ +from fastapi.testclient import TestClient +import pytest + +from starlette.types import ASGIApp +from pyispyb.app.extensions.database.middleware import get_session +from pyispyb.config import settings +from pyispyb.app.main import app as _app +from tests.authclient import AuthClient +from tests.core.api.utils.permissions import mock_permissions + + +@pytest.fixture() +def client(): + return TestClient(_app) + + +@pytest.fixture +def app() -> ASGIApp: + yield _app + + +@pytest.fixture +def with_db_session(): + with get_session() as db_session: + yield db_session + + +@pytest.fixture +def auth_client_abcd(client: TestClient): + auth = AuthClient(client, settings.api_root) + auth.login(login="abcd", permissions="abcd") + yield auth + + +@pytest.fixture +def auth_client_efgh(client: TestClient): + auth = AuthClient(client, settings.api_root) + auth.login(login="efgh", permissions="efgh") + yield auth + + +@pytest.fixture +def auth_client(client: TestClient): + auth = AuthClient(client, settings.api_root) + yield auth + + +@pytest.fixture +def short_session(): + old_token_exp_time = settings.token_exp_time + + new_token_exp_time = 0 + settings.token_exp_time = new_token_exp_time + + yield new_token_exp_time + settings.token_exp_time = old_token_exp_time + + +@pytest.fixture +def with_beamline_groups(auth_client_efgh: AuthClient, app: ASGIApp): + with mock_permissions(["manage_options"], app): + resp = auth_client_efgh.patch( + "/options", + payload={ + "beamLineGroups": [ + { + "groupName": "BL0x", + "uiGroup": "mx", + "permission": "bl_admin", + "beamLines": [ + {"beamLineName": "BL01"}, + {"beamLineName": "BL02"}, + ], + }, + ] + }, + ) + + assert resp.status_code == 200 + + yield diff --git a/tests/core/__init__.py b/tests/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/core/api/admin/test_activity.py b/tests/core/api/admin/test_activity.py new file mode 100644 index 00000000..b38c481c --- /dev/null +++ b/tests/core/api/admin/test_activity.py @@ -0,0 +1,13 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.admin.activity import test_data_admin_activity + + +@pytest.mark.parametrize("test_elem", test_data_admin_activity, ids=get_elem_name) +def test_activity_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/admin/test_options.py b/tests/core/api/admin/test_options.py new file mode 100644 index 00000000..2789a411 --- /dev/null +++ b/tests/core/api/admin/test_options.py @@ -0,0 +1,19 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.admin.options import test_data_options + + +@pytest.mark.parametrize("test_elem", test_data_options, ids=get_elem_name) +def test_get_options(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) + + +def test_post_option(auth_client_abcd: AuthClient): + response = auth_client_abcd.patch("/options") + + assert response.status_code == 403 diff --git a/tests/core/api/data/admin/activity.py b/tests/core/api/data/admin/activity.py new file mode 100644 index 00000000..71e37882 --- /dev/null +++ b/tests/core/api/data/admin/activity.py @@ -0,0 +1,27 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + + +test_data_admin_activity = [ + ApiTestElem( + name="list admin activity", + input=ApiTestInput( + permissions=[], + login="abcd", + route="/admin/activity", + ), + expected=ApiTestExpected( + code=403, + ), + ), + ApiTestElem( + name="list admin activity", + input=ApiTestInput( + permissions=["view_activity"], + login="abcd", + route="/admin/activity", + ), + expected=ApiTestExpected( + code=200, + ), + ), +] diff --git a/tests/core/api/data/admin/options.py b/tests/core/api/data/admin/options.py new file mode 100644 index 00000000..bf2ae449 --- /dev/null +++ b/tests/core/api/data/admin/options.py @@ -0,0 +1,38 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + + +test_data_options = [ + ApiTestElem( + name="list ui options", + input=ApiTestInput( + permissions=[], + login="abcd", + route="/options/ui", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="list all options", + input=ApiTestInput( + permissions=[], + login="abcd", + route="/options", + ), + expected=ApiTestExpected( + code=403, + ), + ), + ApiTestElem( + name="list all options", + input=ApiTestInput( + permissions=["manage_options"], + login="abcd", + route="/options", + ), + expected=ApiTestExpected( + code=200, + ), + ), +] diff --git a/tests/core/api/data/datacollections.py b/tests/core/api/data/datacollections.py new file mode 100644 index 00000000..49f43e5e --- /dev/null +++ b/tests/core/api/data/datacollections.py @@ -0,0 +1,107 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + + +test_data_dc_attachments = [ + ApiTestElem( + name="List dc attachments", + input=ApiTestInput( + login="abcd", + route="/datacollections/attachments", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="List dc attachments (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/datacollections/attachments", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="Get dc attachments", + input=ApiTestInput( + login="abcd", + route="/datacollections/attachments/1", + ), + expected=ApiTestExpected( + code=404, + ), + ), +] + +test_dc_images = [ + ApiTestElem( + name="Get datacollection image", + input=ApiTestInput( + login="abcd", + route="/datacollections/images/1", + ), + expected=ApiTestExpected( + code=404, + ), + ), + ApiTestElem( + name="Get datacollection image (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/datacollections/images/1", + ), + expected=ApiTestExpected( + code=404, + ), + ), + ApiTestElem( + name="Get datacollection diffraction image", + input=ApiTestInput( + login="abcd", + route="/datacollections/images/diffraction/1", + ), + expected=ApiTestExpected( + code=404, + ), + ), + ApiTestElem( + name="Get datacollection image quality image", + input=ApiTestInput( + login="abcd", + route="/datacollections/images/quality/1", + ), + expected=ApiTestExpected( + code=404, + ), + ), +] + +test_workflows = [ + ApiTestElem( + name="Get workflow steps", + input=ApiTestInput( + login="abcd", + route="/datacollections/workflows/steps?workflowStepId=1", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="Get workflow step attachment", + input=ApiTestInput( + login="abcd", + route="/datacollections/workflows/steps/1?attachmentType=imageResultFilePath", + ), + expected=ApiTestExpected( + code=404, + ), + ), +] diff --git a/tests/core/api/data/events.py b/tests/core/api/data/events.py new file mode 100644 index 00000000..bd671dcf --- /dev/null +++ b/tests/core/api/data/events.py @@ -0,0 +1,27 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + + +test_data_events = [ + ApiTestElem( + name="list events", + input=ApiTestInput( + permissions=[], + login="abcd", + route="/events", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="get event types", + input=ApiTestInput( + permissions=[], + login="abcd", + route="/events/types", + ), + expected=ApiTestExpected( + code=200, + ), + ), +] diff --git a/tests/core/api/data/legacy/authorization.py b/tests/core/api/data/legacy/authorization.py new file mode 100644 index 00000000..bf64d620 --- /dev/null +++ b/tests/core/api/data/legacy/authorization.py @@ -0,0 +1,129 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + +test_data_session = [ + ApiTestElem( + name="all_sessions permission OK", + input=ApiTestInput( + permissions=[ + "all_sessions", + ], + login="efgh", + route="/legacy/em/session/70566/stats", + ), + expected=ApiTestExpected(code=200), + ), + ApiTestElem( + name="no all_sessions permission DENIED", + input=ApiTestInput( + permissions=[], + login="efgh", + route="/legacy/em/session/70566/stats", + ), + expected=ApiTestExpected(code=403), + ), + ApiTestElem( + name="own_sessions permission OK (proposal.personId)", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="pasteur", + route="/legacy/em/session/70566/stats", + ), + expected=ApiTestExpected(code=200), + ), + ApiTestElem( + name="no own_sessions permission DENIED (proposal.personId)", + input=ApiTestInput( + permissions=[], + login="abcd", + route="/legacy/em/session/70566/stats", + ), + expected=ApiTestExpected(code=403), + ), + ApiTestElem( + name="own_sessions permission OK (Session_has_Person.personId)", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="darwin", + route="/legacy/em/session/70565/stats", + ), + expected=ApiTestExpected(code=200), + ), + ApiTestElem( + name="no own_sessions permission DENIED (Session_has_Person.personId)", + input=ApiTestInput( + permissions=[], + login="darwin", + route="/legacy/em/session/70565/stats", + ), + expected=ApiTestExpected(code=403), + ), + ApiTestElem( + name="own_sessions permission DENIED", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="abcd", + route="/legacy/em/session/70566/stats", + ), + expected=ApiTestExpected(code=403), + ), +] + +test_data_proposal = [ + ApiTestElem( + name="all_proposals permission OK", + input=ApiTestInput( + permissions=[ + "all_proposals", + ], + login="efgh", + route="/legacy/proposals/MX1", + ), + expected=ApiTestExpected(code=200), + ), + ApiTestElem( + name="no all_proposals permission DENIED", + input=ApiTestInput( + permissions=[], + login="efgh", + route="/legacy/proposals/MX1", + ), + expected=ApiTestExpected(code=403), + ), + ApiTestElem( + name="own_proposals permission OK (proposal.personId)", + input=ApiTestInput( + permissions=[ + "own_proposals", + ], + login="pasteur", + route="/legacy/proposals/MX1", + ), + expected=ApiTestExpected(code=200), + ), + ApiTestElem( + name="no own_proposals permission DENIED (proposal.personId)", + input=ApiTestInput( + permissions=[], + login="pasteur", + route="/legacy/proposals/MX1", + ), + expected=ApiTestExpected(code=403), + ), + ApiTestElem( + name="own_proposals permission DENIED", + input=ApiTestInput( + permissions=[ + "own_proposals", + ], + login="abcd", + route="/legacy/proposals/MX1", + ), + expected=ApiTestExpected(code=403), + ), +] diff --git a/tests/core/api/data/legacy/proposals.py b/tests/core/api/data/legacy/proposals.py new file mode 100644 index 00000000..76d59320 --- /dev/null +++ b/tests/core/api/data/legacy/proposals.py @@ -0,0 +1,134 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + +test_data_proposal_list = [ + ApiTestElem( + name="list own_proposals", + input=ApiTestInput( + permissions=[ + "own_proposals", + ], + login="pasteur", + route="/legacy/proposals", + ), + expected=ApiTestExpected( + code=200, + res=[ + { + "Proposal_proposalId": 9096, + "Proposal_proposalType": "MX", + "Proposal_personId": 404290, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_proposalNumber": "1", + } + ], + ), + ), + ApiTestElem( + name="empty list own_proposals", + input=ApiTestInput( + permissions=[ + "own_proposals", + ], + login="efgh", + route="/legacy/proposals", + ), + expected=ApiTestExpected(code=200, res=[]), + ), + ApiTestElem( + name="list all_proposals", + input=ApiTestInput( + permissions=[ + "all_proposals", + ], + login="pasteur", + route="/legacy/proposals", + ), + expected=ApiTestExpected(code=200), + ), +] + +test_data_proposal_info = [ + ApiTestElem( + name="own_proposals OK proposal name", + input=ApiTestInput( + permissions=[ + "own_proposals", + ], + login="pasteur", + route="/legacy/proposals/MX1", + ), + expected=ApiTestExpected( + code=200, + res={ + "proposal": { + "proposalId": 9096, + "proposalCode": "MX", + "proposalType": "MX", + "externalId": None, + "personId": 404290, + "title": "TEST", + "proposalNumber": "1", + "bltimeStamp": "2022-05-10T07:59:31", + "state": "Open", + } + }, + ), + ), + ApiTestElem( + name="own_proposals OK proposal id", + input=ApiTestInput( + permissions=[ + "own_proposals", + ], + login="pasteur", + route="/legacy/proposals/9096", + ), + expected=ApiTestExpected( + code=200, + res={ + "proposal": { + "proposalId": 9096, + "proposalCode": "MX", + "proposalType": "MX", + "externalId": None, + "personId": 404290, + "title": "TEST", + "proposalNumber": "1", + "bltimeStamp": "2022-05-10T07:59:31", + "state": "Open", + } + }, + ), + ), + ApiTestElem( + name="own_proposals NOK", + input=ApiTestInput( + permissions=[ + "own_proposals", + ], + login="efgh", + route="/legacy/proposals/9096", + ), + expected=ApiTestExpected( + code=403, + res={ + "detail": "User efgh (permissions assigned: ['own_proposals']) is not authorized to access proposal 9096." + }, + ), + ), + ApiTestElem( + name="invalid proposal", + input=ApiTestInput( + permissions=[ + "all_proposals", + ], + login="pasteur", + route="/legacy/proposals/NOT_A_VALID_PROPOSAL", + ), + expected=ApiTestExpected( + code=200, + res={"proposal": None}, + ), + ), +] diff --git a/tests/core/api/data/legacy/sessions.py b/tests/core/api/data/legacy/sessions.py new file mode 100644 index 00000000..6af2ccad --- /dev/null +++ b/tests/core/api/data/legacy/sessions.py @@ -0,0 +1,796 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + + +test_data_session_list = [ + ApiTestElem( + name="list own_sessions", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="pasteur", + route="/legacy/sessions", + ), + expected=ApiTestExpected( + code=200, + res=[ + { + "sessionId": 70565, + "expSessionPk": 78889, + "beamLineSetupId": 1761425, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-11-22T09:30:00", + "BLSession_endDate": "2016-11-23T17:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 1, + "comments": None, + "beamLineOperator": "DARWIN C", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "91481", + "BLSession_lastUpdate": "2016-11-23T17:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70566, + "expSessionPk": 78888, + "beamLineSetupId": 1761426, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-11-23T09:30:00", + "BLSession_endDate": "2016-11-23T17:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 1, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2016-11-23T17:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70567, + "expSessionPk": 56630, + "beamLineSetupId": 1761427, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-07-22T09:30:00", + "BLSession_endDate": "2016-07-23T08:00:00", + "beamLineName": "BL02", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "DARWIN C", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "91481", + "BLSession_lastUpdate": "2016-07-23T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70568, + "expSessionPk": 79910, + "beamLineSetupId": 1761428, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2017-05-12T09:30:00", + "BLSession_endDate": "2017-05-13T08:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2017-05-13T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + ], + ), + ), + ApiTestElem( + name="empty list own_sessions", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="efgh", + route="/legacy/sessions", + ), + expected=ApiTestExpected(code=200, res=[]), + ), + ApiTestElem( + name="list all_sessions", + input=ApiTestInput( + permissions=[ + "all_sessions", + ], + login="pasteur", + route="/legacy/sessions", + ), + expected=ApiTestExpected(code=200), + ), +] + + +test_data_session_dates_list = [ + ApiTestElem( + name="list own_sessions", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="pasteur", + route="/legacy/sessions/date/20170512/20170513", + ), + expected=ApiTestExpected( + code=200, + res=[ + { + "sessionId": 70568, + "expSessionPk": 79910, + "beamLineSetupId": 1761428, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2017-05-12T09:30:00", + "BLSession_endDate": "2017-05-13T08:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2017-05-13T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + ], + ), + ), + ApiTestElem( + name="empty list own_sessions", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="pasteur", + route="/legacy/sessions/date/20000101/20000102", + ), + expected=ApiTestExpected(code=200, res=[]), + ), + ApiTestElem( + name="list all_sessions", + input=ApiTestInput( + permissions=[ + "all_sessions", + ], + login="efgh", + route="/legacy/sessions/date/20170512/20170513", + ), + expected=ApiTestExpected( + code=200, + res=[ + { + "sessionId": 70568, + "expSessionPk": 79910, + "beamLineSetupId": 1761428, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2017-05-12T09:30:00", + "BLSession_endDate": "2017-05-13T08:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2017-05-13T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + ], + ), + ), +] + + +test_data_session_proposal_list = [ + ApiTestElem( + name="list own_sessions name", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="pasteur", + route="/legacy/sessions/proposal/MX1", + ), + expected=ApiTestExpected( + code=200, + res=[ + { + "sessionId": 70565, + "expSessionPk": 78889, + "beamLineSetupId": 1761425, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-11-22T09:30:00", + "BLSession_endDate": "2016-11-23T17:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 1, + "comments": None, + "beamLineOperator": "DARWIN C", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "91481", + "BLSession_lastUpdate": "2016-11-23T17:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70566, + "expSessionPk": 78888, + "beamLineSetupId": 1761426, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-11-23T09:30:00", + "BLSession_endDate": "2016-11-23T17:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 1, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2016-11-23T17:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70567, + "expSessionPk": 56630, + "beamLineSetupId": 1761427, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-07-22T09:30:00", + "BLSession_endDate": "2016-07-23T08:00:00", + "beamLineName": "BL02", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "DARWIN C", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "91481", + "BLSession_lastUpdate": "2016-07-23T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70568, + "expSessionPk": 79910, + "beamLineSetupId": 1761428, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2017-05-12T09:30:00", + "BLSession_endDate": "2017-05-13T08:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2017-05-13T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + ], + ), + ), + ApiTestElem( + name="list own_sessions id", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="pasteur", + route="/legacy/sessions/proposal/9096", + ), + expected=ApiTestExpected( + code=200, + res=[ + { + "sessionId": 70565, + "expSessionPk": 78889, + "beamLineSetupId": 1761425, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-11-22T09:30:00", + "BLSession_endDate": "2016-11-23T17:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 1, + "comments": None, + "beamLineOperator": "DARWIN C", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "91481", + "BLSession_lastUpdate": "2016-11-23T17:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70566, + "expSessionPk": 78888, + "beamLineSetupId": 1761426, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-11-23T09:30:00", + "BLSession_endDate": "2016-11-23T17:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 1, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2016-11-23T17:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70567, + "expSessionPk": 56630, + "beamLineSetupId": 1761427, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2016-07-22T09:30:00", + "BLSession_endDate": "2016-07-23T08:00:00", + "beamLineName": "BL02", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "DARWIN C", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "91481", + "BLSession_lastUpdate": "2016-07-23T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + { + "sessionId": 70568, + "expSessionPk": 79910, + "beamLineSetupId": 1761428, + "proposalId": 9096, + "projectCode": None, + "BLSession_startDate": "2017-05-12T09:30:00", + "BLSession_endDate": "2017-05-13T08:00:00", + "beamLineName": "BL01", + "scheduled": 1, + "nbShifts": 3, + "comments": None, + "beamLineOperator": "PASTEUR L", + "visit_number": None, + "bltimeStamp": "2022-05-10T07:59:32", + "usedFlag": None, + "sessionTitle": None, + "structureDeterminations": None, + "dewarTransport": None, + "databackupFrance": None, + "databackupEurope": None, + "operatorSiteNumber": "17074", + "BLSession_lastUpdate": "2017-05-13T08:00:00", + "BLSession_protectedData": None, + "Proposal_title": "TEST", + "Proposal_proposalCode": "MX", + "Proposal_ProposalNumber": "1", + "Proposal_ProposalType": "MX", + "Person_personId": 404290, + "Person_familyName": "PASTEUR", + "Person_givenName": "Louis", + "Person_emailAddress": "test@test.test", + "energyScanCount": 0, + "sampleCount": 0, + "imagesCount": None, + "testDataCollectionGroupCount": 0, + "dataCollectionGroupCount": 0, + "EMdataCollectionGroupCount": 0, + "xrfSpectrumCount": 0, + "hplcCount": 0, + "sampleChangerCount": 0, + "calibrationCount": 0, + "lastExperimentDataCollectionGroup": None, + "lastEndTimeDataCollectionGroup": None, + }, + ], + ), + ), + ApiTestElem( + name="list own_sessions empty", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="efgh", + route="/legacy/sessions/proposal/MX1", + ), + expected=ApiTestExpected(code=200, res=[]), + ), + ApiTestElem( + name="no rights", + input=ApiTestInput( + permissions=[], + login="pasteur", + route="/legacy/sessions/proposal/MX1", + ), + expected=ApiTestExpected( + code=403, + res={"detail": "Not Authorized"}, + ), + ), + ApiTestElem( + name="list proposal does not exist", + input=ApiTestInput( + permissions=[ + "own_sessions", + ], + login="pasteur", + route="/legacy/sessions/proposal/UNKN", + ), + expected=ApiTestExpected(code=200, res=[]), + ), + ApiTestElem( + name="list all_sessions", + input=ApiTestInput( + permissions=[ + "all_sessions", + ], + login="pasteur", + route="/legacy/sessions/proposal/MX1", + ), + expected=ApiTestExpected(code=200), + ), +] diff --git a/tests/core/api/data/proposals.py b/tests/core/api/data/proposals.py new file mode 100644 index 00000000..eee46969 --- /dev/null +++ b/tests/core/api/data/proposals.py @@ -0,0 +1,64 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + + +test_data_proposal_list = [ + ApiTestElem( + name="List proposals", + input=ApiTestInput( + login="abcd", + route="/proposals", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="abcd / MX1 / 404", + input=ApiTestInput( + login="abcd", + route="/proposals/MX1", + ), + expected=ApiTestExpected( + code=404, + ), + ), + ApiTestElem( + name="List proposals (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/proposals", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="efgh / MX1 / 200", + input=ApiTestInput( + login="efgh", + permissions=[ + "bl_admin", + ], + route="/proposals/MX1", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="efgh / blc00001 / 404", + input=ApiTestInput( + login="efgh", + permissions=[ + "bl_admin", + ], + route="/proposals/blc00001", + ), + expected=ApiTestExpected( + code=404, + ), + ), +] diff --git a/tests/core/api/data/proteins.py b/tests/core/api/data/proteins.py new file mode 100644 index 00000000..de4df576 --- /dev/null +++ b/tests/core/api/data/proteins.py @@ -0,0 +1,38 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + + +test_data_proteins_list = [ + ApiTestElem( + name="List proteins", + input=ApiTestInput( + login="abcd", + route="/proteins", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="Get a protein", + input=ApiTestInput( + login="abcd", + route="/proteins/1", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="List proteins (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/proteins", + ), + expected=ApiTestExpected( + code=200, + ), + ), +] diff --git a/tests/core/api/data/samples.py b/tests/core/api/data/samples.py new file mode 100644 index 00000000..fa6168f7 --- /dev/null +++ b/tests/core/api/data/samples.py @@ -0,0 +1,111 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + + +test_data_samples_list = [ + ApiTestElem( + name="List samples", + input=ApiTestInput( + login="abcd", + route="/samples", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="Get a samples", + input=ApiTestInput( + login="abcd", + route="/samples/1", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="List samples (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/samples", + ), + expected=ApiTestExpected( + code=200, + ), + ), +] + +test_data_subsamples_list = [ + ApiTestElem( + name="List sub samples", + input=ApiTestInput( + login="abcd", + route="/samples/sub", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="Get a sub sample", + input=ApiTestInput( + login="abcd", + route="/samples/sub/2", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="List sub samples (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/samples/sub", + ), + expected=ApiTestExpected( + code=200, + ), + ), +] + + +test_data_sampleimages_list = [ + ApiTestElem( + name="List sample images", + input=ApiTestInput( + login="abcd", + route="/samples/images", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="Get sample image", + input=ApiTestInput( + login="abcd", + route="/samples/images/1", + ), + expected=ApiTestExpected( + code=404, + ), + ), + ApiTestElem( + name="List sample images (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/samples/images", + ), + expected=ApiTestExpected( + code=200, + ), + ), +] diff --git a/tests/core/api/data/sessions.py b/tests/core/api/data/sessions.py new file mode 100644 index 00000000..90bd56e0 --- /dev/null +++ b/tests/core/api/data/sessions.py @@ -0,0 +1,51 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + + +test_data_sessions_list = [ + ApiTestElem( + name="List sessions", + input=ApiTestInput( + login="abcd", + route="/sessions", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="abcd / blc00001-1 / 200", + input=ApiTestInput( + login="abcd", + route="/sessions/1", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="List sessions (admin)", + input=ApiTestInput( + permissions=[ + "bl_admin", + ], + login="efgh", + route="/sessions", + ), + expected=ApiTestExpected( + code=200, + ), + ), + ApiTestElem( + name="efgh / blc00001-1 / 404", + input=ApiTestInput( + login="efgh", + permissions=[ + "bl_admin", + ], + route="/sessions/1", + ), + expected=ApiTestExpected( + code=404, + ), + ), +] diff --git a/tests/core/api/data/userportalsync_create.py b/tests/core/api/data/userportalsync_create.py new file mode 100644 index 00000000..ac2c141c --- /dev/null +++ b/tests/core/api/data/userportalsync_create.py @@ -0,0 +1,237 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + +test_data_proposal_userportalsync_create = { + "proposal": { + "title": "Proposal title", + "proposalCode": "I", + "proposalNumber": "20140076", + "proposalType": "MX", + "externalId": 20140076, + "persons": [ + { + "givenName": "Amity", + "familyName": "Weaver", + "title": "Dr.", + "externalId": 157, + "login": "aewaver", + "emailAddress": "test@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + }, + { + "givenName": "Kirk", + "familyName": "Chambers", + "externalId": 158, + "login": "kchambers", + "emailAddress": "test@test.test", + "laboratory": { + "name": "Lab0", + "city": "GRENOBLE", + "country": "FR", + "address": "71 avenue des Martyrs\nCS 40220\n38043\n", + }, + }, + { + "givenName": "Sherri", + "familyName": "Quinlan", + "externalId": 159, + "login": "squinlan", + "emailAddress": "test@test.test", + }, + ], + "labcontacts": [ + { + "cardName": "DESY - Amity Weaver", + "person": { + "givenName": "Amity", + "familyName": "Weaver", + "title": "Dr.", + "externalId": 157, + "login": "aewaver", + "emailAddress": "test@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + }, + }, + { + "cardName": "MAXIV - Michael Lingle", + "person": { + "givenName": "Michael", + "familyName": "Lingle", + "externalId": 181, + "login": "mlingle", + "emailAddress": "test@test.test", + "laboratory": { + "name": "MAXIV", + "city": "LUND", + "country": "SE", + "address": "Fotongatan 2\n224 84", + "laboratoryExtPk": 5, + }, + }, + }, + ], + }, + "sessions": [ + { + "externalId": 23458, + "startDate": "2022-05-20T14:09:20.340Z", + "endDate": "2022-05-21T14:09:20.340Z", + "beamLineName": "P11", + "scheduled": 1, + "nbShifts": 2, + "comments": "Testing a session import", + "beamLineOperator": "Kaye Wiley", + "visit_number": 0, + "usedFlag": 0, + "sessionTitle": "Session 1 user portal sync", + "structureDeterminations": 0, + "dewarTransport": 0, + "databackupFrance": 0, + "databackupEurope": 0, + "operatorSiteNumber": "234", + "nbReimbDewars": 0, + "persons": [ + { + "givenName": "Saim", + "familyName": "Gross", + "externalId": 160, + "login": "sgross", + "emailAddress": "test@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + "session_options": {"role": "Local Contact", "remote": 0}, + }, + { + "givenName": "Jaspal", + "familyName": "Bernal", + "externalId": 161, + "login": "jbernal", + "title": "Dr.", + "phoneNumber": "+123456789", + "emailAddress": "test@test.test", + "session_options": {"role": "Principal Investigator", "remote": 1}, + }, + ], + }, + { + "expSessionPk": 23459, + "startDate": "2022-06-20T16:09:20.340Z", + "endDate": "2022-06-21T16:09:20.340Z", + "beamLineName": "P11", + "scheduled": 1, + "nbShifts": 2, + "comments": "Testing a second session import", + "beamLineOperator": "Kaye Wiley", + "visit_number": 0, + "usedFlag": 0, + "sessionTitle": "Session 2 user portal sync", + "structureDeterminations": 0, + "dewarTransport": 0, + "databackupFrance": 0, + "databackupEurope": 0, + "operatorSiteNumber": "235", + "nbReimbDewars": 0, + "persons": [ + { + "givenName": "Saim", + "familyName": "Gross", + "externalId": 160, + "login": "sgross", + "emailAddress": "test@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + "session_options": {"role": "Local Contact", "remote": 0}, + }, + { + "givenName": "Jaspal", + "familyName": "Bernal", + "externalId": 161, + "login": "jbernal", + "title": "Dr.", + "phoneNumber": "+123456789", + "emailAddress": "test@test.test", + "session_options": {"role": "Principal Investigator", "remote": 1}, + }, + ], + }, + ], + "proteins": [ + { + "name": "Peralosyde Ratrei", + "acronym": "P4R2", + "hazardGroup": 1, + "containmentLevel": 1, + "externalId": 3789, + "person": { + "givenName": "Amity", + "familyName": "Weaver", + "externalId": 157, + "login": "aewaver", + "emailAddress": "test@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + }, + }, + { + "name": "Dithiothreitol", + "acronym": "DTT5", + "hazardGroup": 1, + "containmentLevel": 1, + "person": { + "givenName": "Amity", + "familyName": "Weaver", + "login": "aewaver", + "emailAddress": "test@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + }, + }, + ], +} + +test_route_uportal_sync_create = [ + ApiTestElem( + name="Run User Portal Sync - Create", + input=ApiTestInput( + permissions=["uportal_sync"], + login="efgh", + route="/webservices/userportalsync/sync_proposal", + method="post", + payload=test_data_proposal_userportalsync_create, + ), + expected=ApiTestExpected( + code=200, + ), + ), +] diff --git a/tests/core/api/data/userportalsync_update.py b/tests/core/api/data/userportalsync_update.py new file mode 100644 index 00000000..b9f85c85 --- /dev/null +++ b/tests/core/api/data/userportalsync_update.py @@ -0,0 +1,258 @@ +from tests.core.api.utils.apitest import ApiTestElem, ApiTestExpected, ApiTestInput + +test_data_proposal_userportalsync_update = { + "proposal": { + "title": "Proposal title updated", + "proposalCode": "I", + "proposalNumber": "20140076", + "proposalType": "MX", + "externalId": 20140076, + "persons": [ + { + "givenName": "Amity", + "familyName": "Weaver", + "title": "Dr.", + "externalId": 157, + "login": "aewaver", + "emailAddress": "aewaver@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + }, + { + "givenName": "Kirk", + "familyName": "Chambers", + "externalId": 158, + "login": "kchambers", + "emailAddress": "test@test.test", + "laboratory": { + "name": "ESRF", + "city": "GRENOBLE", + "country": "FR", + "address": "71 avenue des Martyrs\nCS 40220\n38043\n", + }, + }, + { + "givenName": "Sherri", + "familyName": "Quinlan", + "externalId": 159, + "login": "squinlan", + "emailAddress": "test@test.test", + "laboratory": { + "name": "ALBA", + "city": "BARCELONA", + "country": "ES", + "address": "Carrer de la Llum 2-26.\n 08290\n Cerdanyola del Valles", + "laboratoryExtPk": 2, + }, + }, + ], + "labcontacts": [ + { + "cardName": "DESY - Amity Weaver", + "person": { + "givenName": "Amity", + "familyName": "Weaver", + "title": "Dr.", + "externalId": 157, + "login": "aewaver", + "emailAddress": "aewaver@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + }, + }, + { + "cardName": "MAXIV - Michael Lingle", + "person": { + "givenName": "Michael", + "familyName": "Lingle", + "externalId": 181, + "login": "mlingle", + "emailAddress": "test@test.test", + "laboratory": { + "name": "MAXIV", + "city": "LUND", + "country": "SE", + "address": "Fotongatan 2\n224 84", + "laboratoryExtPk": 5, + }, + }, + }, + ], + }, + "sessions": [ + { + "externalId": 23458, + "startDate": "2022-05-20T14:09:20.340Z", + "endDate": "2022-05-21T14:09:20.340Z", + "beamLineName": "ID23-1", + "scheduled": 1, + "nbShifts": 2, + "comments": "Testing a session import", + "beamLineOperator": "Kaye Wiley", + "visit_number": 0, + "usedFlag": 0, + "sessionTitle": "Session 1 user portal sync", + "structureDeterminations": 0, + "dewarTransport": 0, + "databackupFrance": 0, + "databackupEurope": 0, + "operatorSiteNumber": "234", + "nbReimbDewars": 0, + "persons": [ + { + "givenName": "Saim", + "familyName": "Gross", + "externalId": 160, + "login": "sgross", + "emailAddress": "test@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + "session_options": {"role": "Local Contact", "remote": 0}, + }, + { + "givenName": "Jaspal", + "familyName": "Bernal", + "externalId": 161, + "login": "jbernal", + "title": "Dr.", + "phoneNumber": "+123456789", + "emailAddress": "test@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + "session_options": {"role": "Principal Investigator", "remote": 1}, + }, + ], + }, + { + "expSessionPk": 23459, + "startDate": "2022-06-20T16:09:20.340Z", + "endDate": "2022-06-21T16:09:20.340Z", + "beamLineName": "P11", + "scheduled": 1, + "nbShifts": 2, + "comments": "Testing a second session import", + "beamLineOperator": "Kaye Wiley", + "visit_number": 0, + "usedFlag": 0, + "sessionTitle": "Session 2 user portal sync", + "structureDeterminations": 0, + "dewarTransport": 0, + "databackupFrance": 0, + "databackupEurope": 0, + "operatorSiteNumber": "235", + "nbReimbDewars": 0, + "persons": [ + { + "givenName": "Saim", + "familyName": "Gross", + "externalId": 160, + "login": "sgross", + "emailAddress": "test@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + "session_options": {"role": "Local Contact", "remote": 0}, + }, + { + "givenName": "Jaspal", + "familyName": "Bernal", + "externalId": 161, + "login": "jbernal", + "title": "Dr.", + "phoneNumber": "+123456789", + "emailAddress": "test@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + "session_options": {"role": "Principal Investigator", "remote": 1}, + }, + ], + }, + ], + "proteins": [ + { + "name": "Peralosyde Ratrei", + "acronym": "P4R2", + "hazardGroup": 1, + "containmentLevel": 1, + "externalId": 3789, + "person": { + "givenName": "Amity", + "familyName": "Weaver", + "externalId": 157, + "login": "aewaver", + "emailAddress": "aewaver@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + }, + }, + { + "name": "Dithiothreitol", + "acronym": "DTT5", + "hazardGroup": 1, + "containmentLevel": 1, + "person": { + "givenName": "Amity", + "familyName": "Weaver", + "login": "aewaver", + "emailAddress": "aewaver@test.test", + "laboratory": { + "name": "DESY", + "city": "HAMBURG", + "country": "DE", + "address": "Notkestr. 85\n22706", + "laboratoryExtPk": 1, + }, + }, + }, + ], +} + +test_route_uportal_sync_update = [ + ApiTestElem( + name="Run User Portal Sync - Update", + input=ApiTestInput( + permissions=["uportal_sync"], + login="efgh", + route="/webservices/userportalsync/sync_proposal", + method="post", + payload=test_data_proposal_userportalsync_update, + ), + expected=ApiTestExpected( + code=200, + ), + ), +] diff --git a/tests/core/api/legacy/test_authorization.py b/tests/core/api/legacy/test_authorization.py new file mode 100644 index 00000000..4943f6b3 --- /dev/null +++ b/tests/core/api/legacy/test_authorization.py @@ -0,0 +1,25 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.legacy.authorization import ( + test_data_session, + test_data_proposal, +) + + +@pytest.mark.parametrize("test_elem", test_data_session, ids=get_elem_name) +def test_authorization_session( + auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp +): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize("test_elem", test_data_proposal, ids=get_elem_name) +def test_authorization_proposal( + auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp +): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/legacy/test_proposals_legacy.py b/tests/core/api/legacy/test_proposals_legacy.py new file mode 100644 index 00000000..266f1c07 --- /dev/null +++ b/tests/core/api/legacy/test_proposals_legacy.py @@ -0,0 +1,21 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.legacy.proposals import ( + test_data_proposal_list, + test_data_proposal_info, +) + + +@pytest.mark.parametrize("test_elem", test_data_proposal_list, ids=get_elem_name) +def test_proposal_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize("test_elem", test_data_proposal_info, ids=get_elem_name) +def test_proposal_info(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/legacy/test_sessions_legacy.py b/tests/core/api/legacy/test_sessions_legacy.py new file mode 100644 index 00000000..26ba4838 --- /dev/null +++ b/tests/core/api/legacy/test_sessions_legacy.py @@ -0,0 +1,33 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.legacy.sessions import ( + test_data_session_proposal_list, + test_data_session_list, + test_data_session_dates_list, +) + + +@pytest.mark.parametrize("test_elem", test_data_session_list, ids=get_elem_name) +def test_session_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize("test_elem", test_data_session_dates_list, ids=get_elem_name) +def test_session_dates_list( + auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp +): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize( + "test_elem", test_data_session_proposal_list, ids=get_elem_name +) +def test_session_proposal_list( + auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp +): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/test_authentication.py b/tests/core/api/test_authentication.py new file mode 100644 index 00000000..137e7bba --- /dev/null +++ b/tests/core/api/test_authentication.py @@ -0,0 +1,66 @@ +import time +from fastapi.testclient import TestClient +from pyispyb.config import settings + + +def test_token_create_decode(client: TestClient): + res = client.post( + f"{settings.api_root}/auth/login", + json={"login": "abcd", "password": "abcd", "plugin": "dummy"}, + ) + assert res.status_code == 201 + + headers = {"Authorization": f"Bearer {res.json()['token']}"} + res2 = client.get(f"{settings.api_root}/events", headers=headers) + assert res2.status_code == 200 + + +def test_token_expired(client: TestClient, short_session: float): + res = client.post( + f"{settings.api_root}/auth/login", + json={"login": "abcd", "password": "abcd", "plugin": "dummy"}, + ) + assert res.status_code == 201 + + time.sleep(short_session * 60 + 1) + + headers = {"Authorization": f"Bearer {res.json()['token']}"} + res2 = client.get(f"{settings.api_root}/events", headers=headers) + assert res2.status_code == 401 + assert "expired" in res2.json()["detail"].lower() + + +def test_no_token(client: TestClient): + res = client.get(f"{settings.api_root}/events") + assert res.status_code == 401 + assert "no token" in res.json()["detail"].lower() + + +def test_token_invalid(client: TestClient): + headers = {"Authorization": "Bearer asda.asda.asda"} + res = client.get(f"{settings.api_root}/events", headers=headers) + assert res.status_code == 401 + assert "invalid" in res.json()["detail"].lower() + + +def test_onetime_invalid(client: TestClient): + res = client.get(f"{settings.api_root}/events?onetime=one") + assert res.status_code == 401 + assert "invalid" in res.json()["detail"].lower() + + +def test_onetime(client: TestClient): + res = client.post( + f"{settings.api_root}/auth/login", + json={"login": "abcd", "password": "abcd", "plugin": "dummy"}, + ) + assert res.status_code == 201 + + headers = {"Authorization": f"Bearer {res.json()['token']}"} + res2 = client.post( + f"{settings.api_root}/user/sign", headers=headers, json={"validity": "/events"} + ) + assert res2.status_code == 200 + + res = client.get(f"{settings.api_root}/events?onetime={res2.json()['token']}") + assert res.status_code == 200 diff --git a/tests/core/api/test_beamline_groups.py b/tests/core/api/test_beamline_groups.py new file mode 100644 index 00000000..4613d40b --- /dev/null +++ b/tests/core/api/test_beamline_groups.py @@ -0,0 +1,43 @@ +from starlette.types import ASGIApp + +from tests.core.api.utils.permissions import mock_permissions +from tests.authclient import AuthClient + + +def test_all_proposals(auth_client_efgh: AuthClient, app: ASGIApp): + """Browse all proposals""" + with mock_permissions("all_proposals", app): + resp = auth_client_efgh.get("/proposals") + assert resp.status_code == 200 + json = resp.json() + + assert len(json["results"]) >= 2 + + +def test_bl_admin(auth_client_efgh: AuthClient, app: ASGIApp, with_beamline_groups): + """Should be able to browse proposals on beamline BL01 and BL02""" + with mock_permissions("bl_admin", app): + resp = auth_client_efgh.get("/proposals") + assert resp.status_code == 200 + json = resp.json() + + assert len(json["results"]) == 1 + assert set(json["results"][0]["_metadata"]["beamLines"]) == set( + ["BL01", "BL02"] + ) + + +def test_no_permission(auth_client_abcd: AuthClient): + """Browse only proposals with SessionHasPerson links""" + resp = auth_client_abcd.get("/proposals") + assert resp.status_code == 200 + json = resp.json() + + assert len(json["results"]) == 1 + assert json["results"][0]["_metadata"]["beamLines"] == ["bl"] + + +def test_sessions_for_group(auth_client_abcd: AuthClient, with_beamline_groups): + """Browse sessions for beamline group""" + resp = auth_client_abcd.get("/sessions/group?beamLineGroup=BL0x") + assert resp.status_code == 200 diff --git a/tests/core/api/test_datacollections.py b/tests/core/api/test_datacollections.py new file mode 100644 index 00000000..4cdd05d4 --- /dev/null +++ b/tests/core/api/test_datacollections.py @@ -0,0 +1,27 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.datacollections import ( + test_data_dc_attachments, + test_dc_images, + test_workflows, +) + + +@pytest.mark.parametrize("test_elem", test_data_dc_attachments, ids=get_elem_name) +def test_dc_attachments(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize("test_elem", test_dc_images, ids=get_elem_name) +def test_dc_images(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize("test_elem", test_workflows, ids=get_elem_name) +def test_workflows(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/test_events.py b/tests/core/api/test_events.py new file mode 100644 index 00000000..ccfa0809 --- /dev/null +++ b/tests/core/api/test_events.py @@ -0,0 +1,14 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem +from tests.core.api.data.events import ( + test_data_events, +) + + +@pytest.mark.parametrize("test_elem", test_data_events, ids=get_elem_name) +def test_proposal_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/test_labcontacts.py b/tests/core/api/test_labcontacts.py new file mode 100644 index 00000000..bd544b6d --- /dev/null +++ b/tests/core/api/test_labcontacts.py @@ -0,0 +1,63 @@ +import time + +from starlette.types import ASGIApp + +from tests.authclient import AuthClient + +LABCONTACT = { + "proposalId": 1, + "cardName": f"test card {time.time()}", + "Person": { + "givenName": "test person", + "familyName": "last name", + "Laboratory": { + "name": "lab name", + "address": "address", + "city": "city", + "country": "country", + }, + }, +} + +UPDATED_CONTACT = {"cardName": f"updated card {time.time()}"} + + +def test_labcontacts(auth_client_abcd: AuthClient, app: ASGIApp): + """Browse lab contacts""" + resp = auth_client_abcd.get("/labcontacts") + assert resp.status_code == 200 + + +def test_create_labcontact(auth_client_abcd: AuthClient, app: ASGIApp): + """Create a lab contact""" + + resp = auth_client_abcd.post("/labcontacts", payload=LABCONTACT) + assert resp.status_code == 201 + json = resp.json() + + resp = auth_client_abcd.get(f"/labcontacts/{json['labContactId']}") + assert resp.status_code == 200 + + +def test_create_labcontact_invalid_proposal(auth_client_efgh: AuthClient, app: ASGIApp): + """Create a lab contact without valid proposal""" + + resp = auth_client_efgh.post("/labcontacts", payload=LABCONTACT) + assert resp.status_code == 404 + + +def test_update_labcontact(auth_client_abcd: AuthClient, app: ASGIApp): + """Browse lab contacts""" + resp = auth_client_abcd.get("/labcontacts") + assert resp.status_code == 200 + + json = resp.json() + results = json["results"] + latest = results[-1] + + resp = auth_client_abcd.patch( + f"/labcontacts/{latest['labContactId']}", payload=UPDATED_CONTACT + ) + assert resp.status_code == 200 + updated_json = resp.json() + assert updated_json["cardName"] == UPDATED_CONTACT["cardName"] diff --git a/tests/core/api/test_proposals.py b/tests/core/api/test_proposals.py new file mode 100644 index 00000000..baab7613 --- /dev/null +++ b/tests/core/api/test_proposals.py @@ -0,0 +1,13 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.proposals import test_data_proposal_list + + +@pytest.mark.parametrize("test_elem", test_data_proposal_list, ids=get_elem_name) +def test_proposal_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/test_proteins.py b/tests/core/api/test_proteins.py new file mode 100644 index 00000000..4478678d --- /dev/null +++ b/tests/core/api/test_proteins.py @@ -0,0 +1,15 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.proteins import ( + test_data_proteins_list, +) + + +@pytest.mark.parametrize("test_elem", test_data_proteins_list, ids=get_elem_name) +def test_proteins_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/test_samples.py b/tests/core/api/test_samples.py new file mode 100644 index 00000000..6bf4099d --- /dev/null +++ b/tests/core/api/test_samples.py @@ -0,0 +1,27 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.samples import ( + test_data_samples_list, + test_data_sampleimages_list, + test_data_subsamples_list, +) + + +@pytest.mark.parametrize("test_elem", test_data_samples_list, ids=get_elem_name) +def test_samples_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize("test_elem", test_data_sampleimages_list, ids=get_elem_name) +def test_sample_images(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) + + +@pytest.mark.parametrize("test_elem", test_data_subsamples_list, ids=get_elem_name) +def test_subsamples_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/test_sessions.py b/tests/core/api/test_sessions.py new file mode 100644 index 00000000..6b301daa --- /dev/null +++ b/tests/core/api/test_sessions.py @@ -0,0 +1,13 @@ +import pytest + +from starlette.types import ASGIApp + +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem + +from tests.core.api.data.sessions import test_data_sessions_list + + +@pytest.mark.parametrize("test_elem", test_data_sessions_list, ids=get_elem_name) +def test_session_list(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + run_test(auth_client, test_elem, app) diff --git a/tests/core/api/test_user.py b/tests/core/api/test_user.py new file mode 100644 index 00000000..633792fc --- /dev/null +++ b/tests/core/api/test_user.py @@ -0,0 +1,9 @@ +from starlette.types import ASGIApp + +from tests.authclient import AuthClient + + +def test_user(auth_client_efgh: AuthClient, app: ASGIApp): + """Get current user""" + resp = auth_client_efgh.get("/user/current") + assert resp.status_code == 200 diff --git a/tests/core/api/test_userportalsync_create.py b/tests/core/api/test_userportalsync_create.py new file mode 100644 index 00000000..f7a8edf0 --- /dev/null +++ b/tests/core/api/test_userportalsync_create.py @@ -0,0 +1,224 @@ +import pytest +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem +from starlette.types import ASGIApp +from pyispyb.core.modules.proposals import get_proposals +from pyispyb.core.modules.persons import get_persons +from pyispyb.core.modules.laboratories import get_laboratories +from pyispyb.core.modules.proteins import get_proteins +from pyispyb.core.modules.sessions import get_sessions +from pyispyb.core.modules.labcontacts import get_labcontacts +from tests.core.api.data.userportalsync_create import ( + test_data_proposal_userportalsync_create, + test_route_uportal_sync_create, +) + + +@pytest.mark.parametrize("test_elem", test_route_uportal_sync_create, ids=get_elem_name) +def test_call_sync_proposal_create( + auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp +): + run_test(auth_client, test_elem, app) + + +def test_proposal_persons_sync(with_db_session): + # Only one proposal with proposalCode and proposalNumber should have been created in DB + proposals = get_proposals( + withAuthorization=False, + skip=0, + limit=10, + proposalCode=test_data_proposal_userportalsync_create["proposal"][ + "proposalCode" + ], + proposalNumber=test_data_proposal_userportalsync_create["proposal"][ + "proposalNumber" + ], + ) + + assert proposals.total == 1 + + # Check the persons related to the proposal were created + total_proposal_persons = 0 + for i, json_person in enumerate( + test_data_proposal_userportalsync_create["proposal"]["persons"] + ): + # Lookup for the persons in DB according to the input JSON data + persons = get_persons( + skip=0, + limit=10, + login=json_person["login"], + givenName=json_person["givenName"], + familyName=json_person["familyName"], + withLaboratory=False, + ) + if persons.total == 1: + total_proposal_persons += 1 + if i == 0: + # Save the personID from the first person in the JSON list + first_person_id = persons.results[i].personId + + assert total_proposal_persons == len( + test_data_proposal_userportalsync_create["proposal"]["persons"] + ) + + # The first person related to the proposal (PI or leader) + # should be the one having the relation with the Proposal table in DB (foreign constraint) + assert first_person_id == proposals.results[0].personId + + # Check the number of persons within the ProposalHasPerson table + assert ( + len(test_data_proposal_userportalsync_create["proposal"]["persons"]) + == proposals.results[0]._metadata["persons"] + ) + + +# We could check also if the right persons where added within the ProposalHasPerson table + + +def test_proposal_persons_laboratories_sync(with_db_session): + # Get a list of unique laboratories from proposal persons + unique_laboratories = [] + for i, person in enumerate( + test_data_proposal_userportalsync_create["proposal"]["persons"] + ): + try: + laboratory = person["laboratory"] + except KeyError: + laboratory = None + + if laboratory and laboratory not in unique_laboratories: + # Make a list of unique laboratories + unique_laboratories.append(person["laboratory"]) + + labs_in_db = 0 + for laboratory in unique_laboratories: + # Lookup for the laboratories in DB according to the input JSON data + laboratories = get_laboratories( + skip=0, + limit=10, + name=laboratory["name"], + city=laboratory["city"], + country=laboratory["country"], + ) + if laboratories.total == 1: + labs_in_db += 1 + + # Check the amount of unique laboratories corresponds with the entries in the DB + assert len(unique_laboratories) == labs_in_db + + +def test_session_persons_sync(with_db_session): + # Iterate over the session + sessions_in_db = 0 + for json_session in test_data_proposal_userportalsync_create["sessions"]: + + try: + if json_session["externalId"] is not None: + sessions = get_sessions( + withAuthorization=False, + skip=0, + limit=10, + externalId=json_session["externalId"], + ) + except KeyError: + pass + + try: + # Keeping expSessionPk for now for backward compatibility with the ISPyB JAVA API + # It might be deprecated later + if json_session["expSessionPk"] is not None: + sessions = get_sessions( + withAuthorization=False, + skip=0, + limit=10, + expSessionPk=json_session["expSessionPk"], + ) + except KeyError: + pass + + if sessions.total == 1: + sessions_in_db += 1 + + # Check the number of persons within the Session_has_Person table + assert ( + len(json_session["persons"]) == sessions.results[0]._metadata["persons"] + ) + + # Check the amount of sessions corresponds with the entries in the DB + assert len(test_data_proposal_userportalsync_create["sessions"]) == sessions_in_db + + +def test_lab_contacts_sync(with_db_session): + # Get the proposal from the DB + proposals = get_proposals( + withAuthorization=False, + skip=0, + limit=10, + proposalCode=test_data_proposal_userportalsync_create["proposal"][ + "proposalCode" + ], + proposalNumber=test_data_proposal_userportalsync_create["proposal"][ + "proposalNumber" + ], + ) + # Get the lab contacts for the proposal in DB + labcontacts = get_labcontacts( + withAuthorization=False, + skip=0, + limit=10, + proposalId=proposals.results[0].proposalId, + ) + + # Check the amount of LabContacts for the proposal corresponds with the entries in the DB + assert ( + len(test_data_proposal_userportalsync_create["proposal"]["labcontacts"]) + == labcontacts.total + ) + + # Later we can add more automatic testings to see if the right persons were added, etc + + +def test_proteins_sync(with_db_session): + # Get the proposal from the DB + proposals = get_proposals( + withAuthorization=False, + skip=0, + limit=10, + proposalCode=test_data_proposal_userportalsync_create["proposal"][ + "proposalCode" + ], + proposalNumber=test_data_proposal_userportalsync_create["proposal"][ + "proposalNumber" + ], + ) + + proteins_in_db = 0 + for i, protein in enumerate(test_data_proposal_userportalsync_create["proteins"]): + # Check all proteins in DB related to the proposalID + try: + if protein["externalId"] is not None: + proteins = get_proteins( + withAuthorization=False, + skip=0, + limit=10, + externalId=protein["externalId"], + proposalId=proposals.results[0].proposalId, + ) + except KeyError: + pass + + try: + proteins = get_proteins( + withAuthorization=False, + skip=0, + limit=10, + acronym=protein["acronym"], + proposalId=proposals.results[0].proposalId, + ) + except KeyError: + pass + + if proteins.total == 1: + proteins_in_db += 1 + # Check the amount of proteins in JSON corresponds with the entries in the DB + assert len(test_data_proposal_userportalsync_create["proteins"]) == proteins_in_db diff --git a/tests/core/api/test_userportalsync_update.py b/tests/core/api/test_userportalsync_update.py new file mode 100644 index 00000000..309de735 --- /dev/null +++ b/tests/core/api/test_userportalsync_update.py @@ -0,0 +1,95 @@ +import pytest +from tests.conftest import AuthClient +from tests.core.api.utils.apitest import get_elem_name, run_test, ApiTestElem +from starlette.types import ASGIApp +from pyispyb.core.modules.proposals import get_proposals +from pyispyb.core.modules.persons import get_persons +from pyispyb.core.modules.sessions import get_sessions +from tests.core.api.data.userportalsync_update import ( + test_data_proposal_userportalsync_update, + test_route_uportal_sync_update, +) + + +@pytest.mark.parametrize("test_elem", test_route_uportal_sync_update, ids=get_elem_name) +def test_call_sync_proposal_create( + auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp +): + run_test(auth_client, test_elem, app) + + +def test_proposal_title_update(with_db_session): + # Get the proposal from the DB + proposals = get_proposals( + withAuthorization=False, + skip=0, + limit=10, + proposalCode=test_data_proposal_userportalsync_update["proposal"][ + "proposalCode" + ], + proposalNumber=test_data_proposal_userportalsync_update["proposal"][ + "proposalNumber" + ], + ) + # Check the proposal title was updated as expected + assert ( + proposals.results[0].title + == test_data_proposal_userportalsync_update["proposal"]["title"] + ) + + +def test_person_email_update(with_db_session): + # Get the person from the DB + persons = get_persons( + skip=0, + limit=10, + login=test_data_proposal_userportalsync_update["proposal"]["persons"][0][ + "login" + ], + ) + # Check the person email was updated as expected + assert ( + persons.results[0].emailAddress + == test_data_proposal_userportalsync_update["proposal"]["persons"][0][ + "emailAddress" + ] + ) + + +def test_person_laboratory_name_update(with_db_session): + # Get the person from the DB + persons = get_persons( + skip=0, + limit=10, + login=test_data_proposal_userportalsync_update["proposal"]["persons"][1][ + "login" + ], + withLaboratory=True, + ) + # Check the laboratory name was updated as expected + assert ( + persons.results[0].Laboratory.name + == test_data_proposal_userportalsync_update["proposal"]["persons"][1][ + "laboratory" + ]["name"] + ) + + +def test_session_beamline_name_update(with_db_session): + # Get the session from the DB + sessions = get_sessions( + withAuthorization=False, + skip=0, + limit=10, + externalId=test_data_proposal_userportalsync_update["sessions"][0][ + "externalId" + ], + ) + # Check the laboratory name was updated as expected + assert ( + sessions.results[0].beamLineName + == test_data_proposal_userportalsync_update["sessions"][0]["beamLineName"] + ) + + +# More testings can be added later diff --git a/tests/core/api/utils/apitest.py b/tests/core/api/utils/apitest.py new file mode 100644 index 00000000..49c049c5 --- /dev/null +++ b/tests/core/api/utils/apitest.py @@ -0,0 +1,89 @@ +from typing import Any + +from starlette.types import ASGIApp + +from tests.authclient import AuthClient +from jsondiff import diff + +from tests.core.api.utils.permissions import mock_permissions + + +class ApiTestInput: + def __init__( + self, + *, + login: str, + route: str, + permissions: list[str] = [], + method: str = "get", + payload: str | None = None, + ) -> None: + self.login = login + self.permissions = permissions + self.route = route + self.method = method + self.payload = payload + + +class ApiTestExpected: + def __init__( + self, code: int | None = None, res: dict[str, Any] | None = None + ) -> None: + self.code = code + self.res = res + + +class ApiTestElem: + def __init__( + self, name: str, input: ApiTestInput, expected: ApiTestExpected + ) -> None: + self.name = name + self.input = input + self.expected = expected + + +def get_elem_name(test_elem: ApiTestElem): + return test_elem.name + + +def run_test(auth_client: AuthClient, test_elem: ApiTestElem, app: ASGIApp): + with mock_permissions(test_elem.input.permissions, app): + auth_client.login(test_elem.input.login, "password") + + if test_elem.input.method == "get": + response = auth_client.get(test_elem.input.route) + elif test_elem.input.method == "post": + response = auth_client.post( + test_elem.input.route, payload=test_elem.input.payload + ) + + if test_elem.expected.code is not None: + assert ( + response.status_code == test_elem.expected.code + ), f""" + TEST { test_elem.name } + EXPECTED code { test_elem.expected.code } + GOT code { response.status_code } + """ + + if test_elem.expected.res is not None: + assert ( + response.json() == test_elem.expected.res + ), f""" + TEST { test_elem.name } + + EXPECTED json + ============================= + { test_elem.expected.res } + ============================= + + GOT json + ============================= + { response.json() } + ============================= + + DIFF + ============================= + { diff(test_elem.expected.res, response.json()) } + ============================= + """ diff --git a/tests/core/api/utils/permissions.py b/tests/core/api/utils/permissions.py new file mode 100644 index 00000000..4d2fc625 --- /dev/null +++ b/tests/core/api/utils/permissions.py @@ -0,0 +1,45 @@ +from contextlib import contextmanager + +from fastapi import HTTPException, Depends +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials +from starlette.types import ASGIApp + +from pyispyb.app.extensions.auth.token import set_token_data +from pyispyb.app.extensions.auth.bearer import JWTBearer, verify_jwt + +security = HTTPBearer() + + +@contextmanager +def mock_permissions(permissions: list[str], app: ASGIApp): + """Allows overriding the permissions the current user has by replacing the JWTBearer dependency""" + + async def JWTBearerMockPermissions( + credentials: HTTPAuthorizationCredentials = Depends(security), + ): + if credentials: + if not credentials.scheme == "Bearer": + raise HTTPException( + status_code=401, detail="Invalid authentication scheme." + ) + decoded = verify_jwt(credentials.credentials) + if not decoded: + raise HTTPException( + status_code=401, detail="Invalid token or expired token." + ) + + decoded["permissions"] = permissions + print() + print(f" - Set permissions for `{decoded['login']}` to {permissions}") + set_token_data(decoded) + + return credentials.credentials + else: + raise HTTPException(status_code=401, detail="No token provided.") + + app.dependency_overrides[JWTBearer] = JWTBearerMockPermissions + + yield + + print(" - Resetting permissions") + app.dependency_overrides = {} diff --git a/tests/core/conftest.py b/tests/core/conftest.py deleted file mode 100644 index 1500286f..00000000 --- a/tests/core/conftest.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -Project: py-ispyb -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along -""" - - - - -__license__ = "LGPLv3+" - - -import os -import sys -import pytest - - -#TESTS_DIR = os.path.abspath(os.path.dirname(__file__)) -ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) -sys.path.insert(0, ROOT_DIR) - -from pyispyb import create_app - -@pytest.fixture(scope="session") -def ispyb_core_app(): - app = create_app("ispyb_core_config.yml", "test") - with app.app_context(): - yield app - - -@pytest.fixture() -def ispyb_core_token(ispyb_core_app): - client = ispyb_core_app.test_client() - api_root = ispyb_core_app.config["API_ROOT"] - - response = client.get( - api_root + "/auth/login", headers={"username": "manager", "password": "pass"} - ) - return response.json["token"] diff --git a/tests/core/data.py b/tests/core/data.py deleted file mode 100644 index f23600e5..00000000 --- a/tests/core/data.py +++ /dev/null @@ -1,369 +0,0 @@ -import time - -from datetime import datetime -from random import randint -import uuid - -test_proposal = { - "proposalCode": "MX", - "title": "Test proposal", - "proposalType": "MX", - "personId": 1, - "proposalNumber": "111", - # "bltimeStamp": datetime.strptime("2015-12-21 16:20:43", "%Y-%m-%d %H:%M:%S"), - "state": "Open", -} - - -def get_test_proposal(): - proposal = test_proposal - proposal["proposalNumber"] = randint(1, 1e5) - return proposal - - -test_beam_calendar = { - "run": "1", - "beamStatus": "Open", - "endDate": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), - "startDate": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), -} - -test_session = { - "bltimeStamp": "2015-12-21 16:20:44", - "proposalId": 37027, - "beamLineName": "i03", - "visit_number": 2, - "archived": 0, - "beamLineSetupId": 1, - "endDate": "2015-12-21 16:20:44", - "startDate": "2015-12-21 16:20:44", -} - - -test_data_collection = { - "binning": 1, - "FOCALSPOTSIZEATSAMPLEY": 20, - "detectorDistance": 193.087, - "xtalSnapshotFullPath3": "/path/to_snapshot3", - "printableForReport": 1, - "dataCollectionGroupId": 988855, - "slitGapHorizontal": 0.099937, - "axisStart": 45, - "xtalSnapshotFullPath1": "/path/to_snapshot1", - "imageDirectory": "/path/o/image/dir", - "BLSAMPLEID": 374695, - "xBeam": 215.62, - "SESSIONID": 55167, - "comments": "Test_comm", - "fileTemplate": "tlys_jan_4_1_####.cbf", - "imageSuffix": "cbf", - "flux": 833107367454.3083, - "overlap": 0, - "wavelength": 1.28255, - "axisEnd": 0.1, - "slitGapVertical": 0.059918, - "beamSizeAtSampleX": 0.05, - "omegaStart": 45, - "endTime": datetime.strptime("2016-01-14 12:41:54", "%Y-%m-%d %H:%M:%S"), - "dataCollectionNumber": 1, - "imagePrefix": "tlys_jan_4", - "xtalSnapshotFullPath4": "/path/to_snapshot4", - "yBeam": 208.978, - "synchrotronMode": "User", - "numberOfPasses": 1, - "resolution": 1.6, - "undulatorGap1": 5.685, - "xtalSnapshotFullPath2": "/path/to_snapshot2", - "FOCALSPOTSIZEATSAMPLEX": 80, - "runStatus": "DataCollection Successful", - "dataCollectionId": 993677, - "axisRange": 0.1, - "POSITIONID": 595236, - "numberOfImages": 3600, - "beamSizeAtSampleY": 0.02, - "transmission": 40.1936, - "startImageNumber": 1, - "rotationAxis": "Omega", - "exposureTime": 0.02, - "startTime": datetime.strptime("2016-01-14 12:40:34", "%Y-%m-%d %H:%M:%S"), -} - -test_lab_contact = { - "personId": 1, - "cardName": "Card", - "proposalId": 37027, - "defaultCourrierCompany": "DHL", - "courierAccount": "01", - "billingReference": "02", - "dewarAvgCustomsValue": 0, - "dewarAvgTransportValue": 0, -} - -test_shippment = { - "proposalId": 37027, - "shippingName": "Test shipment", - "deliveryAgent_agentName": "DHL", - "deliveryAgent_agentCode": "Code", - "deliveryAgent_flightCode": "Code", - "shippingStatus": "Open", - "laboratoryId": 1, - "isStorageShipping": 0, - "comments": "Comment", - "sendingLabContactId": 1, - "returnLabContactId": 1, - "returnCourier": "DHL", - "deliveryAgent_label": "Label", - "physicalLocation": "Store", -} - -test_laboratory = { - "laboratoryUUID": "UUID", - "name": "Test lab", - "address": "Test address", - "city": "City", - "country": "Country", - "url": "url", - "organization": "Test org", - "laboratoryPk": 0, - "postcode": "Test code", -} - -test_person = { - "laboratoryId": 1, - "personUUID": "Person uuid", - "familyName": "Family", - "givenName": "Name", - "title": "Dr", - "emailAddress": "email", - "phoneNumber": "2233", - "login": "login", - "faxNumber": "222", - "cache": "string", -} - - -def get_test_person(): - test_person_dict = test_person - test_person_dict["login"] = uuid.uuid4().hex.upper()[0:6] - return test_person_dict - - -test_detector = { - "detectorType": "PixelCounting", - "detectorManufacturer": "TestManufacturer", - "detectorModel": "T1", - "detectorPixelSizeHorizontal": 0.75, - "detectorPixelSizeVertical": 0.75, - "DETECTORMAXRESOLUTION": 0.6, - "DETECTORMINRESOLUTION": 6, - "detectorSerialNumber": "00AA11", - "detectorDistanceMin": "100", - "detectorDistanceMax": "1000", - "trustedPixelValueRangeLower": "1", - "trustedPixelValueRangeUpper": "2", - "sensorThickness": 1, - "overload": 1, - "XGeoCorr": "100", - "YGeoCorr": "200", - "detectorMode": "testMode", - "density": 1, - "composition": "comp", - "numberOfPixelsX": 8000, - "numberOfPixelsY": 8000, - "detectorRollMin": "1", - "detectorRollMax": "2", - "localName": "TestDetector", -} - - -def get_test_detector(): - test_detector_dict = test_detector - test_detector_dict["detectorSerialNumber"] = uuid.uuid4().hex.upper()[0:6] - return test_detector_dict - - -test_beamline_setup = { - "detectorId": 0, - "synchrotronMode": "Test mode", - "undulatorType1": "Si111", - "focalSpotSizeAtSample": 10, - "focusingOptic": "CRL", - "beamDivergenceHorizontal": 0.1, - "beamDivergenceVertical": 0.1, - "polarisation": 0, - "monochromatorType": "T", - "setupDate": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), - "synchrotronName": "TestSynch", - "maxExpTimePerDataCollection": "0.04", - "maxExposureTimePerImage": 10, - "minExposureTimePerImage": "0", - "goniostatMaxOscillationSpeed": "1", - "goniostatMaxOscillationWidth": "1", - "goniostatMinOscillationWidth": "1", - "maxTransmission": "100", - "minTransmission": "0", - "beamlineName": "testBeamline", - "beamSizeXMin": 10, - "beamSizeXMax": 200, - "beamSizeYMin": 10, - "beamSizeYMax": 200, - "energyMin": 5, - "energyMax": 15, - "omegaMin": 0, - "omegaMax": 360, - "kappaMin": 0, - "kappaMax": 360, - "phiMin": 0, - "phiMax": 180, - "active": 0, - "numberOfImagesMax": 1000000, - "numberOfImagesMin": 1, -} - -test_protein = { - "proposalId": 37027, - "name": "Protein name", - "acronym": "ancr", - "molecularMass": "2", - "proteinType": "2", - "personId": 1, -} - -test_diffraction_plan = { - "name": "Test", - "experimentKind": "OSC", - "observedResolution": 2, - "minimalResolution": 2, - "exposureTime": 0.04, - "oscillationRange": 360, - "maximalResolution": 1, - "screeningResolution": 2, - "radiationSensitivity": 0, - "preferredBeamSizeX": 20, - "preferredBeamSizeY": 20, - "preferredBeamDiameter": 20, - "comments": "Test comment", - "DIFFRACTIONPLANUUID": "uuid", - "aimedCompleteness": "99", - "aimedIOverSigmaAtHighestRes": "1", - "aimedMultiplicity": "1", - "aimedResolution": "1", - "anomalousData": 0, - "complexity": "1", - "estimateRadiationDamage": 0, - "forcedSpaceGroup": "P4", - "requiredCompleteness": "99", - "requiredMultiplicity": "1", - "requiredResolution": "1", - "numberOfPositions": 1, - "minOscWidth": 0.1, - "energy": 12.70, - "transmission": 100, - "kappaStart": 0, - "axisStart": 0, - "axisRange": 0.1, - "numberOfImages": 3600, - "beamLineName": "test beamline", - "distance": "200", -} - -test_crystal = { - "diffractionPlanId": 0, - "proteinId": 0, - "crystalUUID": "crUUID", - "name": "Test crystal", - "spaceGroup": "P4", - "morphology": "No", - "color": "Green", - "size_X": "10", - "size_Y": "10", - "size_Z": "100", - "cell_a": "1", - "cell_b": "1", - "cell_c": "1", - "cell_alpha": "2", - "cell_beta": "2", - "cell_gamma": "2", - "comments": "Comment", - "pdbFileName": "pdf_filename", - "pdbFilePath": "pdf_filen_path", - "abundance": 0, - "theoreticalDensity": 0, -} - -test_sample = { - "blSampleId": 0, - "diffractionPlanId": 0, - "crystalId": 0, - "containerId": 0, - "name": "Test sample", - "code": "code", - "location": "1", - "holderLength": "22", - "loopLength": "22", - "loopType": "N", - "wireWidth": "1", - "comments": "Test comment", - "isInSampleChanger": 0, - "POSITIONID": 0, - "SMILES": "string", - "blSubSampleId": 0, - "lastImageURL": "string", - "screenComponentGroupId": 0, - "volume": 0, - "packingFraction": 0, - "preparationTemeprature": 0, - "preparationHumidity": 0, - "blottingTime": 0, - "blottingForce": 0, - "blottingDrainTime": 0, - "support": "string", - "subLocation": 0, -} - -test_container = { - "dewarId": 0, - "code": "code", - "containerType": "cane", - "capacity": 10, - "sampleChangerLocation": "1", - "containerStatus": "ready", - "beamlineLocation": "no", - "screenId": 0, - "scheduleId": 0, - "barcode": "barcode", - "imagerId": 0, - "sessionId": 0, - "ownerId": 0, - "requestedImagerId": 0, - "requestedReturn": 0, - "comments": "string", - "experimentType": "MX", - "storageTemperature": 8, - "containerRegistryId": 0, -} - -test_dewar = { - "shippingId": 0, - "code": "00001", - "comments": "comments", - "storageLocation": "string", - "dewarStatus": "open", - "isStorageDewar": 1, - "barCode": "code", - "firstExperimentId": 0, - "customsValue": 10, - "transportValue": 100, - "trackingNumberToSynchrotron": "00001", - "trackingNumberFromSynchrotron": "00002", - "type": "Dewar", - "FACILITYCODE": "fac", - "weight": 30, - "deliveryAgent_barcode": "test", -} - - -def get_test_dewar(): - dewar_dict = test_dewar - dewar_dict["barCode"] = uuid.uuid4().hex.upper()[0:6] - return dewar_dict diff --git a/tests/core/functional/test_a_post.py b/tests/core/functional/test_a_post.py deleted file mode 100644 index 07c0a36f..00000000 --- a/tests/core/functional/test_a_post.py +++ /dev/null @@ -1,186 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -from tests.core import data - - -def test_post(ispyb_core_app, ispyb_core_token): - client = ispyb_core_app.test_client() - headers = {"Authorization": "bearer " + ispyb_core_token} - - - route = ispyb_core_app.config["API_ROOT"] + "/contacts/labs" - response = client.post(route, json=data.test_laboratory, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - laboratory_id = response.json["laboratoryId"] - assert laboratory_id - - print("Laboratory id: %d" % laboratory_id) - route = ispyb_core_app.config["API_ROOT"] + "/contacts/persons" - person_dict = data.get_test_person() - person_dict["laboratoryId"] = laboratory_id - response = client.post(route, json=person_dict, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - person_id = response.json["personId"] - assert person_id - print("Person id: %d" % person_id) - - route = ispyb_core_app.config["API_ROOT"] + "/proposals" - proposal_dict = data.get_test_proposal() - proposal_dict["personId"] = person_id - response = client.post(route, json=proposal_dict, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - proposal_id = response.json["proposalId"] - assert proposal_id - print("Proposal id: %d" % proposal_id) - - route = ispyb_core_app.config["API_ROOT"] + "/contacts/lab_contacts" - lab_contact_dict = data.test_lab_contact - lab_contact_dict["personId"] = person_id - lab_contact_dict["proposalId"] = proposal_id - response = client.post(route, json=lab_contact_dict, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - lab_contact_id = response.json["labContactId"] - print("LabContact id: %d" % lab_contact_id) - assert lab_contact_id - - route = ispyb_core_app.config["API_ROOT"] + "/beamline/detectors" - detector_dict = data.get_test_detector() - response = client.post(route, json=detector_dict, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - detector_id = response.json["detectorId"] - - print("Detector id: %d" % detector_id) - assert detector_id - - route = ispyb_core_app.config["API_ROOT"] + "/beamline/setups" - beamline_setup_dict = data.test_beamline_setup - beamline_setup_dict["detectorId"] = detector_id - response = client.post(route, json=beamline_setup_dict, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - beamline_setup_id = response.json["beamLineSetupId"] - - print("BeamlineSetup id: %d" % beamline_setup_id) - assert beamline_setup_id - - route = ispyb_core_app.config["API_ROOT"] + "/samples/proteins" - protein_dict = data.test_protein - proposal_dict["proposalId"] = proposal_id - proposal_dict["personId"] = person_id - response = client.post(route, json=protein_dict, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - protein_id = response.json["proteinId"] - - print("Protein id: %d" % protein_id) - assert protein_id - - route = ispyb_core_app.config["API_ROOT"] + "/samples/diffraction_plans" - diffraction_plan_dict = data.test_diffraction_plan - diffraction_plan_dict["presetForProposalId"] = proposal_id - diffraction_plan_dict["detectorId"] = detector_id - response = client.post(route, json=diffraction_plan_dict, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - diffraction_plan_id = response.json["diffractionPlanId"] - - print("Diffraction plan id: %d" % diffraction_plan_id) - assert diffraction_plan_id - - route = ispyb_core_app.config["API_ROOT"] + "/samples/crystals" - crystal_dict = data.test_crystal - crystal_dict["proteinId"] = protein_id - crystal_dict["diffractionPlanId"] = diffraction_plan_id - response = client.post(route, json=crystal_dict, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - crystal_id = response.json["crystalId"] - - print("Crystal id: %d" % crystal_id) - assert crystal_id - - route = ispyb_core_app.config["API_ROOT"] + "/shippings" - shipping_dict = data.test_shippment - shipping_dict["proposalId"] = proposal_id - shipping_dict["sendingLabContactId"] = lab_contact_id - shipping_dict["returnLabContactId"] = lab_contact_id - shipping_dict["deliveryAgent_flightCodePersonId"] = person_id - response = client.post(route, json=shipping_dict, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - shipping_id = response.json["shippingId"] - - print("Shipping id: %d" % shipping_id) - assert shipping_id - - route = ispyb_core_app.config["API_ROOT"] + "/sessions/beam_calendars" - beam_calendar_dict = data.test_beam_calendar - response = client.post(route, json=beam_calendar_dict, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - beam_calendar_id = response.json["beamCalendarId"] - - print("Beam calendar id: %d" % beam_calendar_id) - assert beam_calendar_id - - route = ispyb_core_app.config["API_ROOT"] + "/sessions" - session_dict = data.test_session - session_dict["proposalId"] = proposal_id - session_dict["beamLineSetupId"] = beamline_setup_id - session_dict["beamCalendarId"] = beam_calendar_id - response = client.post(route, json=session_dict, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - session_id = response.json["sessionId"] - - print("Session id: %d" % session_id) - assert session_id - - route = ispyb_core_app.config["API_ROOT"] + "/shippings/dewars" - dewar_dict = data.get_test_dewar() - dewar_dict["firstExperimentId"] = session_id - dewar_dict["shippingId"] = shipping_id - response = client.post(route, json=dewar_dict, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - dewar_id = response.json["dewarId"] - - print("Dewar id: %d" % dewar_id) - assert dewar_id - - """ - route = ispyb_core_app.config["API_ROOT"] + "/samples" - sample_dict = data.test_sample - sample_dict["crystalId"] = crystal_id - response = client.post(route, json=sample_dict, headers=headers) - - assert response.status_code == 200, "[POST] %s failed" % route - sample_id = response.json["blSampleId"] - - print("Sample id: %d" % sample_id) - assert sample_id - """ diff --git a/tests/core/functional/test_b_get.py b/tests/core/functional/test_b_get.py deleted file mode 100644 index fda71ab3..00000000 --- a/tests/core/functional/test_b_get.py +++ /dev/null @@ -1,67 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -def test_get(ispyb_core_app, ispyb_core_token): - client = ispyb_core_app.test_client() - - endpoint_list = [ - "/proposals", - "/proposals?offset=1&limit=1", - "/proposals?proposalCode=cm", - "/contacts/labs", - "/contacts/labs?offset=1&limit=1", - "/contacts/labs?city=City", - "/contacts/lab_contacts", - "/contacts/lab_contacts?offset=1&limit=1", - "/contacts/lab_contacts?defaultCourrierCompany=DHL", - "/contacts/persons", - "/contacts/persons?offset=1&limit=1", - "/contacts/persons?login=boaty", - "/data_collections", - "/data_collections?offset=1&limit=1", - "/beamline/detectors", - "/beamline/detectors?offset=1&limit=1", - "/beamline/detectors?detectorModel=T1", - "/beamline/setups", - "/beamline/setups?offset=1&limit=1", - "/beamline/setups?beamlineName=testBeamline", - "/samples/proteins", - "/samples/proteins?offset=1&limit=1", - "/samples/proteins?acronym=ancr", - "/samples/diffraction_plans", - "/samples/diffraction_plans?offset=1&limit=1", - "/samples/diffraction_plans?experimentKind=OSC", - "/samples/crystals", - "/samples/crystals?offset=1&limit=1", - "/samples/crystals?spaceGroup=P4" "/samples", - "/samples?offset=1&limit=1", - "/samples?holderLength=22", - ] - - headers = {"Authorization": "Bearer " + ispyb_core_token} - - for endpoint in endpoint_list: - route = ispyb_core_app.config["API_ROOT"] + endpoint - response = client.get(route, headers=headers) - data = response.json - - assert response.status_code == 200, "[GET] %s " % (route) - assert data, "[GET] %s No data returned" % route diff --git a/tests/core/functional/test_c_patch.py b/tests/core/functional/test_c_patch.py deleted file mode 100644 index 4bde7db0..00000000 --- a/tests/core/functional/test_c_patch.py +++ /dev/null @@ -1,117 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -def test_patch(ispyb_core_app, ispyb_core_token): - client = ispyb_core_app.test_client() - headers = {"Authorization": "Bearer " + ispyb_core_token} - - route = ispyb_core_app.config["API_ROOT"] + "/contacts/labs" - response = client.get(route, headers=headers) - lab_id = response.json["data"]["rows"][0]["laboratoryId"] - route = ispyb_core_app.config["API_ROOT"] + "/contacts/labs/" + str(lab_id) - mod_laboratory = {"name": "Modified name"} - - response = client.patch(route, json=mod_laboratory, headers=headers) - assert response.status_code == 200, "[PATCH] %s failed" % (route) - - route = ispyb_core_app.config["API_ROOT"] + "/contacts/persons" - response = client.get(route, headers=headers) - person_id = response.json["data"]["rows"][0]["personId"] - route = ispyb_core_app.config["API_ROOT"] + "/contacts/persons/" + str(person_id) - mod_person = {"familyName": "Modified name", "phoneNumber": "0172-12233"} - - response = client.patch(route, json=mod_person, headers=headers) - assert response.status_code == 200, "[PATCH] %s failed" % (route) - - route = ispyb_core_app.config["API_ROOT"] + "/contacts/lab_contacts" - response = client.get(route, headers=headers) - lab_contact_id = response.json["data"]["rows"][0]["labContactId"] - route = ( - ispyb_core_app.config["API_ROOT"] - + "/contacts/lab_contacts/" - + str(lab_contact_id) - ) - mod_lab_contact = {"defaultCourrierCompany": "FedEX"} - - response = client.patch(route, json=mod_lab_contact, headers=headers) - assert response.status_code == 200, "[PATCH] %s failed" % (route) - - route = ispyb_core_app.config["API_ROOT"] + "/beamline/detectors" - response = client.get(route, headers=headers) - detector_id = response.json["data"]["rows"][0]["detectorId"] - route = ( - ispyb_core_app.config["API_ROOT"] + "/beamline/detectors/" + str(detector_id) - ) - mod_detector = {"detectorModel": "T1_0001"} - - response = client.patch(route, json=mod_detector, headers=headers) - assert response.status_code == 200, "[PATCH] %s failed" % (route) - - route = ispyb_core_app.config["API_ROOT"] + "/beamline/setups" - response = client.get(route, headers=headers) - beamline_setup_id = response.json["data"]["rows"][0]["beamLineSetupId"] - route = ( - ispyb_core_app.config["API_ROOT"] + "/beamline/setups/" + str(beamline_setup_id) - ) - mode_beamline_setup = {"synchrotronName": "Error"} - - response = client.patch(route, json=mode_beamline_setup, headers=headers) - assert response.status_code == 200, "[PATCH] %s failed" % (route) - - route = ispyb_core_app.config["API_ROOT"] + "/samples/proteins" - response = client.get(route, headers=headers) - protein_id = response.json["data"]["rows"][0]["proteinId"] - route = ispyb_core_app.config["API_ROOT"] + "/samples/proteins/" + str(protein_id) - mod_protein = {"molecularMass": "200"} - - response = client.patch(route, json=mod_protein, headers=headers) - assert response.status_code == 200, "[PATCH] %s failed" % (route) - - route = ispyb_core_app.config["API_ROOT"] + "/samples/diffraction_plans" - response = client.get(route, headers=headers) - diffraction_plan_id = response.json["data"]["rows"][0]["diffractionPlanId"] - route = ( - ispyb_core_app.config["API_ROOT"] - + "/samples/diffraction_plans/" - + str(diffraction_plan_id) - ) - mod_diffraction_plan = {"observedResolution": "3"} - - response = client.patch(route, json=mod_diffraction_plan, headers=headers) - assert response.status_code == 200, "[PATCH] %s failed" % (route) - - route = ispyb_core_app.config["API_ROOT"] + "/samples/crystals" - response = client.get(route, headers=headers) - crystal_id = response.json["data"]["rows"][0]["crystalId"] - route = ispyb_core_app.config["API_ROOT"] + "/samples/crystals/" + str(crystal_id) - mod_crystal = {"spaceGroup": "P2"} - - response = client.patch(route, json=mod_crystal, headers=headers) - assert response.status_code == 200, "[PATCH] %s failed" % (route) - - route = ispyb_core_app.config["API_ROOT"] + "/samples" - response = client.get(route, headers=headers) - sample_id = response.json["data"]["rows"][0]["blSampleId"] - route = ispyb_core_app.config["API_ROOT"] + "/samples/" + str(sample_id) - mod_sample = {"location": "2"} - - response = client.patch(route, json=mod_sample, headers=headers) - assert response.status_code == 200, "[PATCH] %s failed" % (route) diff --git a/tests/core/functional/test_d_delete.py b/tests/core/functional/test_d_delete.py deleted file mode 100644 index a665d42d..00000000 --- a/tests/core/functional/test_d_delete.py +++ /dev/null @@ -1,76 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -def test_delete(ispyb_core_app, ispyb_core_token): - client = ispyb_core_app.test_client() - headers = {"Authorization": "Bearer " + ispyb_core_token} - - - route = ispyb_core_app.config["API_ROOT"] + "/samples/diffraction_plans" - response = client.get(route, headers=headers) - diffraction_plan_id = response.json["data"]["rows"][-1]["diffractionPlanId"] - route = ( - ispyb_core_app.config["API_ROOT"] - + "/samples/diffraction_plans/" - + str(diffraction_plan_id) - ) - response = client.delete(route, headers=headers) - assert response.status_code == 200, "[DELETE] %s " % (route) - - route = ispyb_core_app.config["API_ROOT"] + "/contacts/persons" - response = client.get(route, headers=headers) - person_id = response.json["data"]["rows"][-1]["personId"] - route = ispyb_core_app.config["API_ROOT"] + "/contacts/persons/" + str(person_id) - response = client.delete(route, headers=headers) - assert response.status_code == 200, "[DELETE] %s " % (route) - - route = ispyb_core_app.config["API_ROOT"] + "/contacts/labs" - response = client.get(route, headers=headers) - lab_id = response.json["data"]["rows"][-1]["laboratoryId"] - route = ispyb_core_app.config["API_ROOT"] + "/contacts/labs/" + str(lab_id) - response = client.delete(route, headers=headers) - assert response.status_code == 200, "[DELETE] %s " % (route) - - route = ispyb_core_app.config["API_ROOT"] + "/beamline/setups" - response = client.get(route, headers=headers) - beamline_setup_id = response.json["data"]["rows"][-1]["beamLineSetupId"] - route = ( - ispyb_core_app.config["API_ROOT"] + "/beamline/setups/" + str(beamline_setup_id) - ) - response = client.delete(route, headers=headers) - assert response.status_code == 200, "[DELETE] %s " % (route) - - - route = ispyb_core_app.config["API_ROOT"] + "/samples" - response = client.get(route, headers=headers) - sample_id = response.json["data"]["rows"][-1]["blSampleId"] - route = ispyb_core_app.config["API_ROOT"] + "/samples/" + str(sample_id) - response = client.delete(route, headers=headers) - assert response.status_code == 200, "[DELETE] %s " % (route) - - route = ispyb_core_app.config["API_ROOT"] + "/proposals" - response = client.get(route, headers=headers) - proposal_id = response.json["data"]["rows"][-1]["proposalId"] - - return - route = ispyb_core_app.config["API_ROOT"] + "/proposals/" + str(proposal_id) - response = client.delete(route, headers=headers) - assert response.status_code == 200, "[DELTE] %s " % (route) diff --git a/tests/core/unit/test_models.py b/tests/core/unit/test_models.py deleted file mode 100644 index 2fd7364e..00000000 --- a/tests/core/unit/test_models.py +++ /dev/null @@ -1,48 +0,0 @@ -from tests.core import data - -from pyispyb.core import schemas - - -def test_data_collection_model(): - data_collection = schemas.data_collection.DataCollectionSchema().dump( - data.test_data_collection - ) - - assert data_collection.errors == {} - - -def test_proposal_model(): - proposal = schemas.proposal.ProposalSchema().dump(data.test_proposal) - - assert proposal.errors == {} - - -def test_session_model(): - pass - # session = schemas.session.SessionSchema().dump(data.test_session) - - # assert session.errors == {} - - -def test_lab_contact_model(): - lab_contact = schemas.lab_contact.LabContactSchema().dump(data.test_lab_contact) - - assert lab_contact.errors == {} - - -def test_shipping_model(): - shipmenmt = schemas.shipping.ShippingSchema().dump(data.test_shippment) - - assert shipmenmt.errors == {} - - -def test_laboratory_model(): - laboratory = schemas.laboratory.LaboratorySchema().dump(data.test_laboratory) - - assert laboratory.errors == {} - - -def test_person_model(): - person = schemas.person.PersonSchema().dump(data.test_person) - - assert person.errors == {} diff --git a/tests/requirements.txt b/tests/requirements.txt deleted file mode 100644 index 203d1c91..00000000 --- a/tests/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -pylint - -pytest>=4.6 -codecov -pytest-cov<2.6.0 diff --git a/tests/ssx/conftest.py b/tests/ssx/conftest.py deleted file mode 100644 index 7f264943..00000000 --- a/tests/ssx/conftest.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -Project: py-ispyb -https://github.com/ispyb/py-ispyb - -This file is part of py-ispyb software. - -py-ispyb is free software: you can redistribute it and/or modify -it under the terms of the GNU Lesser General Public License as published by -the Free Software Foundation, either version 3 of the License, or -(at your option) any later version. - -py-ispyb is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along -""" - - - - -__license__ = "LGPLv3+" - - -import os -import sys -import pytest - - -TESTS_DIR = os.path.abspath(os.path.dirname(__file__)) -ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) -sys.path.insert(0, ROOT_DIR) - -from pyispyb import create_app - -@pytest.fixture(scope="session") -def ispyb_ssx_app(): - ssx_app = create_app("ispyb_ssx_config.yml", "test") - with ssx_app.app_context(): - yield ssx_app - - -@pytest.fixture() -def ispyb_ssx_token(ispyb_ssx_app): - client = ispyb_ssx_app.test_client() - api_root = ispyb_ssx_app.config["API_ROOT"] - - resp = client.get( - api_root + "/auth/login", headers={"username": "admin", "password": "pass"} - ) - return resp.json["token"] diff --git a/tests/ssx/data.py b/tests/ssx/data.py deleted file mode 100644 index d8de0938..00000000 --- a/tests/ssx/data.py +++ /dev/null @@ -1,19 +0,0 @@ -sample_delivery_device_list = [ - {"type": "photoChip", "descriptionJson": {"name": "Test Photo chip", "speed": 10}} -] - -crystal_surry = { - "name": "Test crystal slurry", - "crystalSizeDistributionId": 10, - "crystalDensity": 20, - "bufferId": 1, - "crystalId": 3918, -} - -loaded_sample = { - "name": "testLoadedSample", - "sampleStockId": 0, - "sampleDeliveryDeviceId": 1, - "loadingPattern": 0, - "descriptionJson": {"temp": 20}, -} diff --git a/tests/ssx/functional/test_ssx_loaded_sample.py b/tests/ssx/functional/test_ssx_loaded_sample.py deleted file mode 100644 index 58a394dd..00000000 --- a/tests/ssx/functional/test_ssx_loaded_sample.py +++ /dev/null @@ -1,45 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - - -from tests.ssx.data import sample_delivery_device_list - - -def test_ssx_loaded_samples(ispyb_ssx_app, ispyb_ssx_token): - client = ispyb_ssx_app.test_client() - api_root = ispyb_ssx_app.config["API_ROOT"] - headers = {"Authorization": "Bearer " + ispyb_ssx_token} - response = client.get(api_root + "/samples", headers=headers) - assert response.status_code == 200, "Wrong status code" - assert response.json - - -def test_sample_delivery_devices(ispyb_ssx_app, ispyb_ssx_token): - client = ispyb_ssx_app.test_client() - route = ispyb_ssx_app.config["API_ROOT"] + "/samples/delivery_devices" - mimetype = "application/json" - headers = {"Authorization": "Bearer " + ispyb_ssx_token, "Content-Type": mimetype} - - for sample_deliver_device in sample_delivery_device_list: - response = client.post(route, json=sample_deliver_device, headers=headers) - assert response.status_code == 200, "Wrong status code" - assert response.json - assert response.json["type"] == sample_deliver_device["type"] - assert response.content_type == mimetype diff --git a/uvicorn.sh b/uvicorn.sh new file mode 100755 index 00000000..bf9a1932 --- /dev/null +++ b/uvicorn.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +export ISPYB_ENVIRONMENT="dev" + +uvicorn pyispyb.app.main:app --reload diff --git a/wsgi.py b/wsgi.py deleted file mode 100644 index c5a21ef3..00000000 --- a/wsgi.py +++ /dev/null @@ -1,41 +0,0 @@ -# encoding: utf-8 -# -# Project: py-ispyb -# https://github.com/ispyb/py-ispyb -# -# This file is part of py-ispyb software. -# -# py-ispyb is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# py-ispyb is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with py-ispyb. If not, see . - -import sys - -from pyispyb import create_app - - -__license__ = "LGPLv3+" - - -if len(sys.argv) > 3: - config_filename = sys.argv[1] - run_mode = sys.argv[2] - port = sys.argv[3] -else: - config_filename = "ispyb_core_config.yml" - run_mode = "dev" - port = 5000 - -debug = run_mode == "dev" - -app = create_app(config_filename, run_mode) -app.run(host='0.0.0.0', port=port, debug=debug)