Skip to content

Commit 310bf70

Browse files
authored
Merge pull request #471 from bioimage-io/dev
Stability improvements
2 parents f292030 + 2cdfcf6 commit 310bf70

18 files changed

+226
-190
lines changed

.github/workflows/build.yaml

Lines changed: 52 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,9 @@ name: Test and Deploy bioimageio.core
22

33
on:
44
push:
5-
branches: [ main ]
5+
branches: [main]
66
pull_request:
7-
branches: [ "**" ]
7+
branches: ['**']
88

99
defaults:
1010
run:
@@ -58,81 +58,69 @@ jobs:
5858
matrix:
5959
include:
6060
- python-version: '3.9'
61-
conda-env: dev
62-
spec: conda
6361
numpy-version: 1
6462
- python-version: '3.9'
65-
conda-env: dev
66-
spec: main
6763
numpy-version: 2
6864
- python-version: '3.10'
69-
conda-env: full
7065
run-expensive-tests: true
7166
report-coverage: true
7267
save-cache: true
73-
spec: conda
7468
numpy-version: 1
7569
- python-version: '3.11'
76-
conda-env: dev
77-
spec: main
7870
numpy-version: 2
7971
- python-version: '3.12'
80-
conda-env: dev
81-
spec: conda
8272
numpy-version: 1
8373
# - python-version: '3.13'
84-
# conda-env: '313'
85-
# spec: main
8674
# numpy-version: 2
8775

8876
steps:
89-
- uses: actions/checkout@v4
90-
- uses: actions/setup-python@v6
91-
with:
92-
python-version: ${{matrix.python-version}}
93-
cache: 'pip'
94-
- name: Install dependencies
95-
run: |
96-
pip install --upgrade pip
97-
pip install -e .[dev] numpy==${{matrix.numpy-version}}.*
98-
- name: Pyright
99-
if: matrix.run-expensive-tests
100-
run: |
101-
pyright --version
102-
pyright -p pyproject.toml --pythonversion ${{ matrix.python-version }}
103-
- name: Restore bioimageio cache ${{needs.populate-cache.outputs.cache-key}}
104-
uses: actions/cache/restore@v4
105-
with:
106-
path: bioimageio_cache
107-
key: ${{needs.populate-cache.outputs.cache-key}}
108-
- name: pytest
109-
run: pytest --cov bioimageio --cov-report xml --cov-append --capture no --disable-pytest-warnings
110-
env:
111-
BIOIMAGEIO_CACHE_PATH: bioimageio_cache
112-
RUN_EXPENSIVE_TESTS: ${{ matrix.run-expensive-tests && 'true' || 'false' }}
113-
- name: Save bioimageio cache ${{needs.populate-cache.outputs.cache-key}}
114-
if: matrix.save-cache
115-
uses: actions/cache/save@v4
116-
with:
117-
path: bioimageio_cache
118-
key: ${{needs.populate-cache.outputs.cache-key}}
77+
- uses: actions/checkout@v4
78+
- uses: actions/setup-python@v6
79+
with:
80+
python-version: ${{matrix.python-version}}
81+
cache: 'pip'
82+
- name: Install dependencies
83+
run: |
84+
pip install --upgrade pip
85+
pip install -e .[dev] numpy==${{matrix.numpy-version}}.*
86+
- name: Pyright
87+
if: matrix.run-expensive-tests # pyright is not expensive, but we only want to run it once due to otherwise inconsistent typing
88+
run: |
89+
pyright --version
90+
pyright -p pyproject.toml --pythonversion ${{ matrix.python-version }}
91+
- name: Restore bioimageio cache ${{needs.populate-cache.outputs.cache-key}}
92+
uses: actions/cache/restore@v4
93+
with:
94+
path: bioimageio_cache
95+
key: ${{needs.populate-cache.outputs.cache-key}}
96+
- name: pytest
97+
run: pytest --cov bioimageio --cov-report xml --cov-append --capture no --disable-pytest-warnings
98+
env:
99+
BIOIMAGEIO_CACHE_PATH: bioimageio_cache
100+
RUN_EXPENSIVE_TESTS: ${{ matrix.run-expensive-tests && 'true' || 'false' }}
101+
- name: Save bioimageio cache ${{needs.populate-cache.outputs.cache-key}}
102+
if: matrix.save-cache
103+
uses: actions/cache/save@v4
104+
with:
105+
path: bioimageio_cache
106+
key: ${{needs.populate-cache.outputs.cache-key}}
119107

120-
- if: matrix.report-coverage && github.event_name == 'pull_request'
121-
uses: orgoro/[email protected]
122-
with:
123-
coverageFile: coverage.xml
124-
token: ${{secrets.GITHUB_TOKEN}}
125-
- if: matrix.report-coverage && github.ref == 'refs/heads/main'
126-
run: |
127-
pip install genbadge[coverage]
128-
genbadge coverage --input-file coverage.xml --output-file ./dist/coverage/coverage-badge.svg
129-
coverage html -d dist/coverage
130-
- if: matrix.report-coverage && github.ref == 'refs/heads/main'
131-
uses: actions/upload-artifact@v4
132-
with:
133-
name: coverage
134-
retention-days: 1
135-
path: dist
108+
- if: matrix.report-coverage && github.event_name == 'pull_request'
109+
uses: orgoro/[email protected]
110+
with:
111+
coverageFile: coverage.xml
112+
token: ${{secrets.GITHUB_TOKEN}}
113+
- if: matrix.report-coverage && github.ref == 'refs/heads/main'
114+
run: |
115+
pip install genbadge[coverage]
116+
genbadge coverage --input-file coverage.xml --output-file ./dist/coverage/coverage-badge.svg
117+
coverage html -d dist/coverage
118+
- if: matrix.report-coverage && github.ref == 'refs/heads/main'
119+
uses: actions/upload-artifact@v4
120+
with:
121+
name: coverage
122+
retention-days: 1
123+
path: dist
136124

137125
conda-build:
138126
needs: test
@@ -145,7 +133,7 @@ jobs:
145133
with:
146134
auto-update-conda: true
147135
auto-activate-base: true
148-
activate-environment: ""
136+
activate-environment: ''
149137
channel-priority: strict
150138
miniforge-version: latest
151139
conda-solver: libmamba
@@ -242,14 +230,14 @@ jobs:
242230
uses: pypa/gh-action-pypi-publish@release/v1.12
243231
with:
244232
user: __token__
245-
password: "${{ secrets.PYPI_TOKEN }}"
233+
password: '${{ secrets.PYPI_TOKEN }}'
246234
packages-dir: dist/
247235
verbose: true
248236
- name: Publish the release notes
249237
if: github.ref == 'refs/heads/main'
250238
uses: release-drafter/[email protected]
251239
with:
252240
publish: "${{ steps.tag-version.outputs.new_tag != '' }}"
253-
tag: "${{ steps.tag-version.outputs.new_tag }}"
241+
tag: '${{ steps.tag-version.outputs.new_tag }}'
254242
env:
255-
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
243+
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'

.pre-commit-config.yaml

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,4 @@
11
repos:
2-
- repo: https://github.com/ambv/black
3-
rev: 25.1.0
4-
hooks:
5-
- id: black-jupyter
62
- repo: https://github.com/astral-sh/ruff-pre-commit
73
rev: v0.12.8
84
hooks:

README.md

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -364,9 +364,16 @@ may be controlled with the `LOGURU_LEVEL` environment variable.
364364

365365
## Changelog
366366

367+
### 0.9.3
368+
369+
- bump bioimageio.spec library version to 0.5.5.5
370+
- more robust test model reporting
371+
- improved user input axis intepretation
372+
- fixed conda subprocess calls
373+
367374
### 0.9.2
368375

369-
fix model inference tolerance reporting
376+
- fix model inference tolerance reporting
370377

371378
### 0.9.1
372379

example/dataset_statistics_demo.ipynb

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -329,12 +329,14 @@
329329
"source": [
330330
"# compute dataset statistics on all samples\n",
331331
"# (in this case we should really use the non-overlapping tiles as samples in dataset_for_initial_statistics)\n",
332-
"with create_prediction_pipeline(\n",
333-
" bioimageio_model=model_resource,\n",
334-
" dataset_for_initial_statistics=dataset,\n",
335-
" update_dataset_stats_for_n_samples=0, # if you call the prediciton pipeline more then len(dataset)\n",
336-
" # times you might want to set this to zero to avoid further updates to the dataset statistics\n",
337-
") as pp:\n",
332+
"with (\n",
333+
" create_prediction_pipeline(\n",
334+
" bioimageio_model=model_resource,\n",
335+
" dataset_for_initial_statistics=dataset,\n",
336+
" update_dataset_stats_for_n_samples=0, # if you call the prediciton pipeline more then len(dataset)\n",
337+
" # times you might want to set this to zero to avoid further updates to the dataset statistics\n",
338+
" ) as pp\n",
339+
"):\n",
338340
" only_init_dataset_stats = process_dataset(pp, dataset)"
339341
]
340342
},

example/model_usage.ipynb

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,6 @@
111111
" Mapping[str, NDArray[Any]], Mapping[TensorId, Union[Tensor, NDArray[Any]]]\n",
112112
" ],\n",
113113
") -> None:\n",
114-
"\n",
115114
" for title, image in images.items():\n",
116115
" if isinstance(image, Tensor):\n",
117116
" input_array = image.data.data\n",

pyproject.toml

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ requires-python = ">=3.9"
66
readme = "README.md"
77
dynamic = ["version"]
88
dependencies = [
9-
"bioimageio.spec ==0.5.5.4",
9+
"bioimageio.spec ==0.5.5.5",
1010
"h5py",
1111
"imagecodecs",
1212
"imageio>=2.10",
@@ -44,15 +44,14 @@ onnx = ["onnxruntime"]
4444
pytorch = ["torch>=1.6,<3", "torchvision>=0.21", "keras>=3.0,<4"]
4545
tensorflow = ["tensorflow", "keras>=2.15,<4"]
4646
dev = [
47-
"black",
4847
"cellpose", # for model testing
4948
"crick",
5049
"httpx",
51-
"jupyter-black",
5250
"jupyter",
5351
"keras>=3.0,<4",
5452
"matplotlib",
5553
"monai", # for model testing
54+
"numpy",
5655
"onnx",
5756
"onnxruntime",
5857
"packaging>=17.0",
@@ -79,12 +78,6 @@ where = ["src/"]
7978
[tool.setuptools.dynamic]
8079
version = { attr = "bioimageio.core.__version__" }
8180

82-
[tool.black]
83-
line-length = 88
84-
extend-exclude = "/presentations/"
85-
target-version = ["py39", "py310", "py311", "py312"]
86-
preview = true
87-
8881
[tool.pyright]
8982
exclude = [
9083
"**/__pycache__",

src/bioimageio/core/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
"""
44
# ruff: noqa: E402
55

6-
__version__ = "0.9.2"
6+
__version__ = "0.9.3"
77
from loguru import logger
88

99
logger.disable("bioimageio.core")

src/bioimageio/core/_magic_tensor_ops.py

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -71,12 +71,14 @@ def __ge__(self, other: _Compatible) -> Self:
7171

7272
def __eq__(self, other: _Compatible) -> Self: # type: ignore[override]
7373
return self._binary_op(
74-
other, nputils.array_eq # pyright: ignore[reportUnknownArgumentType]
74+
other,
75+
nputils.array_eq, # pyright: ignore[reportUnknownArgumentType]
7576
)
7677

7778
def __ne__(self, other: _Compatible) -> Self: # type: ignore[override]
7879
return self._binary_op(
79-
other, nputils.array_ne # pyright: ignore[reportUnknownArgumentType]
80+
other,
81+
nputils.array_ne, # pyright: ignore[reportUnknownArgumentType]
8082
)
8183

8284
# When __eq__ is defined but __hash__ is not, then an object is unhashable,
@@ -171,22 +173,30 @@ def __invert__(self) -> Self:
171173

172174
def round(self, *args: Any, **kwargs: Any) -> Self:
173175
return self._unary_op(
174-
ops.round_, *args, **kwargs # pyright: ignore[reportUnknownArgumentType]
176+
ops.round_, # pyright: ignore[reportUnknownArgumentType]
177+
*args,
178+
**kwargs,
175179
)
176180

177181
def argsort(self, *args: Any, **kwargs: Any) -> Self:
178182
return self._unary_op(
179-
ops.argsort, *args, **kwargs # pyright: ignore[reportUnknownArgumentType]
183+
ops.argsort, # pyright: ignore[reportUnknownArgumentType]
184+
*args,
185+
**kwargs,
180186
)
181187

182188
def conj(self, *args: Any, **kwargs: Any) -> Self:
183189
return self._unary_op(
184-
ops.conj, *args, **kwargs # pyright: ignore[reportUnknownArgumentType]
190+
ops.conj, # pyright: ignore[reportUnknownArgumentType]
191+
*args,
192+
**kwargs,
185193
)
186194

187195
def conjugate(self, *args: Any, **kwargs: Any) -> Self:
188196
return self._unary_op(
189-
ops.conjugate, *args, **kwargs # pyright: ignore[reportUnknownArgumentType]
197+
ops.conjugate, # pyright: ignore[reportUnknownArgumentType]
198+
*args,
199+
**kwargs,
190200
)
191201

192202
__add__.__doc__ = operator.add.__doc__

0 commit comments

Comments
 (0)