Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
44f51d3
Model fitting works lazily
kephale Apr 6, 2024
c2ba1f3
Prediction has worked with this
kephale Apr 7, 2024
71c4b71
Fix count aggregation
kephale Apr 7, 2024
ad429ef
Launch cellcanvas from copick explorer, legend matches
kephale Apr 8, 2024
857ee01
Update painting and prediction colormaps
kephale Apr 8, 2024
5e12086
Move hard coded scaling to point data, resolves painting issue
kephale Apr 8, 2024
0076c51
Kludge to get colormaps working
kephale Apr 9, 2024
6f0a018
Add support for toggling layers
kephale Apr 12, 2024
d143192
Train and predict on all tomograms in a copick project
kephale Apr 12, 2024
723c53a
Activate label when clicked on in the legend
kephale Apr 15, 2024
6fab1ce
Update prediction to account for chunkwise predictions
kephale Apr 15, 2024
e063f0f
Add support for labels with no annotations
kephale Apr 15, 2024
59bda09
Add support for importing models
kephale Apr 15, 2024
c9dbd23
Better multiscale support, better UI for fit/predict on al
kephale Apr 17, 2024
e5c106d
Fix default painting/prediction path, remove settings from UI
kephale Apr 17, 2024
ec75334
Clean up path handling and support configs per run
kephale Apr 17, 2024
b999512
Make embedding computation button triggered, improved config supp
kephale Apr 17, 2024
942cae3
Support for training on all pairs of images (denoised, wbp, etc.)
kephale Apr 17, 2024
2c6e7dc
Fix for removed model dropdown
kephale Apr 22, 2024
faf4f13
Refactor into if main clause
kephale Apr 22, 2024
24e2f28
Move copick widget into source tree
kephale Apr 22, 2024
7cdc73e
Add __init__ for _copick
kephale Apr 22, 2024
e8f0d55
Update attribute reference for copick project
kephale Apr 22, 2024
f3f625a
Remove hack for adding background label, now it comes from copick
kephale Apr 22, 2024
54e90ed
Update default paths, use more copick config, lighter model
kephale Apr 22, 2024
51ca7bb
Fix: remove hard coded color for label 9
kephale Apr 23, 2024
ae0d475
Better lazy loading of UI, support for opening zarr stores
kephale Apr 24, 2024
7c41d25
More updates for loading from zarr stores
kephale Apr 24, 2024
d48c4e2
Features as a dict
kephale Apr 24, 2024
2a5d767
Add LRU cache to stores to get some speedup
kephale Apr 25, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
670 changes: 670 additions & 0 deletions examples/run_app_copick.py

Large diffs are not rendered by default.

5 changes: 5 additions & 0 deletions src/cellcanvas/_app/main_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,11 @@ def __init__(
extra_logging=self.verbose,
)


def update_data_manager(self, data: DataManager):
self.data = data
self.semantic_segmentor.update_data_manager(data)

@property
def mode(self) -> AppMode:
return self._mode
Expand Down
Empty file.
584 changes: 584 additions & 0 deletions src/cellcanvas/_copick/widget.py

Large diffs are not rendered by default.

39 changes: 22 additions & 17 deletions src/cellcanvas/data/data_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import numpy as np
from napari.utils.events.containers import SelectableEventedList
from zarr import Array
import dask.array as da

from cellcanvas.data.data_set import DataSet

Expand All @@ -15,6 +16,7 @@ def __init__(self, datasets: Optional[List[DataSet]] = None):
datasets = [datasets]
self.datasets = SelectableEventedList(datasets)

# Normal version
def get_training_data(self) -> Tuple[Array, Array]:
"""Get the pixel-wise semantic segmentation training data for datasets.

Expand All @@ -30,23 +32,26 @@ def get_training_data(self) -> Tuple[Array, Array]:
features = []
labels = []
for dataset in self.datasets:
# get the features and labels
# todo make lazier
dataset_features = np.asarray(dataset.concatenated_features)
dataset_labels = np.asarray(dataset.labels)

# reshape the data
dataset_labels = dataset_labels.flatten()
reshaped_features = dataset_features.reshape(
-1, dataset_features.shape[-1]
)

# Filter features where labels are greater than 0
valid_labels = dataset_labels > 0
filtered_features = reshaped_features[valid_labels, :]
filtered_labels = dataset_labels[valid_labels] - 1 # Adjust labels
dataset_features = da.asarray(dataset.concatenated_features)
dataset_labels = da.asarray(dataset.labels)
# Flatten labels for boolean indexing
flattened_labels = dataset_labels.flatten()

# Compute valid_indices based on labels > 0
valid_indices = da.nonzero(flattened_labels > 0)[0].compute()

# Flatten only the spatial dimensions of the dataset_features while preserving the feature dimension
c, h, w, d = dataset_features.shape
reshaped_features = dataset_features.reshape(c, h * w * d)

# We need to apply valid_indices for each feature dimension separately
filtered_features_list = [da.take(reshaped_features[i, :], valid_indices, axis=0) for i in range(c)]
filtered_features = da.stack(filtered_features_list, axis=1)

# Adjust labels
filtered_labels = flattened_labels[valid_indices] - 1

features.append(filtered_features)
labels.append(filtered_labels)

return np.concatenate(features), np.concatenate(labels)
return da.concatenate(features), da.concatenate(labels)
62 changes: 61 additions & 1 deletion src/cellcanvas/data/data_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
import zarr
from zarr import Array

from ome_zarr.io import ZarrLocation
from ome_zarr.reader import Multiscales

@dataclass
class DataSet:
Expand Down Expand Up @@ -62,7 +64,11 @@ def from_paths(
dimension_separator=".",
)
else:
labels = zarr.open(labels_path, "a")
if Multiscales.matches(ZarrLocation(labels_path)):
labels = zarr.open(os.path.join(labels_path, "0"),
"a")
else:
labels = zarr.open(labels_path, "a")

# get the segmentation
if (not os.path.isdir(segmentation_path)) and make_missing_datasets:
Expand All @@ -83,3 +89,57 @@ def from_paths(
labels=labels,
segmentation=segmentation,
)

@classmethod
def from_stores(
cls,
image_store,
features_store,
labels_store,
segmentation_store,
):
"""Create a DataSet from a set of paths.

todo: add ability to create missing labels/segmentations
"""

# TODO rewrite this to copy everything to be local

# get the image
# TODO fix hardcoded scale for pickathon
image = zarr.open(zarr.storage.LRUStoreCache(image_store, None), "r")["0"]

# get the features
features = {"features": zarr.open(zarr.storage.LRUStoreCache(features_store, None), "r")}

group_name = "labels"

# get the labels
labels = zarr.open_group(zarr.storage.LRUStoreCache(labels_store, None),
mode="a")
if group_name in labels:
labels = labels[group_name]
else:
labels = labels.create_dataset(group_name,
shape=image.shape,
dtype="i4")

# get the segmentation
segmentation = zarr.open_group(zarr.storage.LRUStoreCache(segmentation_store, None),
mode="a")
if group_name in segmentation:
segmentation = segmentation[group_name]
else:
segmentation = segmentation.create_dataset(group_name,
shape=image.shape,
dtype="i4")

# TODO start a background thread that triggers downloads of the zarrs

return cls(
image=image,
features=features,
labels=labels,
segmentation=segmentation,
)

Loading