Skip to content

Commit f96218c

Browse files
authored
Merge branch 'SpikeInterface:main' into drift_docs_to_sphinx_gallery
2 parents ec08b8d + 1711188 commit f96218c

File tree

230 files changed

+3609
-2335
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

230 files changed

+3609
-2335
lines changed

.github/import_test.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
n_samples = 10
2020
# Note that the symbols at the end are for centering the table
21-
markdown_output = f"## \n\n| Imported Module ({n_samples=}) | Importing Time (seconds) | Standard Deviation (seconds) |\n| :--: | :--------------: | :------------------: |\n"
21+
markdown_output = f"## \n\n| Imported Module ({n_samples=}) | Importing Time (seconds) | Standard Deviation (seconds) | Times List (seconds) |\n| :--: | :--------------: | :------------------: | :-------------: |\n"
2222

2323
exceptions = []
2424

@@ -45,10 +45,17 @@
4545
time_taken = float(result.stdout.strip())
4646
time_taken_list.append(time_taken)
4747

48+
# for time in time_taken_list:
49+
# Uncomment once exporting import is fixed
50+
# if time > 2.5:
51+
# exceptions.append(f"Importing {import_statement} took too long: {time:.2f} seconds")
52+
# break
53+
4854
if time_taken_list:
4955
avg_time_taken = sum(time_taken_list) / len(time_taken_list)
5056
std_dev_time_taken = math.sqrt(sum((x - avg_time_taken) ** 2 for x in time_taken_list) / len(time_taken_list))
51-
markdown_output += f"| `{import_statement}` | {avg_time_taken:.2f} | {std_dev_time_taken:.2f} |\n"
57+
times_list_str = ", ".join(f"{time:.2f}" for time in time_taken_list)
58+
markdown_output += f"| `{import_statement}` | {avg_time_taken:.2f} | {std_dev_time_taken:.2f} | {times_list_str} |\n"
5259

5360
if exceptions:
5461
raise Exception("\n".join(exceptions))

.github/workflows/core-test.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ jobs:
2222
- uses: actions/checkout@v4
2323
- uses: actions/setup-python@v5
2424
with:
25-
python-version: '3.10'
25+
python-version: '3.11'
2626
- name: Install dependencies
2727
run: |
2828
git config --global user.email "[email protected]"
@@ -31,7 +31,7 @@ jobs:
3131
pip install -e .[test_core]
3232
- name: Test core with pytest
3333
run: |
34-
pytest -vv -ra --durations=0 --durations-min=0.001 src/spikeinterface/core | tee report.txt; test ${PIPESTATUS[0]} -eq 0 || exit 1
34+
pytest -m "core" -vv -ra --durations=0 --durations-min=0.001 | tee report.txt; test $? -eq 0 || exit 1
3535
shell: bash # Necessary for pipeline to work on windows
3636
- name: Build test summary
3737
run: |

.github/workflows/test_imports.yml

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,13 +22,13 @@ jobs:
2222
- uses: actions/checkout@v4
2323
- uses: actions/setup-python@v5
2424
with:
25-
python-version: "3.10"
25+
python-version: "3.11"
2626
- name: Install Spikeinterface with only core dependencies
2727
run: |
2828
git config --global user.email "[email protected]"
2929
git config --global user.name "CI Almighty"
3030
python -m pip install -U pip # Official recommended way
31-
pip install -e . # This should install core only
31+
pip install . # This should install core only
3232
- name: Profile Imports
3333
run: |
3434
echo "## OS: ${{ matrix.os }}" >> $GITHUB_STEP_SUMMARY
@@ -38,8 +38,7 @@ jobs:
3838
shell: bash # Necessary for pipeline to work on windows
3939
- name: Install in full mode
4040
run: |
41-
python -m pip install -U pip # Official recommended way
42-
pip install -e .[full]
41+
pip install .[full]
4342
- name: Profile Imports with full
4443
run: |
4544
# Add a header to separate the two profiles

conftest.py

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -25,31 +25,27 @@ def pytest_sessionstart(session):
2525
for mark_name in mark_names:
2626
(pytest.global_test_folder / mark_name).mkdir()
2727

28-
2928
def pytest_collection_modifyitems(config, items):
3029
"""
3130
This function marks (in the pytest sense) the tests according to their name and file_path location
3231
Marking them in turn allows the tests to be run by using the pytest -m marker_name option.
3332
"""
3433

35-
36-
# python 3.4/3.5 compat: rootdir = pathlib.Path(str(config.rootdir))
3734
rootdir = Path(config.rootdir)
38-
35+
modules_location = rootdir / "src" / "spikeinterface"
3936
for item in items:
40-
rel_path = Path(item.fspath).relative_to(rootdir)
41-
if "sorters" in str(rel_path):
42-
if "/internal/" in str(rel_path):
37+
rel_path = Path(item.fspath).relative_to(modules_location)
38+
module = rel_path.parts[0]
39+
if module == "sorters":
40+
if "internal" in rel_path.parts:
4341
item.add_marker("sorters_internal")
44-
elif "/external/" in str(rel_path):
42+
elif "external" in rel_path.parts:
4543
item.add_marker("sorters_external")
4644
else:
4745
item.add_marker("sorters")
4846
else:
49-
for mark_name in mark_names:
50-
if f"/{mark_name}/" in str(rel_path):
51-
mark = getattr(pytest.mark, mark_name)
52-
item.add_marker(mark)
47+
item.add_marker(module)
48+
5349

5450

5551
def pytest_sessionfinish(session, exitstatus):

doc/conf.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@
6767
'numpydoc',
6868
'sphinx.ext.autosectionlabel',
6969
'sphinx_design',
70+
'sphinxcontrib.jquery',
7071
"sphinx.ext.intersphinx",
7172
"sphinx.ext.extlinks",
7273
"IPython.sphinxext.ipython_directive",

doc/development/development.rst

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ for providing parameters, however is a little different. The project prefers the
152152

153153
.. code-block:: bash
154154
155-
parameter_name: type, default: default_value
155+
parameter_name : type, default: default_value
156156
157157
158158
This allows users to quickly understand the type of data that should be input into a function as well as whether a default is supplied. A full example would be:
@@ -165,21 +165,22 @@ This allows users to quickly understand the type of data that should be input in
165165
166166
Parameters
167167
----------
168-
param_a: dict
168+
param_a : dict
169169
A dictionary containing the data
170-
param_b: int, default: 5
170+
param_b : int, default: 5
171171
A scaling factor to be applied to the data
172-
param_c: "mean" | "median", default: "mean"
172+
param_c : "mean" | "median", default: "mean"
173173
What to calculate on the data
174174
175175
Returns
176176
-------
177-
great_data: dict
177+
great_data : dict
178178
A dictionary of the processed data
179179
"""
180180
181181
182-
Note that in this example we demonstrate two other docstring conventions followed by SpikeInterface. First, that all string arguments should be presented
182+
There should be a space between each parameter and the colon following it. This is neccessary for using the `numpydoc validator <https://numpydoc.readthedocs.io/en/latest/validation.html>`_.
183+
In the above example we demonstrate two other docstring conventions followed by SpikeInterface. First, that all string arguments should be presented
183184
with double quotes. This is the same stylistic convention followed by Black and enforced by the pre-commit for the repo. Second, when a parameter is a
184185
string with a limited number of values (e.g. :code:`mean` and :code:`median`), rather than give the type a value of :code:`str`, please list the possible strings
185186
so that the user knows what the options are.

doc/index.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,7 @@ SpikeInterface is made of several modules to deal with different aspects of the
5858
development/development
5959
whatisnew
6060
authors
61+
references
6162

6263

6364
Other resources

doc/references.rst

Lines changed: 129 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,129 @@
1+
How to Cite
2+
===========
3+
4+
If you like SpikeInterface, please star us on `Github <https://github.com/SpikeInterface/spikeinterface>`_!
5+
*giving us a star gives a measure of the level of use and interest, which goes a long way to getting funding*
6+
7+
Please cite SpikeInterface in your papers with our eLife paper: [Buccino]_
8+
9+
SpikeInterface stands on the shoulders of giants!
10+
Each method in SpikeInterface draws on (or directly runs) independently-created methods.
11+
Please try to reference the individual works that are important for your analysis pipeline.
12+
If you notice a missing reference, please let us know by `submitting an issue <https://github.com/SpikeInterface/spikeinterface/issues/new>`_ on Github.
13+
14+
Preprocessing Module
15+
--------------------
16+
If you use one of the following preprocessing methods, please cite the appropriate source:
17+
18+
- :code:`phase_shift` or :code:`highpass_spatial_filter` [IBL]_
19+
- :code:`detect_bad_channels(method='coherence+psd')` [IBL]_
20+
- :code:`common_reference` [Rolston]_
21+
22+
Motion Correction
23+
^^^^^^^^^^^^^^^^^
24+
If you use the :code:`correct_motion` method in the preprocessing module, please cite [Garcia]_
25+
as well as the references that correspond to the :code:`preset` you used:
26+
27+
- :code:`nonrigid_accurate` [Windolf]_ [Varol]_
28+
- :code:`nonrigid_fast_and_accurate` [Windolf]_ [Varol]_ [Pachitariu]_
29+
- :code:`rigid_fast` *no additional citation needed*
30+
- :code:`kilosort_like` [Pachitariu]_
31+
32+
Sorters Module
33+
--------------
34+
If you use one of the following spike sorting algorithms (i.e. you use the :code:`run_sorter()` method,
35+
please include the appropriate citation for the :code:`sorter_name` parameter you use:
36+
*Note: unless otherwise stated, the reference given is to be used for all versions of the sorter*
37+
38+
- :code:`combinato` [Niediek]_
39+
- :code:`hdsort` [Diggelmann]_
40+
- :code:`herdingspikes` [Muthmann]_ [Hilgen]_
41+
- :code:`kilosort` [Pachitariu]_
42+
- :code:`mountainsort` [Chung]_
43+
- :code:`spykingcircus` [Yger]_
44+
- :code:`wavclus` [Chaure]_
45+
- :code:`yass` [Lee]_
46+
47+
Qualitymetrics Module
48+
---------------------
49+
If you use the :code:`qualitymetrics` module, i.e. you use the :code:`analyzer.compute()`
50+
or :code:`compute_quality_metrics()` methods, please include the citations for the :code:`metric_names` that were particularly
51+
important for your research:
52+
53+
- :code:`amplitude_cutoff` or :code:`isi_violation` [Hill]_
54+
- :code:`amplitude_median` or :code:`sliding_rp_violation` [IBL]_
55+
- :code:`drift` [Siegle]_
56+
- :code:`rp_violation` [Llobet]_
57+
- :code:`sd_ratio` [Pouzat]_
58+
- :code:`snr` [Lemon]_ [Jackson]_
59+
- :code:`synchrony` [Grun]_
60+
61+
If you use the :code:`qualitymetrics.pca_metrics` module, i.e. you use the
62+
:code:`compute_pc_metrics()` method, please include the citations for the :code:`metric_names` that were particularly
63+
important for your research:
64+
65+
- :code:`d_prime` [Hill]_
66+
- :code:`isolation_distance` or :code:`l_ratio` [Schmitzer-Torbert]_
67+
- :code:`nearest_neighbor` or :code:`nn_isolation` or :code:`nn_noise_overlap` [Chung]_ [Siegle]_
68+
- :code:`silhouette` [Rousseeuw]_ [Hruschka]_
69+
70+
Curation Module
71+
---------------
72+
If you use the :code:`get_potential_auto_merge` method from the curation module, please cite [Llobet]_
73+
74+
References
75+
----------
76+
77+
.. [Buccino] `SpikeInterface, a unified framework for spike sorting. 2020. <https://pubmed.ncbi.nlm.nih.gov/33170122/>`_
78+
79+
.. [Buzsaki] `The Log-Dynamic Brain: How Skewed Distributions Affect Network Operations. 2014. <https://pubmed.ncbi.nlm.nih.gov/24569488/>`_
80+
81+
.. [Chaure] `A novel and fully automatic spike-sorting implementation with variable number of features. 2018. <https://pubmed.ncbi.nlm.nih.gov/29995603/>`_
82+
83+
.. [Chung] `A Fully Automated Approach to Spike Sorting. 2017. <https://pubmed.ncbi.nlm.nih.gov/28910621/>`_
84+
85+
.. [Diggelmann] `Automatic spike sorting for high-density microelectrode arrays. 2018. <https://pubmed.ncbi.nlm.nih.gov/30207864/>`_
86+
87+
.. [Garcia] `A Modular Implementation to Handle and Benchmark Drift Correction for High-Density Extracellular Recordings. 2024. <https://pubmed.ncbi.nlm.nih.gov/38238082/>`_
88+
89+
.. [Grun] `Impact of higher-order correlations on coincidence distributions of massively parallel data. 2007. <https://www.researchgate.net/publication/225145104_Impact_of_Higher-Order_Correlations_on_Coincidence_Distributions_of_Massively_Parallel_Data>`_
90+
91+
.. [Harris] `Temporal interaction between single spikes and complex spike bursts in hippocampal pyramidal cells. 2001. <https://pubmed.ncbi.nlm.nih.gov/11604145/>`_
92+
93+
.. [Hilgen] `Unsupervised Spike Sorting for Large-Scale, High-Density Multielectrode Arrays. 2017. <https://pubmed.ncbi.nlm.nih.gov/28273464/>`_
94+
95+
.. [Hill] `Quality Metrics to Accompany Spike Sorting of Extracellular Signals. 2011. <https://pubmed.ncbi.nlm.nih.gov/21677152/>`_
96+
97+
.. [Hruschka] `Evolutionary algorithms for clustering gene-expression data. 2004. <https://www.researchgate.net/publication/220765683_Evolutionary_Algorithms_for_Clustering_Gene-Expression_Data>`_
98+
99+
.. [IBL] `Spike sorting pipeline for the International Brain Laboratory. 2022. <https://figshare.com/articles/online_resource/Spike_sorting_pipeline_for_the_International_Brain_Laboratory/19705522/3>`_
100+
101+
.. [Jackson] Quantitative assessment of extracellular multichannel recording quality using measures of cluster separation. Society of Neuroscience Abstract. 2005.
102+
103+
.. [Lee] `YASS: Yet another spike sorter. 2017. <https://www.biorxiv.org/content/10.1101/151928v1>`_
104+
105+
.. [Lemon] Methods for neuronal recording in conscious animals. IBRO Handbook Series. 1984.
106+
107+
.. [Llobet] `Automatic post-processing and merging of multiple spike-sorting analyses with Lussac. 2022. <https://www.biorxiv.org/content/10.1101/2022.02.08.479192v1>`_
108+
109+
.. [Muthmann] `Spike Detection for Large Neural Populations Using High Density Multielectrode Arrays. 2015. <https://pubmed.ncbi.nlm.nih.gov/26733859/>`_
110+
111+
.. [Niediek] `Reliable Analysis of Single-Unit Recordings from the Human Brain under Noisy Conditions: Tracking Neurons over Hours. 2016. <https://pubmed.ncbi.nlm.nih.gov/27930664/>`_
112+
113+
.. [Pachitariu] `Spike sorting with Kilosort4. 2024. <https://pubmed.ncbi.nlm.nih.gov/38589517/>`_
114+
115+
.. [Pouzat] `Using noise signature to optimize spike-sorting and to assess neuronal classification quality. 2002. <https://pubmed.ncbi.nlm.nih.gov/12535763/>`_
116+
117+
.. [Rolston] `Common median referencing for improved action potential detection with multielectrode arrays. 2009. <https://pubmed.ncbi.nlm.nih.gov/19964004/>`_
118+
119+
.. [Rousseeuw] `Silhouettes: A graphical aid to the interpretation and validation of cluster analysis. 1987. <https://www.sciencedirect.com/science/article/pii/0377042787901257>`_
120+
121+
.. [Schmitzer-Torbert] `Neuronal Activity in the Rodent Dorsal Striatum in Sequential Navigation: Separation of Spatial and Reward Responses on the Multiple T Task. 2004. <https://pubmed.ncbi.nlm.nih.gov/14736863/>`_
122+
123+
.. [Siegle] `Survey of Spiking in the Mouse Visual System Reveals Functional Hierarchy. 2021. <https://pubmed.ncbi.nlm.nih.gov/33473216/>`_
124+
125+
.. [Varol] `Decentralized Motion Inference and Registration of Neuropixel Data. 2021. <https://ieeexplore.ieee.org/document/9414145>`_
126+
127+
.. [Windolf] `Robust Online Multiband Drift Estimation in Electrophysiology Data. 2022. <https://www.biorxiv.org/content/10.1101/2022.12.04.519043v2>`_
128+
129+
.. [Yger] `A spike sorting toolbox for up to thousands of electrodes validated with ground truth recordings in vitro and in vivo. 2018. <https://pubmed.ncbi.nlm.nih.gov/29557782/>`_

pyproject.toml

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -91,11 +91,11 @@ full = [
9191
"h5py",
9292
"pandas",
9393
"xarray",
94-
"scipy<1.13",
94+
"scipy",
9595
"scikit-learn",
9696
"networkx",
9797
"distinctipy",
98-
"matplotlib",
98+
"matplotlib>=3.6", # matplotlib.colormaps
9999
"cuda-python; platform_system != 'Darwin'",
100100
"numba",
101101
]
@@ -159,8 +159,8 @@ test = [
159159
]
160160

161161
docs = [
162-
"Sphinx==5.1.1",
163-
"sphinx_rtd_theme==1.0.0",
162+
"Sphinx",
163+
"sphinx_rtd_theme",
164164
"sphinx-gallery",
165165
"sphinx-design",
166166
"numpydoc",
@@ -173,6 +173,7 @@ docs = [
173173
"hdbscan>=0.8.33", # For sorters spykingcircus2 + tridesclous
174174
"numba", # For many postprocessing functions
175175
"xarray", # For use of SortingAnalyzer zarr format
176+
"networkx",
176177
# for release we need pypi, so this needs to be commented
177178
"probeinterface @ git+https://github.com/SpikeInterface/probeinterface.git", # We always build from the latest version
178179
"neo @ git+https://github.com/NeuralEnsemble/python-neo.git", # We always build from the latest version

src/spikeinterface/comparison/collision.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ class CollisionGTComparison(GroundTruthComparison):
1414
This class needs maintenance and need a bit of refactoring.
1515
1616
17-
collision_lag: float
17+
collision_lag : float
1818
Collision lag in ms.
1919
2020
"""

0 commit comments

Comments
 (0)