diff --git a/docs/source/_static/publishing_extensions.png b/docs/source/_static/publishing_extensions.png new file mode 100644 index 000000000..08d1a629a Binary files /dev/null and b/docs/source/_static/publishing_extensions.png differ diff --git a/docs/source/_static/publishing_extensions_clone_fork_repo.png b/docs/source/_static/publishing_extensions_clone_fork_repo.png new file mode 100644 index 000000000..1442d5426 Binary files /dev/null and b/docs/source/_static/publishing_extensions_clone_fork_repo.png differ diff --git a/docs/source/conf.py b/docs/source/conf.py index d0b919cbf..ab71c400b 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -164,6 +164,13 @@ def __call__(self, filename): 'dandi': ('https://dandiarchive.org/%s', '%s'), "nwbinspector": ("https://nwbinspector.readthedocs.io/en/dev/%s", "%s"), 'hdmf-zarr': ('https://hdmf-zarr.readthedocs.io/en/stable/%s', '%s'), + 'nwb_extension_git': ('https://github.com/nwb-extensions/%s', '%s'), + 'nwb-schema-language-docs': ('https://schema-language.readthedocs.io/en/latest/%s', '%s'), + 'ndx-template-docs': ('https://github.com/nwb-extensions/ndx-template/%s', '%s'), + 'nwb-schema-docs': ('https://nwb-schema.readthedocs.io/en/latest/%s', '%s'), + 'hdmf-docutils-docs': ('https://github.com/hdmf-dev/hdmf-docutils/%s', '%s'), + 'ndx-catalog': ('https://nwb-extensions.github.io/%s', '%s'), + } nitpicky = True diff --git a/docs/source/extensions.rst b/docs/source/extensions.rst new file mode 100644 index 000000000..cd6581512 --- /dev/null +++ b/docs/source/extensions.rst @@ -0,0 +1,21 @@ +.. _extending-nwb: + +Extending NWB +============= + +Neurophysiology is always changing as new technologies are developed. While the core NWB schema supports many of the +most common data types in neurophysiology, we need a way to accommodate new technologies and unique metadata needs. +Neurodata extensions (NDX) allow us to define new data types. These data types can extend core types, contain core +types, or can be entirely new. These extensions are formally defined with a collection of YAML files following +the `NWB Specification Language `_. + +.. toctree:: + :maxdepth: 2 + + extensions/create_extension + extensions/spec_api + extensions/auto_api + extensions/custom_api + extensions/documenting + extensions/publishing + extensions/examples diff --git a/docs/source/extensions/auto_api.rst b/docs/source/extensions/auto_api.rst new file mode 100644 index 000000000..102f66479 --- /dev/null +++ b/docs/source/extensions/auto_api.rst @@ -0,0 +1,85 @@ +.. _extension-auto-api: + +Generating an API for an extension +------------------------------------ + +.. _extension-auto-matlabnapi: + +Generating a MatNWB API +~~~~~~~~~~~~~~~~~~~~~~~ + +In MatNWB, simply call ``generateExtension("path/to/extension/namespace.yaml");``; The class files will be generated under the ``+types/+`` module and can be accessed via standard MATLAB class semantics: + +.. code-block:: MATLAB + + ts = types.ndx_example.TetrodeSeries(); + +.. note:: + As seen above, MatNWB will convert namespace names if they are not valid identifiers in MATLAB. See `Variable Names `_ for more information. In most cases, the conversion conforms with MATLAB's approach with `matlab.lang.makeValidName() `_ + +.. _extension-auto-pythonapi: + +Generating a PyNWB API +~~~~~~~~~~~~~~~~~~~~~~ + +Now that we have created the extension specification, we need to create the Python interface. These classes will be +used just like the PyNWB API to read and write NWB files using Python. There are two ways to do this: you can +automatically generate the API classes based on the schema, or you can manually create the API classes. Here, we will +show you how to automatically generate the API. In the next section we will discuss why and how to create custom API +classes. + + +Open up ``ndx-example/src/pynwb/ndx_example/__init__.py``, and notice the last line: + +.. code-block:: python + + TetrodeSeries = get_class('TetrodeSeries', 'ndx-example') + +:py:func:`~pynwb.get_class` is a function that automatically creates a Python API object by parsing the extension +YAML. If you create more neurodata types, simply go down the line creating each one. This is the same object that is +created when you use the ``load_namespaces`` flag on :py:func:`~pynwb.NWBHDF5IO.__init__`. + +Customizing automatically generated APIs +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Once these classes are generated, you can customize them by dynamically adding or replacing attributes/methods (a.k.a., monkey patching). + +A typical example is adding methods. Let's say you wanted a method that could +return data from only the first channel. You could add that method like this: + +.. code-block:: python + + def data_from_first_chan(self): + return self.data[:, 0] + + TetrodeSeries.data_from_first_chan = data_from_first_chan + +You can also alter existing methods by overwriting them. Lets suppose you wanted to ensure that the +``trode_id`` field is never less than 0 for the ``TetrodeSeries`` constructor. You can do this by creating a new +``__init__`` function and assigning it to the class. + +.. code-block:: python + + from hdmf.utils import docval, get_docval + from hdmf.common.table import DynamicTableRegion + + @docval(get_docval(TetrodeSeries.__init__)) + def new_init(self, **kwargs): + assert kwargs['trode_id'] >=0, f"`trode_id` must be greater than or equal to 0." + TetrodeSeries.__init__(self, **kwargs) + + TetrodeSeries.__init__ = new_init + +The above code creates a ``new_init`` method that runs a validation step and then calls the original ``__init__``. +Then the class ``__init__`` is overwritten by the new method. Here we also use ``docval``, which is described in the +next section. + + +.. tip:: + This approach is easy, but note your API will be locked to your specification. If you make changes to your + specification there will be corresponding changes to the API, and this is likely to break existing code. + Also, monkey patches can be very confusing to someone who is not aware of them. Differences + between the installed module and the actual behavior of the source code can lead to frustrated + developers. As such, this approach should be used with great care. In the + next section we will show you how to create your own custom API that is more robust. In the + :ref:`extension-custom-api` section, we'll explore how to build a fully customized API. diff --git a/docs/source/extensions/create_extension.rst b/docs/source/extensions/create_extension.rst new file mode 100644 index 000000000..dc8cfa809 --- /dev/null +++ b/docs/source/extensions/create_extension.rst @@ -0,0 +1,143 @@ +.. _extension-create: + +Creating an extension +===================== + +Using ndx-template +~~~~~~~~~~~~~~~~~~ +Extensions should be created in their own repository, not alongside data conversion code. This facilitates sharing +and editing of the extension separately from the code that uses it. When starting a new extension, we highly +recommend using the :nwb_extension_git:`ndx-template` repository, which automatically generates a repository with +the appropriate directory structure. + +After you finish the instructions :nwb_extension_git:`here `, +you should have a directory structure that looks like this + +.. code-block:: bash + + ├── LICENSE.txt + ├── MANIFEST.in + ├── NEXTSTEPS.md + ├── README.md + ├── docs + │   ├── Makefile + │   ├── README.md + │   ├── make.bat + │   └── source + │   ├── _static + │   │   └── theme_overrides.css + │   ├── conf.py + │   ├── conf_doc_autogen.py + │   ├── credits.rst + │   ├── description.rst + │   ├── format.rst + │   ├── index.rst + │   └── release_notes.rst + ├── requirements.txt + ├── setup.cfg + ├── setup.py + ├── spec + │   ├── ndx-example.extensions.yaml + │   └── ndx-example.namespace.yaml + └── src + ├── matnwb + │   └── README.md + ├── pynwb + │   ├── README.md + │   ├── ndx_example + │   │   └── __init__.py + │   └── tests + │   ├── __init__.py + │   └── test_tetrodeseries.py + └── spec + └── create_extension_spec.py + +At its core, an NWB extension consists of YAML text files, such as those generated in the `spec` +folder. While you can write these YAML extension files by hand, PyNWB provides a convenient API +via the :py:mod:`~pynwb.spec` module for creating extensions. + +Open ``src/spec/create_extension_spec.py``. You will be +modifying this script to create your own NWB extension. Let's first walk through each piece. + +Creating a namespace +~~~~~~~~~~~~~~~~~~~~ +NWB organizes types into namespaces. You must define a new namespace before creating any new types. After following +the instructions from the :nwb_extension_git:`ndx-template`, you should have a file +``ndx-my-ext/src/spec/create_extension_spec.py``. The beginning of this file should look like + +.. code-block:: python + + from pynwb.spec import NWBNamespaceBuilder, export_spec, NWBGroupSpec, NWBAttributeSpec + # TODO: import the following spec classes as needed + # from pynwb.spec import NWBDatasetSpec, NWBLinkSpec, NWBDtypeSpec, NWBRefSpec + + + def main(): + # these arguments were auto-generated from your cookiecutter inputs + ns_builder = NWBNamespaceBuilder( + doc="my description", + name="ndx-my-ext", + version="0.1.0", + author="John Doe", + contact="contact@gmail.com" + ) + +Here, after the initial imports, we are defining meta-data of the extension. +Pay particular attention to ``version``. If you make changes to your extension +after the initial release, you should increase the numbers in your version +number, so that you can keep track of what exact version of the extension was +used for each file. We recommend using a semantic versioning approach. + +Including types +~~~~~~~~~~~~~~~ + +Next, we need to include types from the core schemas. This is analogous to +importing classes in Python. The generated file includes some example imports. + +.. code-block:: python + + ns_builder.include_type('ElectricalSeries', namespace='core') + ns_builder.include_type('TimeSeries', namespace='core') + ns_builder.include_type('NWBDataInterface', namespace='core') + ns_builder.include_type('NWBContainer', namespace='core') + ns_builder.include_type('DynamicTableRegion', namespace='hdmf-common') + ns_builder.include_type('VectorData', namespace='hdmf-common') + ns_builder.include_type('Data', namespace='hdmf-common') + +Neuroscience-specific data types are defined in the namespace ``'core'`` +(which means core NWB). More general organizational data types that are not +specific to neuroscience and are relevant across scientific fields are defined +in ``'hdmf-common'``. You can see which types are defined in which namespace by +exploring the `NWB schema documentation `_ +and hdmf-common schema documentation ``_. + +Defining new neurodata types +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Next, the ``create_extension_spec.py`` file declares an example extension +for a new neurodata type called ``TetrodeSeries``, which extends the :py:class:`~pynwb.ecephys.ElectricalSeries` +type. Then it creates a list of all new data types. + +.. code-block:: python + + tetrode_series = NWBGroupSpec( + neurodata_type_def='TetrodeSeries', + neurodata_type_inc='ElectricalSeries', + doc=('An extension of ElectricalSeries to include the tetrode ID for ' + 'each time series.'), + attributes=[ + NWBAttributeSpec( + name='trode_id', + doc='The tetrode ID.', + dtype='int32' + ) + ], + ) + + # TODO: add all of your new data types to this list + new_data_types = [tetrode_series] + +The remainder of the file is to generate the YAML files from the spec definition, and should not be changed. + +After you make changes to this file, you should run it to re-generate the ``ndx-[name].extensions.yaml`` and +``ndx-[name].namespace.yaml`` files. In the next section, :ref:`extension-spec-api`, we will go into more detail into how to create neurodata +types. diff --git a/docs/source/extensions/custom_api.rst b/docs/source/extensions/custom_api.rst new file mode 100644 index 000000000..dfdb4b163 --- /dev/null +++ b/docs/source/extensions/custom_api.rst @@ -0,0 +1,305 @@ +.. _extension-custom-api: + +Building a custom Python API for an extension +============================================= + +Creating custom extensions is recommended if you want a stable API that can remain the same even as you make changes +to the internal data organization. The :py:mod:`pynwb.core` module has various tools to make it easier to write +classes that behave like the rest of the PyNWB API. + +The :py:mod:`pynwb.core` defines two base classes that represent the primitive structures supported by +the schema. :py:class:`~pynwb.core.NWBData` represents datasets and :py:class:`~pynwb.core.NWBContainer` +represents groups. Additionally, :py:mod:`pynwb.core` offers subclasses of these two classes for +writing classes that come with more functionality. + +Docval +------ +docval is a library within PyNWB and HDMF that performs input validation and automatic documentation generation. Using +the ``docval`` decorator is recommended for methods of custom API classes. + +This decorator takes a list of dictionaries that specify the method parameters. These +dictionaries are used for enforcing type and building a Sphinx docstring. +The first arguments are dictionaries that specify the positional +arguments and keyword arguments of the decorated function. These dictionaries +must contain the following keys: ``'name'``, ``'type'``, and ``'doc'``. This will define a +positional argument. To define a keyword argument, specify a default value +using the key ``'default'``. To validate the dimensions of an input array +add the optional ``'shape'`` parameter. + +The decorated method must take ``self`` and ``**kwargs`` as arguments. + +When using this decorator, the functions :py:func:`getargs` and +:py:func:`popargs` can be used for easily extracting arguments from +kwargs. + +The following code example demonstrates the use of this decorator: + +.. code-block:: python + + @docval({'name': 'arg1':, 'type': str, 'doc': 'this is the first positional argument'}, + {'name': 'arg2':, 'type': int, 'doc': 'this is the second positional argument'}, + {'name': 'kwarg1':, 'type': (list, tuple), 'doc': 'this is a keyword argument', 'default': list()}, + returns='foo object', rtype='Foo') + def foo(self, **kwargs): + arg1, arg2, kwarg1 = getargs('arg1', 'arg2', 'kwarg1', **kwargs) + ... + + +The ``'shape'`` parameter is a tuple that follows the same logic as the `shape parameter in the specification +language `_. It can take the form of a tuple +with integers or ``None`` in each dimension. ``None`` indicates that this dimension can take any value. For +instance, ``(3, None)`` means the data must be a 2D matrix with a length of 3 and any width. ``'shape'`` can also +take a value that is a tuple of tuples, in which case any one of those tuples can match the spec. For instance, +``"shape": ((3, 3), (4, 4, 4))`` would indicate that the shape of this data could either be 3x3 or 4x4x4. + +The ``'type'`` argument can take a class or a tuple of classes. We also define special strings that are macros which +encompass a number of similar types, and can be used in place of a class, on its own, or within a tuple. ``'array_data'`` +allows the data to be of type ``np.ndarray``, ``list``, ``tuple``, or ``h5py.Dataset``; and ``'scalar_data'`` allows +the data to be ``str``, ``int``, ``float``, ``bytes``, or ``bool``. + +Registering classes +------------------- + +When defining a class that represents a *neurodata_type* (i.e. anything that has a *neurodata_type_def*) +from your extension, you can tell PyNWB which *neurodata_type* it represents using the function +:py:func:`~pynwb.register_class`. This class can be called on its own, or used as a class decorator. The +first argument should be the *neurodata_type* and the second argument should be the *namespace* name. + +The following example demonstrates how to register a class as the Python class representation of the +*neurodata_type* "MyContainer" from the *namespace* "my_ns". + +.. code-block:: python + + from pynwb import register_class + from pynwb.core import NWBContainer + + class MyContainer(NWBContainer): + ... + + regitser_class('MyContainer', 'my_ns', MyContainer) + + +Alternatively, you can use :py:func:`~pynwb.register_class` as a decorator. + +.. code-block:: python + + from pynwb import register_class + from pynwb.core import NWBContainer + + @regitser_class('MyContainer', 'my_ns') + class MyContainer(NWBContainer): + ... + +:py:func:`~pynwb.register_class` is used with :py:class:`~pynwb.core.NWBData` the same way it is used with +:py:class:`~pynwb.core.NWBContainer`. + + +Nwbfields +--------- + +When creating a new neurodata type, you need to define the new properties on your class, which is done by defining +them in the ``__nwbfields__`` class property. This class property should be a tuple of strings that name the new +properties. Adding a property using this functionality will create a property than can be set *only once*. Any +new properties of the class should be defined here. + +For example, the following class definition will create the ``MyContainer`` class that has the properties ``foo`` +and ``bar``. + +.. code-block:: python + + from pynwb import register_class + from pynwb.core import NWBContainer + + + class MyContainer(NWBContainer): + + __nwbfields__ = ('foo', 'bar') + + ... + + +NWBContainer +------------- + +:py:class:`~pynwb.core.NWBContainer` should be used to represent groups with a ``neurodata_type_def``. This section +will discuss the available :py:class:`~pynwb.core.NWBContainer` subclasses for representing common group specifications. + +NWBDataInterface +^^^^^^^^^^^^^^^^ + +The NWB schema uses the neurodata type ``NWBDataInterface`` for specifying containers that contain data that is not +considered metadata. For example, ``NWBDataInterface`` is a parent neurodata type to ``ElectricalSeries`` data, +but not a parent to ``ElectrodeGroup``. + +There are no requirements for using :py:class:`~pynwb.core.NWBDataInterface` in addition to those inherited from +:py:class:`~pynwb.core.NWBContainer`. + +MultiContainerInterface +^^^^^^^^^^^^^^^^^^^^^^^^ + +Throughout the NWB schema, there are multiple :py:class:`~pynwb.core.NWBDataInterface` specifications that include one or more or zero +or more of a certain neurodata type. For example, the :py:class:`~pynwb.ecephys.LFP` neurodata type contains one or more :py:class:`~pynwb.ecephys.ElectricalSeries`. +If your extension follows this pattern, you can use :py:class:`~pynwb.core.MultiContainerInterface` for defining +the representative class. + +:py:class:`~pynwb.core.MultiContainerInterface` provides a way of automatically generating setters, getters, and +properties for your class. These methods are autogenerated based on a configuration provided using the class property +``__clsconf__``. ``__clsconf__`` should be a dict or a list of dicts. A single dict should be used if your +specification contains a single neurodata type. A list of dicts should be used if your specification contains +multiple neurodata types that will exist as one or more or zero or more. The contents of the dict are described +in the following table. + +=========== =========================================================== ================ + Key Attribute Required? +=========== =========================================================== ================ +``type`` the type of the Container Yes +``attr`` the property name that holds the Containers Yes +``add`` the name of the method for adding a Container Yes +``create`` the name of the method for creating a Container No +``get`` the name of the method for getting a Container by name Yes +=========== =========================================================== ================ + + +The ``type`` key provides a way for the setters to check for type. The property under the name given by the. +``attr`` key will be a :py:class:`~pynwb.core.LabelledDict`. If your class uses a single dict, +a ``__getitem__`` method will be autogenerated for indexing into this :py:class:`~pynwb.core.LabelledDict`. +Finally, a constructor will also be autogenerated if you do not provide one in the class definition. + +The following code block demonstrates using :py:class:`~pynwb.core.MultiContainerInterface` to build a class +that represents the neurodata type "MyDataInterface" from the namespace "my_ns". It contains one or more containers +with neurodata type "MyContainer". + +.. code-block:: python + + from pynwb import register_class + from pynwb.core import MultiContainerInterface + + + @register_class("MyDataInterface", "my_ns") + class MyDataInterface(MultiContainerInterface): + + __clsconf__ = { + 'type': MyContainer, + 'attr': 'containers', + 'add': 'add_container', + 'create': 'create_container', + 'get': 'get_container', + } + ... + + +This class will have the methods ``add_container``, ``create_container``, and ``get_container``. It will also have +the property ``containers``. The ``add_container`` method will check to make sure that either an object of type +``MyContainer`` or a list/dict/tuple of objects of type ``MyContainer`` is passed in. ``create_container`` will +accept the exact same arguments that the ``MyContainer`` class constructor accepts. + +NWBData +-------- + +:py:class:`~pynwb.core.NWBData` should be used to represent datasets with a *neurodata_type_def*. This section +will discuss the available :py:class:`~pynwb.core.NWBData` subclasses for representing common dataset specifications. + +NWBTable +^^^^^^^^^ + +If your specification extension contains a table definition i.e. a dataset with a compound data type, you should use +the :py:class:`~pynwb.core.NWBTable` class to represent this specification. Since :py:class:`~pynwb.core.NWBTable` +subclasses :py:class:`~pynwb.core.NWBData`, you can still use ``__nwbfields__``. In addition, you can use the +``__columns__`` class property to specify the columns of the table. ``__columns__`` should be a list or a tuple of +:py:func:`~hdmf.utils.docval`-like dictionaries. + +The following example demonstrates how to define a table with the columns ``foo`` and ``bar`` that are of type +str and int, respectively. We also register the class as the representation of the *neurodata_type* "MyTable" +from the *namespace* "my_ns". + +.. code-block:: python + + from pynwb import register_class + from pynwb.core import NWBTable + + + @register_class('MyTable', 'my_ns') + class MyTable(NWBTable): + + __columns__ = [ + {'name': 'foo', 'type': str, 'doc': 'the foo column'}, + {'name': 'bar', 'type': int, 'doc': 'the bar column'}, + ] + + ... + +NWBTableRegion +^^^^^^^^^^^^^^ + +:py:class:`~pynwb.core.NWBTableRegion` should be used to represent datasets that store a region reference. +When subclassing this class, make sure you provide a way to pass in the required +arguments for the :py:class:`~pynwb.core.NWBTableRegion` constructor--the *name* of the dataset, the *table* that the region +applies to, and the *region* itself. + + +Custom data checks on ``__init__`` +---------------------------------- + +When creating new instances of an API class, we commonly need to check that input parameters are valid. +As a common practice the individual checks are typically implemented as separate functions +named ``_check_....`` on the class and then called in ``__init__``. + +To support access to older file version (which may not have followed some new requirements) +while at the same time preventing the creation of new data that is invalid, PyNWB allows +us to detect in ``__init__`` whether the object is being constructed by +the :py:class:`~hdmf.build.objectmapper.ObjectMapper` on read or directly by the user, +simply by checking if ``self._in_construct_mode`` is ``True/False``. For convenience, +PyNWB provides the :py:func:`~pynwb.core.NWBMixin._error_on_new_warn_on_construct` method, +which makes it easy to raise warnings on read and errors when creating new data. + + +ObjectMapper +------------ + +Customizing the mapping between NWBContainer and the Spec +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If your :py:class:`~pynwb.core.NWBContainer` extension requires custom mapping of the +:py:class:`~pynwb.core.NWBContainer` +class for reading and writing, you will need to implement and register a custom +:py:class:`~hdmf.build.objectmapper.ObjectMapper`. + +:py:class:`~hdmf.build.objectmapper.ObjectMapper` extensions are registered with the decorator +:py:func:`~pynwb.register_map`. + +.. code-block:: python + + from pynwb import register_map + from hdmf.build import ObjectMapper + + @register_map(MyExtensionContainer) + class MyExtensionMapper(ObjectMapper) + ... + +:py:func:`~pynwb.register_map` can also be used as a function. + +.. code-block:: python + + from pynwb import register_map + from hdmf.build import ObjectMapper + + class MyExtensionMapper(ObjectMapper) + ... + + register_map(MyExtensionContainer, MyExtensionMapper) + +.. tip:: + + ObjectMappers allow you to customize how objects in the spec are mapped to attributes of your NWBContainer in + Python. This is useful, e.g., in cases where you want to customize the default mapping. For example in + ``TimeSeries``, the attribute ``unit``, which is defined on the dataset ``data`` (i.e., ``data.unit``), would + by default be mapped to the attribute ``data__unit`` on :py:class:`~pynwb.base.TimeSeries`. The ObjectMapper + :py:class:`~pynwb.io.base.TimeSeriesMap` then changes this mapping to map ``data.unit`` to the attribute ``unit`` + on :py:class:`~pynwb.base.TimeSeries` . ObjectMappers also allow you to customize how constructor arguments + for your ``NWBContainer`` are constructed. For example, in ``TimeSeries`` instead of explicit ``timestamps`` we + may only have a ``starting_time`` and ``rate``. In the ObjectMapper, we could then construct ``timestamps`` + from this data on data load to always have ``timestamps`` available for the user. + For an overview of the concepts of containers, spec, builders, and object mappers in PyNWB, see also + :ref:`software-architecture`. + +After you have created your custom API, you can publish your extension following the instructions in :ref:`extension-documentation` and :ref:`extension-publishing`. diff --git a/docs/source/extensions/documenting.rst b/docs/source/extensions/documenting.rst new file mode 100644 index 000000000..6ec5b70bd --- /dev/null +++ b/docs/source/extensions/documenting.rst @@ -0,0 +1,43 @@ +.. _extension-documentation: + +Documenting Extensions +---------------------- + +Using the same tools used to generate the documentation for the :nwb-schema-docs:`NWB core format <>`. +one can easily generate documentation in HTML, PDF, ePub and many other formats for extensions. + +If you used :ndx-template-docs:`ndx-template <>`, then your repository is already pre-configured to +automatically generate documentation for your extension using the :hdmf-docutils-docs:`HDMF DocUtils <>` +and `Sphinx `_. The ``docs`` directory structure should look like this. + +.. code-block:: text + + ndx-my-extension/ + docs/ + source/ + credits.rst + description.rst + release_notes.rst + ... + + +To generate the HTML documentation files from the YAML (or JSON) sources of the extension, simply run: + +.. code-block:: text + + cd docs/source + make html + +The generated documentation will be available in ``build/html``. To view, open ``build/html/index.html`` in your browser. +These pages contain diagrams of your extension. Note that there are several places where information needs to be +added. For instance, the Overview section says: + +.. note:: + + Add the description of your extension here + +Within ``docs/source``, edit ``credits.rst``, ``description.rst``, and ``release_notes.rst``, then rerun ``make html``. + +Now that you have created documentation for your extension, it is time to learn how to publish in the NDX catalog. + +See :ref:`extension-publishing` for detailed instructions on how to publish your extension, and browse published extensions on the :ndx-catalog:`NDX Catalog website <>`. diff --git a/docs/source/extensions/examples.rst b/docs/source/extensions/examples.rst new file mode 100644 index 000000000..bbc9bad42 --- /dev/null +++ b/docs/source/extensions/examples.rst @@ -0,0 +1,13 @@ +.. _extension-nwb-examples: + +Examples +-------- + +This section provides examples of real-world NWB extensions that demonstrate +different use cases and implementation approaches. + +.. toctree:: + :maxdepth: 1 + :titlesonly: + + extension_examples/labmetadata_extension diff --git a/docs/source/extensions/extension_examples/labmetadata_extension.rst b/docs/source/extensions/extension_examples/labmetadata_extension.rst new file mode 100644 index 000000000..fbfeb1717 --- /dev/null +++ b/docs/source/extensions/extension_examples/labmetadata_extension.rst @@ -0,0 +1,348 @@ +.. _extension-example-labmetadata: + +Extensions for lab-specific metadata: Extending ``LabMetaData`` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Use case +"""""""" + +.. short_description_start + +Here we address the use case of adding lab-specific metadata to a file, e.g., +lab-specific information about experimental protocols, lab-specific identifiers and so on. +This approach is intended for usually small metadata. + +`Extension source `_ + +.. short_description_end + + + +Approach +"""""""" + +To include lab-specific metadata, NWB provides :py:class:`pynwb.file.LabMetaData` as a +a convenient base type, which makes it easy to add your data to an :py:class:`pynwb.file.NWBFile` +without having to modify the :py:class:`pynwb.file.NWBFile` type itself +(since adding of :py:class:`pynwb.file.LabMetaData` is already implemented). + +.. note:: + + NWB uses dynamically extensible table structures based on :py:class:`~hdmf.common.table.DynamicTable` + to describe metadata and derived results, e.g., :py:class:`~pynwb.epochs.TimeIntervals` for epochs or trials + or :py:class:`~pynwb.file.ElectrodeTable` to describe extracellular electrodes. Depending on the + type of metadata, use of these existing dynamic table structures can help avoid the need for + custom extensions by including the data as additional, custom columns in the appropriate existing tables. + +Creating the extension +"""""""""""""""""""""" + +**1.** Create a new repository for the extension using the :nwb_extension_git:`ndx-template`: + +.. code-block:: bash + + cookiecutter gh:nwb-extensions/ndx-template + +**2.** Answer a few simple questions of the cookiecutter template. We can respond to many questions +with ``Enter`` to accept the default response (e.g., to start with ``version=0.1.0``): + +.. code-block:: none + + namespace [ndx-my-namespace]: ndx-labmetadata-example + description [My NWB extension]: Example extension to illustrate how to extend LabMetaData for adding lab-specific metadata + author [My Name]: Oliver Ruebel + email [my_email@example.com]: oruebel@lbl.gov + github_username [myname]: oruebel + copyright [2021, Oliver Ruebel]: + version [0.1.0]: + release [alpha]: + Select license: + 1 - BSD-3 + 2 - MIT + 3 - Apache Software License 2.0 + 4 - Other + Choose from 1, 2, 3, 4 [1]: 1 + py_pkg_name [ndx_labmetadata_example]: + +**3.** Edit ``ndx-my-brainlabsrc/spec/create_extension_spec.py`` that was generated for you to define the +schema of your extension. See :ref:`extension-spec-api` section for details on how to use the specification API. + +* Add ``LabMetaData`` as an include type + +.. code-block:: python + + ns_builder.include_type('LabMetaData', namespace='core') + +* Define your new ``LabMetaData`` type for your lab + +.. code-block:: python + + labmetadata_ext = NWBGroupSpec( + name='custom_lab_metadata', + doc='Example extension type for storing lab metadata', + neurodata_type_def='LabMetaDataExtensionExample', + neurodata_type_inc='LabMetaData', + ) + +* Add the ``Groups``, ``Datasets``, and ``Attributes`` with the metadata specific to our lab to + our ``LabMetaData`` schema + +.. code-block:: python + + labmetadata_ext.add_dataset( + name="tissue_preparation", + doc="Lab-specific description of the preparation of the tissue", + dtype='text', + quantity='?' + ) + +* Add our new type definitions to the extension + +.. code-block:: python + + new_data_types = [labmetadata_ext] + +**4.** Generate the schema for the extension by running the ``create_extension_spec.py`` script + +.. code-block:: bash + + cd ndx-labmetadata-example + python src/spec/create_extension_spec.py + +**5.** Edit ``src/pynwb/__init__.py`` to define Python API classes for our new extension data types via :py:meth:`pynwb.get_class`. + +.. code-block:: python + + LabMetaDataExtensionExample = get_class('LabMetaDataExtensionExample', 'ndx-labmetadata-example') + +**6.** Define unit tests for the extension. The :nwb_extension_git:`ndx-template` created an example test +module ``src/pynwb/tests/test_tetrodeseries.py`` to illustrate how to implement tests. Here we simply remove +this file and replace it with our own tests `test_labmetadata_example.py `_. More details below in :ref:`extension-example-labmetadata-unittest`. + +**7.** To make sure our extension schema and source code files are version controlled, we now add all the files we just created to the Git repo: + +.. code-block:: bash + + git add . + git commit -m "Added API classes, tests, and schema files" + + +**8.** Install your extension (Python only)(Optional) + +.. code-block:: bash + + pip install . + +Now our extension is ready to use! + + +Creating custom Python API classes +"""""""""""""""""""""""""""""""""" + +We skip this step here, since this extension of :py:class:`~pynwb.file.LabMetaData` is simple enough that the +autogenerated class is sufficient. If the autogenerated class from :py:meth:`pynwb.get_class` for an extension +data types is not sufficient, then we can either customize the autogenerated class as described in +:ref:`extension-auto-pythonapi` (recommended only for basic changes) or define our own custom API class as +described in :ref:`extension-custom-api` (recommended for full customization). + + +.. _extension-example-labmetadata-unittest: + +Creating unit tests +""""""""""""""""""" + +.. tabs:: + + .. tab:: Python + + .. tabs:: + + .. code-tab:: py Unit test + + from pynwb.testing.mock.file import mock_NWBFile + from pynwb.testing import TestCase + from ndx_labmetadata_example import LabMetaDataExtensionExample + + + class TestLabMetaDataExtensionExample(TestCase): + """Test basic functionality of LabMetaDataExtensionExample without read/write""" + + def setUp(self): + """Set up an NWB file. Necessary because TetrodeSeries requires references to electrodes.""" + self.nwbfile = mock_NWBFile() + + def test_constructor(self): + """Test that the constructor for TetrodeSeries sets values as expected.""" + tissue_preparation = "Example tissue preparation" + lmdee_object = LabMetaDataExtensionExample(tissue_preparation=tissue_preparation) + self.assertEqual(lmdee_object.tissue_preparation, tissue_preparation) + + .. code-tab:: py Roundtrip test (read/write) + + from pynwb.testing.mock.file import mock_NWBFile + from pynwb.testing import TestCase + from pynwb.testing.testh5io import NWBH5IOMixin + from ndx_labmetadata_example import LabMetaDataExtensionExample + + class TestLabMetaDataExtensionExampleRoundtrip(NWBH5IOMixin, TestCase): + """ + Roundtrip test for LabMetaDataExtensionExample to test read/write + + This test class writes the LabMetaDataExtensionExample to an NWBFile, then + reads the data back from the file, and compares that the data read from file + is consistent with the original data. Using the pynwb.testing infrastructure + simplifies this complex test greatly by allowing to simply define how to + create the container, add to a file, and retrieve it form a file. The + task of writing, reading, and comparing the data is then taken care of + automatically by the NWBH5IOMixin. + """ + + def setUpContainer(self): + """set up example LabMetaDataExtensionExample object""" + self.lab_meta_data = LabMetaDataExtensionExample(tissue_preparation="Example tissue preparation") + return self.lab_meta_data + + def addContainer(self, nwbfile): + """Add the test LabMetaDataExtensionExample to the given NWBFile.""" + nwbfile.add_lab_meta_data(lab_meta_data=self.lab_meta_data) + + def getContainer(self, nwbfile): + """Get the LabMetaDataExtensionExample object from the given NWBFile.""" + return nwbfile.get_lab_meta_data(self.lab_meta_data.name) + + + .. code-tab:: bash Running Python unit tests + + cd ndx-labmetadata-example + pytest + + .. tab:: MATLAB + + .. tabs:: + + .. code-tab:: c Unit test + + Coming soon ... + + .. code-tab:: c Roundtrip test (read/write) + + Coming soon ... + + .. code-tab:: bash Running MATLAB unit tests + + Coming soon ... + + +Documenting the extension +""""""""""""""""""""""""" + +* **REAME.md:** Add instructions to the ``README.md`` file. This typically includes information on how to install the + extension and an example on how to use the extension +* **Schema and user documentation:** + + * Install the latest release of hdmf_docutils: ``python -m pip install hdmf-docutils`` + * Generate the documentation for your extension based on the YAML schema files via: + + .. code-block:: bash + + cd docs/ + make html + + * To view the docs, simply open ``docs/build/html/index.html`` in your browser + * See the `docs/README.md `_ + for instructions on how to customize the documentation for your extension. + +See :ref:`extension-documentation` for more details. + +Writing data using the extension +"""""""""""""""""""""""""""""""" + +.. tabs:: + + .. code-tab:: py Python + + from pynwb.file import NWBFile, Subject + from ndx_labmetadata_example import LabMetaDataExtensionExample + from pynwb import NWBHDF5IO + from uuid import uuid4 + from datetime import datetime + + # create an example NWBFile + nwbfile = NWBFile( + session_description="test session description", + identifier=str(uuid4()), + session_start_time=datetime(1970, 1, 1), + subject=Subject( + age="P50D", + description="example mouse", + sex="F", + subject_id="test_id") + ) + + # create our custom lab metadata + lab_meta_data = LabMetaDataExtensionExample(tissue_preparation="Example tissue preparation") + + # Add the test LabMetaDataExtensionExample to the NWBFile + nwbfile.add_lab_meta_data(lab_meta_data=lab_meta_data) + + # Write the file to disk + filename = "testfile.nwb" + with NWBHDF5IO(path=filename, mode="a") as io: + io.write(nwbfile) + + .. code-tab:: c MATLAB + + Coming soon ... + + +Reading an NWB file that uses the extension +""""""""""""""""""""""""""""""""""""""""""" + +.. tabs:: + + .. code-tab:: py Python + + from pynwb import NWBHDF5IO + from ndx_labmetadata_example import LabMetaDataExtensionExample + + # Read the file from disk + io = NWBHDF5IO(path=filename, mode="r") + nwbfile = io.read() + # Get the custom lab metadata object + lab_meta_data = nwbfile.get_lab_meta_data(name="custom_lab_metadata") + + .. code-tab:: py Python (without extension installed) + + from pynwb import NWBHDF5IO + + # Read the file from disk. Load the namespace from file to + # autogenerate classes from the schema + io = NWBHDF5IO(path=filename, mode="r", load_namespaces=True) + nwbfile = io.read() + # Get the custom lab metadata object + lab_meta_data = nwbfile.get_lab_meta_data(name="custom_lab_metadata") + + .. code-tab:: c MATLAB + + Coming soon ... + + + +Publishing the extension +""""""""""""""""""""""""" + +The steps to publish an extension are the same for all extensions. We, therefore, here only briefly describe +he main steps for publishing our extension. For a more in-depth guide, see the page :ref:`extension-publishing` + +* **GitHub (Open Source):** To make the sources of your extension openly accessible, publish the extension + on GitHub by following the instructions on :ref:`extension-publishing-github`. + +* **PyPI (Open Access):** Publish your extension on `PyPI `_ to make it easy for users to + install it and to create a persistent release of the extension following the :ref:`extension-publishing-pypi` guide. + +* **NDX Catalog (Open Publication)**: The :ndx-catalog:`NDX Catalog <>` serves as a central, community-led catalog + for extensions to the NWB data standard. The NDX Catalog manages basic metadata about extensions while ownership + of the source repositories for the extensions remain with the developers. For a step-by-step guide the + :ref:`extension-publishing-ndxcatalog` guide. + + diff --git a/docs/source/extensions/publishing.rst b/docs/source/extensions/publishing.rst new file mode 100644 index 000000000..bd59a4ba2 --- /dev/null +++ b/docs/source/extensions/publishing.rst @@ -0,0 +1,132 @@ +.. _extension-publishing: + +Publishing extensions +--------------------- + +.. image:: ../_static/publishing_extensions.png + :width: 315px + :class: align-right + +Neurodata extensions can be shared with the community using the :ndx-catalog:`NDX Catalog <>`. +As illustrated in the figure, the publication process is divided into three main steps: +1) open release of the sources to the community using GitHub, 2) open access of versioned +releases via PyPI, and 3) open publication of the extension to the community via the +:ndx-catalog:`NDX Catalog <>`. + +.. _extension-publishing-github: + +Open Source: Releasing your extension Git repository +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +1. Before publishing your extension online you should add a license file. Permissive licenses should be used if possible. A `BSD license ` is recommended. +2. Modify ``README.md`` at the root directory of your extension repo to describe the extension for interested developers and users. +3. The first step to publishing your extension then is to make your Git repository accessible online + via GitHub, or any other public Git hosting service of your choice. To publish your extension + on GitHub you will need a `GitHub account `_ and follow the following + `instructions to add an existing project to GitHub `_ +4. Make a release for the extension on GitHub with the version number specified. e.g. if the version + is ``0.1.0``, then this page should exist: ``https://github.com///releases/tag/0.1.0``. + See the `creating a release guide `_ + on GitHub for instructions on how to make a release. See the :nwb-main:`NWB Versioning Guidelines ` + for details on how to version extensions. + +.. note:: + + We here focus on GitHub because it is the services that is currently most commonly used for + extensions repositories. However, users may chose to use other services (e.g., GitLab or Bitbucket) + to share their sources. + +.. _extension-publishing-pypi: + +Open Access: Releasing your extension on PyPI +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To make your extension installable for users via pip and manage public releases NWB uses the +`Python Package Index (PyPI) `_ index. + +1. Follow `these directions `_ to package your project. + You may need to modify ``setup.py``. If your extension version is ``0.1.0``, then this page + should exist: ``https://pypi.org/project//0.1.0`` +2. Once your GitHub release and ``setup.py`` are ready, publishing on PyPI: + +.. code-block:: bash + + python setup.py sdist bdist_wheel + twine upload dist/* + +.. _extension-publishing-ndxcatalog: + +Open Publication: Publishing your extension on the NDX Catalog +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The :ndx-catalog:`NDX Catalog <>` serves as a central, community-led catalog for extensions to +the NWB data standard. The NDX Catalog manages basic metadata about extensions while ownership of the source +repositories for the extensions remain with the developers. To publish your extension on the catalog: + + + +1. `Fork `_ the + :nwb_extension_git:`staged-extensions ` repository, + which is used to submit new extension to the catalog via pull requests. +2. Clone your fork of the *staged-extensions* onto your computer, e.g., via ``git clone `` + +.. image:: ../_static/publishing_extensions_clone_fork_repo.png + :width: 800px + :class: align-center + +3. Copy the directory ``staged-extensions/example`` to a new directory with the name of your extension, e.g., via + ``cp -r staged-extensions/example staged-extensions/`` + +4. Edit ``staged-extensions//ndx-meta.yaml`` with information on where to find your + NWB extension. The ``NEXTSTEPS.md`` file in the ``ndx-template`` includes an autogenerated + template ``ndx-meta.yaml`` file that you may copy and modify. The YAML file MUST contain a dict + with the following keys: + + * ``name``: extension namespace name + * ``version``: extension version + * ``src``: URL for the main page of the public repository (e.g. on GitHub, BitBucket, GitLab) + that contains the sources of the extension + * ``pip``: URL for the main page of the extension on PyPI + * ``license``: name of the license of the extension + * `` maintainers``: list of GitHub usernames of those who will reliably maintain the extension + You may copy and modify the following YAML that was auto-generated: + +5. Edit ``staged-extensions//README.md`` to add information about your extension. + Usually, you can here just copy the ``README.md`` from your extension repo + ``cp /README.md staged-extensions//README.md`` + +6. Add and commit your changes to Git and push your changes to GitHub: + +.. code-block:: + + cd staged-extensions + git add + git commit -m "Add new catalog entry for " + git push + +7. Open a pull request. See the `creating a pull request from a fork `_ + website for step-by-step instructions on to create a pull request on GitHub. + +8. Once the PR has been created, building of your extension will be tested on Windows, Mac, and Linux. + The technical team will review your extension shortly after and provide feedback and + request changes, if any. Once the technical team has approved and merged your pull request, + a new repository, called ``-record`` will be created + in the :nwb_extension_git:`nwb-extensions GitHub organization <>` and you will be added as a + maintainer for that repository. + +Updating your published extension +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Once you have published your extension you can update and publish new version as follows: + +1. Update your ```` GitHub repository +2. Publish your updated extension on PyPI. +3. Fork the ``-record`` repository from the :nwb_extension_git:`nwb-extensions GitHub organization <>` + and update your ``ndx-meta.yaml``, ``README.md`` and other relevant record data +4. Open a pull request to test the changes automatically. +5. The technical team will review your changes shortly after and provide feedback and request changes, if any. +6. Your updated extension is ready once your PR has been approved and merged. + +Policies: Neurodata Extension (NDX) rules and guidelines +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +See our `Policies Page `_ for information about how to version, publish, and share your extensions. \ No newline at end of file diff --git a/docs/source/extensions/spec_api.rst b/docs/source/extensions/spec_api.rst new file mode 100644 index 000000000..3be708f79 --- /dev/null +++ b/docs/source/extensions/spec_api.rst @@ -0,0 +1,188 @@ +.. _extension-spec-api: + +The Spec API +------------ + +pynwb defines a spec API, which are classes to help generate a valid NWB extension. +The `NWB Specification Language `_ defines a structure for +data and metadata using Groups, Datasets, Attributes, and Links. These structures are mapped onto +:py:class:`~pynwb.spec.NWBGroupSpec`, :py:class:`~pynwb.spec.NWBDatasetSpec`, +:py:class:`~pynwb.spec.NWBAttributeSpec`, and :py:class:`~pynwb.spec.NWBLinkSpec`, respectively. Here, we describe in +detail each of these classes, and demonstrate how to use them to create custom neurodata types. + +Group Specifications +^^^^^^^^^^^^^^^^^^^^ + +Most neurodata types are Groups, which act like a directory or folder within the NWB file. A Group can have +within it Datasets, Attributes, Links, and/or other Groups. Groups are specified with the +:py:class:`~pynwb.spec.NWBGroupSpec` class, which provides a python API for specifying the structure for an +:nwb-schema-language-docs:`NWB Group `. + +.. code-block:: python + + from pynwb.spec import NWBGroupSpec + + spec = NWBGroupSpec( + neurodata_type_def='MyType', + neurodata_type_inc='NWBDataInterface', + doc='A custom NWB type', + name='quux', + attributes=[...], + datasets=[...], + groups=[...], + links=[...] + ) + +``neurodata_type_def`` and ``neurodata_type_inc`` define the neurodata type with the following rules: + +- ``neurodata_type_def`` declares the name of the neurodata type. +- ``neurodata_type_inc`` indicates what data type you are extending (Groups must extend Groups, and Datasets must extend Datasets). +- To define a new neurodata type that does not extend an existing type, use + ``neurodata_type_inc=NWBContainer`` for a group or ``neurodata_type_inc=NWBData`` for a dataset. + ``NWBContainer`` and ``NWBData`` are base types for NWB. +- To use a type that has already been defined, use ``neurodata_type_inc`` and not ``neurodata_type_def``. +- You can define a group that is not a neurodata type by omitting both ``neurodata_type_def`` and ``neurodata_type_inc``. + +.. tip:: + Although you have the option not to, there are several advantages to defining new groups and neurodata types. + Neurodata types can be reused in multiple places in the schema, and can be linked to, while groups that are not + neurodata types cannot. You can also have multiple neurodata type groups of the same type in the same group, + whereas groups that are not neurodata types are limited to 0 or 1. Most of the time, we would recommend making a + group a neurodata type. It is also generally better to extend your neurodata type from an existing type. Look + through the `NWB schema `_ to see if a core neurodata type would + work as a base for your new type. If no existing type works, consider extending + :py:class:`~pynwb.base.NWBDataInterface`, which allows you to add the object to a processing module. + +.. tip:: + New neurodata types should always be declared at the top level of the schema rather than nesting type + definitions. I.e., when creating a new neurodata type it should be placed at the top level of your schema + and then included at the appropriate location via ``neurodata_type_inc``. This approach greatly simplifies + management of types. + +For more information about the options available when specifying a Group, see the +`API docs for NWBGroupSpec `_. + +Dataset Specifications +^^^^^^^^^^^^^^^^^^^^^^ + +All larger blocks of numeric or text data should be stored in Datasets. Specifying datasets is done with +:py:class:`~pynwb.spec.NWBDatasetSpec`. + +.. code-block:: python + + from pynwb.spec import NWBDatasetSpec + + spec = NWBDatasetSpec( + doc='A custom NWB type', + name='qux', + shape=(None, None), + attributes=[...] + ) + +``neurodata_type_def``, ``neurodata_type_inc``, ``doc``, ``name``, ``default_name``, ``linkable``, ``quantity``, and +``attributes`` all work the same as they do in :py:class:`~pynwb.spec.NWBGroupSpec`, described in the previous section. + +``dtype`` defines the type of the data, which can be a basic type, compound type, or reference type. +See a list of `dtype options `_ +as part of the specification language docs. Basic types can be defined as string objects and more complex +types via :py:class:`~pynwb.spec.NWBDtypeSpec` and :py:class:`~hdmf.spec.spec.RefSpec`. + + +``shape`` is a specification defining the allowable shapes for the dataset. See the +:nwb-schema-language-docs:`shape specification ` +as part of the specification language docs. ``None`` is mapped to ``null``. Is no shape is provided, it is +assumed that the dataset is only a single element. + +If the dataset is a single element (scalar) that represents meta-data, consider using an Attribute (see +below) to store the data more efficiently instead. However, note that a Dataset can have Attributes, +whereas an Attribute cannot have Attributes of its own. +``dims`` provides labels for each dimension of ``shape``. + +Using datasets to specify tables +++++++++++++++++++++++++++++++++ + +Row-based tables can be specified using :py:class:`~pynwb.spec.NWBDtypeSpec`. To specify a table, provide a +list of :py:class:`~pynwb.spec.NWBDtypeSpec` objects to the ``dtype`` argument. + +.. code-block:: python + + from pynwb.spec import NWBDatasetSpec, NWBDtypeSpec + + spec = NWBDatasetSpec( + doc='A custom NWB type', + name='qux', + attributes=[ + NWBAttributeSpec('baz', 'a value for baz', 'text'), + ], + dtype=[ + NWBDtypeSpec('foo', 'column for foo', 'int'), + NWBDtypeSpec('bar', 'a column for bar', 'float'), + ], + ) + +.. tip:: + Column-based tables are also possible and more flexible. See the documentation for :hdmf-docs:`DynamicTable `. + +Attribute Specifications +^^^^^^^^^^^^^^^^^^^^^^^^ + +Attributes are small metadata objects describing the nature and/or intended usage of a Group or Dataset. Attributes are +defined in the ``attributes`` field of a :py:class:`~pynwb.spec.NWBGroupSpec` or +:py:class:`~pynwb.spec.NWBDatasetSpec`. ``attributes`` takes a list of :py:class:`~pynwb.spec.NWBAttributeSpec` objects. + +.. code-block:: python + + from pynwb.spec import NWBAttributeSpec + + spec = NWBAttributeSpec( + name='bar', + doc='a value for bar', + dtype='float' + ) + +:py:class:`~pynwb.spec.NWBAttributeSpec` has arguments very similar to :py:class:`~pynwb.spec.NWBDatasetSpec`. A key difference is that an attribute cannot be a +neurodata type, i.e., the ``neurodata_type_def`` and ``neurodata_type_inc`` keys are not allowed. The only way to match an object with a spec is through the name of the attribute so ``name`` is +required. You cannot have multiple attributes on a single group/dataset that correspond to the same +:py:class:`~pynwb.spec.NWBAttributeSpec`, since these would have to have the same name. Therefore, instead of +specifying number of ``quantity``, you have a ``required`` field which takes a boolean value. Another +key difference between datasets and attributes is that attributes cannot have attributes of their own. + +.. tip:: + Dataset or Attribute? It is often possible to store data as either a Dataset or an Attribute. Our best advice is + to keep Attributes small. In HDF5 the typical size limit for attributes is 64Kbytes. If an attribute is going to + store more than 64Kbyte, then make it a Dataset. Attributes are also more efficient for storing very + small data, such as scalars. However, attributes cannot have attributes of their own, and in HDF5, + I/O filters, such as compression and chunking, cannot apply to attributes. + + +Link Specifications +^^^^^^^^^^^^^^^^^^^ + +You can store an object in one place and reference that object in another without copying the object using +:nwb-schema-language-docs:`Links `, which +can be defined using :py:class:`~pynwb.spec.NWBLinkSpec` objects. + +.. code-block:: python + + from pynwb.spec import NWBLinkSpec + + spec = NWBLinkSpec( + doc='my link', + target_type='ElectricalSeries', + quantity='?' + ) + +``doc``, ``quantity``, and ``name`` work similarly to :py:class:`~pynwb.spec.NWBDatasetSpec`. + +``target_type`` indicates the neurodata type that can be referenced. + +.. tip:: + In case you need to store large collections of links, it can be more efficient to create a dataset for storing + the links via object references. In NWB, this is used, e.g., in py:class:`~pynwb.epoch.TimeIntervals` to store + collections of references to TimeSeries objects. + +Using these functions in ``create_extension_spec.py`` and then running that file will generate YAML files that define +your extension. If you are a MATLAB user, you are now ready to switch over to MATLAB. Just run +``generateExtension ('path/to/ndx_name.extension.yaml')`` and the extension will be automatically generated for you. If +you are a Python user, you need to do a little more work to make a Python API that allows you to read and write data +according to this extension. The next two sections, :ref:`extension-auto-api` and :ref:`extension-custom-api`, will teach you how to create this Python API. diff --git a/docs/source/index.rst b/docs/source/index.rst index 1f6883d1c..aab2eeda1 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -35,6 +35,7 @@ breaking down the barriers to data sharing in neuroscience. validation export + extensions api_docs .. toctree:: diff --git a/docs/source/sg_execution_times.rst b/docs/source/sg_execution_times.rst new file mode 100644 index 000000000..5e04276fb --- /dev/null +++ b/docs/source/sg_execution_times.rst @@ -0,0 +1,100 @@ + +:orphan: + +.. _sphx_glr_sg_execution_times: + + +Computation times +================= +**00:16.349** total execution time for 22 files **from all galleries**: + +.. container:: + + .. raw:: html + + + + + + + + .. list-table:: + :header-rows: 1 + :class: table table-striped sg-datatable + + * - Example + - Time + - Mem (MB) + * - :ref:`sphx_glr_tutorials_general_plot_read_basics.py` (``../gallery/general/plot_read_basics.py``) + - 00:10.878 + - 0.0 + * - :ref:`sphx_glr_tutorials_domain_plot_icephys_pandas.py` (``../gallery/domain/plot_icephys_pandas.py``) + - 00:01.832 + - 0.0 + * - :ref:`sphx_glr_tutorials_advanced_io_plot_iterative_write.py` (``../gallery/advanced_io/plot_iterative_write.py``) + - 00:01.015 + - 0.0 + * - :ref:`sphx_glr_tutorials_domain_plot_icephys.py` (``../gallery/domain/plot_icephys.py``) + - 00:00.602 + - 0.0 + * - :ref:`sphx_glr_tutorials_domain_plot_behavior.py` (``../gallery/domain/plot_behavior.py``) + - 00:00.601 + - 0.0 + * - :ref:`sphx_glr_tutorials_general_plot_file.py` (``../gallery/general/plot_file.py``) + - 00:00.446 + - 0.0 + * - :ref:`sphx_glr_tutorials_advanced_io_plot_linking_data.py` (``../gallery/advanced_io/plot_linking_data.py``) + - 00:00.269 + - 0.0 + * - :ref:`sphx_glr_tutorials_advanced_io_plot_editing.py` (``../gallery/advanced_io/plot_editing.py``) + - 00:00.236 + - 0.0 + * - :ref:`sphx_glr_tutorials_general_plot_configurator.py` (``../gallery/general/plot_configurator.py``) + - 00:00.178 + - 0.0 + * - :ref:`sphx_glr_tutorials_general_plot_timeintervals.py` (``../gallery/general/plot_timeintervals.py``) + - 00:00.159 + - 0.0 + * - :ref:`sphx_glr_tutorials_advanced_io_plot_zarr_io.py` (``../gallery/advanced_io/plot_zarr_io.py``) + - 00:00.132 + - 0.0 + * - :ref:`sphx_glr_tutorials_advanced_io_h5dataio.py` (``../gallery/advanced_io/h5dataio.py``) + - 00:00.000 + - 0.0 + * - :ref:`sphx_glr_tutorials_advanced_io_parallelio.py` (``../gallery/advanced_io/parallelio.py``) + - 00:00.000 + - 0.0 + * - :ref:`sphx_glr_tutorials_advanced_io_streaming.py` (``../gallery/advanced_io/streaming.py``) + - 00:00.000 + - 0.0 + * - :ref:`sphx_glr_tutorials_domain_ecephys.py` (``../gallery/domain/ecephys.py``) + - 00:00.000 + - 0.0 + * - :ref:`sphx_glr_tutorials_domain_images.py` (``../gallery/domain/images.py``) + - 00:00.000 + - 0.0 + * - :ref:`sphx_glr_tutorials_domain_ogen.py` (``../gallery/domain/ogen.py``) + - 00:00.000 + - 0.0 + * - :ref:`sphx_glr_tutorials_domain_ophys.py` (``../gallery/domain/ophys.py``) + - 00:00.000 + - 0.0 + * - :ref:`sphx_glr_tutorials_general_add_remove_containers.py` (``../gallery/general/add_remove_containers.py``) + - 00:00.000 + - 0.0 + * - :ref:`sphx_glr_tutorials_general_extensions.py` (``../gallery/general/extensions.py``) + - 00:00.000 + - 0.0 + * - :ref:`sphx_glr_tutorials_general_object_id.py` (``../gallery/general/object_id.py``) + - 00:00.000 + - 0.0 + * - :ref:`sphx_glr_tutorials_general_scratch.py` (``../gallery/general/scratch.py``) + - 00:00.000 + - 0.0