53
53
from pathlib import Path
54
54
import os
55
55
import re
56
+ from warnings import warn
56
57
57
58
import numpy as np
58
59
@@ -76,7 +77,9 @@ class SpikeGLXRawIO(BaseRawWithBufferApiIO):
76
77
dirname: str, default: ''
77
78
The spikeglx folder containing meta/bin files
78
79
load_sync_channel: bool, default: False
79
- The last channel (SY0) of each stream is a fake channel used for synchronisation
80
+ Can be used to load the synch stream as the last channel of the neural data.
81
+ This option is deprecated and will be removed in version 0.15.
82
+ From versions higher than 0.14.1 the sync channel is always loaded as a separate stream.
80
83
load_channel_location: bool, default: False
81
84
If True probeinterface is used to load the channel locations from the directory
82
85
@@ -109,6 +112,12 @@ def __init__(self, dirname="", load_sync_channel=False, load_channel_location=Fa
109
112
BaseRawWithBufferApiIO .__init__ (self )
110
113
self .dirname = dirname
111
114
self .load_sync_channel = load_sync_channel
115
+ if load_sync_channel :
116
+ warn (
117
+ "The load_sync_channel=True option is deprecated and will be removed in version 0.15 \n "
118
+ "The sync channel is now loaded as a separate stream by default and should be accessed as such. " ,
119
+ DeprecationWarning , stacklevel = 2
120
+ )
112
121
self .load_channel_location = load_channel_location
113
122
114
123
def _source_name (self ):
@@ -152,6 +161,8 @@ def _parse_header(self):
152
161
signal_buffers = []
153
162
signal_streams = []
154
163
signal_channels = []
164
+ sync_stream_id_to_buffer_id = {}
165
+
155
166
for stream_name in stream_names :
156
167
# take first segment
157
168
info = self .signals_info_dict [0 , stream_name ]
@@ -168,6 +179,16 @@ def _parse_header(self):
168
179
for local_chan in range (info ["num_chan" ]):
169
180
chan_name = info ["channel_names" ][local_chan ]
170
181
chan_id = f"{ stream_name } #{ chan_name } "
182
+
183
+ # Sync channel
184
+ if "nidq" not in stream_name and "SY0" in chan_name and not self .load_sync_channel and local_chan == info ["num_chan" ] - 1 :
185
+ # This is a sync channel and should be added as its own stream
186
+ sync_stream_id = f"{ stream_name } -SYNC"
187
+ sync_stream_id_to_buffer_id [sync_stream_id ] = buffer_id
188
+ stream_id_for_chan = sync_stream_id
189
+ else :
190
+ stream_id_for_chan = stream_id
191
+
171
192
signal_channels .append (
172
193
(
173
194
chan_name ,
@@ -177,25 +198,33 @@ def _parse_header(self):
177
198
info ["units" ],
178
199
info ["channel_gains" ][local_chan ],
179
200
info ["channel_offsets" ][local_chan ],
180
- stream_id ,
201
+ stream_id_for_chan ,
181
202
buffer_id ,
182
203
)
183
204
)
184
205
185
- # all channel by dafult unless load_sync_channel=False
206
+ # all channel by default unless load_sync_channel=False
186
207
self ._stream_buffer_slice [stream_id ] = None
208
+
187
209
# check sync channel validity
188
210
if "nidq" not in stream_name :
189
211
if not self .load_sync_channel and info ["has_sync_trace" ]:
190
- # the last channel is remove from the stream but not from the buffer
191
- last_chan = signal_channels [- 1 ]
192
- last_chan = last_chan [:- 2 ] + ("" , buffer_id )
193
- signal_channels = signal_channels [:- 1 ] + [last_chan ]
212
+ # the last channel is removed from the stream but not from the buffer
194
213
self ._stream_buffer_slice [stream_id ] = slice (0 , - 1 )
214
+
215
+ # Add a buffer slice for the sync channel
216
+ sync_stream_id = f"{ stream_name } -SYNC"
217
+ self ._stream_buffer_slice [sync_stream_id ] = slice (- 1 , None )
218
+
195
219
if self .load_sync_channel and not info ["has_sync_trace" ]:
196
220
raise ValueError ("SYNC channel is not present in the recording. " "Set load_sync_channel to False" )
197
221
198
222
signal_buffers = np .array (signal_buffers , dtype = _signal_buffer_dtype )
223
+
224
+ # Add sync channels as their own streams
225
+ for sync_stream_id , buffer_id in sync_stream_id_to_buffer_id .items ():
226
+ signal_streams .append ((sync_stream_id , sync_stream_id , buffer_id ))
227
+
199
228
signal_streams = np .array (signal_streams , dtype = _signal_stream_dtype )
200
229
signal_channels = np .array (signal_channels , dtype = _signal_channel_dtype )
201
230
@@ -237,6 +266,14 @@ def _parse_header(self):
237
266
t_start = frame_start / sampling_frequency
238
267
239
268
self ._t_starts [stream_name ][seg_index ] = t_start
269
+
270
+ # This need special logic because sync not present in stream_names
271
+ if f"{ stream_name } -SYNC" in signal_streams ["name" ]:
272
+ sync_stream_name = f"{ stream_name } -SYNC"
273
+ if sync_stream_name not in self ._t_starts :
274
+ self ._t_starts [sync_stream_name ] = {}
275
+ self ._t_starts [sync_stream_name ][seg_index ] = t_start
276
+
240
277
t_stop = info ["sample_length" ] / info ["sampling_rate" ]
241
278
self ._t_stops [seg_index ] = max (self ._t_stops [seg_index ], t_stop )
242
279
@@ -265,7 +302,11 @@ def _parse_header(self):
265
302
if self .load_channel_location :
266
303
# need probeinterface to be installed
267
304
import probeinterface
268
-
305
+
306
+ # Skip for sync streams
307
+ if "SYNC" in stream_name :
308
+ continue
309
+
269
310
info = self .signals_info_dict [seg_index , stream_name ]
270
311
if "imroTbl" in info ["meta" ] and info ["stream_kind" ] == "ap" :
271
312
# only for ap channel
0 commit comments