From 2e69961a6a1494273f8815336bf8c18ff18bfc5d Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Wed, 7 May 2025 11:45:56 -0600 Subject: [PATCH 01/12] draft --- neo/rawio/blackrockrawio.py | 147 ++++++++++++++++++++---------------- 1 file changed, 83 insertions(+), 64 deletions(-) diff --git a/neo/rawio/blackrockrawio.py b/neo/rawio/blackrockrawio.py index b10bc796e..26094936f 100644 --- a/neo/rawio/blackrockrawio.py +++ b/neo/rawio/blackrockrawio.py @@ -323,19 +323,21 @@ def _parse_header(self): self.__nsx_data_header = {} for nsx_nb in self._avail_nsx: - spec = self.__nsx_spec[nsx_nb] = self.__extract_nsx_file_spec(nsx_nb) + spec_version = self.__nsx_spec[nsx_nb] = self.__extract_nsx_file_spec(nsx_nb) # read nsx headers - self.__nsx_basic_header[nsx_nb], self.__nsx_ext_header[nsx_nb] = self.__nsx_header_reader[spec](nsx_nb) + nsx_header_reader = self.__nsx_header_reader[spec_version] + self.__nsx_basic_header[nsx_nb], self.__nsx_ext_header[nsx_nb] = nsx_header_reader(nsx_nb) - # The only way to know if it is the PTP-variant of file spec 3.0 + # The only way to know if it is the peak-to-peak-variant of file spec 3.0 # is to check for nanosecond timestamp resolution. - if ( + is_ptp_variant = ( "timestamp_resolution" in self.__nsx_basic_header[nsx_nb].dtype.names and self.__nsx_basic_header[nsx_nb]["timestamp_resolution"] == 1_000_000_000 - ): + ) + if is_ptp_variant: nsx_dataheader_reader = self.__nsx_dataheader_reader["3.0-ptp"] else: - nsx_dataheader_reader = self.__nsx_dataheader_reader[spec] + nsx_dataheader_reader = self.__nsx_dataheader_reader[spec_version] # for nsxdef get_analogsignal_shape(self, block_index, seg_index): self.__nsx_data_header[nsx_nb] = nsx_dataheader_reader(nsx_nb) @@ -355,8 +357,12 @@ def _parse_header(self): else: raise (ValueError("nsx_to_load is wrong")) - if not all(nsx_nb in self._avail_nsx for nsx_nb in self.nsx_to_load): - raise FileNotFoundError(f"nsx_to_load does not match available nsx list") + missing_nsx_files = [nsx_nb for nsx_nb in self.nsx_to_load if nsx_nb not in self._avail_nsx] + if missing_nsx_files: + missing_list = ", ".join(f"ns{nsx_nb}" for nsx_nb in missing_nsx_files) + raise FileNotFoundError( + f"Requested NSX file(s) not found: {missing_list}. Available NSX files: {self._avail_nsx}" + ) # check that all files come from the same specification all_spec = [self.__nsx_spec[nsx_nb] for nsx_nb in self.nsx_to_load] @@ -377,31 +383,33 @@ def _parse_header(self): for nsx_nb in self.nsx_to_load: self.__match_nsx_and_nev_segment_ids(nsx_nb) - self.nsx_datas = {} + self.nsx_data = {} self.sig_sampling_rates = {} if len(self.nsx_to_load) > 0: for nsx_nb in self.nsx_to_load: - spec = self.__nsx_spec[nsx_nb] - # The only way to know if it is the PTP-variant of file spec 3.0 + basic_header = self.__nsx_basic_header[nsx_nb] + spec_version = self.__nsx_spec[nsx_nb] + # The only way to know if it is the peak-to-peak-variant of file spec 3.0 # is to check for nanosecond timestamp resolution. - if ( - "timestamp_resolution" in self.__nsx_basic_header[nsx_nb].dtype.names - and self.__nsx_basic_header[nsx_nb]["timestamp_resolution"] == 1_000_000_000 - ): + is_ptp_variant = ( + "timestamp_resolution" in basic_header.dtype.names + and basic_header["timestamp_resolution"] == 1_000_000_000 + ) + if is_ptp_variant: _data_reader_fun = self.__nsx_data_reader["3.0-ptp"] else: - _data_reader_fun = self.__nsx_data_reader[spec] - self.nsx_datas[nsx_nb] = _data_reader_fun(nsx_nb) + _data_reader_fun = self.__nsx_data_reader[spec_version] + self.nsx_data[nsx_nb] = _data_reader_fun(nsx_nb) - sr = float(self.main_sampling_rate / self.__nsx_basic_header[nsx_nb]["period"]) + sr = float(self.main_sampling_rate / basic_header["period"]) self.sig_sampling_rates[nsx_nb] = sr - if spec in ["2.2", "2.3", "3.0"]: + if spec_version in ["2.2", "2.3", "3.0"]: ext_header = self.__nsx_ext_header[nsx_nb] - elif spec == "2.1": + elif spec_version == "2.1": ext_header = [] keys = ["labels", "units", "min_analog_val", "max_analog_val", "min_digital_val", "max_digital_val"] - params = self.__nsx_params[spec](nsx_nb) + params = self.__nsx_params[spec_version](nsx_nb) for i in range(len(params["labels"])): d = {} for key in keys: @@ -438,7 +446,7 @@ def _parse_header(self): signal_channels.append((ch_name, ch_id, sr, sig_dtype, units, gain, offset, stream_id, buffer_id)) # check nb segment per nsx - nb_segments_for_nsx = [len(self.nsx_datas[nsx_nb]) for nsx_nb in self.nsx_to_load] + nb_segments_for_nsx = [len(self.nsx_data[nsx_nb]) for nsx_nb in self.nsx_to_load] if not all(nb == nb_segments_for_nsx[0] for nb in nb_segments_for_nsx): raise NeoReadWriteError("Segment nb not consistent across nsX files") self._nb_segment = nb_segments_for_nsx[0] @@ -460,7 +468,7 @@ def _parse_header(self): ts_res = 30_000 period = self.__nsx_basic_header[nsx_nb]["period"] sec_per_samp = period / 30_000 # Maybe 30_000 should be ['sample_resolution'] - length = self.nsx_datas[nsx_nb][data_bl].shape[0] + length = self.nsx_data[nsx_nb][data_bl].shape[0] if self.__nsx_data_header[nsx_nb] is None: t_start = 0.0 t_stop = max(t_stop, length / self.sig_sampling_rates[nsx_nb]) @@ -631,7 +639,7 @@ def _segment_t_stop(self, block_index, seg_index): def _get_signal_size(self, block_index, seg_index, stream_index): stream_id = self.header["signal_streams"][stream_index]["id"] nsx_nb = int(stream_id) - memmap_data = self.nsx_datas[nsx_nb][seg_index] + memmap_data = self.nsx_data[nsx_nb][seg_index] return memmap_data.shape[0] def _get_signal_t_start(self, block_index, seg_index, stream_index): @@ -642,7 +650,7 @@ def _get_signal_t_start(self, block_index, seg_index, stream_index): def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop, stream_index, channel_indexes): stream_id = self.header["signal_streams"][stream_index]["id"] nsx_nb = int(stream_id) - memmap_data = self.nsx_datas[nsx_nb][seg_index] + memmap_data = self.nsx_data[nsx_nb][seg_index] if channel_indexes is None: channel_indexes = slice(None) sig_chunk = memmap_data[i_start:i_stop, channel_indexes] @@ -802,7 +810,7 @@ def __extract_nsx_file_spec(self, nsx_nb): """ Extract file specification from an .nsx file. """ - filename = ".".join([self._filenames["nsx"], f"ns{nsx_nb}"]) + filename = f"{self._filenames['nsx']}.ns{nsx_nb}" # Header structure of files specification 2.2 and higher. For files 2.1 # and lower, the entries ver_major and ver_minor are not supported. @@ -822,7 +830,7 @@ def __extract_nev_file_spec(self): """ Extract file specification from an .nev file """ - filename = ".".join([self._filenames["nev"], "nev"]) + filename = f"{self._filenames['nev']}.nev" # Header structure of files specification 2.2 and higher. For files 2.1 # and lower, the entries ver_major and ver_minor are not supported. dt0 = [("file_id", "S8"), ("ver_major", "uint8"), ("ver_minor", "uint8")] @@ -872,7 +880,7 @@ def __read_nsx_header_variant_b(self, nsx_nb): """ Extract nsx header information from a 2.2 or 2.3 .nsx file """ - filename = ".".join([self._filenames["nsx"], f"ns{nsx_nb}"]) + filename = f"{self._filenames['nsx']}.ns{nsx_nb}" # basic header (file_id: NEURALCD) dt0 = [ @@ -885,6 +893,8 @@ def __read_nsx_header_variant_b(self, nsx_nb): # label of the sampling group (e.g., "1 kS/s" or "LFP low") ("label", "S16"), ("comment", "S256"), + # ("application_to_create_file", "S52"), # A 52-character string labeling the program which created the file. Trellis will also include its revision number in this label. + # ("processor_timestamp", "uint32"), # The processor timestamp (in 30 kHz clock cycles) at which the data in the file were collected. ("period", "uint32"), ("timestamp_resolution", "uint32"), # time origin: 2byte uint16 values for ... @@ -904,7 +914,7 @@ def __read_nsx_header_variant_b(self, nsx_nb): # extended header (type: CC) offset_dt0 = np.dtype(dt0).itemsize - shape = nsx_basic_header["channel_count"] + shape = int(nsx_basic_header["channel_count"]) dt1 = [ ("type", "S2"), ("electrode_id", "uint16"), @@ -923,11 +933,11 @@ def __read_nsx_header_variant_b(self, nsx_nb): # filter settings used to create nsx from source signal ("hi_freq_corner", "uint32"), ("hi_freq_order", "uint32"), - ("hi_freq_type", "uint16"), # 0=None, 1=Butterworth + ("hi_freq_type", "uint16"), # 0=None, 1=Butterworth, 2=Chebyshev ("lo_freq_corner", "uint32"), ("lo_freq_order", "uint32"), ("lo_freq_type", "uint16"), - ] # 0=None, 1=Butterworth + ] # 0=None, 1=Butterworth, -2-Chebyshev nsx_ext_header = np.memmap(filename, shape=shape, offset=offset_dt0, dtype=dt1, mode="r") @@ -937,14 +947,17 @@ def __read_nsx_dataheader(self, nsx_nb, offset): """ Reads data header following the given offset of an nsx file. """ - filename = ".".join([self._filenames["nsx"], f"ns{nsx_nb}"]) + filename = f"{self._filenames['nsx']}.ns{nsx_nb}" - ts_size = "uint64" if self.__nsx_basic_header[nsx_nb]["ver_major"] >= 3 else "uint32" + major_version = self.__nsx_basic_header[nsx_nb]["ver_major"] + ts_size = "uint64" if major_version >= 3 else "uint32" + #ts_size = "uint64" + # dtypes data header, the header flag is always set to 1 + dt2 = [("header_flag", "uint8"), ("timestamp", ts_size), ("nb_data_points", "uint32")] - # dtypes data header - dt2 = [("header", "uint8"), ("timestamp", ts_size), ("nb_data_points", "uint32")] + packet_header = np.memmap(filename, dtype=dt2, shape=1, offset=offset, mode="r")[0] - return np.memmap(filename, dtype=dt2, shape=1, offset=offset, mode="r")[0] + return packet_header def __read_nsx_dataheader_variant_a(self, nsx_nb, filesize=None, offset=None): """ @@ -964,32 +977,37 @@ def __read_nsx_dataheader_variant_b( Reads the nsx data header for each data block following the offset of file spec 2.2, 2.3, and 3.0. """ - filename = ".".join([self._filenames["nsx"], f"ns{nsx_nb}"]) + filename = f"{self._filenames['nsx']}.ns{nsx_nb}" filesize = self.__get_file_size(filename) data_header = {} - index = 0 - if offset is None: - offset = self.__nsx_basic_header[nsx_nb]["bytes_in_headers"] + offset_to_first_data_block = offset or int(self.__nsx_basic_header[nsx_nb]["bytes_in_headers"]) + channel_count = int(self.__nsx_basic_header[nsx_nb]["channel_count"]) + offset = offset_to_first_data_block + data_block_index = 0 while offset < filesize: - dh = self.__read_nsx_dataheader(nsx_nb, offset) - data_header[index] = { - "header": dh["header"], - "timestamp": dh["timestamp"], - "nb_data_points": dh["nb_data_points"], - "offset_to_data_block": offset + dh.dtype.itemsize, + packet_header = self.__read_nsx_dataheader(nsx_nb, offset) + header_flag = packet_header["header_flag"] + assert header_flag == 1, f"Invalid header flag: {header_flag}" + timestamp = packet_header["timestamp"] + offset_to_data_block_start = offset + packet_header.dtype.itemsize + num_data_points = int(packet_header["nb_data_points"]) + + data_header[data_block_index] = { + "header": header_flag, + "timestamp": timestamp, + "nb_data_points": num_data_points, + "offset_to_data_block": offset_to_data_block_start, } - # data size = number of data points * (2bytes * number of channels) - # use of `int` avoids overflow problem - data_size = int(dh["nb_data_points"]) * int(self.__nsx_basic_header[nsx_nb]["channel_count"]) * 2 - # define new offset (to possible next data block) - offset = int(data_header[index]["offset_to_data_block"]) + data_size + data_array_size = num_data_points * channel_count * np.dtype("int16").itemsize + # Jump to the next data block + offset = offset_to_data_block_start + data_array_size - index += 1 + data_block_index += 1 return data_header @@ -1075,19 +1093,20 @@ def __read_nsx_data_variant_b(self, nsx_nb): Extract nsx data (blocks) from a 2.2, 2.3, or 3.0 .nsx file. Blocks can arise if the recording was paused by the user. """ - filename = ".".join([self._filenames["nsx"], f"ns{nsx_nb}"]) + filename = f"{self._filenames['nsx']}.ns{nsx_nb}" data = {} - for data_bl in self.__nsx_data_header[nsx_nb].keys(): + data_header = self.__nsx_data_header[nsx_nb] + number_of_channels = int(self.__nsx_basic_header[nsx_nb]["channel_count"]) + + for data_block in data_header.keys(): # get shape and offset of data - shape = ( - int(self.__nsx_data_header[nsx_nb][data_bl]["nb_data_points"]), - int(self.__nsx_basic_header[nsx_nb]["channel_count"]), - ) - offset = int(self.__nsx_data_header[nsx_nb][data_bl]["offset_to_data_block"]) + number_of_samples = int(data_header[data_block]["nb_data_points"]) + shape = (number_of_samples, number_of_channels) + offset = int(data_header[data_block]["offset_to_data_block"]) # read data - data[data_bl] = np.memmap(filename, dtype="int16", shape=shape, offset=offset, mode="r") + data[data_block] = np.memmap(filename, dtype="int16", shape=shape, offset=offset, mode="r") return data @@ -2117,13 +2136,13 @@ def __delete_empty_segments(self): for data_bl in range(self._nb_segment): keep_seg = True for nsx_nb in self.nsx_to_load: - length = self.nsx_datas[nsx_nb][data_bl].shape[0] + length = self.nsx_data[nsx_nb][data_bl].shape[0] keep_seg = keep_seg and (length >= 2) if not keep_seg: removed_seg.append(data_bl) for nsx_nb in self.nsx_to_load: - self.nsx_datas[nsx_nb].pop(data_bl) + self.nsx_data[nsx_nb].pop(data_bl) self.__nsx_data_header[nsx_nb].pop(data_bl) # Keys need to be increasing from 0 to maximum in steps of 1 @@ -2132,8 +2151,8 @@ def __delete_empty_segments(self): for j in range(i + 1, self._nb_segment): # remap nsx seg index for nsx_nb in self.nsx_to_load: - data = self.nsx_datas[nsx_nb].pop(j) - self.nsx_datas[nsx_nb][j - 1] = data + data = self.nsx_data[nsx_nb].pop(j) + self.nsx_data[nsx_nb][j - 1] = data data_header = self.__nsx_data_header[nsx_nb].pop(j) self.__nsx_data_header[nsx_nb][j - 1] = data_header From 61a1f13c4217d9588b25e1fb823f95bce8403cd4 Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Tue, 13 May 2025 10:27:24 -0600 Subject: [PATCH 02/12] draft --- neo/rawio/blackrockrawio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neo/rawio/blackrockrawio.py b/neo/rawio/blackrockrawio.py index 26094936f..ae9844504 100644 --- a/neo/rawio/blackrockrawio.py +++ b/neo/rawio/blackrockrawio.py @@ -991,7 +991,7 @@ def __read_nsx_dataheader_variant_b( while offset < filesize: packet_header = self.__read_nsx_dataheader(nsx_nb, offset) header_flag = packet_header["header_flag"] - assert header_flag == 1, f"Invalid header flag: {header_flag}" + # assert header_flag == 1, f"Invalid header flag: {header_flag}" timestamp = packet_header["timestamp"] offset_to_data_block_start = offset + packet_header.dtype.itemsize num_data_points = int(packet_header["nb_data_points"]) From 0317d3b06435f402bdbbc8b8f9d4a6717902a495 Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Wed, 2 Jul 2025 11:33:15 -0600 Subject: [PATCH 03/12] rever datas to data --- neo/rawio/blackrockrawio.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/neo/rawio/blackrockrawio.py b/neo/rawio/blackrockrawio.py index 4ef6abed5..bb1492b98 100644 --- a/neo/rawio/blackrockrawio.py +++ b/neo/rawio/blackrockrawio.py @@ -383,7 +383,7 @@ def _parse_header(self): for nsx_nb in self.nsx_to_load: self.__match_nsx_and_nev_segment_ids(nsx_nb) - self.nsx_data = {} + self.nsx_datas = {} self.sig_sampling_rates = {} if len(self.nsx_to_load) > 0: for nsx_nb in self.nsx_to_load: @@ -399,7 +399,7 @@ def _parse_header(self): _data_reader_fun = self.__nsx_data_reader["3.0-ptp"] else: _data_reader_fun = self.__nsx_data_reader[spec_version] - self.nsx_data[nsx_nb] = _data_reader_fun(nsx_nb) + self.nsx_datas[nsx_nb] = _data_reader_fun(nsx_nb) sr = float(self.main_sampling_rate / basic_header["period"]) self.sig_sampling_rates[nsx_nb] = sr @@ -446,7 +446,7 @@ def _parse_header(self): signal_channels.append((ch_name, ch_id, sr, sig_dtype, units, gain, offset, stream_id, buffer_id)) # check nb segment per nsx - nb_segments_for_nsx = [len(self.nsx_data[nsx_nb]) for nsx_nb in self.nsx_to_load] + nb_segments_for_nsx = [len(self.nsx_datas[nsx_nb]) for nsx_nb in self.nsx_to_load] if not all(nb == nb_segments_for_nsx[0] for nb in nb_segments_for_nsx): raise NeoReadWriteError("Segment nb not consistent across nsX files") self._nb_segment = nb_segments_for_nsx[0] @@ -468,7 +468,7 @@ def _parse_header(self): ts_res = 30_000 period = self.__nsx_basic_header[nsx_nb]["period"] sec_per_samp = period / 30_000 # Maybe 30_000 should be ['sample_resolution'] - length = self.nsx_data[nsx_nb][data_bl].shape[0] + length = self.nsx_datas[nsx_nb][data_bl].shape[0] if self.__nsx_data_header[nsx_nb] is None: t_start = 0.0 t_stop = max(t_stop, length / self.sig_sampling_rates[nsx_nb]) @@ -646,7 +646,7 @@ def _segment_t_stop(self, block_index, seg_index): def _get_signal_size(self, block_index, seg_index, stream_index): stream_id = self.header["signal_streams"][stream_index]["id"] nsx_nb = int(stream_id) - memmap_data = self.nsx_data[nsx_nb][seg_index] + memmap_data = self.nsx_datas[nsx_nb][seg_index] return memmap_data.shape[0] def _get_signal_t_start(self, block_index, seg_index, stream_index): @@ -657,7 +657,7 @@ def _get_signal_t_start(self, block_index, seg_index, stream_index): def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop, stream_index, channel_indexes): stream_id = self.header["signal_streams"][stream_index]["id"] nsx_nb = int(stream_id) - memmap_data = self.nsx_data[nsx_nb][seg_index] + memmap_data = self.nsx_datas[nsx_nb][seg_index] if channel_indexes is None: channel_indexes = slice(None) sig_chunk = memmap_data[i_start:i_stop, channel_indexes] @@ -2155,13 +2155,13 @@ def __delete_empty_segments(self): for data_bl in range(self._nb_segment): keep_seg = True for nsx_nb in self.nsx_to_load: - length = self.nsx_data[nsx_nb][data_bl].shape[0] + length = self.nsx_datas[nsx_nb][data_bl].shape[0] keep_seg = keep_seg and (length >= 2) if not keep_seg: removed_seg.append(data_bl) for nsx_nb in self.nsx_to_load: - self.nsx_data[nsx_nb].pop(data_bl) + self.nsx_datas[nsx_nb].pop(data_bl) self.__nsx_data_header[nsx_nb].pop(data_bl) # Keys need to be increasing from 0 to maximum in steps of 1 @@ -2170,8 +2170,8 @@ def __delete_empty_segments(self): for j in range(i + 1, self._nb_segment): # remap nsx seg index for nsx_nb in self.nsx_to_load: - data = self.nsx_data[nsx_nb].pop(j) - self.nsx_data[nsx_nb][j - 1] = data + data = self.nsx_datas[nsx_nb].pop(j) + self.nsx_datas[nsx_nb][j - 1] = data data_header = self.__nsx_data_header[nsx_nb].pop(j) self.__nsx_data_header[nsx_nb][j - 1] = data_header From 0804d97d348066b3fbcc9df4388cf22448f3fd27 Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Wed, 2 Jul 2025 11:56:58 -0600 Subject: [PATCH 04/12] blackrock --- neo/rawio/blackrockrawio.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/neo/rawio/blackrockrawio.py b/neo/rawio/blackrockrawio.py index bb1492b98..83a8bcdb2 100644 --- a/neo/rawio/blackrockrawio.py +++ b/neo/rawio/blackrockrawio.py @@ -900,8 +900,6 @@ def __read_nsx_header_variant_b(self, nsx_nb): # label of the sampling group (e.g., "1 kS/s" or "LFP low") ("label", "S16"), ("comment", "S256"), - # ("application_to_create_file", "S52"), # A 52-character string labeling the program which created the file. Trellis will also include its revision number in this label. - # ("processor_timestamp", "uint32"), # The processor timestamp (in 30 kHz clock cycles) at which the data in the file were collected. ("period", "uint32"), ("timestamp_resolution", "uint32"), # time origin: 2byte uint16 values for ... @@ -921,7 +919,6 @@ def __read_nsx_header_variant_b(self, nsx_nb): # extended header (type: CC) offset_dt0 = np.dtype(dt0).itemsize - shape = int(nsx_basic_header["channel_count"]) dt1 = [ ("type", "S2"), ("electrode_id", "uint16"), @@ -946,7 +943,8 @@ def __read_nsx_header_variant_b(self, nsx_nb): ("lo_freq_type", "uint16"), ] # 0=None, 1=Butterworth, -2-Chebyshev - nsx_ext_header = np.memmap(filename, shape=shape, offset=offset_dt0, dtype=dt1, mode="r") + channel_count = int(nsx_basic_header["channel_count"]) + nsx_ext_header = np.memmap(filename, shape=channel_count, offset=offset_dt0, dtype=dt1, mode="r") return nsx_basic_header, nsx_ext_header @@ -958,7 +956,7 @@ def __read_nsx_dataheader(self, nsx_nb, offset): major_version = self.__nsx_basic_header[nsx_nb]["ver_major"] ts_size = "uint64" if major_version >= 3 else "uint32" - #ts_size = "uint64" + # dtypes data header, the header flag is always set to 1 dt2 = [("header_flag", "uint8"), ("timestamp", ts_size), ("nb_data_points", "uint32")] @@ -998,11 +996,18 @@ def __read_nsx_dataheader_variant_b( while offset < filesize: packet_header = self.__read_nsx_dataheader(nsx_nb, offset) header_flag = packet_header["header_flag"] - # assert header_flag == 1, f"Invalid header flag: {header_flag}" + # NSX data blocks must have header_flag = 1, other values indicate file corruption + if header_flag != 1: + raise ValueError( + f"Invalid NSX data block header at offset {offset:#x} in ns{nsx_nb} file. " + f"Expected header_flag=1, got {header_flag}. " + f"This may indicate file corruption or unsupported NSX format variant. " + f"Block index: {data_block_index}, File size: {filesize} bytes" + ) timestamp = packet_header["timestamp"] offset_to_data_block_start = offset + packet_header.dtype.itemsize num_data_points = int(packet_header["nb_data_points"]) - + data_header[data_block_index] = { "header": header_flag, "timestamp": timestamp, @@ -1010,9 +1015,9 @@ def __read_nsx_dataheader_variant_b( "offset_to_data_block": offset_to_data_block_start, } - data_array_size = num_data_points * channel_count * np.dtype("int16").itemsize # Jump to the next data block - offset = offset_to_data_block_start + data_array_size + data_block_size = num_data_points * channel_count * np.dtype("int16").itemsize + offset = offset_to_data_block_start + data_block_size data_block_index += 1 From 91f0d6af813c2c04b6dcd8e36d4d5992931b218c Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Wed, 2 Jul 2025 12:11:17 -0600 Subject: [PATCH 05/12] fix bug --- neo/rawio/blackrockrawio.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/neo/rawio/blackrockrawio.py b/neo/rawio/blackrockrawio.py index 83a8bcdb2..fff4d0349 100644 --- a/neo/rawio/blackrockrawio.py +++ b/neo/rawio/blackrockrawio.py @@ -423,11 +423,11 @@ def _parse_header(self): signal_buffers.append((stream_name, buffer_id)) signal_streams.append((stream_name, stream_id, buffer_id)) for i, chan in enumerate(ext_header): - if spec in ["2.2", "2.3", "3.0"]: + if spec_version in ["2.2", "2.3", "3.0"]: ch_name = chan["electrode_label"].decode() ch_id = str(chan["electrode_id"]) units = chan["units"].decode() - elif spec == "2.1": + elif spec_version == "2.1": ch_name = chan["labels"] ch_id = str(self.__nsx_ext_header[nsx_nb][i]["electrode_id"]) units = chan["units"] From d839a329b4710c35d9f8d97b2974618d36424f36 Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Wed, 2 Jul 2025 12:59:24 -0600 Subject: [PATCH 06/12] Update neo/rawio/blackrockrawio.py Co-authored-by: Zach McKenzie <92116279+zm711@users.noreply.github.com> --- neo/rawio/blackrockrawio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neo/rawio/blackrockrawio.py b/neo/rawio/blackrockrawio.py index fff4d0349..9f857e55f 100644 --- a/neo/rawio/blackrockrawio.py +++ b/neo/rawio/blackrockrawio.py @@ -941,7 +941,7 @@ def __read_nsx_header_variant_b(self, nsx_nb): ("lo_freq_corner", "uint32"), ("lo_freq_order", "uint32"), ("lo_freq_type", "uint16"), - ] # 0=None, 1=Butterworth, -2-Chebyshev + ] # 0=None, 1=Butterworth, 2=Chebyshev channel_count = int(nsx_basic_header["channel_count"]) nsx_ext_header = np.memmap(filename, shape=channel_count, offset=offset_dt0, dtype=dt1, mode="r") From 47d8c97191f0a6c26b9e93a8548471dcfe5593a0 Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Thu, 3 Jul 2025 10:39:44 -0600 Subject: [PATCH 07/12] precision time protocol\ --- neo/rawio/blackrockrawio.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/neo/rawio/blackrockrawio.py b/neo/rawio/blackrockrawio.py index 9f857e55f..f43e2d23c 100644 --- a/neo/rawio/blackrockrawio.py +++ b/neo/rawio/blackrockrawio.py @@ -328,7 +328,7 @@ def _parse_header(self): nsx_header_reader = self.__nsx_header_reader[spec_version] self.__nsx_basic_header[nsx_nb], self.__nsx_ext_header[nsx_nb] = nsx_header_reader(nsx_nb) - # The only way to know if it is the peak-to-peak-variant of file spec 3.0 + # The only way to know if it is the Precision Time Protocol of file spec 3.0 # is to check for nanosecond timestamp resolution. is_ptp_variant = ( "timestamp_resolution" in self.__nsx_basic_header[nsx_nb].dtype.names @@ -389,7 +389,7 @@ def _parse_header(self): for nsx_nb in self.nsx_to_load: basic_header = self.__nsx_basic_header[nsx_nb] spec_version = self.__nsx_spec[nsx_nb] - # The only way to know if it is the peak-to-peak-variant of file spec 3.0 + # The only way to know if it is the Precision Time Protocol of file spec 3.0 # is to check for nanosecond timestamp resolution. is_ptp_variant = ( "timestamp_resolution" in basic_header.dtype.names From 9796814d2190fc5c4eceda46f45c1420bd21902d Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Thu, 3 Jul 2025 10:48:48 -0600 Subject: [PATCH 08/12] add int casting to input offset --- neo/rawio/blackrockrawio.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/neo/rawio/blackrockrawio.py b/neo/rawio/blackrockrawio.py index f43e2d23c..c400a84b0 100644 --- a/neo/rawio/blackrockrawio.py +++ b/neo/rawio/blackrockrawio.py @@ -988,24 +988,24 @@ def __read_nsx_dataheader_variant_b( data_header = {} - offset_to_first_data_block = offset or int(self.__nsx_basic_header[nsx_nb]["bytes_in_headers"]) + offset_to_first_data_block = int(offset or self.__nsx_basic_header[nsx_nb]["bytes_in_headers"]) channel_count = int(self.__nsx_basic_header[nsx_nb]["channel_count"]) - offset = offset_to_first_data_block + current_offset = offset_to_first_data_block data_block_index = 0 - while offset < filesize: - packet_header = self.__read_nsx_dataheader(nsx_nb, offset) + while current_offset < filesize: + packet_header = self.__read_nsx_dataheader(nsx_nb, current_offset) header_flag = packet_header["header_flag"] # NSX data blocks must have header_flag = 1, other values indicate file corruption if header_flag != 1: raise ValueError( - f"Invalid NSX data block header at offset {offset:#x} in ns{nsx_nb} file. " + f"Invalid NSX data block header at offset {current_offset:#x} in ns{nsx_nb} file. " f"Expected header_flag=1, got {header_flag}. " f"This may indicate file corruption or unsupported NSX format variant. " f"Block index: {data_block_index}, File size: {filesize} bytes" ) timestamp = packet_header["timestamp"] - offset_to_data_block_start = offset + packet_header.dtype.itemsize + offset_to_data_block_start = current_offset + packet_header.dtype.itemsize num_data_points = int(packet_header["nb_data_points"]) data_header[data_block_index] = { @@ -1017,7 +1017,7 @@ def __read_nsx_dataheader_variant_b( # Jump to the next data block data_block_size = num_data_points * channel_count * np.dtype("int16").itemsize - offset = offset_to_data_block_start + data_block_size + current_offset = offset_to_data_block_start + data_block_size data_block_index += 1 From 5968cdfe67ca68412d288370e6d5f2a1c2db781b Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Thu, 3 Jul 2025 10:54:41 -0600 Subject: [PATCH 09/12] specific about bytes --- neo/rawio/blackrockrawio.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/neo/rawio/blackrockrawio.py b/neo/rawio/blackrockrawio.py index c400a84b0..e592b9e7e 100644 --- a/neo/rawio/blackrockrawio.py +++ b/neo/rawio/blackrockrawio.py @@ -984,28 +984,28 @@ def __read_nsx_dataheader_variant_b( """ filename = f"{self._filenames['nsx']}.ns{nsx_nb}" - filesize = self.__get_file_size(filename) + filesize_bytes = self.__get_file_size(filename) data_header = {} offset_to_first_data_block = int(offset or self.__nsx_basic_header[nsx_nb]["bytes_in_headers"]) channel_count = int(self.__nsx_basic_header[nsx_nb]["channel_count"]) - current_offset = offset_to_first_data_block + current_offset_bytes = offset_to_first_data_block data_block_index = 0 - while current_offset < filesize: - packet_header = self.__read_nsx_dataheader(nsx_nb, current_offset) + while current_offset_bytes < filesize_bytes: + packet_header = self.__read_nsx_dataheader(nsx_nb, current_offset_bytes) header_flag = packet_header["header_flag"] # NSX data blocks must have header_flag = 1, other values indicate file corruption if header_flag != 1: raise ValueError( - f"Invalid NSX data block header at offset {current_offset:#x} in ns{nsx_nb} file. " + f"Invalid NSX data block header at offset {current_offset_bytes:#x} in ns{nsx_nb} file. " f"Expected header_flag=1, got {header_flag}. " f"This may indicate file corruption or unsupported NSX format variant. " f"Block index: {data_block_index}, File size: {filesize} bytes" ) timestamp = packet_header["timestamp"] - offset_to_data_block_start = current_offset + packet_header.dtype.itemsize + offset_to_data_block_start = current_offset_bytes + packet_header.dtype.itemsize num_data_points = int(packet_header["nb_data_points"]) data_header[data_block_index] = { @@ -1016,8 +1016,8 @@ def __read_nsx_dataheader_variant_b( } # Jump to the next data block - data_block_size = num_data_points * channel_count * np.dtype("int16").itemsize - current_offset = offset_to_data_block_start + data_block_size + data_block_size_bytes = num_data_points * channel_count * np.dtype("int16").itemsize + current_offset_bytes = offset_to_data_block_start + data_block_size_bytes data_block_index += 1 From 9bb771873c15fb8943b6fc90b76ad74f7b887fd2 Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Thu, 3 Jul 2025 10:56:00 -0600 Subject: [PATCH 10/12] name --- neo/rawio/blackrockrawio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neo/rawio/blackrockrawio.py b/neo/rawio/blackrockrawio.py index e592b9e7e..2d082ea93 100644 --- a/neo/rawio/blackrockrawio.py +++ b/neo/rawio/blackrockrawio.py @@ -1015,7 +1015,7 @@ def __read_nsx_dataheader_variant_b( "offset_to_data_block": offset_to_data_block_start, } - # Jump to the next data block + # Jump to the next data block, the data is encoded as int16 data_block_size_bytes = num_data_points * channel_count * np.dtype("int16").itemsize current_offset_bytes = offset_to_data_block_start + data_block_size_bytes From d4a0ebdd17280a7923662d7b5a9220a3fe89450e Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Thu, 3 Jul 2025 10:58:16 -0600 Subject: [PATCH 11/12] offset --- neo/rawio/blackrockrawio.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/neo/rawio/blackrockrawio.py b/neo/rawio/blackrockrawio.py index 2d082ea93..751c6bd0c 100644 --- a/neo/rawio/blackrockrawio.py +++ b/neo/rawio/blackrockrawio.py @@ -1002,11 +1002,11 @@ def __read_nsx_dataheader_variant_b( f"Invalid NSX data block header at offset {current_offset_bytes:#x} in ns{nsx_nb} file. " f"Expected header_flag=1, got {header_flag}. " f"This may indicate file corruption or unsupported NSX format variant. " - f"Block index: {data_block_index}, File size: {filesize} bytes" + f"Block index: {data_block_index}, File size: {filesize_bytes} bytes" ) timestamp = packet_header["timestamp"] - offset_to_data_block_start = current_offset_bytes + packet_header.dtype.itemsize num_data_points = int(packet_header["nb_data_points"]) + offset_to_data_block_start = current_offset_bytes + packet_header.dtype.itemsize data_header[data_block_index] = { "header": header_flag, From 2240a29c4366fb3ed5dacadb54837004651ee35e Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Thu, 3 Jul 2025 11:09:38 -0600 Subject: [PATCH 12/12] fixed --- neo/rawio/blackrockrawio.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/neo/rawio/blackrockrawio.py b/neo/rawio/blackrockrawio.py index 751c6bd0c..fb6bd9c7b 100644 --- a/neo/rawio/blackrockrawio.py +++ b/neo/rawio/blackrockrawio.py @@ -987,8 +987,10 @@ def __read_nsx_dataheader_variant_b( filesize_bytes = self.__get_file_size(filename) data_header = {} - - offset_to_first_data_block = int(offset or self.__nsx_basic_header[nsx_nb]["bytes_in_headers"]) + if offset is None: + offset_to_first_data_block = int(self.__nsx_basic_header[nsx_nb]["bytes_in_headers"]) + else: + offset_to_first_data_block = int(offset) channel_count = int(self.__nsx_basic_header[nsx_nb]["channel_count"]) current_offset_bytes = offset_to_first_data_block