Skip to content

Commit

Permalink
Merge pull request #1608 from h-mayorquin/use_device_index_in_neo
Browse files Browse the repository at this point in the history
Open folders with user altered filenames in SpikeGLX
  • Loading branch information
zm711 authored Jan 17, 2025
2 parents 17a3e8b + 8731e88 commit 6f571f0
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 30 deletions.
96 changes: 66 additions & 30 deletions neo/rawio/spikeglxrawio.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,14 +82,11 @@ class SpikeGLXRawIO(BaseRawWithBufferApiIO):
Notes
-----
* Contrary to other implementations this IO reads the entire folder and subfolders and:
deals with several segments based on the `_gt0`, `_gt1`, `_gt2`, etc postfixes
deals with all signals "imec0", "imec1" for neuropixel probes and also
external signal like"nidq". This is the "device"
* For imec device both "ap" and "lf" are extracted so one device have several "streams"
* There are several versions depending the neuropixel probe generation (`1.x`/`2.x`/`3.x`)
* Here, we assume that the `meta` file has the same structure across all generations.
* This IO is developed based on neuropixel generation 2.0, single shank recordings.
* This IO reads the entire folder and subfolders locating the `.bin` and `.meta` files
* Handles gates and triggers as segments (based on the `_gt0`, `_gt1`, `_t0` , `_t1` in filenames)
* Handles all signals coming from different acquisition cards ("imec0", "imec1", etc) in a typical
PXIe chassis setup and also external signal like "nidq".
* For imec devices both "ap" and "lf" are extracted so even a one device setup will have several "streams"
Examples
--------
Expand Down Expand Up @@ -125,7 +122,6 @@ def _parse_header(self):
stream_names = sorted(list(srates.keys()), key=lambda e: srates[e])[::-1]
nb_segment = np.unique([info["seg_index"] for info in self.signals_info_list]).size

# self._memmaps = {}
self.signals_info_dict = {}
# one unique block
self._buffer_descriptions = {0: {}}
Expand Down Expand Up @@ -166,7 +162,6 @@ def _parse_header(self):

stream_id = stream_name

stream_index = stream_names.index(info["stream_name"])
signal_streams.append((stream_name, stream_id, buffer_id))

# add channels to global list
Expand Down Expand Up @@ -250,7 +245,6 @@ def _parse_header(self):
# insert some annotation at some place
self._generate_minimal_annotations()
self._generate_minimal_annotations()
block_ann = self.raw_annotations["blocks"][0]

for seg_index in range(nb_segment):
seg_ann = self.raw_annotations["blocks"][0]["segments"][seg_index]
Expand Down Expand Up @@ -354,23 +348,56 @@ def scan_files(dirname):
if len(info_list) == 0:
raise FileNotFoundError(f"No appropriate combination of .meta and .bin files were detected in {dirname}")

# the segment index will depend on both 'gate_num' and 'trigger_num'
# so we order by 'gate_num' then 'trigger_num'
# None is before any int
def make_key(info):
k0 = info["gate_num"]
if k0 is None:
k0 = -1
k1 = info["trigger_num"]
if k1 is None:
k1 = -1
return (k0, k1)

order_key = list({make_key(info) for info in info_list})
order_key = sorted(order_key)
# This sets non-integers values before integers
normalize = lambda x: x if isinstance(x, int) else -1

# Segment index is determined by the gate_num and trigger_num in that order
def get_segment_tuple(info):
# Create a key from the normalized gate_num and trigger_num
gate_num = normalize(info.get("gate_num"))
trigger_num = normalize(info.get("trigger_num"))
return (gate_num, trigger_num)

unique_segment_tuples = {get_segment_tuple(info) for info in info_list}
sorted_keys = sorted(unique_segment_tuples)

# Map each unique key to a corresponding index
segment_tuple_to_segment_index = {key: idx for idx, key in enumerate(sorted_keys)}

for info in info_list:
info["seg_index"] = order_key.index(make_key(info))
info["seg_index"] = segment_tuple_to_segment_index[get_segment_tuple(info)]


# Probe index calculation
# The calculation is ordered by slot, port, dock in that order, this is the number that appears in the filename
# after imec when using native names (e.g. imec0, imec1, etc.)
def get_probe_tuple(info):
slot = normalize(info.get("probe_slot"))
port = normalize(info.get("probe_port"))
dock = normalize(info.get("probe_dock"))
return (slot, port, dock)

# TODO: handle one box case
info_list_imec = [info for info in info_list if info.get("device") != "nidq"]
unique_probe_tuples = {get_probe_tuple(info) for info in info_list_imec}
sorted_probe_keys = sorted(unique_probe_tuples)
probe_tuple_to_probe_index = {key: idx for idx, key in enumerate(sorted_probe_keys)}

for info in info_list:
if info.get("device") == "nidq":
info["device_index"] = "" # TODO: Handle multi nidq case, maybe use meta["typeNiEnabled"]
else:
info["device_index"] = probe_tuple_to_probe_index[get_probe_tuple(info)]

# Define stream base on device [imec|nidq], device_index and stream_kind [ap|lf] for imec
for info in info_list:
device_kind = info["device_kind"]
device_index = info["device_index"]
stream_kind = f".{info['stream_kind']}" if info["stream_kind"] else ""
stream_name = f"{device_kind}{device_index}{stream_kind}"

info["stream_name"] = stream_name

return info_list


Expand Down Expand Up @@ -488,13 +515,15 @@ def extract_stream_info(meta_file, meta):
else:
# NIDQ case
has_sync_trace = False
fname = Path(meta_file).stem

bin_file_path = meta["fileName"]
fname = Path(bin_file_path).stem

run_name, gate_num, trigger_num, device, stream_kind = parse_spikeglx_fname(fname)

if "imec" in fname.split(".")[-2]:
device = fname.split(".")[-2]
stream_kind = fname.split(".")[-1]
stream_name = device + "." + stream_kind
units = "uV"
# please note the 1e6 in gain for this uV

Expand Down Expand Up @@ -534,7 +563,6 @@ def extract_stream_info(meta_file, meta):
else:
device = fname.split(".")[-1]
stream_kind = ""
stream_name = device
units = "V"
channel_gains = np.ones(num_chan)

Expand All @@ -550,6 +578,10 @@ def extract_stream_info(meta_file, meta):
gain_factor = float(meta["niAiRangeMax"]) / 32768
channel_gains = per_channel_gain * gain_factor

probe_slot = meta.get("imDatPrb_slot", None)
probe_port = meta.get("imDatPrb_port", None)
probe_dock = meta.get("imDatPrb_dock", None)

info = {}
info["fname"] = fname
info["meta"] = meta
Expand All @@ -563,12 +595,16 @@ def extract_stream_info(meta_file, meta):
info["trigger_num"] = trigger_num
info["device"] = device
info["stream_kind"] = stream_kind
info["stream_name"] = stream_name
# All non-production probes (phase 3B onwards) have "typeThis", otherwise revert to file parsing
info["device_kind"] = meta.get("typeThis", device.split(".")[0])
info["units"] = units
info["channel_names"] = [txt.split(";")[0] for txt in meta["snsChanMap"]]
info["channel_gains"] = channel_gains
info["channel_offsets"] = np.zeros(info["num_chan"])
info["has_sync_trace"] = has_sync_trace
info["probe_slot"] = int(probe_slot) if probe_slot else None
info["probe_port"] = int(probe_port) if probe_port else None
info["probe_dock"] = int(probe_dock) if probe_dock else None

if "nidq" in device:
info["digital_channels"] = []
Expand Down
2 changes: 2 additions & 0 deletions neo/test/rawiotest/test_spikeglxrawio.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ class TestSpikeGLXRawIO(BaseTestRawIO, unittest.TestCase):
"spikeglx/NP2_subset_with_sync",
# NP-ultra
"spikeglx/np_ultra_stub",
# Filename changed by the user, multi-dock
"spikeglx/multi_probe_multi_dock_multi_shank_filename_without_info",
# CatGT
"spikeglx/multi_trigger_multi_gate/CatGT/CatGT-A",
"spikeglx/multi_trigger_multi_gate/CatGT/CatGT-B",
Expand Down

0 comments on commit 6f571f0

Please sign in to comment.