Skip to content

Commit

Permalink
Set sparse=False during waveform extraction (#1039)
Browse files Browse the repository at this point in the history
* Save LFP as pynwb.ecephys.LFP

* Fix formatting

* Fix formatting

* Set sparse False during wf extraciton

* Black

* Update Changelog

* Black

---------

Co-authored-by: Eric Denovellis <[email protected]>
Co-authored-by: Eric Denovellis <[email protected]>
  • Loading branch information
3 people authored Aug 5, 2024
1 parent 24793f5 commit 907098a
Show file tree
Hide file tree
Showing 8 changed files with 3 additions and 8 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,8 @@ PositionGroup.alter()
`spikesorting.v1.SpikeSorting` #1009
- Remove deprecated dependencies on kachery_client #1014
- Add `UnitAnnotation` table and naming convention for units #1027, #1052

- Set `sparse` parameter to waveform extraction step in `spikesorting.v1`
#1039
## [0.5.2] (April 22, 2024)

### Infrastructure
Expand Down
1 change: 0 additions & 1 deletion src/spyglass/decoding/v1/clusterless.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,6 @@ def make(self, key):
classifier = ClusterlessDetector(**decoding_params)

if key["estimate_decoding_params"]:

# if estimating parameters, then we need to treat times outside
# decoding interval as missing this means that times outside the
# decoding interval will not use the spiking data a better approach
Expand Down
1 change: 0 additions & 1 deletion src/spyglass/decoding/v1/sorted_spikes.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,6 @@ def make(self, key):
classifier = SortedSpikesDetector(**decoding_params)

if key["estimate_decoding_params"]:

# if estimating parameters, then we need to treat times outside
# decoding interval as missing this means that times outside the
# decoding interval will not use the spiking data a better approach
Expand Down
2 changes: 0 additions & 2 deletions src/spyglass/position/v1/position_dlc_pose_estimation.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,6 @@ def insert_estimation_task(
def _insert_est_with_log(
self, key, task_mode, params, check_crop, skip_duplicates, output_dir
):

v_path, v_fname, _, _ = get_video_info(key)
if not v_path:
raise FileNotFoundError(f"Video file not found for {key}")
Expand Down Expand Up @@ -215,7 +214,6 @@ def make(self, key):

@file_log(logger, console=True)
def _logged_make(self, key):

METERS_PER_CM = 0.01

logger.info("----------------------")
Expand Down
1 change: 0 additions & 1 deletion src/spyglass/position/v1/position_dlc_position.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,6 @@ def make(self, key):

@file_log(logger, console=False)
def _logged_make(self, key):

METERS_PER_CM = 0.01

logger.info("-----------------------")
Expand Down
1 change: 1 addition & 0 deletions src/spyglass/spikesorting/v1/metric_curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,7 @@ def make(self, key):
waveforms = si.extract_waveforms(
recording=recording,
sorting=sorting,
sparse=waveform_params.get("sparse", False),
folder=waveforms_dir,
overwrite=True,
**waveform_params,
Expand Down
1 change: 0 additions & 1 deletion tests/common/test_position.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,6 @@ def rename_default_cols(common_position):
],
)
def test_rename_columns(rename_default_cols, col_type, cols):

_fix_col_names, defaults = rename_default_cols
df = pd.DataFrame([range(len(cols) + 1)], columns=["junk"] + cols)
result = _fix_col_names(df).columns.tolist()
Expand Down
1 change: 0 additions & 1 deletion tests/utils/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,6 @@ class MergeChild(SpyglassMixin, dj.Manual):

@pytest.fixture(scope="module")
def graph_tables(dj_conn, graph_schema):

schema = dj.Schema(context=graph_schema)

for table in graph_schema.values():
Expand Down

0 comments on commit 907098a

Please sign in to comment.