Skip to content

Commit

Permalink
logger.warn -> warning
Browse files Browse the repository at this point in the history
  • Loading branch information
CBroz1 committed Nov 11, 2024
1 parent 024b726 commit 942e279
Show file tree
Hide file tree
Showing 21 changed files with 40 additions and 37 deletions.
6 changes: 4 additions & 2 deletions src/spyglass/common/common_behav.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ def fetch1_dataframe(self):
id_rp = [(n["id"], n["raw_position"]) for n in self.fetch_nwb()]

if len(set(rp.interval for _, rp in id_rp)) > 1:
logger.warn("Loading DataFrame with multiple intervals.")
logger.warning("Loading DataFrame with multiple intervals.")

df_list = [
pd.DataFrame(
Expand Down Expand Up @@ -395,7 +395,9 @@ def _no_transaction_make(self, key, verbose=True, skip_duplicates=False):
)

if videos is None:
logger.warn(f"No video data interface found in {nwb_file_name}\n")
logger.warning(
f"No video data interface found in {nwb_file_name}\n"
)
return
else:
videos = videos.time_series
Expand Down
12 changes: 7 additions & 5 deletions src/spyglass/common/common_device.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,9 @@ def insert_from_nwbfile(cls, nwbf, config=None):
+ f"{ndx_devices.keys()}"
)
else:
logger.warn("No conforming data acquisition device metadata found.")
logger.warning(
"No conforming data acquisition device metadata found."
)

@classmethod
def get_all_device_names(cls, nwbf, config) -> tuple:
Expand Down Expand Up @@ -305,7 +307,7 @@ def insert_from_nwbfile(cls, nwbf, config=None):
if device_name_list:
logger.info(f"Inserted camera devices {device_name_list}")
else:
logger.warn("No conforming camera device metadata found.")
logger.warning("No conforming camera device metadata found.")
return device_name_list


Expand Down Expand Up @@ -462,7 +464,7 @@ def insert_from_nwbfile(cls, nwbf, config=None):
if all_probes_types:
logger.info(f"Inserted probes {all_probes_types}")
else:
logger.warn("No conforming probe metadata found.")
logger.warning("No conforming probe metadata found.")

return all_probes_types

Expand Down Expand Up @@ -709,7 +711,7 @@ def create_from_nwbfile(

query = ProbeType & {"probe_type": probe_type}
if len(query) == 0:
logger.warn(
logger.warning(
f"No ProbeType found with probe_type '{probe_type}'. Aborting."
)
return
Expand Down Expand Up @@ -769,7 +771,7 @@ def create_from_nwbfile(
]

if not device_found:
logger.warn(
logger.warning(
"No electrodes in the NWB file were associated with a device "
+ f"named '{nwb_device_name}'."
)
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/common/common_dio.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def make(self, key):
nwbf, "behavioral_events", pynwb.behavior.BehavioralEvents
)
if behav_events is None:
logger.warn(
logger.warning(
"No conforming behavioral events data interface found in "
+ f"{nwb_file_name}\n"
)
Expand Down
4 changes: 2 additions & 2 deletions src/spyglass/common/common_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def _add_data_acquisition_device_part(self, nwb_file_name, nwbf, config={}):
"data_acquisition_device_name": device_name
}
if len(query) == 0:
logger.warn(
logger.warning(
"Cannot link Session with DataAcquisitionDevice.\n"
+ f"DataAcquisitionDevice does not exist: {device_name}"
)
Expand Down Expand Up @@ -180,7 +180,7 @@ def _add_experimenter_part(
# ensure that the foreign key exists and do nothing if not
query = LabMember & {"lab_member_name": name}
if len(query) == 0:
logger.warn(
logger.warning(
"Cannot link Session with LabMember. "
+ f"LabMember does not exist: {name}"
)
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/common/common_subject.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def insert_from_nwbfile(cls, nwbf: NWBFile, config: dict = None):
"""
config = config or dict()
if "Subject" not in config and nwbf.subject is None:
logger.warn("No subject metadata found.\n")
logger.warning("No subject metadata found.\n")
return None

conf = config["Subject"][0] if "Subject" in config else dict()
Expand Down
10 changes: 5 additions & 5 deletions src/spyglass/common/common_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def insert_from_nwbfile(cls, nwbf: pynwb.NWBFile):
"""
tasks_mod = nwbf.processing.get("tasks")
if tasks_mod is None:
logger.warn(f"No tasks processing module found in {nwbf}\n")
logger.warning(f"No tasks processing module found in {nwbf}\n")
return
for task in tasks_mod.data_interfaces.values():
if cls.check_task_table(task):
Expand Down Expand Up @@ -136,7 +136,7 @@ def make(self, key):
tasks_mod = nwbf.processing.get("tasks")
config_tasks = config.get("Tasks", [])
if tasks_mod is None and (not config_tasks):
logger.warn(
logger.warning(
f"No tasks processing module found in {nwbf} or config\n"
)
return
Expand All @@ -163,7 +163,7 @@ def make(self, key):
for camera_id in valid_camera_ids
]
else:
logger.warn(
logger.warning(
f"No camera device found with ID {camera_ids} in NWB "
+ f"file {nwbf}\n"
)
Expand All @@ -186,7 +186,7 @@ def make(self, key):
epoch, session_intervals
)
if target_interval is None:
logger.warn("Skipping epoch.")
logger.warning("Skipping epoch.")
continue
key["interval_list_name"] = target_interval
task_inserts.append(key.copy())
Expand Down Expand Up @@ -219,7 +219,7 @@ def make(self, key):
epoch, session_intervals
)
if target_interval is None:
logger.warn("Skipping epoch.")
logger.warning("Skipping epoch.")
continue
new_key["interval_list_name"] = target_interval
task_inserts.append(key.copy())
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/common/prepopulate/prepopulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def populate_from_yaml(yaml_path: str):
if k in table_cls.primary_key
}
if not primary_key_values:
logger.warn(
logger.warning(
f"Populate: No primary key provided in data {entry_dict} "
+ f"for table {table_cls.__name__}"
)
Expand Down
4 changes: 2 additions & 2 deletions src/spyglass/spikesorting/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def get_group_by_shank(
if omit_ref_electrode_group and (
str(e_group) == str(ref_elec_group)
):
logger.warn(
logger.warning(
f"Omitting electrode group {e_group} from sort groups "
+ "because contains reference."
)
Expand All @@ -117,7 +117,7 @@ def get_group_by_shank(

# omit unitrodes if indicated
if omit_unitrode and len(shank_elect) == 1:
logger.warn(
logger.warning(
f"Omitting electrode group {e_group}, shank {shank} "
+ "from sort groups because unitrode."
)
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/spikesorting/v0/sortingview.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def make(self, key: dict):
LabMember.LabMemberInfo & {"lab_member_name": team_member}
).fetch("google_user_name")
if len(google_user_id) != 1:
logger.warn(
logger.warning(
f"Google user ID for {team_member} does not exist or more than one ID detected;\
permission not given to {team_member}, skipping..."
)
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/spikesorting/v0/sortingview_helper_fn.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def _generate_url(
)

if initial_curation is not None:
logger.warn("found initial curation")
logger.warning("found initial curation")
sorting_curation_uri = kcl.store_json(initial_curation)
else:
sorting_curation_uri = None
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/spikesorting/v0/spikesorting_artifact.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def _get_artifact_times(
[[valid_timestamps[0], valid_timestamps[-1]]]
)
artifact_times_empty = np.asarray([])
logger.warn("No artifacts detected.")
logger.warning("No artifacts detected.")
return recording_interval, artifact_times_empty

# convert indices to intervals
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/spikesorting/v0/spikesorting_curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@ def save_sorting_nwb(
AnalysisNwbfile().add(key["nwb_file_name"], analysis_file_name)

if object_ids == "":
logger.warn(
logger.warning(
"Sorting contains no units."
"Created an empty analysis nwb file anyway."
)
Expand Down
4 changes: 2 additions & 2 deletions src/spyglass/spikesorting/v1/artifact.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def insert_selection(cls, key: dict):
"""
query = cls & key
if query:
logger.warn("Similar row(s) already inserted.")
logger.warning("Similar row(s) already inserted.")
return query.fetch(as_dict=True)
key["artifact_id"] = uuid.uuid4()
cls.insert1(key, skip_duplicates=True)
Expand Down Expand Up @@ -290,7 +290,7 @@ def _get_artifact_times(
[[valid_timestamps[0], valid_timestamps[-1]]]
)
artifact_times_empty = np.asarray([])
logger.warn("No artifacts detected.")
logger.warning("No artifacts detected.")
return recording_interval, artifact_times_empty

# convert indices to intervals
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/spikesorting/v1/figurl_curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def insert_selection(cls, key: dict):
if "figurl_curation_id" in key:
query = cls & {"figurl_curation_id": key["figurl_curation_id"]}
if query:
logger.warn("Similar row(s) already inserted.")
logger.warning("Similar row(s) already inserted.")
return query.fetch(as_dict=True)
key["figurl_curation_id"] = uuid.uuid4()
cls.insert1(key, skip_duplicates=True)
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/spikesorting/v1/metric_curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def insert_selection(cls, key: dict):
key for the inserted row
"""
if cls & key:
logger.warn("This row has already been inserted.")
logger.warning("This row has already been inserted.")
return (cls & key).fetch1()
key["metric_curation_id"] = uuid.uuid4()
cls.insert1(key, skip_duplicates=True)
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/spikesorting/v1/recording.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def insert_selection(cls, key: dict):
"""
query = cls & key
if query:
logger.warn("Similar row(s) already inserted.")
logger.warning("Similar row(s) already inserted.")
return query.fetch(as_dict=True)
key["recording_id"] = uuid.uuid4()
cls.insert1(key, skip_duplicates=True)
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/utils/dj_helper_fn.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def _subclass_factory(

# Define the __call__ method for the new class
def init_override(self, *args, **kwargs):
logger.warn(
logger.warning(
"Deprecation: this class has been moved out of "
+ f"{old_module}\n"
+ f"\t{old_name} -> {new_module}.{new_class.__name__}"
Expand Down
8 changes: 4 additions & 4 deletions src/spyglass/utils/dj_merge_tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,14 +66,14 @@ def __init__(self):
self._reserved_sk = RESERVED_SECONDARY_KEY
if not self.is_declared:
if not is_merge_table(self): # Check definition
logger.warn(
logger.warning(
"Merge table with non-default definition\n"
+ f"Expected:\n{MERGE_DEFINITION.strip()}\n"
+ f"Actual :\n{self.definition.strip()}"
)
for part in self.parts(as_objects=True):
if part.primary_key != self.primary_key:
logger.warn( # PK is only 'merge_id' in parts, no others
logger.warning( # PK is only 'merge_id' in parts, no others
f"Unexpected primary key in {part.table_name}"
+ f"\n\tExpected: {self.primary_key}"
+ f"\n\tActual : {part.primary_key}"
Expand Down Expand Up @@ -721,7 +721,7 @@ def _normalize_source(
raise ValueError(f"Unable to find source for {source}")
source = fetched_source[0]
if len(fetched_source) > 1:
logger.warn(f"Multiple sources. Selecting first: {source}.")
logger.warning(f"Multiple sources. Selecting first: {source}.")
if isinstance(source, dj.Table):
source = self._part_name(source)
if isinstance(source, dict):
Expand Down Expand Up @@ -814,7 +814,7 @@ def merge_fetch(
try:
results.extend(part.fetch(*attrs, **kwargs))
except DataJointError as e:
logger.warn(
logger.warning(
f"{e.args[0]} Skipping "
+ to_camel_case(part.table_name.split("__")[-1])
)
Expand Down
4 changes: 2 additions & 2 deletions src/spyglass/utils/dj_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,15 +368,15 @@ def _check_delete_permission(self) -> None:
not self._session_connection # Table has no session
or self._member_pk in self.heading.names # Table has experimenter
):
logger.warn( # Permit delete if no session connection
logger.warning( # Permit delete if no session connection
"Could not find lab team associated with "
+ f"{self.__class__.__name__}."
+ "\nBe careful not to delete others' data."
)
return

if not (sess_summary := self._get_exp_summary()):
logger.warn(
logger.warning(
f"Could not find a connection from {self.camel_name} "
+ "to Session.\n Be careful not to delete others' data."
)
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/utils/nwb_helper_fn.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ def get_valid_intervals(

if total_time < min_valid_len:
half_total_time = total_time / 2
logger.warn(f"Setting minimum valid interval to {half_total_time}")
logger.warning(f"Setting minimum valid interval to {half_total_time}")
min_valid_len = half_total_time

# get rid of NaN elements
Expand Down
1 change: 0 additions & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,7 +419,6 @@ def video_keys(common, base_dir):
for file in DOWNLOADS.file_downloads:
if file.endswith(".h264"):
DOWNLOADS.wait_for(file)
DOWNLOADS.rename_files()

return common.VideoFile().fetch(as_dict=True)

Expand Down

0 comments on commit 942e279

Please sign in to comment.