Skip to content

Commit

Permalink
Merge pull request #830 from AllenInstitute/fix_829
Browse files Browse the repository at this point in the history
Fixing path and circular imports
  • Loading branch information
alexpiet authored Aug 9, 2022
2 parents 0721142 + dba2df0 commit a0b0ce5
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 7 deletions.
8 changes: 6 additions & 2 deletions visual_behavior/data_access/loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,8 @@ def get_platform_analysis_cache_dir():
This is the cache directory to use for all platform paper analysis
This cache contains NWB files downloaded directly from AWS
"""
# return r'//allen/programs/braintv/workgroups/nc-ophys/visual_behavior/platform_paper_cache'
return r'\\allen\programs\braintv\workgroups\nc-ophys\visual_behavior\platform_paper_cache'
return r'//allen/programs/braintv/workgroups/nc-ophys/visual_behavior/platform_paper_cache'
# return r'\\allen\programs\braintv\workgroups\nc-ophys\visual_behavior\platform_paper_cache'


def get_production_cache_dir():
Expand Down Expand Up @@ -3388,6 +3388,10 @@ def get_multi_session_df_for_conditions(data_type, event_type, conditions, inclu
experiments_table = get_platform_paper_experiment_table(limit_to_closest_active=True)
multi_session_df = multi_session_df[multi_session_df.ophys_experiment_id.isin(experiments_table.index.values)]
print('there are', len(multi_session_df.ophys_experiment_id.unique()), 'experiments in the multi_session_df after limiting to platform experiments')
elif 'strategy_paper' in inclusion_criteria:
experiments_table = get_platform_paper_experiment_table(limit_to_closest_active=False)
multi_session_df = multi_session_df[multi_session_df.ophys_experiment_id.isin(experiments_table.index.values)]
print('there are', len(multi_session_df.ophys_experiment_id.unique()), 'experiments in the multi_session_df after limiting to strategy paper')
else:
experiments_table = get_filtered_ophys_experiment_table()

Expand Down
4 changes: 2 additions & 2 deletions visual_behavior/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ def populate_id_dict(input_id_dict):
'ophys_experiment_id': None,
}

assert(len(input_id_dict) == 1), "use only one ID type to identify others"
assert (len(input_id_dict) == 1), "use only one ID type to identify others"
for key in input_id_dict:
assert key in ids.keys(), "input key must be one of {}".format(list(ids.keys()))
ids[key] = input_id_dict[key]
Expand Down Expand Up @@ -559,7 +559,7 @@ def simplify_type(x):
elif is_uuid(x):
return str(x)
elif is_array(x):
return[simplify_type(e) for e in x]
return [simplify_type(e) for e in x]
else:
return x

Expand Down
2 changes: 1 addition & 1 deletion visual_behavior/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from visual_behavior.ophys.sync.sync_dataset import Dataset
from visual_behavior.data_access import loading
import visual_behavior.visualization.behavior as behavior
# import visual_behavior.visualization.behavior as behavior


def flatten_list(in_list):
Expand Down
4 changes: 2 additions & 2 deletions visual_behavior/validation/extended_trials.py
Original file line number Diff line number Diff line change
Expand Up @@ -820,7 +820,7 @@ def merge_in_omitted_flashes(visual_stimuli, omitted_stimuli):
elif six.PY3:
visual_stimuli = pd.concat((visual_stimuli, omitted_stimuli), sort=True).sort_values(by='frame').reset_index()
else:
raise(RuntimeError)
raise (RuntimeError)

# was previous flash omitted?
visual_stimuli['previous_omitted'] = visual_stimuli['omitted'].shift()
Expand Down Expand Up @@ -936,7 +936,7 @@ def validate_initial_blank(trials, visual_stimuli, omitted_stimuli, initial_blan
elif six.PY3:
visual_stimuli = pd.concat((visual_stimuli, omitted_stimuli), sort=True).sort_values(by='frame').reset_index()
else:
raise(RuntimeError)
raise (RuntimeError)

# preallocate array
initial_blank_in_tolerance = np.empty(len(trials))
Expand Down

0 comments on commit a0b0ce5

Please sign in to comment.