Skip to content

Commit

Permalink
Merge pull request #252 from DESm1th/patches
Browse files Browse the repository at this point in the history
[FIX] Python3 conversion bug fixes (Round 2)
  • Loading branch information
DESm1th authored Dec 31, 2019
2 parents 76c0bc0 + 2262e12 commit 8bc2c5c
Show file tree
Hide file tree
Showing 33 changed files with 77 additions and 95 deletions.
22 changes: 13 additions & 9 deletions assets/imob-parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import numpy as np


def return_timings(data, trial_type):
"""
Finds all trials matching the string 'trial_type',
Expand All @@ -18,16 +19,18 @@ def return_timings(data, trial_type):

return onsets


def write_afni_timings(task, offset):
"""
This takes the CSV files supplied with the imitate/observe
data and returns something that can be fed into AFNI's
3dDeconvolve.
"""
# import the original data as a list of lists of strings :D
data = np.genfromtxt(task + '-timing.csv', skip_header=1,
delimiter=',',
dtype=(str))
data = np.genfromtxt(task + '-timing.csv',
skip_header=1,
delimiter=',',
dtype=(str))
# find all trial types
trials = []

Expand All @@ -41,20 +44,21 @@ def write_afni_timings(task, offset):
onsets = return_timings(data, trial)

# write an output file for this trial_type
f = open(task + '_event-times_' + trial + '.1D', 'wb')
f = open(task + '_event-times_' + trial + '.1D', 'w')

# this is for AFNI -- blank first line for first run if OB
if task == 'OB':
f.write('\n')
for i in range(len(onsets)):
on = float(onsets[i]) - offset
f.write('{o:.2f} '.format(o=on))

# this is for AFNI -- blank second line for second run if IM
if task == 'IM':
f.write('\n')

print('Finished ' + task + ':' + trial)

print('Finished ' + task + ':' + trial)


def main():
"""
Expand All @@ -67,7 +71,7 @@ def main():

print('Timing files generated. Please have a wonderful day.')


if __name__ == '__main__':
print('Should be run in the folder with the *-timing.csv files.')
main()

2 changes: 1 addition & 1 deletion bin/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
## Needed for tests to work
# Needed for tests to work
7 changes: 0 additions & 7 deletions bin/archive_manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,7 @@
--showheaders Just list all of the headers for each archive
"""

import io
import sys
import os.path
import tarfile
import zipfile

from docopt import docopt
import pydicom
import pandas as pd

import datman
Expand Down
2 changes: 1 addition & 1 deletion bin/dm_get_session_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def main():
results.append(result)

if output_csv:
with open(output_csv, 'wb') as csvfile:
with open(output_csv, 'w') as csvfile:
csv_writer = csv.writer(csvfile)
csv_writer.writerow(headers)
for row in results:
Expand Down
4 changes: 2 additions & 2 deletions bin/dm_header_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,10 @@ def main():
dti = args['--dti']

if ignore_file:
ignored_fields.extend(parse_file(ignore_file))
ignored_fields.extend(header_checks.parse_file(ignore_file))

if tolerances:
tolerances = read_json(tolerances)
tolerances = header_checks.read_json(tolerances)

diffs = header_checks.construct_diffs(series_json, standard_json,
ignored_fields, tolerances, dti)
Expand Down
1 change: 0 additions & 1 deletion bin/dm_link_project_scans.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
import csv
import re

import yaml
from docopt import docopt

import datman as dm
Expand Down
13 changes: 8 additions & 5 deletions bin/dm_link_shared_ids.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,9 @@
that may have multiple IDs for some of its subjects.
Options:
--redcap FILE The path to a text file containing a redcap token to
--redcap FILE A path to a text file containing a redcap token to
access 'Scan completed' surveys. If not set the
'redcap-token' file in the project metadata folder
will be used.
environment variable 'REDCAP_TOKEN' will be used
--site-config FILE The path to a site configuration file. If not set,
the default defined for datman.config.config() is used.
Expand All @@ -44,6 +43,7 @@

import dm_link_project_scans as link_scans
import datman.dashboard as dashboard
from datman.exceptions import InputException

DRYRUN = False

Expand Down Expand Up @@ -134,8 +134,11 @@ def get_project_redcap_records(config, redcap_cred):

def get_redcap_token(config, redcap_cred):
if not redcap_cred:
# Read in credentials as string from config file
redcap_cred = os.path.join(config.get_path('meta'), 'redcap-token')
token = os.getenv('REDCAP_TOKEN')
if not token:
raise InputException("Redcap token not provided. Set the shell "
"variable 'REDCAP_TOKEN' or provide a file")
return token

try:
token = datman.utils.read_credentials(redcap_cred)[0]
Expand Down
9 changes: 5 additions & 4 deletions bin/dm_link_sprl.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,17 @@
key contains SPRL
"""

from docopt import docopt
import sys
import os
import re
import logging
import errno

from docopt import docopt

import datman.config
import datman.utils
import datman.dashboard
import logging
import errno
from datman.exceptions import DashboardException

logger = logging.getLogger(__name__)

Expand Down
9 changes: 5 additions & 4 deletions bin/dm_proc_outliers.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,10 @@
"--write-stats <filename>" option is chosen, the summary statistics calculated
from this csv are written out to the specified filename.
"""
from docopt import docopt
import numpy as np
import os
import sys

from docopt import docopt
import pandas as pd

arguments = docopt(__doc__)
Expand All @@ -66,7 +67,7 @@
DRYRUN = arguments['--dry-run']

if not os.path.isfile(inputfile):
sys.exit("Input file {} doesn't exist.".format(FAmap))
sys.exit("Input file {} doesn't exist.".format(inputfile))

# read the inputdata into a pandas dataframe
inputdata = pd.read_csv(inputfile, sep=',', dtype=str, comment='#')
Expand All @@ -90,7 +91,7 @@
if a file is specified, check that it exists and load it
'''
if not os.path.isfile(summaryin):
sys.exit("Summary Statistics file {} doesn't exist.".format(FAmap))
sys.exit("Summary Statistics file {} doesn't exist.".format(summaryin))
SummaryStats = pd.read_csv(summaryin, sep=',', index_col=0)

if 'AnyOutliers' not in inputdata.columns:
Expand Down
4 changes: 2 additions & 2 deletions bin/dm_qc_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -504,8 +504,8 @@ def get_series_to_add(series, subject):
t2 = get_split_image(subject, series.series_num, 'T2')
split_series = [t2]
except RuntimeError as e:
logger.error("Can't add PDT2 {} to QC page. Reason: {}".format(
series.path, e.message))
logger.error("Can't add PDT2 {} to QC page. Reason: {}"
"".format(series.path, e))
return []

try:
Expand Down
2 changes: 1 addition & 1 deletion bin/dm_sftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def get_server_config(cfg):
try:
server_config[site_server] = read_config(cfg, site=site)
except datman.config.UndefinedSetting as e:
logger.debug(e.message)
logger.debug(e)
return server_config


Expand Down
1 change: 0 additions & 1 deletion bin/dm_symlink_scans.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@

import os
import sys
from glob import glob
import fnmatch
import logging

Expand Down
3 changes: 2 additions & 1 deletion bin/dm_update_study_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,15 @@
import datman.dashboard as dashboard
import datman.config as cfg


def main():
config = cfg.config()
studies = config.get_key('Projects').keys()

for study in studies:
try:
config.set_study(study)
except:
except Exception:
pass
is_open = config.get_key('IsOpen')

Expand Down
4 changes: 2 additions & 2 deletions bin/dm_xnat_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def process_archive(archivefile):
try:
data_exists, resource_exists = check_files_exist(archivefile,
xnat_session)
except Exception as e:
except Exception:
logger.error('Failed checking xnat for session: {}'.format(scanid))
return

Expand Down Expand Up @@ -272,7 +272,7 @@ def check_files_exist(archive, xnat_session):
try:
scans_exist = scan_data_exists(xnat_session, local_headers)
except ValueError as e:
logger.error("Please check {}: {}".format(archive, e.message))
logger.error("Please check {}: {}".format(archive, e))
# Return true for both to prevent XNAT being modified
return True, True

Expand Down
6 changes: 3 additions & 3 deletions bin/redcap_demographics.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
<study> Name of the datman managed study
Options:
--URL PATH set the REDCap URL [default:
https://redcap.smh.ca/redcap/api/]
--URL PATH set the REDCap URL
[default: https://redcap.smh.ca/redcap/api/]
--output PATH set the location to save the output csv file
[default: clinical/demographics.csv]
Expand Down Expand Up @@ -81,7 +81,7 @@ def get_payload(token):

def make_rest(url, payload, REDCap_variables):
response = post(url, data=payload)
if response.status_code is not 200:
if response.status_code != 200:
print('Cannot talk to server, response code is {}.'
''.format(response.status_code))
sys.exit(1)
Expand Down
1 change: 0 additions & 1 deletion bin/track_scan_dates.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import json
import operator
import os
from datetime import datetime
Expand Down
12 changes: 6 additions & 6 deletions bin/xnat_fetch_sessions.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def main():
credentials_file, server, project, destination = get_xnat_config(
config, site)
except KeyError as e:
logger.error("{}".format(e.message))
logger.error("{}".format(e))
continue
username, password = get_credentials(credentials_file)
with datman.xnat.xnat(server, username, password) as xnat:
Expand All @@ -118,7 +118,7 @@ def get_sessions(xnat, xnat_project, destination):
session = xnat.get_session(xnat_project, session_name)
except Exception as e:
logger.error("Failed to get session {} from xnat. "
"Reason: {}".format(session_name, e.message))
"Reason: {}".format(session_name, e))
continue

zip_name = session_name.upper() + ".zip"
Expand All @@ -139,8 +139,8 @@ def get_sessions(xnat, xnat_project, destination):
try:
temp_zip = session.download(xnat, temp, zip_name=zip_name)
except Exception as e:
logger.error("Cant download session {}. Reason: {}".format(
session_name, e.message))
logger.error("Cant download session {}. Reason: {}"
"".format(session_name, e))
continue
restructure_zip(temp_zip, zip_path)

Expand Down Expand Up @@ -316,13 +316,13 @@ def remove_empty_dirs(base_dir):
try:
shutil.rmtree(empty_dir)
except OSError as e:
logger.info("Cant delete {}. Reason: {}".format(empty_dir, e.message))
logger.info("Cant delete {}. Reason: {}".format(empty_dir, e))


def move(source, dest):
try:
shutil.move(source, dest)
except Exception as e:
except Exception:
logger.error("Couldnt move {} to destination {}".format(source, dest))


Expand Down
2 changes: 1 addition & 1 deletion datman/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@
# datman/dashboard.py you will get circular import errors. I am sooo sorry.
# We'd have to store hardcoded file paths in the dashboard database to
# otherwise fix this.
import datman.config
import datman.config # noqa: F401
4 changes: 2 additions & 2 deletions datman/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@ def get_study_tags(self, study=None):
except KeyError:
continue

if type(site_tags) is str:
if isinstance(site_tags, str):
site_tags = [site_tags]

for tag_name in site_tags:
Expand Down Expand Up @@ -517,7 +517,7 @@ def series_map(self):
except KeyError:
raise KeyError("Cant retrieve 'Pattern' from config. Did you "
"specify a site?")
if type(pattern) is list:
if isinstance(pattern, list):
pattern = "|".join(pattern)
series_map[tag] = pattern
return series_map
Expand Down
2 changes: 1 addition & 1 deletion datman/dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ def get_bids_scan(name):
scan = queries.get_scan(name, bids=True)
if len(scan) > 1:
raise DashboardException("Couldnt identify scan {}. {} matches "
"found".format(scan_name, len(scan)))
"found".format(name, len(scan)))
if len(scan) == 1:
return scan[0]
return None
Expand Down
4 changes: 4 additions & 0 deletions datman/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,7 @@ class MetadataException(Exception):

class ExportException(Exception):
pass


class InputException(Exception):
pass
Loading

0 comments on commit 8bc2c5c

Please sign in to comment.