diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index bd9bd70..1b9a7b1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -5,17 +5,17 @@ on: push jobs: tester: - name: Test the code - runs-on: [ubuntu-latest, macos-latest, windows-latest] + name: 'Test the code' strategy: matrix: - python-version: [3.8, '3.10'] - + python-version: ['3.8', '3.10'] + os: ['ubuntu-latest', 'windows-latest'] + runs-on: ${{ matrix.os }} steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Python setup - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} architecture: 'x64' @@ -28,12 +28,13 @@ jobs: pip install pytest-cov pip install coveralls pip install . + - name: Pylint + if: ${{ ( matrix.os != 'windows-latest' ) }} + run: pylint -E --disable=E1101 py/minimint/*py - name: Test - run: | - pytest --cov=minimint -s - pylint -E --disable=E1101 py/minimint/*py + run: pytest --cov=minimint -s - name: Coveralls - if: ${{ success() }} + if: ${{ success() && (matrix.os != 'windows-latest') }} run: coveralls --service=github env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/py/minimint/bolom.py b/py/minimint/bolom.py index 13fa265..d72fdcd 100644 --- a/py/minimint/bolom.py +++ b/py/minimint/bolom.py @@ -84,8 +84,8 @@ def __call__(self, p): bad = np.zeros(p.shape[0], dtype=bool) for i in range(self.ndim): pos1[:, i] = np.searchsorted(self.uvecs[i], p[:, i], 'right') - 1 - bad = bad | (pos1[:, i] < 0) | (pos1[:, i] >= - (len(self.uvecs[i]) - 1)) + bad = bad | (pos1[:, i] < 0) | (pos1[:, i] + >= (len(self.uvecs[i]) - 1)) pos1[:, i][bad] = 0 xs[:, i] = (p[:, i] - self.uvecs[i][pos1[:, i]]) / ( self.uvecs[i][pos1[:, i] + 1] - self.uvecs[i][pos1[:, i]] @@ -119,10 +119,12 @@ def list_filters(path=None): if path is None: path = get_data_path() - fs = glob.glob(path + '/' + FILT_NPY % '*') + fs = glob.glob(os.path.join(path, FILT_NPY % '*')) filts = [] for f in fs: - filts.append(re.match(FILT_NPY % '(.*)', f.split('/')[-1]).group(1)) + filts.append( + re.match(FILT_NPY % '(.*)', + f.split(os.path.sep)[-1]).group(1)) return filts @@ -140,7 +142,7 @@ def prepare(iprefix, raise Exception("shouldn't happen") last_vec = vec.copy() if i == 0: - np.save(oprefix + '/' + POINTS_NPY, vec) + np.save(os.path.join(oprefix, POINTS_NPY), vec) for k in tabs.columns: if k not in cols_ex: - np.save(oprefix + '/' + FILT_NPY % (k), tabs[k]) + np.save(os.path.join(oprefix, FILT_NPY % k), tabs[k]) diff --git a/py/minimint/mist_interpolator.py b/py/minimint/mist_interpolator.py index 301157c..1b1b690 100644 --- a/py/minimint/mist_interpolator.py +++ b/py/minimint/mist_interpolator.py @@ -3,10 +3,12 @@ import glob import os import gc +import subprocess import pickle import urllib.request import astropy.table as atpy import scipy.interpolate + import numpy as np from minimint import bolom, utils """ @@ -61,8 +63,10 @@ def getheader(f): def read_grid(eep_prefix, outp_prefix): - fs = glob.glob('%s/*EEPS/*eep' % (eep_prefix, )) - assert (len(fs) > 0) + mask = os.path.join(eep_prefix, '*EEPS', '*eep') + fs = glob.glob(mask) + if len(fs) == 0: + raise RuntimeError(f'Failed to find eep files {mask}') tmpfile = utils.tail_head(fs[0], 11, 10) tab0 = atpy.Table().read(tmpfile, format='ascii.fast_commented_header') os.unlink(tmpfile) @@ -71,7 +75,7 @@ def read_grid(eep_prefix, outp_prefix): for i, f in enumerate(fs): if i % (N // 100) == 0: print('%d/%d' % (i, N)) - curt = atpy.Table().read(f, format='ascii') + curt = atpy.Table().read(f, format='ascii.fast_no_header') for i, k in enumerate(list(curt.columns)): curt.rename_column(k, list(tab0.columns)[i]) D = getheader(f) @@ -93,7 +97,7 @@ def read_grid(eep_prefix, outp_prefix): tabs.remove_column(k) os.makedirs(outp_prefix, exist_ok=True) - tabs.write(outp_prefix + '/' + TRACKS_FILE, overwrite=True) + tabs.write(os.path.join(outp_prefix, TRACKS_FILE), overwrite=True) def grid3d_filler(ima): @@ -159,12 +163,22 @@ def writer(url, pref): print('Downloading', url) fd = urllib.request.urlopen(url) fname = url.split('/')[-1] - fdout = open(pref + '/' + fname, 'wb') + fname_out = os.path.join(pref, fname) + fdout = open(fname_out, 'wb') fdout.write(fd.read()) fdout.close() fd.close() - cmd = 'cd %s; tar xfJ %s' % (pref, fname) - os.system(cmd) + if os.name == 'nt': + fname_out1 = fname_out.replace('.txz', '.tar') + cmd = (f'cd /d {pref} && ' + f'7z x {fname_out} && ' + f'7z x {fname_out1}') + else: + cmd = f'cd {pref}; tar xfJ {fname_out}' + ret = subprocess.run(cmd, shell=True, timeout=60) + if ret.returncode != 0: + raise RuntimeError('Failed to untar the files' + + ret.stdout.decode() + ret.stderr.decode()) with tempfile.TemporaryDirectory(dir=tmp_prefix) as T: for curfilt in filters: @@ -198,8 +212,8 @@ def prepare(eep_prefix, and bolometric corrections') read_grid(eep_prefix, outp_prefix) print('Processing EEPs') - tab = atpy.Table().read(outp_prefix + '/' + TRACKS_FILE) - os.unlink(outp_prefix + '/' + TRACKS_FILE) # remove after reading + tab = atpy.Table().read(os.path.join(outp_prefix, TRACKS_FILE)) + os.unlink(os.path.join(outp_prefix, TRACKS_FILE)) # remove after reading umass, mass_id = np.unique(np.array(tab['initial_mass']), return_inverse=True) @@ -233,9 +247,9 @@ def prepare(eep_prefix, if k == 'logage': grid[:, :, :] = np.cumsum(grid, axis=2) - np.save(outp_prefix + '/' + get_file(k), grid) + np.save(os.path.join(outp_prefix, get_file(k)), grid) - with open(outp_prefix + '/' + INTERP_PKL, 'wb') as fp: + with open(os.path.join(outp_prefix, INTERP_PKL), 'wb') as fp: pickle.dump(dict(umass=umass, ufeh=ufeh, neep=neep), fp) print('Reading/processing bolometric corrections') bolom.prepare(bolom_prefix, outp_prefix, filters) @@ -353,13 +367,13 @@ def __init__(self, prefix=None): """ if prefix is None: prefix = utils.get_data_path() - self.logg_grid = np.load(prefix + '/' + get_file('logg')) - self.logl_grid = np.load(prefix + '/' + get_file('logl')) - self.logteff_grid = np.load(prefix + '/' + get_file('logteff')) - self.logage_grid = np.load(prefix + '/' + get_file('logage')) - self.phase_grid = np.load(prefix + '/' + get_file('phase')) + (self.logg_grid, self.logl_grid, self.logteff_grid, self.logage_grid, + self.phase_grid) = [ + np.load(os.path.join(prefix, get_file(curt))) + for curt in ['logg', 'logl', 'logteff', 'logage', 'phase'] + ] - with open(prefix + '/' + INTERP_PKL, 'rb') as fp: + with open(os.path.join(prefix, INTERP_PKL), 'rb') as fp: D = pickle.load(fp) self.umass = np.array(D['umass']) self.ufeh = np.array(D['ufeh']) diff --git a/py/minimint/utils.py b/py/minimint/utils.py index 1334f4e..a8a662e 100644 --- a/py/minimint/utils.py +++ b/py/minimint/utils.py @@ -7,7 +7,7 @@ def get_data_path(): path = os.environ.get('MINIMINT_DATA_PATH') if path is not None: return path - path = str(pathlib.Path(__file__).parent.absolute()) + '/data/' + path = os.path.join(str(pathlib.Path(__file__).parent.absolute()), 'data') os.makedirs(path, exist_ok=True) return path diff --git a/setup.py b/setup.py index fcee637..cf475df 100644 --- a/setup.py +++ b/setup.py @@ -41,7 +41,7 @@ def read(fname): VERSIONPIP = read('version.txt').rstrip() VERSION = VERSIONPIP + get_revision() -with open('py/minimint/_version.py', 'w') as fp: +with open(os.path.join('py', 'minimint', '_version.py'), 'w') as fp: print('version="%s"' % (VERSION), file=fp) setup( @@ -55,8 +55,8 @@ def read(fname): url="http://github.com/segasai/minimint", packages=['minimint'], scripts=[fname for fname in glob.glob(os.path.join('bin', '*'))], - package_dir={'': 'py/'}, - package_data={'minimint': ['tests/']}, + package_dir={'': 'py'}, + package_data={'minimint': ['tests']}, long_description=read('README.md'), long_description_content_type='text/markdown', classifiers=[