diff --git a/src/sorcha/ephemeris/__init__.py b/src/sorcha/ephemeris/__init__.py index 95b04531..47428b31 100644 --- a/src/sorcha/ephemeris/__init__.py +++ b/src/sorcha/ephemeris/__init__.py @@ -7,10 +7,6 @@ create_ecl_to_eq_rotation_matrix, ) from .simulation_data_files import ( - DATA_FILE_LIST, - JPL_PLANETS, - JPL_SMALL_BODIES, - DE440S, make_retriever, ) from .simulation_geometry import ( diff --git a/src/sorcha/ephemeris/simulation_data_files.py b/src/sorcha/ephemeris/simulation_data_files.py index dddd7697..6184731c 100644 --- a/src/sorcha/ephemeris/simulation_data_files.py +++ b/src/sorcha/ephemeris/simulation_data_files.py @@ -1,75 +1,7 @@ import pooch -# Define variables for the file names -DE440S = "de440s.bsp" -EARTH_PREDICT = "earth_200101_990827_predict.bpc" -EARTH_HISTORICAL = "earth_620120_240827.bpc" -EARTH_HIGH_PRECISION = "earth_latest_high_prec.bpc" -JPL_PLANETS = "linux_p1550p2650.440" -JPL_SMALL_BODIES = "sb441-n16.bsp" -LEAP_SECONDS = "naif0012.tls" -META_KERNEL = "meta_kernel.txt" -OBSERVATORY_CODES = "ObsCodes.json" -OBSERVATORY_CODES_COMPRESSED = "ObsCodes.json.gz" -ORIENTATION_CONSTANTS = "pck00010.pck" - -# Dictionary of filename: url -URLS = { - DE440S: "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/spk/planets/de440s.bsp", - EARTH_PREDICT: "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_200101_990827_predict.bpc", - EARTH_HISTORICAL: "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_620120_240827.bpc", - EARTH_HIGH_PRECISION: "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_latest_high_prec.bpc", - JPL_PLANETS: "https://ssd.jpl.nasa.gov/ftp/eph/planets/Linux/de440/linux_p1550p2650.440", - JPL_SMALL_BODIES: "https://ssd.jpl.nasa.gov/ftp/eph/small_bodies/asteroids_de441/sb441-n16.bsp", - LEAP_SECONDS: "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/lsk/naif0012.tls", - OBSERVATORY_CODES_COMPRESSED: "https://minorplanetcenter.net/Extended_Files/obscodes_extended.json.gz", - ORIENTATION_CONSTANTS: "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/pck00010.tpc", -} - -# Convenience list of all the file names -DATA_FILE_LIST = [ - DE440S, - EARTH_PREDICT, - EARTH_HISTORICAL, - EARTH_HIGH_PRECISION, - JPL_PLANETS, - JPL_SMALL_BODIES, - LEAP_SECONDS, - META_KERNEL, - OBSERVATORY_CODES, - OBSERVATORY_CODES_COMPRESSED, - ORIENTATION_CONSTANTS, -] - -# List of files that need to be downloaded -DATA_FILES_TO_DOWNLOAD = [ - DE440S, - EARTH_PREDICT, - EARTH_HISTORICAL, - EARTH_HIGH_PRECISION, - JPL_PLANETS, - JPL_SMALL_BODIES, - LEAP_SECONDS, - OBSERVATORY_CODES_COMPRESSED, - ORIENTATION_CONSTANTS, -] - -# List of kernels ordered from least to most precise - used to assemble META_KERNEL file -ORDERED_KERNEL_FILES = [ - LEAP_SECONDS, - EARTH_HISTORICAL, - EARTH_PREDICT, - ORIENTATION_CONSTANTS, - DE440S, - EARTH_HIGH_PRECISION, -] - -# Default Pooch registry to define which files will be tracked and retrievable -REGISTRY = {data_file: None for data_file in DATA_FILE_LIST} - - -def make_retriever(directory_path: str = None, registry: dict = REGISTRY) -> pooch.Pooch: +def make_retriever(auxconfigs, directory_path: str = None) -> pooch.Pooch: """Helper function that will create a Pooch object to track and retrieve files. Parameters @@ -79,7 +11,8 @@ def make_retriever(directory_path: str = None, registry: dict = REGISTRY) -> poo registry : dictionary, optional A dictionary of file names to SHA hashes. Generally we'll not use SHA=None because the files we're tracking change frequently. Default = REGISTRY - + auxconfigs: dataclass + Dataclass of auxiliary configuration file arguments. Returns ------- : pooch @@ -92,7 +25,7 @@ def make_retriever(directory_path: str = None, registry: dict = REGISTRY) -> poo return pooch.create( path=dir_path, base_url="", - urls=URLS, - registry=registry, + urls=auxconfigs.urls, + registry=auxconfigs.registry, retry_if_failed=25, ) diff --git a/src/sorcha/ephemeris/simulation_driver.py b/src/sorcha/ephemeris/simulation_driver.py index a2a6c984..cb742c92 100644 --- a/src/sorcha/ephemeris/simulation_driver.py +++ b/src/sorcha/ephemeris/simulation_driver.py @@ -116,13 +116,13 @@ def create_ephemeris(orbits_df, pointings_df, args, sconfigs): ephemeris_csv_filename = os.path.join(args.outpath, args.output_ephemeris_file) verboselog("Building ASSIST ephemeris object.") - ephem, gm_sun, gm_total = create_assist_ephemeris(args) + ephem, gm_sun, gm_total = create_assist_ephemeris(args, sconfigs.auxiliary) verboselog("Furnishing SPICE kernels.") - furnish_spiceypy(args) + furnish_spiceypy(args, sconfigs.auxiliary) verboselog("Generating ASSIST+REBOUND simulations.") sim_dict = generate_simulations(ephem, gm_sun, gm_total, orbits_df, args) pixel_dict = defaultdict(list) - observatories = Observatory(args) + observatories = Observatory(args, sconfigs.auxiliary) output = StringIO() in_memory_csv = writer(output) diff --git a/src/sorcha/ephemeris/simulation_parsing.py b/src/sorcha/ephemeris/simulation_parsing.py index ef458e16..d09890f8 100644 --- a/src/sorcha/ephemeris/simulation_parsing.py +++ b/src/sorcha/ephemeris/simulation_parsing.py @@ -3,14 +3,9 @@ import numpy as np import spiceypy as spice from pooch import Decompress - from sorcha.ephemeris.simulation_constants import RADIUS_EARTH_KM from sorcha.ephemeris.simulation_geometry import ecliptic_to_equatorial -from sorcha.ephemeris.simulation_data_files import ( - OBSERVATORY_CODES, - OBSERVATORY_CODES_COMPRESSED, - make_retriever, -) +from sorcha.ephemeris.simulation_data_files import make_retriever from sorcha.ephemeris.orbit_conversion_utilities import universal_cartesian @@ -134,7 +129,7 @@ class Observatory: Class containing various utility tools related to the calculation of the observatory position """ - def __init__(self, args, oc_file=OBSERVATORY_CODES): + def __init__(self, args, auxconfigs, oc_file=None): """ Initialization method @@ -142,22 +137,25 @@ def __init__(self, args, oc_file=OBSERVATORY_CODES): ---------- args : dictionary or `sorchaArguments` object dictionary of command-line arguments. + auxconfigs: dataclass + Dataclass of configuration file arguments. oc_file : str Path for the file with observatory codes """ self.observatoryPositionCache = {} # previously calculated positions to speed up the process - if oc_file == OBSERVATORY_CODES: - retriever = make_retriever(args.ar_data_file_path) + if oc_file == None: + retriever = make_retriever(auxconfigs, args.ar_data_file_path) # is the file available locally, if so, return the full path - if os.path.isfile(os.path.join(retriever.abspath, OBSERVATORY_CODES)): - obs_file_path = retriever.fetch(OBSERVATORY_CODES) + if os.path.isfile(os.path.join(retriever.abspath, auxconfigs.observatory_codes)): + obs_file_path = retriever.fetch(auxconfigs.observatory_codes) # if the file is not local, download, and decompress it, then return the path. else: obs_file_path = retriever.fetch( - OBSERVATORY_CODES_COMPRESSED, processor=Decompress(name=OBSERVATORY_CODES) + auxconfigs.observatory_codes_compressed, + processor=Decompress(name=auxconfigs.observatory_codes), ) else: diff --git a/src/sorcha/ephemeris/simulation_setup.py b/src/sorcha/ephemeris/simulation_setup.py index 3a21782c..6a75d396 100644 --- a/src/sorcha/ephemeris/simulation_setup.py +++ b/src/sorcha/ephemeris/simulation_setup.py @@ -11,13 +11,7 @@ import numpy as np from sorcha.ephemeris.simulation_constants import * -from sorcha.ephemeris.simulation_data_files import ( - make_retriever, - JPL_PLANETS, - JPL_SMALL_BODIES, - META_KERNEL, - ORDERED_KERNEL_FILES, -) +from sorcha.ephemeris.simulation_data_files import make_retriever from sorcha.ephemeris.simulation_geometry import ( barycentricObservatoryRates, @@ -32,9 +26,12 @@ from sorcha.utilities.generate_meta_kernel import build_meta_kernel_file -def create_assist_ephemeris(args) -> tuple: +def create_assist_ephemeris(args, auxconfigs) -> tuple: """Build the ASSIST ephemeris object - + Parameter + --------- + auxconfigs: dataclass + Dataclass of auxiliary configuration file arguments. Returns --------- Ephem : ASSIST ephemeris obejct @@ -46,9 +43,9 @@ def create_assist_ephemeris(args) -> tuple: """ pplogger = logging.getLogger(__name__) - retriever = make_retriever(args.ar_data_file_path) - planets_file_path = retriever.fetch(JPL_PLANETS) - small_bodies_file_path = retriever.fetch(JPL_SMALL_BODIES) + retriever = make_retriever(auxconfigs, args.ar_data_file_path) + planets_file_path = retriever.fetch(auxconfigs.jpl_planets) + small_bodies_file_path = retriever.fetch(auxconfigs.jpl_small_bodies) ephem = Ephem(planets_path=planets_file_path, asteroids_path=small_bodies_file_path) gm_sun = ephem.get_particle("Sun", 0).m gm_total = sum(sorted([ephem.get_particle(i, 0).m for i in range(27)])) @@ -59,27 +56,31 @@ def create_assist_ephemeris(args) -> tuple: return ephem, gm_sun, gm_total -def furnish_spiceypy(args): +def furnish_spiceypy(args, auxconfigs): """ Builds the SPICE kernel, downloading the required files if needed + Parameters + ----------- + auxconfigs: dataclass + Dataclass of auxiliary configuration file arguments. """ # The goal here would be to download the spice kernel files (if needed) # Then call spice.furnish() on each of those files. pplogger = logging.getLogger(__name__) - retriever = make_retriever(args.ar_data_file_path) + retriever = make_retriever(auxconfigs, args.ar_data_file_path) - for kernel_file in ORDERED_KERNEL_FILES: + for kernel_file in auxconfigs.ordered_kernel_files: retriever.fetch(kernel_file) # check if the META_KERNEL file exists. If it doesn't exist, create it. - if not os.path.exists(os.path.join(retriever.abspath, META_KERNEL)): - build_meta_kernel_file(retriever) + if not os.path.exists(os.path.join(retriever.abspath, auxconfigs.meta_kernel)): + build_meta_kernel_file(auxconfigs, retriever) # try to get the META_KERNEL file. If it's not there, error out. try: - meta_kernel = retriever.fetch(META_KERNEL) + meta_kernel = retriever.fetch(auxconfigs.meta_kernel) except ValueError: pplogger.error( "ERROR: furnish_spiceypy: Must create meta_kernel.txt by running `bootstrap_sorcha_data_files` on the command line." @@ -181,11 +182,11 @@ def precompute_pointing_information(pointings_df, args, sconfigs): pointings_df : pandas dataframe The original dataframe with several additional columns of precomputed values. """ - ephem, _, _ = create_assist_ephemeris(args) + ephem, _, _ = create_assist_ephemeris(args, sconfigs.auxiliary) - furnish_spiceypy(args) + furnish_spiceypy(args, sconfigs.auxiliary) obsCode = sconfigs.simulation.ar_obs_code - observatories = Observatory(args) + observatories = Observatory(args, sconfigs.auxiliary) # vectorize the calculation to get x,y,z vector from ra/dec vectors = ra_dec2vec( diff --git a/src/sorcha/utilities/generate_meta_kernel.py b/src/sorcha/utilities/generate_meta_kernel.py index 45a5d34a..c38a92c4 100644 --- a/src/sorcha/utilities/generate_meta_kernel.py +++ b/src/sorcha/utilities/generate_meta_kernel.py @@ -1,10 +1,6 @@ import os import pooch -from sorcha.ephemeris.simulation_data_files import ( - META_KERNEL, - ORDERED_KERNEL_FILES, -) """ An example output from running `build_meta_kernel_file` might look like @@ -29,7 +25,7 @@ """ -def build_meta_kernel_file(retriever: pooch.Pooch) -> None: +def build_meta_kernel_file(auxconfigs, retriever: pooch.Pooch) -> None: """Builds a specific text file that will be fed into `spiceypy` that defines the list of spice kernel to load, as well as the order to load them. @@ -37,13 +33,14 @@ def build_meta_kernel_file(retriever: pooch.Pooch) -> None: ---------- retriever : pooch Pooch object that maintains the registry of files to download - + auxconfigs: dataclass + Dataclass of auxiliary configuration file arguments. Returns --------- None """ # build meta_kernel file path - meta_kernel_file_path = os.path.join(retriever.abspath, META_KERNEL) + meta_kernel_file_path = os.path.join(retriever.abspath, auxconfigs.meta_kernel) # build a meta_kernel.txt file with open(meta_kernel_file_path, "w") as meta_file: @@ -51,7 +48,7 @@ def build_meta_kernel_file(retriever: pooch.Pooch) -> None: meta_file.write(f"PATH_VALUES = ('{retriever.abspath}')\n\n") meta_file.write("PATH_SYMBOLS = ('A')\n\n") meta_file.write("KERNELS_TO_LOAD=(\n") - for file_name in ORDERED_KERNEL_FILES: + for file_name in auxconfigs.ordered_kernel_files: shortened_file_name = _build_file_name(retriever.abspath, retriever.fetch(file_name)) meta_file.write(f" '{shortened_file_name}',\n") meta_file.write(")\n\n") diff --git a/src/sorcha/utilities/retrieve_ephemeris_data_files.py b/src/sorcha/utilities/retrieve_ephemeris_data_files.py index 85d06a3a..9696c0d5 100644 --- a/src/sorcha/utilities/retrieve_ephemeris_data_files.py +++ b/src/sorcha/utilities/retrieve_ephemeris_data_files.py @@ -4,11 +4,7 @@ import pooch from functools import partial -from sorcha.ephemeris.simulation_data_files import ( - make_retriever, - DATA_FILES_TO_DOWNLOAD, - DATA_FILE_LIST, -) +from sorcha.ephemeris.simulation_data_files import make_retriever from sorcha.utilities.generate_meta_kernel import build_meta_kernel_file @@ -35,7 +31,7 @@ def _decompress(fname, action, pup): # pragma: no cover pooch.Decompress(method="auto", name=os.path.splitext(fname)[0]).__call__(fname, action, pup) -def _remove_files(retriever: pooch.Pooch) -> None: # pragma: no cover +def _remove_files(auxconfigs, retriever: pooch.Pooch) -> None: # pragma: no cover """Utility to remove all the files tracked by the pooch retriever. This includes the decompressed ObservatoryCodes.json file as well as the META_KERNEL file that are created after downloading the files in the DATA_FILES_TO_DOWNLOAD @@ -45,9 +41,11 @@ def _remove_files(retriever: pooch.Pooch) -> None: # pragma: no cover ------------ retriever : pooch Pooch object that maintains the registry of files to download. + auxconfigs: dataclass + Dataclass of auxiliary configuration file arguments. """ - for file_name in DATA_FILE_LIST: + for file_name in auxconfigs.data_file_list: file_path = retriever.fetch(file_name) print(f"Deleting file: {file_path}") os.remove(file_path) diff --git a/src/sorcha/utilities/sorchaConfigs.py b/src/sorcha/utilities/sorchaConfigs.py index 40ee694f..5730a71f 100644 --- a/src/sorcha/utilities/sorchaConfigs.py +++ b/src/sorcha/utilities/sorchaConfigs.py @@ -780,6 +780,201 @@ def _validate_expert_configs(self): self.vignetting_on = cast_as_bool_or_set_default(self.vignetting_on, "vignetting_on", True) +@dataclass +class auxiliaryConfigs: + de440s: str = "de440s.bsp" + """filename of de440s""" + de440s_url: str = "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/spk/planets/de440s.bsp" + """url for de4440s""" + + earth_predict: str = "earth_200101_990827_predict.bpc" + """filename of earth_predict""" + earth_predict_url: str = ( + "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_200101_990827_predict.bpc" + ) + """url for earth_predict""" + + earth_historical: str = "earth_620120_240827.bpc" + """filename of earth_histoical""" + earth_historical_url: str = ( + "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_620120_240827.bpc" + ) + """url for earth_historical""" + + earth_high_precision: str = "earth_latest_high_prec.bpc" + """filename of earth_high_precision""" + earth_high_precision_url: str = ( + "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_latest_high_prec.bpc" + ) + """url of earth_high_precision""" + + jpl_planets: str = "linux_p1550p2650.440" + """filename of jpl_planets""" + jpl_planets_url: str = "https://ssd.jpl.nasa.gov/ftp/eph/planets/Linux/de440/linux_p1550p2650.440" + """url of jpl_planets""" + + jpl_small_bodies: str = "sb441-n16.bsp" + """filename of jpl_small_bodies""" + jpl_small_bodies_url: str = "https://ssd.jpl.nasa.gov/ftp/eph/small_bodies/asteroids_de441/sb441-n16.bsp" + """url of jpl_small_bodies""" + + leap_seconds: str = "naif0012.tls" + """filename of leap_seconds""" + leap_seconds_url: str = "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/lsk/naif0012.tls" + """url of leap_seconds""" + + meta_kernel: str = "meta_kernel.txt" + """filename of meta_kernal""" + + observatory_codes: str = "ObsCodes.json" + """filename of observatory_codes""" + + observatory_codes_compressed: str = "ObsCodes.json.gz" + """filename of observatory_codes_compressed""" + observatory_codes_compressed_url: str = ( + "https://minorplanetcenter.net/Extended_Files/obscodes_extended.json.gz" + ) + """url of observatory_codes_compressed""" + + orientation_constants: str = "pck00010.pck" + """filename of observatory_constants""" + orientation_constants_url: str = "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/pck00010.tpc" + """url of observatory_constants""" + + data_file_list: list = None + """convenience list of all the file names""" + + urls: dict = None + """dictionary of filename: url""" + + data_files_to_download: list = None + """list of files that need to be downloaded""" + + ordered_kernel_files: list = None + """list of kernels ordered from least to most precise - used to assemble meta_kernel file""" + + registry: list = None + """Default Pooch registry to define which files will be tracked and retrievable""" + + @property + def default_url(self): + """returns a dictionary of the default urls used in this version of sorcha""" + return { + "de440s": self.__class__.de440s_url, + "earth_predict": self.__class__.earth_predict_url, + "earth_historical": self.__class__.earth_historical_url, + "earth_high_precision": self.__class__.earth_high_precision_url, + "jpl_planets": self.__class__.jpl_planets_url, + "jpl_small_bodies": self.__class__.jpl_small_bodies_url, + "leap_seconds": self.__class__.leap_seconds_url, + "observatory_codes_compressed": self.__class__.observatory_codes_compressed_url, + "orientation_constants": self.__class__.orientation_constants_url, + } + + @property + def default_filenames(self): + """returns a dictionary of the default filenames used in this version""" + return { + "de440s": self.__class__.de440s, + "earth_predict": self.__class__.earth_predict, + "earth_historical": self.__class__.earth_historical, + "earth_high_precision": self.__class__.earth_high_precision, + "jpl_planets": self.__class__.jpl_planets, + "jpl_small_bodies": self.__class__.jpl_small_bodies, + "leap_seconds": self.__class__.leap_seconds, + "meta_kernel": self.__class__.meta_kernel, + "observatory_codes": self.__class__.observatory_codes, + "observatory_codes_compressed": self.__class__.observatory_codes_compressed, + "orientation_constants": self.__class__.orientation_constants, + } + + def __post_init__(self): + """Automagically validates the auxiliary configs after initialisation.""" + self._create_lists_auxiliary_configs() + self._validate_auxiliary_configs() + + def _validate_auxiliary_configs(self): + """ + validates the auxililary config attributes after initialisation. + """ + for file in self.default_filenames: + if file != "meta_kernel" and file != "observatory_codes": + if ( + self.default_filenames[file] == getattr(self, file) + and getattr(self, file + "_url") != self.default_url[file] + ): + logging.error(f"ERROR: url for {file} given but filename for {file} not given") + sys.exit(f"ERROR: url for {file} given but filename for {file} not given") + + elif ( + self.default_filenames[file] != getattr(self, file) + and getattr(self, file + "_url") == self.default_url[file] + ): + setattr(self, file + "_url", None) + + def _create_lists_auxiliary_configs(self): + """ + creates lists of the auxililary config attributes after initialisation. + + Parameters + ----------- + None. + + Returns + ---------- + None + """ + + self.urls = { + self.de440s: self.de440s_url, + self.earth_predict: self.earth_predict_url, + self.earth_historical: self.earth_historical_url, + self.earth_high_precision: self.earth_high_precision_url, + self.jpl_planets: self.jpl_planets_url, + self.jpl_small_bodies: self.jpl_small_bodies_url, + self.leap_seconds: self.leap_seconds_url, + self.observatory_codes_compressed: self.observatory_codes_compressed_url, + self.orientation_constants: self.orientation_constants_url, + } + + self.data_file_list = [ + self.de440s, + self.earth_predict, + self.earth_historical, + self.earth_high_precision, + self.jpl_planets, + self.jpl_small_bodies, + self.leap_seconds, + self.meta_kernel, + self.observatory_codes, + self.observatory_codes_compressed, + self.orientation_constants, + ] + + self.data_files_to_download = [ + self.de440s, + self.earth_predict, + self.earth_historical, + self.earth_high_precision, + self.jpl_planets, + self.jpl_small_bodies, + self.leap_seconds, + self.observatory_codes_compressed, + self.orientation_constants, + ] + + self.ordered_kernel_files = [ + self.leap_seconds, + self.earth_historical, + self.earth_predict, + self.orientation_constants, + self.de440s, + self.earth_high_precision, + ] + + self.registry = {data_file: None for data_file in self.data_file_list} + + @dataclass class sorchaConfigs: """Dataclass which stores configuration file keywords in dataclasses.""" @@ -820,6 +1015,9 @@ class sorchaConfigs: expert: expertConfigs = None """expertConfigs dataclass which stores the keywords from the EXPERT section of the config file.""" + auxiliary: auxiliaryConfigs = None + """auxiliaryConfigs dataclass which stores the keywords from the AUXILIARY section of the config file.""" + pplogger: None = None """The Python logger instance""" @@ -873,6 +1071,7 @@ def _read_configs_from_object(self, config_object): "LIGHTCURVE": lightcurveConfigs, "ACTIVITY": activityConfigs, "EXPERT": expertConfigs, + "AUXILIARY": auxiliaryConfigs, } # when adding new sections in config file this general function needs the name of the section in uppercase # to be the same as the attributes defined above in lowercase e.g. section INPUT has attribute input @@ -1160,7 +1359,7 @@ def PrintConfigsToLog(sconfigs, cmd_args): if sconfigs.activity.comet_activity == "comet": pplogger.info("Cometary activity set to: " + str(sconfigs.activity.comet_activity)) - elif sconfigs.activity.comet_activity == "none": + elif sconfigs.activity.comet_activity == None: pplogger.info("No cometary activity selected.") pplogger.info("Format of ephemerides file is: " + sconfigs.input.eph_format) diff --git a/src/sorcha_cmdline/bootstrap.py b/src/sorcha_cmdline/bootstrap.py index 43c34cf5..0a566140 100644 --- a/src/sorcha_cmdline/bootstrap.py +++ b/src/sorcha_cmdline/bootstrap.py @@ -41,8 +41,6 @@ def execute(args): # from sorcha.utilities.retrieve_ephemeris_data_files import ( make_retriever, - DATA_FILE_LIST, - DATA_FILES_TO_DOWNLOAD, _check_for_existing_files, _decompress, _remove_files, @@ -50,17 +48,22 @@ def execute(args): ) from functools import partial import concurrent.futures + from sorcha.utilities.sorchaConfigs import auxiliaryConfigs + # Bootstrap will always take the default filenames and urls (stored in auxiliaryConfigs) for the current version of sorcha. + # A user can download new files by running sorcha and specifying in the config file under the section [AUXILIARY] a new filename and url. + + default_files = auxiliaryConfigs() # create the Pooch retriever that tracks and retrieves the requested files - retriever = make_retriever(args.cache) + retriever = make_retriever(default_files, args.cache) # determine if we should attempt to download or create any files. found_all_files = False if args.force: - _remove_files(retriever) + _remove_files(default_files, retriever) else: print("Checking cache for existing files.") - found_all_files = _check_for_existing_files(retriever, DATA_FILE_LIST) + found_all_files = _check_for_existing_files(retriever, default_files.data_file_list) if not found_all_files: # create a partial function of `Pooch.fetch` including the `_decompress` method @@ -68,13 +71,13 @@ def execute(args): # download the data files in parallel with concurrent.futures.ThreadPoolExecutor() as executor: - executor.map(fetch_partial, DATA_FILES_TO_DOWNLOAD) + executor.map(fetch_partial, default_files.data_files_to_download) # build the meta_kernel.txt file - build_meta_kernel_file(retriever) + build_meta_kernel_file(default_files, retriever) print("Checking cache after attempting to download and create files.") - _check_for_existing_files(retriever, DATA_FILE_LIST) + _check_for_existing_files(retriever, default_files.data_file_list) if __name__ == "__main__": diff --git a/tests/ephemeris/test_pixdict.py b/tests/ephemeris/test_pixdict.py index af8f717b..ffe036f1 100644 --- a/tests/ephemeris/test_pixdict.py +++ b/tests/ephemeris/test_pixdict.py @@ -80,11 +80,11 @@ def test_pixeldict(tmp_path): filterpointing = precompute_pointing_information(filterpointing, args, configs) args = sorchaArguments(cmd_args_dict) - ephem, gm_sun, gm_total = create_assist_ephemeris(args) - furnish_spiceypy(args) + ephem, gm_sun, gm_total = create_assist_ephemeris(args,configs.auxiliary) + furnish_spiceypy(args,configs.auxiliary) sim_dict = generate_simulations(ephem, gm_sun, gm_total, orbits_df, args) - observatory = Observatory(args=None, oc_file=get_test_filepath("ObsCodes_test.json")) + observatory = Observatory(auxconfigs=configs.auxiliary,args=None, oc_file=get_test_filepath("ObsCodes_test.json")) pixdict = PixelDict(54800.0 + 2400000.5, sim_dict, ephem, "Z20", observatory, picket_interval=1, nside=32) diff --git a/tests/ephemeris/test_simulation_parsing.py b/tests/ephemeris/test_simulation_parsing.py index 73807347..a136e784 100644 --- a/tests/ephemeris/test_simulation_parsing.py +++ b/tests/ephemeris/test_simulation_parsing.py @@ -2,10 +2,11 @@ import numpy as np import sorcha.ephemeris.simulation_parsing as sp from sorcha.utilities.dataUtilitiesForTests import get_test_filepath - +from sorcha.utilities.sorchaConfigs import sorchaConfigs, auxiliaryConfigs def test_observatory_compared_to_original(): - observatory = sp.Observatory(args=None, oc_file=get_test_filepath("ObsCodes_test.json")) + auxconfigs = auxiliaryConfigs() + observatory = sp.Observatory(auxconfigs=auxconfigs,args=None, oc_file=get_test_filepath("ObsCodes_test.json")) obs = observatory.ObservatoryXYZ # Reference tuples were taken from Matt Holman's original notebook @@ -17,7 +18,8 @@ def test_observatory_compared_to_original(): def test_observatory_for_moving_observatories(): - observatory = sp.Observatory(args=None, oc_file=get_test_filepath("ObsCodes_test.json")) + auxconfigs = auxiliaryConfigs() + observatory = sp.Observatory(auxconfigs=auxconfigs,args=None, oc_file=get_test_filepath("ObsCodes_test.json")) obs = observatory.ObservatoryXYZ assert obs["250"] == (None, None, None) diff --git a/tests/sorcha/test_sorchaConfigs.py b/tests/sorcha/test_sorchaConfigs.py index a6a2f254..7007582d 100644 --- a/tests/sorcha/test_sorchaConfigs.py +++ b/tests/sorcha/test_sorchaConfigs.py @@ -17,6 +17,7 @@ lightcurveConfigs, activityConfigs, expertConfigs, + auxiliaryConfigs, ) # these are the results we expect from sorcha_config_demo.ini @@ -103,6 +104,31 @@ "randomization_on": True, "vignetting_on": True, } + +correct_auxciliary_URLs = { + "de440s.bsp": "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/spk/planets/de440s.bsp", + "earth_200101_990827_predict.bpc": "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_200101_990827_predict.bpc", + "earth_620120_240827.bpc": "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_620120_240827.bpc", + "earth_latest_high_prec.bpc": "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_latest_high_prec.bpc", + "linux_p1550p2650.440": "https://ssd.jpl.nasa.gov/ftp/eph/planets/Linux/de440/linux_p1550p2650.440", + "sb441-n16.bsp": "https://ssd.jpl.nasa.gov/ftp/eph/small_bodies/asteroids_de441/sb441-n16.bsp", + "naif0012.tls": "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/lsk/naif0012.tls", + "ObsCodes.json.gz": "https://minorplanetcenter.net/Extended_Files/obscodes_extended.json.gz", + "pck00010.pck": "https://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/pck00010.tpc", +} +correct_auxciliary_filenames = [ + "de440s.bsp", + "earth_200101_990827_predict.bpc", + "earth_620120_240827.bpc", + "earth_latest_high_prec.bpc", + "linux_p1550p2650.440", + "sb441-n16.bsp", + "naif0012.tls", + "meta_kernel.txt", + "ObsCodes.json", + "ObsCodes.json.gz", + "pck00010.pck", +] ################################################################################################################################## # SORCHA Configs test @@ -119,8 +145,6 @@ def test_sorchaConfigs(): # check each section to make sure you get what you expect assert correct_inputs == test_configs.input.__dict__ assert correct_simulation == test_configs.simulation.__dict__ - print(correct_filters) - print(test_configs.filters.__dict__) assert correct_filters == test_configs.filters.__dict__ assert correct_saturation == test_configs.saturation.__dict__ assert correct_phasecurve == test_configs.phasecurves.__dict__ @@ -131,6 +155,8 @@ def test_sorchaConfigs(): assert correct_lc_model == test_configs.lightcurve.__dict__ assert correct_activity == test_configs.activity.__dict__ assert correct_expert == test_configs.expert.__dict__ + assert correct_auxciliary_URLs == test_configs.auxiliary.__dict__["urls"] + assert correct_auxciliary_filenames == test_configs.auxiliary.__dict__["data_file_list"] ################################################################################################################################## @@ -986,3 +1012,47 @@ def test_expertConfig_bool(key_name): error_text.value.code == f"ERROR: expected a bool for config parameter {key_name}. Check value in config file." ) + + +################################################################################################################################## + +# auxiliary config test + + +@pytest.mark.parametrize( + "file", + [ + "de440s", + "earth_predict", + "earth_historical", + "jpl_planets", + "leap_seconds", + "observatory_codes_compressed", + "orientation_constants", + ], +) +def test_auxiliary_config_url_given_filename_not(file): + + aux_configs = {file + "_url": "new_url"} + with pytest.raises(SystemExit) as error_text: + test_configs = auxiliaryConfigs(**aux_configs) + assert error_text.value.code == f"ERROR: url for {file} given but filename for {file} not given" + + +@pytest.mark.parametrize( + "file", + [ + "de440s", + "earth_predict", + "earth_historical", + "jpl_planets", + "leap_seconds", + "observatory_codes_compressed", + "orientation_constants", + ], +) +def test_auxiliary_config_making_url_none(file): + aux_configs = {file: "new_filename"} + + test_configs = auxiliaryConfigs(**aux_configs) + assert getattr(test_configs, file + "_url") == None