Skip to content

Commit

Permalink
Merge branch 'main' into remove_temp_file
Browse files Browse the repository at this point in the history
  • Loading branch information
mschwamb authored Nov 27, 2024
2 parents 243daf4 + 6570322 commit c8476b9
Show file tree
Hide file tree
Showing 23 changed files with 2,593 additions and 213 deletions.
9 changes: 6 additions & 3 deletions docs/notebooks/demo_UncertaintiesAndRandomization.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@
"import numpy as np\n",
"import pandas as pd\n",
"import matplotlib.pyplot as plt\n",
"from astropy.coordinates import SkyCoord"
"from astropy.coordinates import SkyCoord\n",
"from sorcha.utilities.sorchaConfigs import expertConfigs"
]
},
{
Expand Down Expand Up @@ -99,6 +100,8 @@
"outputs": [],
"source": [
"configs = {'trailing_losses_on':True, 'default_SNR_cut': False}\n",
"configs = expertConfigs(**configs)\n",
"setattr(configs, \"expert\", configs)\n",
"rng = PerModuleRNG(2012)"
]
},
Expand Down Expand Up @@ -317,7 +320,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"display_name": "sorcha",
"language": "python",
"name": "python3"
},
Expand All @@ -331,7 +334,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.13"
"version": "3.10.15"
}
},
"nbformat": 4,
Expand Down
29 changes: 15 additions & 14 deletions src/sorcha/ephemeris/simulation_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def get_vec(row, vecname):
return np.asarray([row[f"{vecname}_x"], row[f"{vecname}_y"], row[f"{vecname}_z"]])


def create_ephemeris(orbits_df, pointings_df, args, configs):
def create_ephemeris(orbits_df, pointings_df, args, sconfigs):
"""Generate a set of observations given a collection of orbits
and set of pointings.
Expand All @@ -59,7 +59,8 @@ def create_ephemeris(orbits_df, pointings_df, args, configs):
The dataframe containing the collection of telescope/camera pointings.
args :
Various arguments necessary for the calculation
configs : dictionary
sconfigs:
Dataclass of configuration file arguments.
Various configuration parameters necessary for the calculation
ang_fov : float
The angular size (deg) of the field of view
Expand Down Expand Up @@ -103,11 +104,11 @@ def create_ephemeris(orbits_df, pointings_df, args, configs):
"""
verboselog = args.pplogger.info if args.verbose else lambda *a, **k: None

ang_fov = configs["ar_ang_fov"]
buffer = configs["ar_fov_buffer"]
picket_interval = configs["ar_picket"]
obsCode = configs["ar_obs_code"]
nside = 2 ** configs["ar_healpix_order"]
ang_fov = sconfigs.simulation.ar_ang_fov
buffer = sconfigs.simulation.ar_fov_buffer
picket_interval = sconfigs.simulation.ar_picket
obsCode = sconfigs.simulation.ar_obs_code
nside = 2**sconfigs.simulation.ar_healpix_order
n_sub_intervals = 101 # configs["n_sub_intervals"]

ephemeris_csv_filename = None
Expand Down Expand Up @@ -221,7 +222,7 @@ def create_ephemeris(orbits_df, pointings_df, args, configs):
# if the user has defined an output file name for the ephemeris results, write out to that file
if ephemeris_csv_filename:
verboselog("Writing out ephemeris results to file.")
write_out_ephemeris_file(ephemeris_df, ephemeris_csv_filename, args, configs)
write_out_ephemeris_file(ephemeris_df, ephemeris_csv_filename, args, sconfigs)

# join the ephemeris and input orbits dataframe, take special care to make
# sure the 'ObjID' column types match.
Expand Down Expand Up @@ -328,7 +329,7 @@ def calculate_rates_and_geometry(pointing: pd.DataFrame, ephem_geom_params: Ephe
)


def write_out_ephemeris_file(ephemeris_df, ephemeris_csv_filename, args, configs):
def write_out_ephemeris_file(ephemeris_df, ephemeris_csv_filename, args, sconfigs):
"""Writes the ephemeris out to an external file.
Parameters
Expand All @@ -342,8 +343,8 @@ def write_out_ephemeris_file(ephemeris_df, ephemeris_csv_filename, args, configs
args: sorchaArguments object or similar
Command-line arguments from Sorcha.
configs: dict
Dictionary of configuration file arguments.
sconfigs: dataclass
Dataclass of configuration file arguments.
Returns
-------
Expand All @@ -352,12 +353,12 @@ def write_out_ephemeris_file(ephemeris_df, ephemeris_csv_filename, args, configs

verboselog = args.pplogger.info if args.verbose else lambda *a, **k: None

if configs["eph_format"] == "csv":
if sconfigs.input.eph_format == "csv":
verboselog("Outputting ephemeris to CSV file...")
PPOutWriteCSV(ephemeris_df, ephemeris_csv_filename + ".csv")
elif configs["eph_format"] == "whitespace":
elif sconfigs.input.eph_format == "whitespace":
verboselog("Outputting ephemeris to whitespaced CSV file...")
PPOutWriteCSV(ephemeris_df, ephemeris_csv_filename + ".csv", separator=" ")
elif configs["eph_format"] == "hdf5" or configs["output_format"] == "h5":
elif sconfigs.input.eph_format == "hdf5" or sconfigs.output.output_format == "h5":
verboselog("Outputting ephemeris to HDF5 binary file...")
PPOutWriteHDF5(ephemeris_df, ephemeris_csv_filename + ".h5", "sorcha_ephemeris")
12 changes: 6 additions & 6 deletions src/sorcha/ephemeris/simulation_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def generate_simulations(ephem, gm_sun, gm_total, orbits_df, args):
return sim_dict


def precompute_pointing_information(pointings_df, args, configs):
def precompute_pointing_information(pointings_df, args, sconfigs):
"""This function is meant to be run once to prime the pointings dataframe
with additional information that Assist & Rebound needs for it's work.
Expand All @@ -173,8 +173,8 @@ def precompute_pointing_information(pointings_df, args, configs):
Contains the telescope pointing database.
args : dictionary
Command line arguments needed for initialization.
configs : dictionary
Configuration settings.
sconfigs: dataclass
Dataclass of configuration file arguments.
Returns
--------
Expand All @@ -184,7 +184,7 @@ def precompute_pointing_information(pointings_df, args, configs):
ephem, _, _ = create_assist_ephemeris(args)

furnish_spiceypy(args)
obsCode = configs["ar_obs_code"]
obsCode = sconfigs.simulation.ar_obs_code
observatories = Observatory(args)

# vectorize the calculation to get x,y,z vector from ra/dec
Expand All @@ -204,8 +204,8 @@ def precompute_pointing_information(pointings_df, args, configs):
# create a partial function since most params don't change, and it makes the lambda easier to read
partial_get_hp_neighbors = partial(
get_hp_neighbors,
search_radius=configs["ar_ang_fov"] + configs["ar_fov_buffer"],
nside=2 ** configs["ar_healpix_order"],
search_radius=sconfigs.simulation.ar_ang_fov + sconfigs.simulation.ar_fov_buffer,
nside=2**sconfigs.simulation.ar_healpix_order,
nested=True,
)

Expand Down
20 changes: 10 additions & 10 deletions src/sorcha/modules/PPAddUncertainties.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def degSin(x):
return np.sin(x * np.pi / 180.0)


def addUncertainties(detDF, configs, module_rngs, verbose=True):
def addUncertainties(detDF, sconfigs, module_rngs, verbose=True):
"""
Generates astrometric and photometric uncertainties, and SNR. Uses uncertainties
to randomize the photometry. Accounts for trailing losses.
Expand All @@ -80,8 +80,8 @@ def addUncertainties(detDF, configs, module_rngs, verbose=True):
detDF : Pandas dataframe)
Dataframe of observations.
configs : dictionary
dictionary of configurations from config file.
sconfigs: dataclass
Dataclass of configuration file arguments.
module_rngs : PerModuleRNG
A collection of random number generators (per module).
Expand All @@ -100,11 +100,11 @@ def addUncertainties(detDF, configs, module_rngs, verbose=True):
verboselog = pplogger.info if verbose else lambda *a, **k: None

detDF["astrometricSigma_deg"], detDF["trailedSourceMagSigma"], detDF["SNR"] = uncertainties(
detDF, configs, filterMagName="trailedSourceMagTrue"
detDF, sconfigs, filterMagName="trailedSourceMagTrue"
)

if configs.get("trailing_losses_on", False):
_, detDF["PSFMagSigma"], detDF["SNR"] = uncertainties(detDF, configs, filterMagName="PSFMagTrue")
if sconfigs.expert.trailing_losses_on:
_, detDF["PSFMagSigma"], detDF["SNR"] = uncertainties(detDF, sconfigs, filterMagName="PSFMagTrue")
else:
detDF["PSFMagSigma"] = detDF["trailedSourceMagSigma"]

Expand All @@ -113,7 +113,7 @@ def addUncertainties(detDF, configs, module_rngs, verbose=True):

def uncertainties(
detDF,
configs,
sconfigs,
limMagName="fiveSigmaDepth_mag",
seeingName="seeingFwhmGeom_arcsec",
filterMagName="trailedSourceMagTrue",
Expand All @@ -130,8 +130,8 @@ def uncertainties(
detDF : Pandas dataframe
dataframe containing observations.
configs : dictionary
dictionary of configurations from config file.
sconfigs: dataclass
Dataclass of configuration file arguments.
limMagName : string, optional
pandas dataframe column name of the limiting magnitude.
Expand Down Expand Up @@ -173,7 +173,7 @@ def uncertainties(
signal-to-noise ratio.
"""

if configs.get("trailing_losses_on", False):
if sconfigs.expert.trailing_losses_on:
dMag = PPTrailingLoss.calcTrailingLoss(
detDF[dra_name],
detDF[ddec_name],
Expand Down
20 changes: 10 additions & 10 deletions src/sorcha/modules/PPApplyFOVFilter.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from sorcha.modules.PPModuleRNG import PerModuleRNG


def PPApplyFOVFilter(observations, configs, module_rngs, footprint=None, verbose=False):
def PPApplyFOVFilter(observations, sconfigs, module_rngs, footprint=None, verbose=False):
"""
Wrapper function for PPFootprintFilter and PPFilterDetectionEfficiency that checks to see
whether a camera footprint filter should be applied or if a simple fraction of the
Expand All @@ -22,8 +22,8 @@ def PPApplyFOVFilter(observations, configs, module_rngs, footprint=None, verbose
observations: Pandas dataframe
dataframe of observations.
configs : dictionary
dictionary of variables from config file.
sconfigs: dataclass
Dataclass of configuration file arguments.
module_rngs : PerModuleRNG
A collection of random number generators (per module).
Expand All @@ -45,25 +45,25 @@ def PPApplyFOVFilter(observations, configs, module_rngs, footprint=None, verbose
pplogger = logging.getLogger(__name__)
verboselog = pplogger.info if verbose else lambda *a, **k: None

if configs["camera_model"] == "footprint":
if sconfigs.fov.camera_model == "footprint":
verboselog("Applying sensor footprint filter...")
onSensor, detectorIDs = footprint.applyFootprint(
observations, edge_thresh=configs["footprint_edge_threshold"]
observations, edge_thresh=sconfigs.fov.footprint_edge_threshold
)

observations = observations.iloc[onSensor].copy()
observations["detectorID"] = detectorIDs

observations = observations.sort_index()

if configs["camera_model"] == "circle":
if sconfigs.fov.camera_model == "circle":
verboselog("FOV is circular...")
if configs["circle_radius"]:
if sconfigs.fov.circle_radius:
verboselog("Circle radius is set. Applying circular footprint filter...")
observations = PPCircleFootprint(observations, configs["circle_radius"])
if configs["fill_factor"]:
observations = PPCircleFootprint(observations, sconfigs.fov.circle_radius)
if sconfigs.fov.fill_factor:
verboselog("Fill factor is set. Removing random observations to mimic chip gaps.")
observations = PPSimpleSensorArea(observations, module_rngs, configs["fill_factor"])
observations = PPSimpleSensorArea(observations, module_rngs, sconfigs.fov.fill_factor)

return observations

Expand Down
30 changes: 15 additions & 15 deletions src/sorcha/modules/PPOutput.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def PPIndexSQLDatabase(outf, tablename="sorcha_results"):
cnx.commit()


def PPWriteOutput(cmd_args, configs, observations_in, verbose=False):
def PPWriteOutput(cmd_args, sconfigs, observations_in, verbose=False):
"""
Writes the output in the format specified in the config file to a location
specified by the user.
Expand All @@ -142,8 +142,8 @@ def PPWriteOutput(cmd_args, configs, observations_in, verbose=False):
cmd_args : dictionary
Dictonary of command line arguments.
configs : Dictionary
Dictionary of config file arguments.
sconfigs: dataclass
Dataclass of configuration file arguments.
observations_in : Pandas dataframe
Dataframe of output.
Expand Down Expand Up @@ -171,7 +171,7 @@ def PPWriteOutput(cmd_args, configs, observations_in, verbose=False):
+ observations_in["Obj_Sun_z_LTC_km"].values ** 2
)

if configs["output_columns"] == "basic":
if sconfigs.output.output_columns == "basic":
observations = observations_in.copy()[
[
"ObjID",
Expand All @@ -193,14 +193,14 @@ def PPWriteOutput(cmd_args, configs, observations_in, verbose=False):
]

# if linking is on and unlinked objects are NOT dropped, add the object_linked column to the output
if configs["SSP_linking_on"] and not configs["drop_unlinked"]:
if sconfigs.linkingfilter.ssp_linking_on and not sconfigs.linkingfilter.drop_unlinked:
observations["object_linked"] = observations_in["object_linked"].copy()

elif configs["output_columns"] == "all":
elif sconfigs.output.output_columns == "all":
observations = observations_in.copy()
elif len(configs["output_columns"]) > 1: # assume a list of column names...
elif len(sconfigs.output.output_columns) > 1: # assume a list of column names...
try:
observations = observations_in.copy()[configs["output_columns"]]
observations = observations_in.copy()[sconfigs.output.output_columns]
except KeyError:
pplogger.error(
"ERROR: at least one of the columns provided in output_columns does not seem to exist. Check docs and try again."
Expand All @@ -209,7 +209,7 @@ def PPWriteOutput(cmd_args, configs, observations_in, verbose=False):
"ERROR: at least one of the columns provided in output_columns does not seem to exist. Check docs and try again."
)

if configs["position_decimals"]:
if sconfigs.output.position_decimals:
for position_col in [
"fieldRA_deg",
"fieldDec_deg",
Expand All @@ -221,12 +221,12 @@ def PPWriteOutput(cmd_args, configs, observations_in, verbose=False):
]:
try: # depending on type of output selected, some of these columns may not exist.
observations[position_col] = observations[position_col].round(
decimals=configs["position_decimals"]
decimals=sconfigs.output.position_decimals
)
except KeyError:
continue

if configs["magnitude_decimals"]:
if sconfigs.output.magnitude_decimals:
for magnitude_col in [
"PSFMag",
"trailedSourceMag",
Expand All @@ -239,26 +239,26 @@ def PPWriteOutput(cmd_args, configs, observations_in, verbose=False):
]:
try: # depending on type of output selected, some of these columns may not exist.
observations[magnitude_col] = observations[magnitude_col].round(
decimals=configs["magnitude_decimals"]
decimals=sconfigs.output.magnitude_decimals
)
except KeyError:
continue

verboselog("Constructing output path...")

if configs["output_format"] == "csv":
if sconfigs.output.output_format == "csv":
outputsuffix = ".csv"
out = os.path.join(cmd_args.outpath, cmd_args.outfilestem + outputsuffix)
verboselog("Output to CSV file...")
observations = PPOutWriteCSV(observations, out)

elif configs["output_format"] == "sqlite3":
elif sconfigs.output.output_format == "sqlite3":
outputsuffix = ".db"
out = os.path.join(cmd_args.outpath, cmd_args.outfilestem + outputsuffix)
verboselog("Output to sqlite3 database...")
observations = PPOutWriteSqlite3(observations, out)

elif configs["output_format"] == "hdf5" or configs["output_format"] == "h5":
elif sconfigs.output.output_format == "hdf5" or sconfigs.output.output_format == "h5":
outputsuffix = ".h5"
out = os.path.join(cmd_args.outpath, cmd_args.outfilestem + outputsuffix)
verboselog("Output to HDF5 binary file...")
Expand Down
Loading

0 comments on commit c8476b9

Please sign in to comment.