From 73664ae2177d76b4c8e8f7563c55937f9847e4be Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Tue, 10 Oct 2023 22:35:53 +0200 Subject: [PATCH 001/168] refactor duplicated code --- iop4admin/modeladmins/fitfile.py | 16 ++++++++++++++++ iop4admin/modeladmins/rawfit.py | 11 ----------- iop4admin/modeladmins/reducedfit.py | 18 ------------------ 3 files changed, 16 insertions(+), 29 deletions(-) diff --git a/iop4admin/modeladmins/fitfile.py b/iop4admin/modeladmins/fitfile.py index f583b6f4..fed84834 100644 --- a/iop4admin/modeladmins/fitfile.py +++ b/iop4admin/modeladmins/fitfile.py @@ -24,6 +24,22 @@ class AdminFitFile(admin.ModelAdmin): + @admin.display(description='TELESCOPE', ordering='epoch__telescope') + def telescope(self, obj): + return obj.epoch.telescope + + @admin.display(description='NIGHT', ordering='epoch__night') + def night(self, obj): + return obj.epoch.night + + @admin.display(description='FILENAME', ordering='filename') + def filename(self, obj): + return obj.filename + + @admin.display(description='STATUS') + def status(self, obj): + return ", ".join(obj.flag_labels) + def get_urls(self): urls = super().get_urls() my_urls = [ diff --git a/iop4admin/modeladmins/rawfit.py b/iop4admin/modeladmins/rawfit.py index 2beeca5d..1e02f78b 100644 --- a/iop4admin/modeladmins/rawfit.py +++ b/iop4admin/modeladmins/rawfit.py @@ -31,17 +31,6 @@ class AdminRawFit(AdminFitFile): "imgsize", ) - - - def telescope(self, obj): - return obj.epoch.telescope - - def night(self, obj): - return obj.epoch.night - - @admin.display(description='STATUS') - def status(self, obj): - return ", ".join(obj.flag_labels) @admin.display(description='OPTIONS') def options(self, obj): diff --git a/iop4admin/modeladmins/reducedfit.py b/iop4admin/modeladmins/reducedfit.py index e8998e8c..220fef7f 100644 --- a/iop4admin/modeladmins/reducedfit.py +++ b/iop4admin/modeladmins/reducedfit.py @@ -33,8 +33,6 @@ class AdminReducedFit(AdminFitFile): "band", "imgsize", ) - - @admin.display(description='OPTIONS') def options(self, obj): @@ -43,18 +41,6 @@ def options(self, obj): url_viewer= reverse('iop4admin:iop4api_reducedfit_viewer', args=[obj.id]) return format_html(rf'raw / details / advanced viewer') - @admin.display(description='TELESCOPE') - def telescope(self, obj): - return obj.epoch.telescope - - @admin.display(description='NIGHT') - def night(self, obj): - return obj.epoch.night - - @admin.display(description='FILENAME') - def filename(self, obj): - return obj.filename - @admin.display(description='IMGSIZE') def imgsize(self, obj): return obj.imgsize @@ -74,10 +60,6 @@ def rotangle(self, obj): @admin.display(description='EXPTIME') def exptime(self, obj): return obj.exptime - - @admin.display(description='STATUS') - def status(self, obj): - return ", ".join(obj.flag_labels) @admin.display(description='SRCS IN FIELD') def get_targets_in_field(self, obj): From 04889bd43625db69d7fa00d8e8ac22b5dd8fde33 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Tue, 10 Oct 2023 22:36:30 +0200 Subject: [PATCH 002/168] iop4.py: change print to logging info --- iop4lib/iop4.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/iop4lib/iop4.py b/iop4lib/iop4.py index e80f1028..0f86f21a 100644 --- a/iop4lib/iop4.py +++ b/iop4lib/iop4.py @@ -71,22 +71,22 @@ def discover_new_epochs(add_local_epochs_to_list=False): new_epochnames_all = set() for tel_cls in Telescope.get_known(): - print(f"Listing remote epochs for {tel_cls.name}...") + logger.info(f"Listing remote epochs for {tel_cls.name}...") remote_epochnames = tel_cls.list_remote_epochnames() - print(f"Found {len(remote_epochnames)} remote epochs for {tel_cls.name}.") + logger.info(f"Found {len(remote_epochnames)} remote epochs for {tel_cls.name}.") if os.path.isdir(f"{iop4conf.datadir}/raw/{tel_cls.name}/"): local_epochnames = [f"{tel_cls.name}/{night}" for night in os.listdir(f"{iop4conf.datadir}/raw/{tel_cls.name}/")] else: local_epochnames = list() - print(f"Found {len(local_epochnames)} epochs for {tel_cls.name} in local raw archive.") + logger.info(f"Found {len(local_epochnames)} epochs for {tel_cls.name} in local raw archive.") if not add_local_epochs_to_list: new_epochnames = set(remote_epochnames).difference(local_epochnames) - print(f"New epochs discovered in {tel_cls.name} (n={len(new_epochnames)}): {new_epochnames}") + logger.info(f"New epochs discovered in {tel_cls.name} (n={len(new_epochnames)}): {new_epochnames}") new_epochnames_all = new_epochnames_all.union(new_epochnames) @@ -107,10 +107,10 @@ def discover_local_epochs(): def retry_failed_files(): qs = ReducedFit.objects.filter(flags__has=ReducedFit.FLAGS.ERROR_ASTROMETRY).all() - print(f"Retrying {qs.count()} failed reduced fits.") + logger.info(f"Retrying {qs.count()} failed reduced fits.") Epoch.reduce_reducedfits(qs) qs2 = ReducedFit.objects.filter(flags__has=ReducedFit.FLAGS.ERROR_ASTROMETRY).all() - print(f"Fixed {qs.count()-qs2.count()} out of {qs.count()} failed reduced fits.") + logger.info(f"Fixed {qs.count()-qs2.count()} out of {qs.count()} failed reduced fits.") @@ -196,8 +196,8 @@ def main(): if len(epochs_to_process) > 0 and not args.list_only: process_epochs(epochs_to_process, args.force_rebuild, check_remote_list=~args.skip_remote_file_list) else: - print("Invoked with --list-only:") - print(f"{epochs_to_process=}") + logger.info("Invoked with --list-only:") + logger.info(f"{epochs_to_process=}") # Retry failed files if indicated @@ -207,7 +207,7 @@ def main(): # Start interactive shell if indicated if args.interactive: - print("Jumping to IPython shell.") + logger.info("Jumping to IPython shell.") import IPython IPython.embed(header="Start IOP4ing!", module=sys.modules['__main__']) From 0967da4f928a65bea101f9ff6d36862dcc858006 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Tue, 10 Oct 2023 22:36:59 +0200 Subject: [PATCH 003/168] enums.py: add DIPOL-1 instrument --- iop4lib/enums.py | 1 + 1 file changed, 1 insertion(+) diff --git a/iop4lib/enums.py b/iop4lib/enums.py index b6653e03..ed105fdb 100644 --- a/iop4lib/enums.py +++ b/iop4lib/enums.py @@ -43,6 +43,7 @@ class INSTRUMENTS(models.TextChoices): CAFOS = 'CAFOS2.2', "CAFOS2.2" AndorT90 = 'AndorT90', "AndorT90" AndorT150 = 'AndorT150', "AndorT150" + DIPOL1 = 'DIPOL-1', "DIPOL-1" class TELESCOPES(models.TextChoices): """ From 46048e95edbbb03b9e8419412ded4e07775fad45 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Tue, 10 Oct 2023 22:37:25 +0200 Subject: [PATCH 004/168] telescope.py: classification of DIPOL fits --- iop4lib/telescopes/telescope.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/iop4lib/telescopes/telescope.py b/iop4lib/telescopes/telescope.py index 87522775..90e541c5 100644 --- a/iop4lib/telescopes/telescope.py +++ b/iop4lib/telescopes/telescope.py @@ -209,6 +209,8 @@ def classify_instrument_kw(cls, rawfit): rawfit.instrument = INSTRUMENTS.AndorT150 elif instrume_header == "CAFOS 2.2": rawfit.instrument = INSTRUMENTS.CAFOS + elif instrume_header == "ASI Camera (1)": + rawfit.instrument = INSTRUMENTS.DIPOL1 else: raise ValueError(f"INSTRUME in fits header ({instrume_header}) not known.") From 0182fc30ab18e549731cc7eb4b0472ba08c8c983 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Tue, 10 Oct 2023 23:46:31 +0200 Subject: [PATCH 005/168] iop4admin: site url now points to /iop4/ necessary for standalone use of iop4api as a django app --- iop4admin/sites.py | 1 + 1 file changed, 1 insertion(+) diff --git a/iop4admin/sites.py b/iop4admin/sites.py index 57f03439..fd6a199b 100644 --- a/iop4admin/sites.py +++ b/iop4admin/sites.py @@ -7,6 +7,7 @@ class IOP4AdminSite(admin.AdminSite): site_title = "IOP4 admin" site_header = 'IOP4 admin site' + site_url = "/iop4/" index_title = 'Welcome to IOP4 admin site' iop4admin_site = IOP4AdminSite(name='iop4admin') From eddf88934bd35705b14110ce01b0ad93b7982ae9 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Wed, 11 Oct 2023 12:47:15 +0200 Subject: [PATCH 006/168] implement telescope kw check for DIPOL files --- iop4lib/telescopes/osnt090.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/iop4lib/telescopes/osnt090.py b/iop4lib/telescopes/osnt090.py index 1b20ecf1..fea1e532 100644 --- a/iop4lib/telescopes/osnt090.py +++ b/iop4lib/telescopes/osnt090.py @@ -126,6 +126,21 @@ def download_rawfits(cls, rawfits): except Exception as e: raise Exception(f"Error downloading file {rawfit.filename}: {e}.") + @classmethod + def check_telescop_kw(cls, rawfit): + r""" Subclassed to account for DIPOL files, that have empty TELESCOPE keyword as of 2023-10-11 + + TODO: this kw should not be empty. + + If it is empty, check first the instrument, and if it is DIPOL, then continue. + """ + if rawfit.header["TELESCOPE"] == "": + cls.classify_instrument_kw(rawfit) + if rawfit.instrument == INSTRUMENTS.DIPOL1: + return + + super().check_telescop_kw(rawfit) + @classmethod def classify_rawfit(cls, rawfit): From ed648a7b5b8f88676f6ac00e55834d170fcf6655 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Wed, 11 Oct 2023 12:48:06 +0200 Subject: [PATCH 007/168] remove unused code --- iop4lib/telescopes/osnt090.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/iop4lib/telescopes/osnt090.py b/iop4lib/telescopes/osnt090.py index fea1e532..6a1b5fca 100644 --- a/iop4lib/telescopes/osnt090.py +++ b/iop4lib/telescopes/osnt090.py @@ -141,20 +141,6 @@ def check_telescop_kw(cls, rawfit): super().check_telescop_kw(rawfit) - @classmethod - def classify_rawfit(cls, rawfit): - - # if iop4conf.osn_download_all_then_check_owner: - # import astropy.io.fits as fits - # with fits.open(rawfit.filepath) as hdul: - # if iop4conf.osn_download_all_then_check_owner not in hdul[0].header['OBSERVER'] and 'BIAS' not in rawfit.filepath.upper() and 'FLAT' not in rawfit.filepath.upper(): - # logger.debug(f"File {rawfit.fileloc} is not ours, deleting.") - # os.unlink(rawfit.filepath) - # rawfit.delete() - - super().classify_rawfit(rawfit) - - @classmethod def classify_juliandate_rawfit(cls, rawfit): """ From f11fc635e7e23d57a0aea2c637b18c90eef9004a Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Fri, 13 Oct 2023 17:13:47 +0200 Subject: [PATCH 008/168] instrument-dependent gain, improve comment in source --- iop4lib/telescopes/cahat220.py | 14 +++++++--- iop4lib/telescopes/osnt090.py | 49 +++++++++++++++++++++++++-------- iop4lib/telescopes/osnt150.py | 23 +++++++++++++--- iop4lib/telescopes/telescope.py | 32 +++++++++++++-------- 4 files changed, 86 insertions(+), 32 deletions(-) diff --git a/iop4lib/telescopes/cahat220.py b/iop4lib/telescopes/cahat220.py index 124b1664..21661acc 100644 --- a/iop4lib/telescopes/cahat220.py +++ b/iop4lib/telescopes/cahat220.py @@ -33,6 +33,8 @@ class CAHAT220(Telescope, metaclass=ABCMeta): {yymmdd}_CAFOS/ where CAFOS refers to the polarimeter and inside each folder there are the files for that day. + + Currently only one instrument, CAFOS. """ # telescope identification @@ -41,10 +43,10 @@ class CAHAT220(Telescope, metaclass=ABCMeta): abbrv = "T220" telescop_kw = "CA-2.2" - # telescope specific properties + # telescope / instrument specific properties - arcsec_per_pix = 0.530 - gain_e_adu = 1.45 + cafos_arcsec_per_pix = 0.530 + cafos_gain_e_adu = 1.45 # telescope specific methods @@ -244,7 +246,11 @@ def get_astrometry_size_hint(cls, rawfit): indicates it is a cut """ - return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.arcsec_per_pix) + return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.cafos_arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.cafos_arcsec_per_pix) + + @classmethod + def get_gain_e_adu(cls, rawfit): + return cls.cafos_gain_e_adu @classmethod def compute_relative_polarimetry(cls, polarimetry_group): diff --git a/iop4lib/telescopes/osnt090.py b/iop4lib/telescopes/osnt090.py index 6a1b5fca..ed53033a 100644 --- a/iop4lib/telescopes/osnt090.py +++ b/iop4lib/telescopes/osnt090.py @@ -33,11 +33,11 @@ class OSNT090(Telescope, metaclass=ABCMeta): abbrv = "T090" telescop_kw = "T90-OSN" - # telescope specific properties + # telescope / instrument specific properties - field_width_arcmin = 13.2 - arcsec_per_pix = 0.387 - gain_e_adu = 4.5 + andort90_field_width_arcmin = 13.2 + andort90_arcsec_per_pix = 0.387 + andort90_gain_e_adu = 4.5 ftp_address = iop4conf.osn_t090_address ftp_user = iop4conf.osn_t090_user @@ -128,13 +128,13 @@ def download_rawfits(cls, rawfits): @classmethod def check_telescop_kw(cls, rawfit): - r""" Subclassed to account for DIPOL files, that have empty TELESCOPE keyword as of 2023-10-11 + r""" Subclassed to account for DIPOL files, that have empty TELESCOP keyword as of 2023-10-11 TODO: this kw should not be empty. If it is empty, check first the instrument, and if it is DIPOL, then continue. """ - if rawfit.header["TELESCOPE"] == "": + if rawfit.header["TELESCOP"] == "": cls.classify_instrument_kw(rawfit) if rawfit.instrument == INSTRUMENTS.DIPOL1: return @@ -268,7 +268,7 @@ def get_astrometry_position_hint(cls, rawfit, allsky=False, n_field_width=1.5): if allsky: hintsep = 180.0 else: - hintsep = (n_field_width * cls.field_width_arcmin*u.Unit("arcmin")).to_value(u.deg) + hintsep = (n_field_width * cls.andort90_field_width_arcmin*u.Unit("arcmin")).to_value(u.deg) return astrometry.PositionHint(ra_deg=hintcoord.ra.deg, dec_deg=hintcoord.dec.deg, radius_deg=hintsep) @@ -280,17 +280,42 @@ def get_astrometry_size_hint(cls, rawfit): the camera pixels are 0.387as/px and it has a field of view of 13,20' x 13,20'. So we provide close values for the hint. If the files are 1x1 it will be 0.387as/px, if 2x2 it will be twice. - For OSN T150 camera pixels are 0.232as/px and it has a field of view of 7.92' x 7.92'. + For DIPOL-1 in OSN-T090, according to preliminary investigation of OSN crew is: + Las posiciones que he tomado y el ángulo de rotación en cada caso son estos: + Dec= -10º HA=+3h rotación=-177.3º + Zenit rotación=-177.3º + Dec=+60º HA=-6h rotación=-177.7º + Dec=+70º HA=+5h rotación=-177.2º + + El campo es de 9.22 x 6.28 arcmin y el tamaño de pixel de 0.134"/pix + + El ángulo de la imagen cambia muy poco entre las posiciones muy separadas del telescopio, y es de 177.5º ± 0.3º + Así que como mucho se produce un error de ± 0.3º en las imágenes, y el punto cero es de 2.5º. """ - if rawfit.header['NAXIS1'] == 2048: - return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.arcsec_per_pix) - elif rawfit.header['NAXIS1'] == 1024: - return astrometry.SizeHint(lower_arcsec_per_pixel=2*0.95*cls.arcsec_per_pix, upper_arcsec_per_pixel=2*1.05*cls.arcsec_per_pix) + if rawfit.instrument == INSTRUMENTS.DIPOL1: + + return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*0.134, upper_arcsec_per_pixel=1.05*0.134) + elif rawfit.instrument == INSTRUMENTS.AndorT90: + if rawfit.header['NAXIS1'] == 2048: + return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.andort90_arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.andort90_arcsec_per_pix) + elif rawfit.header['NAXIS1'] == 1024: + return astrometry.SizeHint(lower_arcsec_per_pixel=2*0.95*cls.andort90_arcsec_per_pix, upper_arcsec_per_pixel=2*1.05*cls.andort90_arcsec_per_pix) + + else: + raise ValueError("Unexpected or unknown instrument for OSN-T090") + + @classmethod + def get_gain_e_adu(cls, rawfit): + if rawfit.instrument == INSTRUMENTS.AndorT90: + return cls.andort90_gain_e_adu + elif rawfit.instrument == INSTRUMENTS.DIPOL1: + logger.error("DIPOL-1 gain not implemented yet... returning 1.0") + return 1.0 @classmethod def compute_relative_polarimetry(cls, polarimetry_group): diff --git a/iop4lib/telescopes/osnt150.py b/iop4lib/telescopes/osnt150.py index 81323799..1f6e51f4 100644 --- a/iop4lib/telescopes/osnt150.py +++ b/iop4lib/telescopes/osnt150.py @@ -35,11 +35,11 @@ class OSNT150(OSNT090, Telescope, metaclass=ABCMeta): abbrv = "T150" telescop_kw = "T150-OSN" - # telescope specific properties + # telescope / instrument specific properties - arcsec_per_pix = 0.232 - gain_e_adu = 4.5 - field_width_arcmin = 7.92 + andort150_arcsec_per_pix = 0.232 + andort150_gain_e_adu = 4.5 + andort150_field_width_arcmin = 7.92 ftp_address = iop4conf.osn_t150_address ftp_user = iop4conf.osn_t150_user @@ -47,6 +47,21 @@ class OSNT150(OSNT090, Telescope, metaclass=ABCMeta): # telescope specific methods + @classmethod + def get_astrometry_size_hint(cls, rawfit): + r""" Get the size hint for this telescope / rawfit. + + According to OSN T0150 camera information (https://www.osn.iaa.csic.es/page/camaras-ccdt150-y-ccdt90) + camera pixels are 0.232as/px and it has a field of view of 7.92' x 7.92'. + If the files are 1x1 it will be that, if they are 2x2 it will be twice. + """ + + if rawfit.header['NAXIS1'] == 2048: + return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.andort150_arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.andort150_arcsec_per_pix) + elif rawfit.header['NAXIS1'] == 1024: + return astrometry.SizeHint(lower_arcsec_per_pixel=2*0.95*cls.andort150_arcsec_per_pix, upper_arcsec_per_pixel=2*1.05*cls.andort150_arcsec_per_pix) + + @classmethod def compute_relative_photometry(cls, rawfit): logger.warning(f"OSNT150.compute_relative_photometry not implemented yet, using OSNT090.compute_relative_photometry {super(cls)=}") diff --git a/iop4lib/telescopes/telescope.py b/iop4lib/telescopes/telescope.py index 90e541c5..d73ac93d 100644 --- a/iop4lib/telescopes/telescope.py +++ b/iop4lib/telescopes/telescope.py @@ -51,6 +51,8 @@ class Telescope(metaclass=ABCMeta): # Abstract attributes + # these attributes must be implemented in the subclass + # telescope identification @property @@ -63,20 +65,18 @@ def name(self): def abbrv(self): pass + # telescope / instrument specific properties + @property @abstractmethod def telescop_kw(self): pass - # telescope properties - - @property - @abstractmethod - def gain_e_adu(self): - pass # Abstract methods + # This methods must be implemented in the subclass + @classmethod @abstractmethod def list_remote_raw_fnames(cls, epoch): @@ -127,6 +127,11 @@ def get_astrometry_position_hint(cls, rawfit, *args, **kwargs): def get_astrometry_size_hint(cls, rawfit): pass + @classmethod + @abstractmethod + def get_gain_e_adu(cls, rawfit): + pass + # Not Implemented Methods (skeleton) # @classmethod @@ -141,7 +146,7 @@ def compute_absolute_photometry(cls): def compute_relative_polarimetry(cls): raise NotImplementedError - # Class methods (usable) + # Class methods (you should be using these only from this Telescope class, not from subclasses) @classmethod def get_known(cls): @@ -170,6 +175,9 @@ def is_known(self, name): """ return (name in [tel.name for tel in Telescope.get_known()]) or (name in [tel.abbrv for tel in Telescope.get_known()]) + # telescope independent functionality + # you should be using these from the subclasses already + # these don't need to be overriden in subclasses, but they can be (e.g. OSN-T090 overrides check_telescop_kw) @classmethod def classify_rawfit(cls, rawfit: 'RawFit'): @@ -182,9 +190,6 @@ def classify_rawfit(cls, rawfit: 'RawFit'): cls.classify_imgsize(rawfit) cls.classify_exptime(rawfit) - # telescope independent functionality (they don't need to be overriden in subclasses) - - @classmethod def check_telescop_kw(cls, rawfit): """ @@ -261,7 +266,10 @@ def get_header_objecthint(self, rawfit): - # should not depend on the telescope + # these implemente more complex functionality related to data reduction + # the ones implemented should not depend on the telescope + # but again can be overriden to customize them + # other reduction procedure like must necessarily be implemented in the subclass (like polarimetry) @classmethod def compute_aperture_photometry(cls, redf, aperpix, r_in, r_out): @@ -306,7 +314,7 @@ def compute_aperture_photometry(cls, redf, aperpix, r_in, r_out): logger.debug(f"{redf}: {np.sum(img_bkg_sub <= 0.0)} px < 0 ({math.sqrt(np.sum(img_bkg_sub <= 0.0)):.0f} px2) in BKG-SUBSTRACTED IMG, after masking.") - error = calc_total_error(img_bkg_sub, bkg.background_rms, cls.gain_e_adu) + error = calc_total_error(img_bkg_sub, bkg.background_rms, cls.get_gain_e_adu(redf)) for astrosource in redf.sources_in_field.all(): for pairs, wcs in (('O', redf.wcs1), ('E', redf.wcs2)) if redf.with_pairs else (('O',redf.wcs),): From b8c099b4f72fd0e9c1a3a7b9048ca0edc03ea088 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Sun, 15 Oct 2023 17:14:27 +0200 Subject: [PATCH 009/168] make_polarimetry_groups: account for instrument kw --- iop4lib/db/epoch.py | 1 + 1 file changed, 1 insertion(+) diff --git a/iop4lib/db/epoch.py b/iop4lib/db/epoch.py index 375a0990..f18a03e4 100644 --- a/iop4lib/db/epoch.py +++ b/iop4lib/db/epoch.py @@ -606,6 +606,7 @@ def make_polarimetry_groups(self): keys = ( ('kwobj', redf.rawfit.header['OBJECT'].split(" ")[0]), + ('instument', redf.instrument), ('band', redf.band), ('exptime', redf.exptime) ) From 7bbc6808580b1363e175b7445a995ac7a5ac5568 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Sun, 15 Oct 2023 17:15:15 +0200 Subject: [PATCH 010/168] include tests for polarimetry groups (skipped atm) --- tests/test_osnt090.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/tests/test_osnt090.py b/tests/test_osnt090.py index 2c562a23..a452de5f 100644 --- a/tests/test_osnt090.py +++ b/tests/test_osnt090.py @@ -85,6 +85,21 @@ def test_epoch_masterbias_masterflats(load_test_catalog): assert (epoch.masterflats.count() == 5) +@pytest.mark.skip(reason="Not implemented yet") +@pytest.mark.django_db(transaction=True) +def test_polarimetry_groups(load_test_catalog): + r""" Tests the splitting of polarimetry observations into groups. + + Organizing observations into groups is essential to derive polarimetry results. + + For OSN-T090 POLARIMETRY observations with AndorT090 instrument, four observations + are needed to derive a single polarimetry result, for the same source, same band and same exptime, + but different polarization angle. + + """ + + assert False + @pytest.mark.django_db(transaction=True) def test_build_single_proc(load_test_catalog): """ Test the whole building process of reduced fits in a single process """ @@ -106,7 +121,6 @@ def test_build_single_proc(load_test_catalog): assert not (redf.has_flag(ReducedFit.FLAGS.ERROR_ASTROMETRY)) - @pytest.mark.django_db(transaction=True) def test_build_multi_proc(load_test_catalog): """ Test the whole building process of reduced fits through multiprocessing """ From e21832367efec907f14c785f9c6a0e35e701c107 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Sun, 15 Oct 2023 18:05:04 +0200 Subject: [PATCH 011/168] clean code, remove unused parts --- iop4lib/db/epoch.py | 55 --------------------------------- iop4lib/telescopes/telescope.py | 24 +++----------- 2 files changed, 4 insertions(+), 75 deletions(-) diff --git a/iop4lib/db/epoch.py b/iop4lib/db/epoch.py index f18a03e4..33a66ba0 100644 --- a/iop4lib/db/epoch.py +++ b/iop4lib/db/epoch.py @@ -35,49 +35,6 @@ logger = logging.getLogger(__name__) -class HybridProperty(): - def __init__(self, fget=None, fset=None, fexp=None): - self.fget = fget - self.fset = fset - self.fexp = fexp - - def getter(self, fget): - return type(self)(fget=fget, fset=self.fset, fexp=self.fexp) - - def setter(self, fset): - return type(self)(fget=self.fget, fset=fset, fexp=self.fexp) - - def expression(self, fexp): - return type(self)(fget=self.fget, fset=self.fset, fexp=fexp) - - def __get__(self, instance, owner): - if instance is None: - return self - return self.fget(instance) - - def __set__(self, instance, value): - if self.fset is not None: - self.fset(instance, value) - else: - raise AttributeError("Can't set attribute") - -def hybrid_property(fget): - res = HybridProperty(fget=fget) - if fget.__name__ is not None: - res.__name__ = fget.__name__ - return res - - -class HybridManager(models.Manager): - def get_queryset(self): - qs = super().get_queryset() - print(f"get_queryset: {qs=}, {qs.model=}") - for name, value in vars(qs.model).items(): - if isinstance(value, HybridProperty) and value.expression is not None: - print(f"Getting qs of {name} with {value=}") - qs = qs.annotate(**{name: value.fexp(qs.model)}) - print("Got qs") - return qs class Epoch(models.Model): """A class representing an epoch. @@ -88,18 +45,6 @@ class Epoch(models.Model): objects = models.Manager() - custom = HybridManager() - @hybrid_property - def epochname_(self): - print(self) - return f"{self.telescope}/{self.night}" - - @epochname_.expression - def cush(self): - return Concat('telescope', models.Value('/'), 'night', output_field=models.CharField()) - - - # Database fields and information # identifiers diff --git a/iop4lib/telescopes/telescope.py b/iop4lib/telescopes/telescope.py index d73ac93d..a0e28679 100644 --- a/iop4lib/telescopes/telescope.py +++ b/iop4lib/telescopes/telescope.py @@ -293,28 +293,15 @@ def compute_aperture_photometry(cls, redf, aperpix, r_in, r_out): bkg_box_size = redf.mdata.shape[0]//10 bkg = get_bkg(redf.mdata, filter_size=1, box_size=bkg_box_size) - # img_bkg_sub = redf.mdata - bkg.background - img_bkg_sub = redf.mdata + img = redf.mdata if np.sum(redf.mdata <= 0.0) >= 1: logger.debug(f"{redf}: {np.sum(redf.mdata <= 0.0):.0f} px < 0 ({math.sqrt(np.sum(redf.mdata <= 0.0)):.0f} px2) in IMAGE.") - - # if np.sum(img_bkg_sub <= 0.0) >= 1: - # try: - # logger.debug(f"{redf}: {np.sum(img_bkg_sub <= 0.0)} px < 0 ({math.sqrt(np.sum(img_bkg_sub <= 0.0)):.0f} px2) in BKG-SUBSTRACTED IMG. Check the bkg-substraction method, I'm going to try to mask sources...") - # seg_threshold = 3.0 * bkg.background_rms # safer to ensure they are sources - # segment_map, convolved_data = get_segmentation(img_bkg_sub, threshold=seg_threshold, fwhm=1, kernel_size=None, npixels=16, deblend=True) - # mask = segment_map.make_source_mask(footprint=circular_footprint(radius=6)) - # bkg = get_bkg(redf.mdata, filter_size=1, box_size=bkg_box_size, mask=mask) - # img_bkg_sub = redf.mdata - bkg.background - # except Exception as e: - # logger.debug(f"{redf}: can not mask sources here... {e}") - if np.sum(img_bkg_sub <= 0.0) >= 1: - logger.debug(f"{redf}: {np.sum(img_bkg_sub <= 0.0)} px < 0 ({math.sqrt(np.sum(img_bkg_sub <= 0.0)):.0f} px2) in BKG-SUBSTRACTED IMG, after masking.") - + if np.sum(img <= 0.0) >= 1: + logger.debug(f"{redf}: {np.sum(img <= 0.0)} px < 0 ({math.sqrt(np.sum(img <= 0.0)):.0f} px2) in BKG-SUBSTRACTED IMG, after masking.") - error = calc_total_error(img_bkg_sub, bkg.background_rms, cls.get_gain_e_adu(redf)) + error = calc_total_error(img, bkg.background_rms, cls.get_gain_e_adu(redf)) for astrosource in redf.sources_in_field.all(): for pairs, wcs in (('O', redf.wcs1), ('E', redf.wcs2)) if redf.with_pairs else (('O',redf.wcs),): @@ -323,11 +310,8 @@ def compute_aperture_photometry(cls, redf, aperpix, r_in, r_out): annulus = CircularAnnulus(astrosource.coord.to_pixel(wcs), r_in=r_in, r_out=r_out) annulus_stats = ApertureStats(redf.mdata, annulus, error=error, sigma_clip=SigmaClip(sigma=5.0, maxiters=10)) - # bkg_stats = ApertureStats(bkg.background, ap, error=error) ap_stats = ApertureStats(redf.mdata, ap, error=error) - # bkg_flux_counts = bkg_stats.sum - # bkg_flux_counts_err = bkg_stats.sum_err bkg_flux_counts = annulus_stats.median*ap_stats.sum_aper_area.value bkg_flux_counts_err = annulus_stats.sum_err / annulus_stats.sum_aper_area.value * ap_stats.sum_aper_area.value From 4a14907da6f9e3755b021c15d9c2415e02dd1df3 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Sun, 15 Oct 2023 20:31:55 +0200 Subject: [PATCH 012/168] add caha-t220 cafos tests --- .github/workflows/ci.yml | 4 +- iop4api/static/iop4api/gui.js | 2 +- tests/fixtures.py | 27 +++++++++++++ tests/test_cahat220.py | 73 +++++++++++++++++++++++++++++++++++ tests/test_generic.py | 48 +++++++++++++++++++++++ tests/test_osnt090.py | 41 +------------------- 6 files changed, 153 insertions(+), 42 deletions(-) create mode 100644 tests/fixtures.py create mode 100644 tests/test_cahat220.py create mode 100644 tests/test_generic.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fcd5e5ef..28bb7993 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -78,16 +78,16 @@ jobs: run: wget --post-data "pass=$TEST_DATA_PASSWORD" 'https://vhega.iaa.es/iop4/iop4testdata.tar.gz' -O $HOME/iop4testdata.tar.gz - name: Run tests (with -o log_cli=true -o log_cli_level=DEBUG to debug CI actions) - run: pytest -o log_cli=true -o log_cli_level=DEBUG -vxs tests + run: pytest -o log_cli=true -o log_cli_level=DEBUG -vxs tests/ - name: Output some info for debugging run: | + df -h du -sh $HOME/.cache/httpdirfs/ - docs: runs-on: ubuntu-latest steps: diff --git a/iop4api/static/iop4api/gui.js b/iop4api/static/iop4api/gui.js index 5817182a..dd867c20 100644 --- a/iop4api/static/iop4api/gui.js +++ b/iop4api/static/iop4api/gui.js @@ -203,7 +203,7 @@ function plot_hide_instrument(e) { label = e.getAttribute('data-instrument'); - console.log('Hiding instument: ' + label) + console.log('Hiding instrument: ' + label) if ( !('activeFilters' in plotData)) { plotData.activeFilters = []; diff --git a/tests/fixtures.py b/tests/fixtures.py new file mode 100644 index 00000000..b21d4a22 --- /dev/null +++ b/tests/fixtures.py @@ -0,0 +1,27 @@ +import pytest +from pathlib import Path + +from .conftest import TEST_CONFIG + +# iop4lib config +import iop4lib.config +iop4conf = iop4lib.Config(config_path=TEST_CONFIG) + +# django imports + +# other imports +import os +from pytest import approx + +# logging +import logging +logger = logging.getLogger(__name__) + +@pytest.fixture +def load_test_catalog(testdata, django_db_setup, django_db_blocker): + with django_db_blocker.unblock(): + from iop4lib.db import AstroSource + + # load test catalog in test db + from django.core.management import call_command + call_command('loaddata', str(Path(iop4conf.datadir) / 'testcatalog.yaml'), verbosity=0) diff --git a/tests/test_cahat220.py b/tests/test_cahat220.py new file mode 100644 index 00000000..664694c9 --- /dev/null +++ b/tests/test_cahat220.py @@ -0,0 +1,73 @@ +import pytest +from pathlib import Path + +from .conftest import TEST_CONFIG + +# iop4lib config +import iop4lib.config +iop4conf = iop4lib.Config(config_path=TEST_CONFIG) + +# other imports +import os +from pytest import approx + +# logging +import logging +logger = logging.getLogger(__name__) + +# fixtures +from .fixtures import load_test_catalog + + +@pytest.mark.django_db(transaction=True) +def test_build_multi_proc(load_test_catalog): + """ Test the whole building process of reduced fits through multiprocessing """ + + from iop4lib.db import Epoch, RawFit, ReducedFit + from iop4lib.enums import IMGTYPES, SRCTYPES + + epochname_L = ["CAHA-T220/2022-09-18", "CAHA-T220/2022-08-27"] + + epoch_L = [Epoch.create(epochname=epochname, check_remote_list=False) for epochname in epochname_L] + + for epoch in epoch_L: + epoch.build_master_biases() + epoch.build_master_flats() + + # workaround for CI + # otherwise the attempt to access the httpdsdir-mounted files directly through multiprocessing will fail + if os.getenv("CI") == 'true': + iop4conf.max_concurrent_threads = 1 + Epoch.reduce_rawfits([RawFit.objects.filter(epoch__in=epoch_L, imgtype=IMGTYPES.LIGHT).first()]) + + iop4conf.max_concurrent_threads = 4 + + rawfits = RawFit.objects.filter(epoch__in=epoch_L, imgtype=IMGTYPES.LIGHT).all() + + Epoch.reduce_rawfits(rawfits) + + assert (ReducedFit.objects.filter(epoch__in=epoch_L).count() == 4) + + for redf in ReducedFit.objects.filter(epoch__in=epoch_L).all(): + assert (redf.has_flag(ReducedFit.FLAGS.BUILT_REDUCED)) + assert not (redf.has_flag(ReducedFit.FLAGS.ERROR_ASTROMETRY)) + + from iop4lib.db import PhotoPolResult, AstroSource + + epoch = Epoch.by_epochname("CAHA-T220/2022-09-18") + + epoch.compute_relative_photometry() + epoch.compute_relative_polarimetry() + + qs_res = PhotoPolResult.objects.filter(epoch=epoch, astrosource__name="2200+420").all() + + # we expect only one photometry result target in this test dataset for this epoch and source + assert qs_res.exclude(astrosource__srctype=SRCTYPES.CALIBRATOR).count() == 1 + + res = qs_res[0] + + # check that the result is correct to 1.5 sigma compared to IOP3 + assert res.mag == approx(13.38, abs=1.5*res.mag_err) + + # check that uncertainty of the result is less than 0.08 mag + assert res.mag_err < 0.08 \ No newline at end of file diff --git a/tests/test_generic.py b/tests/test_generic.py new file mode 100644 index 00000000..5c82e4ee --- /dev/null +++ b/tests/test_generic.py @@ -0,0 +1,48 @@ +import pytest +from pathlib import Path + +from .conftest import TEST_CONFIG + +# iop4lib config +import iop4lib.config +iop4conf = iop4lib.Config(config_path=TEST_CONFIG) + +# other imports +import os + +# logging +import logging +logger = logging.getLogger(__name__) + +# fixtures +from .fixtures import load_test_catalog + + +@pytest.mark.django_db(transaction=True) +def test_testdata(testdata): + """Test that the test data is available""" + assert (os.path.exists(Path(iop4conf.datadir) / "raw" / "OSN-T090")) + + +@pytest.mark.django_db(transaction=True) +def test_testconfig_testdb(load_test_catalog): + """ Check that the DB is clean (it should be the test database), if it is not, all test will fail """ + from iop4lib.db import Epoch, RawFit, ReducedFit, MasterBias, MasterFlat, AperPhotResult, PhotoPolResult, AstroSource + assert (hasattr(iop4conf, "basedir")) + assert (hasattr(iop4conf, "datadir")) + assert (hasattr(iop4conf, "db_path")) + assert (hasattr(iop4conf, "config_path")) + assert (Path(iop4conf.datadir).name == "iop4testdata") + assert (Path(iop4conf.config_path).name == "config.tests.yaml") + assert ("test_" in Path(iop4conf.db_path).name) + assert (Epoch.objects.count() == 0) + assert (RawFit.objects.count() == 0) + assert (ReducedFit.objects.count() == 0) + assert (MasterBias.objects.count() == 0) + assert (MasterFlat.objects.count() == 0) + assert (AperPhotResult.objects.count() == 0) + assert (PhotoPolResult.objects.count() == 0) + + # there should be some test sources in the DB, and their calibrators + assert (0 < AstroSource.objects.count() < 20) + assert AstroSource.objects.filter(name="2200+420").exists() \ No newline at end of file diff --git a/tests/test_osnt090.py b/tests/test_osnt090.py index a452de5f..f11ddcbf 100644 --- a/tests/test_osnt090.py +++ b/tests/test_osnt090.py @@ -7,8 +7,6 @@ import iop4lib.config iop4conf = iop4lib.Config(config_path=TEST_CONFIG) -# django imports - # other imports import os from pytest import approx @@ -17,43 +15,8 @@ import logging logger = logging.getLogger(__name__) - -@pytest.mark.django_db(transaction=True) -def test_testdata(testdata): - """Test that the test data is available""" - assert (os.path.exists(Path(iop4conf.datadir) / "raw" / "OSN-T090")) - -@pytest.fixture -def load_test_catalog(testdata, django_db_setup, django_db_blocker): - with django_db_blocker.unblock(): - from iop4lib.db import AstroSource - - # load test catalog in test db - from django.core.management import call_command - call_command('loaddata', str(Path(iop4conf.datadir) / 'testcatalog.yaml'), verbosity=0) - -@pytest.mark.django_db(transaction=True) -def test_testconfig_testdb(load_test_catalog): - """ Check that the DB is clean (it should be the test database), if it is not, all test will fail """ - from iop4lib.db import Epoch, RawFit, ReducedFit, MasterBias, MasterFlat, AperPhotResult, PhotoPolResult, AstroSource - assert (hasattr(iop4conf, "basedir")) - assert (hasattr(iop4conf, "datadir")) - assert (hasattr(iop4conf, "db_path")) - assert (hasattr(iop4conf, "config_path")) - assert (Path(iop4conf.datadir).name == "iop4testdata") - assert (Path(iop4conf.config_path).name == "config.tests.yaml") - assert ("test_" in Path(iop4conf.db_path).name) - assert (Epoch.objects.count() == 0) - assert (RawFit.objects.count() == 0) - assert (ReducedFit.objects.count() == 0) - assert (MasterBias.objects.count() == 0) - assert (MasterFlat.objects.count() == 0) - assert (AperPhotResult.objects.count() == 0) - assert (PhotoPolResult.objects.count() == 0) - - # there should be some test sources in the DB, and their calibrators - assert (0 < AstroSource.objects.count() < 20) - assert AstroSource.objects.filter(name="2200+420").exists() +# fixtures +from .fixtures import load_test_catalog @pytest.mark.django_db(transaction=True) def test_epoch_creation(load_test_catalog): From c15dbf58c45bf8c4c16fe8ed0809f2c4eb0dda98 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 00:04:04 +0200 Subject: [PATCH 013/168] tests_cahat220.py: ci workaround with extra test --- tests/test_cahat220.py | 36 ++++++++++++++++++++++++++++++------ 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/tests/test_cahat220.py b/tests/test_cahat220.py index 664694c9..5b8e5928 100644 --- a/tests/test_cahat220.py +++ b/tests/test_cahat220.py @@ -19,8 +19,9 @@ from .fixtures import load_test_catalog +@pytest.mark.skipif(os.getenv("CI") != "true", reason="only neccesary for actions CI as a workaround for httpdirfs") @pytest.mark.django_db(transaction=True) -def test_build_multi_proc(load_test_catalog): +def test_build_single_proc(load_test_catalog): """ Test the whole building process of reduced fits through multiprocessing """ from iop4lib.db import Epoch, RawFit, ReducedFit @@ -34,11 +35,34 @@ def test_build_multi_proc(load_test_catalog): epoch.build_master_biases() epoch.build_master_flats() - # workaround for CI - # otherwise the attempt to access the httpdsdir-mounted files directly through multiprocessing will fail - if os.getenv("CI") == 'true': - iop4conf.max_concurrent_threads = 1 - Epoch.reduce_rawfits([RawFit.objects.filter(epoch__in=epoch_L, imgtype=IMGTYPES.LIGHT).first()]) + iop4conf.max_concurrent_threads = 1 + + rawfits = RawFit.objects.filter(epoch__in=epoch_L, imgtype=IMGTYPES.LIGHT).all() + + Epoch.reduce_rawfits(rawfits) + + assert (ReducedFit.objects.filter(epoch__in=epoch_L).count() == 4) + + for redf in ReducedFit.objects.filter(epoch__in=epoch_L).all(): + assert (redf.has_flag(ReducedFit.FLAGS.BUILT_REDUCED)) + assert not (redf.has_flag(ReducedFit.FLAGS.ERROR_ASTROMETRY)) + + + +@pytest.mark.django_db(transaction=True) +def test_build_multi_proc(load_test_catalog): + """ Test the whole building process of reduced fits through multiprocessing """ + + from iop4lib.db import Epoch, RawFit, ReducedFit + from iop4lib.enums import IMGTYPES, SRCTYPES + + epochname_L = ["CAHA-T220/2022-09-18", "CAHA-T220/2022-08-27"] + + epoch_L = [Epoch.create(epochname=epochname, check_remote_list=False) for epochname in epochname_L] + + for epoch in epoch_L: + epoch.build_master_biases() + epoch.build_master_flats() iop4conf.max_concurrent_threads = 4 From 6f5d7172d324758a8ffc1bd3270f7b4ca356bdab Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 00:56:15 +0200 Subject: [PATCH 014/168] test osnt090: add polarimetry check (not complete) --- tests/test_osnt090.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/tests/test_osnt090.py b/tests/test_osnt090.py index f11ddcbf..27f41d2a 100644 --- a/tests/test_osnt090.py +++ b/tests/test_osnt090.py @@ -112,6 +112,8 @@ def test_build_multi_proc(load_test_catalog): from iop4lib.db import PhotoPolResult, AstroSource + # 1. test relative photometry + epoch = Epoch.by_epochname("OSN-T090/2022-09-18") epoch.compute_relative_photometry() @@ -127,4 +129,20 @@ def test_build_multi_proc(load_test_catalog): assert res.mag == approx(13.35, abs=1.5*res.mag_err) # check that uncertainty of the result is less than 0.08 mag - assert res.mag_err < 0.08 \ No newline at end of file + assert res.mag_err < 0.08 + + # 2. test relative polarimetry + + epoch = epoch.by_epochname("OSN-T090/2023-06-11") + + epoch.compute_relative_polarimetry() + + qs_res = PhotoPolResult.objects.filter(epoch=epoch).all() + + # we expect only one polarimetry result target in this test dataset for this epoch + assert qs_res.exclude(astrosource__srctype=SRCTYPES.CALIBRATOR).count() == 1 + + res = qs_res[0] + + # check that the result is correct to 1.5 sigma compared to IOP3 + # TODO From 05d76ed0d930beda2e2db2a44a71d1ea2361303c Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 01:10:33 +0200 Subject: [PATCH 015/168] iop4admin: allow filtering by instrument --- iop4admin/modeladmins/rawfit.py | 1 + iop4admin/modeladmins/reducedfit.py | 1 + iop4api/filters.py | 20 +++++++++++++++++--- 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/iop4admin/modeladmins/rawfit.py b/iop4admin/modeladmins/rawfit.py index 1e02f78b..c3e80243 100644 --- a/iop4admin/modeladmins/rawfit.py +++ b/iop4admin/modeladmins/rawfit.py @@ -24,6 +24,7 @@ class AdminRawFit(AdminFitFile): RawFitTelescopeFilter, RawFitNightFilter, RawFitFilenameFilter, + RawFitInstrumentFilter, RawFitFlagFilter, "imgtype", "obsmode", diff --git a/iop4admin/modeladmins/reducedfit.py b/iop4admin/modeladmins/reducedfit.py index 220fef7f..8149b7d3 100644 --- a/iop4admin/modeladmins/reducedfit.py +++ b/iop4admin/modeladmins/reducedfit.py @@ -28,6 +28,7 @@ class AdminReducedFit(AdminFitFile): RawFitTelescopeFilter, RawFitNightFilter, RawFitFilenameFilter, + RawFitInstrumentFilter, RawFitFlagFilter, "obsmode", "band", diff --git a/iop4api/filters.py b/iop4api/filters.py index 5946b8f4..7535278f 100644 --- a/iop4api/filters.py +++ b/iop4api/filters.py @@ -34,7 +34,7 @@ def queryset(self, request, queryset): return queryset.filter(id=self.value()) class RawFitNightFilter(TextInputFilter): - title = 'Night (contains)' + title = 'Night' name = 'night' parameter_name = 'night' @@ -61,15 +61,29 @@ class RawFitTelescopeFilter(admin.SimpleListFilter): def lookups(self, request, model_admin): from iop4lib.telescopes import Telescope - tels = Telescope.get_known() - return ((t.name, t.name) for t in tels) + return ((t.name, t.name) for t in Telescope.get_known()) def queryset(self, request, queryset): from iop4lib.telescopes import Telescope if (val := self.value()) is not None: return queryset.filter(epoch__telescope=Telescope.by_name(val).name) +class RawFitInstrumentFilter(admin.SimpleListFilter): + """ + Filter list for instrument in RawFit. + """ + title = 'instrument' + name = 'instrument' + parameter_name = 'instrument' + def lookups(self, request, model_admin): + from iop4lib.instruments import Instrument + return ((i.name, i.name) for i in Instrument.get_known()) + + def queryset(self, request, queryset): + from iop4lib.instruments import Instrument + if (val := self.value()) is not None: + return queryset.filter(instrument=Instrument.by_name(val).name) class RawFitFlagFilter(admin.SimpleListFilter): From 13f00edaf5178659094d0f96f64feacb12250272 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 01:20:04 +0200 Subject: [PATCH 016/168] refactor instrument functionality in classes --- iop4lib/db/epoch.py | 16 +- iop4lib/db/rawfit.py | 26 +- iop4lib/db/reducedfit.py | 14 +- iop4lib/instruments/__init__.py | 3 + iop4lib/instruments/andor_cameras.py | 418 +++++++++++++++++++++++++++ iop4lib/instruments/cafos.py | 379 ++++++++++++++++++++++++ iop4lib/instruments/dipol.py | 42 +++ iop4lib/instruments/instrument.py | 322 +++++++++++++++++++++ iop4lib/telescopes/cahat220.py | 357 +---------------------- iop4lib/telescopes/osnt090.py | 405 +------------------------- iop4lib/telescopes/osnt150.py | 6 +- iop4lib/telescopes/telescope.py | 306 +------------------- iop4lib/utils/astrometry.py | 3 +- 13 files changed, 1217 insertions(+), 1080 deletions(-) create mode 100644 iop4lib/instruments/__init__.py create mode 100644 iop4lib/instruments/andor_cameras.py create mode 100644 iop4lib/instruments/cafos.py create mode 100644 iop4lib/instruments/dipol.py create mode 100644 iop4lib/instruments/instrument.py diff --git a/iop4lib/db/epoch.py b/iop4lib/db/epoch.py index 33a66ba0..b71f3b38 100644 --- a/iop4lib/db/epoch.py +++ b/iop4lib/db/epoch.py @@ -24,7 +24,7 @@ from iop4lib.enums import * from iop4lib.telescopes import Telescope -from .astrosource import AstroSource +from iop4lib.instruments import Instrument from .fields import FlagChoices, FlagBitField from iop4lib.utils import get_mem_parent_from_child, get_total_mem_from_child, get_mem_current, get_mem_children from iop4lib.utils.parallel import epoch_bulkreduce_multiprocesing, epoch_bulkreduce_ray @@ -43,8 +43,6 @@ class Epoch(models.Model): from the telescope archives and reducing the data. """ - objects = models.Manager() - # Database fields and information # identifiers @@ -551,7 +549,7 @@ def make_polarimetry_groups(self): keys = ( ('kwobj', redf.rawfit.header['OBJECT'].split(" ")[0]), - ('instument', redf.instrument), + ('instrument', redf.instrument), ('band', redf.band), ('exptime', redf.exptime) ) @@ -597,10 +595,10 @@ def make_polarimetry_groups(self): t1 = Time(min([redf.juliandate for redf in redf_L]), format="jd").datetime.strftime("%H:%M:%S") t2 = Time(max([redf.juliandate for redf in redf_L]), format="jd").datetime.strftime("%H:%M:%S") - print(f"{len(redf_L)=}; {key_D.values()}, {set([redf.rotangle for redf in redf_L])} ({t1}, {t2})") + logging.debug(f"{len(redf_L)=}; {key_D.values()}, {set([redf.rotangle for redf in redf_L])} ({t1}, {t2})") for redf in redf_L: - print(f" -> {redf.rotangle}: {Time(redf.juliandate, format='jd').datetime.strftime('%H:%M:%S')}") + logging.debug(f" -> {redf.rotangle}: {Time(redf.juliandate, format='jd').datetime.strftime('%H:%M:%S')}") # return the groups and their keys: @@ -615,9 +613,9 @@ def compute_relative_polarimetry(self, *args, **kwargs): logger.info(f"{self}: computing relative polarimetry over {len(groupkeys_L)} polarimetry groups.") logger.debug(f"{self}: {groupkeys_L=}") - f = lambda x: Telescope.by_name(self.telescope).compute_relative_polarimetry(x, *args, **kwargs) - - return list(map(f, clusters_L)) + f = lambda x: Instrument.by_name(x[1]['instrument']).compute_relative_polarimetry(x[0], *args, **kwargs) + + return list(map(f, zip(clusters_L, groupkeys_L))) diff --git a/iop4lib/db/rawfit.py b/iop4lib/db/rawfit.py index 06885f1c..2e608db3 100644 --- a/iop4lib/db/rawfit.py +++ b/iop4lib/db/rawfit.py @@ -1,27 +1,30 @@ +# iop4lib config import iop4lib.config iop4conf = iop4lib.Config(config_db=False) +# django imports from django.db import models -# other imports +# iop4lib imports +from ..enums import * +from iop4lib.telescopes import Telescope +from iop4lib.instruments import Instrument +from .fitfilemodel import FitFileModel +from .fields import FlagChoices, FlagBitField +# other imports import re import os import stat import datetime - import numpy as np -from ..enums import * -from iop4lib.telescopes import Telescope -from .fitfilemodel import FitFileModel -from .fields import FlagChoices, FlagBitField - # logging - import logging logger = logging.getLogger(__name__) + + class RawFit(FitFileModel): # Database fields and information @@ -120,6 +123,8 @@ def _repr_pretty_(self, p, cycle): p.breakable() p.text(f"filename: {self.filename},") p.breakable() + p.text(f"instrument: {self.instrument},") + p.breakable() p.text(f"imgtype: {self.imgtype},") p.breakable() p.text(f"size: {self.imgsize},") @@ -139,6 +144,7 @@ def _repr_html_(self): f" - telescope: {self.epoch.telescope}
\n" f" - night: {self.epoch.night}
\n" f" - filename: {self.filename}
\n" + f" - instrument: {self.instrument}
\n" f" - imgtype: {self.imgtype}
\n" f" - size: {self.imgsize}
\n" f" - obsmode: {self.obsmode}
\n" @@ -383,12 +389,12 @@ def request_masterflat(self, other_epochs=False): @property def header_hintcoord(self): """ Returns a SkyCoord according to the headers of the FITS file.""" - return Telescope.by_name(self.epoch.telescope).get_header_hintcoord(self) + return Instrument.by_name(self.instrument).get_header_hintcoord(self) @property def header_objecthint(self): """ Returns the AstroSource according to the OBJECT keyword in the header of the FITS file. """ - return Telescope.by_name(self.epoch.telescope).get_header_objecthint(self) + return Instrument.by_name(self.instrument).get_header_objecthint(self) # Class methods diff --git a/iop4lib/db/reducedfit.py b/iop4lib/db/reducedfit.py index e179873a..4772e7ec 100644 --- a/iop4lib/db/reducedfit.py +++ b/iop4lib/db/reducedfit.py @@ -16,6 +16,7 @@ # iop4lib imports from iop4lib.telescopes import Telescope +from iop4lib.instruments import Instrument from iop4lib.utils.filedproperty import FiledProperty from iop4lib.enums import * from .rawfit import RawFit @@ -362,10 +363,10 @@ def header_objecthint(self): return self.rawfit.header_objecthint def get_astrometry_position_hint(self, allsky=False, n_field_width=1.5): - return Telescope.by_name(self.telescope).get_astrometry_position_hint(self.rawfit, allsky=allsky, n_field_width=n_field_width) + return Instrument.by_name(self.instrument).get_astrometry_position_hint(self.rawfit, allsky=allsky, n_field_width=n_field_width) def get_astrometry_size_hint(self): - return Telescope.by_name(self.telescope).get_astrometry_size_hint(self.rawfit) + return Instrument.by_name(self.instrument).get_astrometry_size_hint(self.rawfit) # REDUCTION METHODS @@ -373,10 +374,10 @@ def get_astrometry_size_hint(self): ## Delegated to telescopes def compute_aperture_photometry(self, *args, **kwargs): - return Telescope.by_name(self.telescope).compute_aperture_photometry(self, *args, **kwargs) + return Instrument.by_name(self.instrument).compute_aperture_photometry(self, *args, **kwargs) def compute_relative_photometry(self, *args, **kwargs): - return Telescope.by_name(self.telescope).compute_relative_photometry(self, *args, **kwargs) + return Instrument.by_name(self.instrument).compute_relative_photometry(self, *args, **kwargs) @classmethod def compute_relative_polarimetry(cls, polarimetry_group, *args, **kwargs): @@ -384,4 +385,7 @@ def compute_relative_polarimetry(cls, polarimetry_group, *args, **kwargs): if not all([redf.telescope == polarimetry_group[0].telescope for redf in polarimetry_group]): raise Exception("All reduced fits in a polarimetry group must be from the same telescope") - return Telescope.by_name(polarimetry_group[0].telescope).compute_relative_polarimetry(polarimetry_group, *args, **kwargs) \ No newline at end of file + if not all([redf.instrument == polarimetry_group[0].instrument for redf in polarimetry_group]): + raise Exception("All reduced fits in a polarimetry group must be from the same instrument") + + return Instrument.by_name(polarimetry_group[0].telescope).compute_relative_polarimetry(polarimetry_group, *args, **kwargs) \ No newline at end of file diff --git a/iop4lib/instruments/__init__.py b/iop4lib/instruments/__init__.py new file mode 100644 index 00000000..eec5c9ea --- /dev/null +++ b/iop4lib/instruments/__init__.py @@ -0,0 +1,3 @@ +from .instrument import Instrument +from .andor_cameras import AndorT90, AndorT150 +from .cafos import CAFOS \ No newline at end of file diff --git a/iop4lib/instruments/andor_cameras.py b/iop4lib/instruments/andor_cameras.py new file mode 100644 index 00000000..26da674e --- /dev/null +++ b/iop4lib/instruments/andor_cameras.py @@ -0,0 +1,418 @@ +# iop4lib config +import iop4lib.config +iop4conf = iop4lib.Config(config_db=False) + +# django imports +from abc import ABCMeta + +# other imports +import astropy.units as u +from astropy.coordinates import Angle, SkyCoord +import astrometry +import numpy as np +import math + +# iop4lib imports +from iop4lib.enums import * +from .instrument import Instrument +from iop4lib.telescopes import OSNT090, OSNT150 + +# logging +import logging +logger = logging.getLogger(__name__) + + +class Andor(Instrument, metaclass=ABCMeta): + r""" Abstract class for OSN Andor cameras.""" + + @classmethod + def classify_juliandate_rawfit(cls, rawfit): + """ + Andor cameras fits has JD keyword + """ + import astropy.io.fits as fits + jd = fits.getheader(rawfit.filepath, ext=0)["JD"] + rawfit.juliandate = jd + + @classmethod + def classify_imgtype_rawfit(cls, rawfit): + """ + Andor cameras fits has IMAGETYP keyword: FLAT, BIAS, LIGHT + + .. note:: + **Sometimes, the IMAGETYP keyword is wrong**, it has LIGHT on it but the filename and the OBJECT keyword contain the word "Flat". In those ocassions, it should be classified as + a FLAT. + """ + from iop4lib.db.rawfit import RawFit + import astropy.io.fits as fits + + with fits.open(rawfit.filepath) as hdul: + if hdul[0].header['IMAGETYP'] == 'FLAT': + rawfit.imgtype = IMGTYPES.FLAT + elif hdul[0].header['IMAGETYP'] == 'BIAS': + rawfit.imgtype = IMGTYPES.BIAS + elif hdul[0].header['IMAGETYP'] == 'LIGHT': + rawfit.imgtype = IMGTYPES.LIGHT + # workarounds for wrong keyword in OSN (see note in docstring) + if "FLAT" in hdul[0].header["OBJECT"].upper() and "FLAT" in rawfit.filename.upper(): + rawfit.imgtype = IMGTYPES.FLAT + elif "BIAS" in hdul[0].header["OBJECT"].upper() and "BIAS" in rawfit.filename.upper(): + rawfit.imgtype = IMGTYPES.BIAS + else: + logger.error(f"Unknown image type for {rawfit.fileloc}.") + rawfit.imgtype = IMGTYPES.ERROR + raise ValueError + + @classmethod + def classify_band_rawfit(cls, rawfit): + """ + OSN Files have no FILTER keyword if they are BIAS, FILTER=Clear if they are FLAT, and FILTER=FilterName if they are LIGHT. + For our DB, we have R, U, ..., None, ERROR. + + For polarimetry, which is done by taking four images with the R filter at different angles, we have R_45, R0, R45, R90. + """ + + from iop4lib.db.rawfit import RawFit + + if 'FILTER' not in rawfit.header: + if rawfit.imgtype == IMGTYPES.BIAS: + rawfit.band = BANDS.NONE + else: + rawfit.band = BANDS.ERROR + raise ValueError(f"Missing FILTER keyword for {rawfit.fileloc} which is not a bias (it is a {rawfit.imgtype}).") + elif rawfit.header['FILTER'] in {"Clear", ""}: + if rawfit.imgtype == IMGTYPES.FLAT: + rawfit.band = BANDS.NONE + else: + rawfit.band = BANDS.ERROR + raise ValueError(f"FILTER keyword is 'Clear' for {rawfit.fileloc} which is not a flat (it is a {rawfit.imgtype}).") + else: + rawfit.band = rawfit.header['FILTER'][0] # First letter of the filter name (R, U, ...) includes cases as R45, R_45, etc + + @classmethod + def classify_obsmode_rawfit(cls, rawfit): + """ + In OSN Andor Polarimetry, we only have polarimetry for filter R, and it is indicated as R_45, R0, R45, R90 (-45, 0, 45 and 90 degrees). They correspond + to the different angles of the polarimeter. + + For photometry, the filter keyword willl be simply the letter R, U, etc. + + The values for angles are -45, 0, 45 and 90. + + Lately we have seen "R-45" instead of "R_45", so we have to take care of that too. + """ + + from iop4lib.db.rawfit import RawFit + import re + + if rawfit.band == BANDS.ERROR: + raise ValueError("Cannot classify obsmode if band is ERROR.") + + if rawfit.band == BANDS.R: + if rawfit.header['FILTER'] == "R": + rawfit.obsmode = OBSMODES.PHOTOMETRY + else: + logger.debug("Band is R, but FILTER is not exactly R, for OSN this must mean it is polarimetry. Trying to extract angle from FILTER keyword.") + + rawfit.obsmode = OBSMODES.POLARIMETRY + + if rawfit.header['FILTER'] == "R_45" or rawfit.header['FILTER'] == "R-45": + rawfit.rotangle = -45 + elif rawfit.header['FILTER'] == "R0": + rawfit.rotangle = 0 + elif rawfit.header['FILTER'] == "R45" or rawfit.header['FILTER'] == "R+45": + rawfit.rotangle = 45 + elif rawfit.header['FILTER'] == "R90": + rawfit.rotangle = 90 + else: + raise ValueError(f"Cannot extract angle from FILTER keyword '{rawfit.header['FILTER']}'.") + else: + logger.debug("Band is not R, assuming it is photometry.") + rawfit.obsmode = OBSMODES.PHOTOMETRY + + @classmethod + def get_header_hintcoord(cls, rawfit): + """ Get the position hint from the fits header as a SkyCoord. + + OSN T090 / AndorT090 have keywords OBJECT, OBJECTRA, OBJECTDEC in the header; e.g: + OBJECT TXS0506 + OBJCTRA 05 09 20 ---> this can be input with unit u.hourangle + OBJCTDEC +05 41 16 ---> this can be input with unit u.deg + """ + + hint_coord = SkyCoord(Angle(rawfit.header['OBJCTRA'], unit=u.hourangle), Angle(rawfit.header['OBJCTDEC'], unit=u.degree), frame='icrs') + return hint_coord + + @classmethod + def get_astrometry_position_hint(cls, rawfit, allsky=False, n_field_width=1.5): + """ Get the position hint from the FITS header as an astrometry.PositionHint.""" + + hintcoord = cls.get_header_hintcoord(rawfit) + + if allsky: + hintsep = 180.0 + else: + hintsep = (n_field_width * cls.field_width_arcmin*u.Unit("arcmin")).to_value(u.deg) + + return astrometry.PositionHint(ra_deg=hintcoord.ra.deg, dec_deg=hintcoord.dec.deg, radius_deg=hintsep) + + @classmethod + def get_astrometry_size_hint(cls, rawfit): + """ Get the size hint for this telescope / rawfit. + + According to OSN T090 cameras information (https://www.osn.iaa.csic.es/page/camaras-ccdt150-y-ccdt90) + the camera pixels are 0.387as/px and it has a field of view of 13,20' x 13,20'. So we provide close values + for the hint. If the files are 1x1 it will be 0.387as/px, if 2x2 it will be twice. + + """ + + if rawfit.header['NAXIS1'] == 2048: + return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.arcsec_per_pix) + elif rawfit.header['NAXIS1'] == 1024: + return astrometry.SizeHint(lower_arcsec_per_pixel=2*0.95*cls.arcsec_per_pix, upper_arcsec_per_pixel=2*1.05*cls.arcsec_per_pix) + + + @classmethod + def compute_relative_polarimetry(cls, polarimetry_group): + """ Computes the relative polarimetry for a polarimetry group for OSNT090 observations. + + .. note:: + The rotation angle in OSNT090 refers to the angle between the polarized filter and some reference direction. This is different + to the rotation angle for CAHA-T220 which is the angle between the half-wave plate (HW) and its fast (extraordinary) axis. See the docs + for ``CAHAT220.compute_relative_polarimetry`` for more information. + + Instrumental polarization is corrected. Currently values are hardcoded in qoff, uoff, dqoff, duoff, Phi, dPhi (see code), + but the values without any correction are stored in the DB so the correction can be automatically obtained in the future. + """ + + from iop4lib.db.aperphotresult import AperPhotResult + from iop4lib.db.photopolresult import PhotoPolResult + from iop4lib.utils import get_target_fwhm_aperpix + + logger.debug("Computing %s relative polarimetry for group: %s", cls.name, "".join(map(str,polarimetry_group))) + + # Instrumental polarization + + ## values for T090 TODO: update them manually or (preferibly) dinamically (TODO) + ## to compute the instrumental polarization we need to get the mean of the Q and U images, use zero + ## (done in the _X_nocorr variables) + + qoff = 0.0579 + uoff = 0.0583 + dqoff = 0.003 + duoff = 0.0023 + Phi = math.radians(-18) + dPhi = math.radians(0.001) + + # Perform some checks on the group + + ## get the band of the group + + bands = [reducedfit.band for reducedfit in polarimetry_group] + + if len(set(bands)) == 1: + band = bands[0] + else: # should not happens + raise Exception(f"Can not compute relative polarimetry for a group with different bands: {bands}") + + ## check obsmodes + + if not all([reducedfit.obsmode == OBSMODES.POLARIMETRY for reducedfit in polarimetry_group]): + raise Exception(f"This method is only for polarimetry images.") + + ## check sources in the fields + + sources_in_field_qs_list = [reducedfit.sources_in_field.all() for reducedfit in polarimetry_group] + group_sources = set.intersection(*map(set, sources_in_field_qs_list)) + + if len(group_sources) == 0: + logger.error("No common sources in field for all polarimetry groups.") + return + + if group_sources != set.union(*map(set, sources_in_field_qs_list)): + logger.warning(f"Sources in field do not match for all polarimetry groups: {set.difference(*map(set, sources_in_field_qs_list))}") + + ## check rotation angles + + rot_angles_available = set([redf.rotangle for redf in polarimetry_group]) + rot_angles_required = {0.0, 45.0, 90.0, -45.0} + + if not rot_angles_required.issubset(rot_angles_available): + logger.error(f"Rotation angles missing: {rot_angles_required - rot_angles_available}; returning early.") + return + + # 1. Compute all aperture photometries + + target_fwhm, aperpix, r_in, r_out = get_target_fwhm_aperpix(polarimetry_group, reductionmethod=REDUCTIONMETHODS.RELPOL) + + logger.debug(f"Computing aperture photometries for the {len(polarimetry_group)} reducedfits in the group with target {aperpix:.1f}.") + + for reducedfit in polarimetry_group: + reducedfit.compute_aperture_photometry(aperpix, r_in, r_out) + + # 2. Compute relative polarimetry for each source (uses the computed aperture photometries) + + logger.debug("Computing relative polarimetry.") + + photopolresult_L = list() + + for astrosource in group_sources: + + flux_0 = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, aperpix=aperpix, pairs="O", reducedfit__rotangle=0.0).flux_counts + flux_0_err = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=0.0).flux_counts_err + + flux_45 = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=45.0).flux_counts + flux_45_err = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=45.0).flux_counts_err + + flux_90 = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=90.0).flux_counts + flux_90_err = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=90.0).flux_counts_err + + flux_n45 = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=-45.0).flux_counts + flux_n45_err = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=-45.0).flux_counts_err + + # from IOP3 polarimetry_osn() : + + fluxes = np.array([flux_0, flux_45, flux_90, flux_n45]) + flux_mean = fluxes.mean() + flux_err = fluxes.std() / math.sqrt(len(fluxes)) + + qraw = (flux_0 - flux_90) / (flux_0 + flux_90) + uraw = (flux_45 - flux_n45) / (flux_45 + flux_n45) + + #Applying error propagation... + + dqraw = qraw * math.sqrt(((flux_0_err**2+flux_90_err**2)/(flux_0+flux_90)**2)+(((flux_0_err**2+flux_90_err**2))/(flux_0-flux_90)**2)) + duraw = uraw * math.sqrt(((flux_45_err**2+flux_n45_err**2)/(flux_45+flux_n45)**2)+(((flux_45_err**2+flux_n45_err**2))/(flux_45-flux_n45)**2)) + + qc = qraw - qoff + uc = uraw - uoff + + dqc = math.sqrt(dqraw**2 + dqoff**2) + duc = math.sqrt(duraw**2 + duoff**2) + + q = qc*math.cos(2*Phi) - uc*math.sin(2*Phi) + u = qc*math.sin(2*Phi) + uc*math.cos(2*Phi) + + dqa = qc*math.cos(2*Phi) * math.sqrt((dqc/qc)**2+((2*dPhi*math.sin(2*Phi))/(math.cos(2*Phi)))**2) + dqb = uc*math.sin(2*Phi) * math.sqrt((duc/uc)**2+((2*dPhi*math.cos(2*Phi))/(math.sin(2*Phi)))**2) + dua = qc*math.sin(2*Phi) * math.sqrt((dqc/qc)**2+((2*dPhi*math.cos(2*Phi))/(math.sin(2*Phi)))**2) + dub = uc*math.cos(2*Phi) * math.sqrt((duc/uc)**2+((2*dPhi*math.sin(2*Phi))/(math.cos(2*Phi)))**2) + + dq = np.sqrt(dqa**2+dqb**2) + du = np.sqrt(dua**2+dub**2) + + P = math.sqrt(q**2 + u**2) + dP = P * (1/(q**2+u**2)) * math.sqrt((q*dq)**2+(u*du)**2) + + Theta_0 = 0 + Theta = (1/2) * math.degrees(math.atan2(u,q) + Theta_0) + dTheta = (0.5 * 180.0 / math.pi) * dP/P + + # compute also non-corrected values for computation of instrumental polarization + + _Phi_nocorr = 0 # no rotation correction? + _qc_nocorr = qraw # no offset correction + _uc_nocorr = uraw # no offset correction + _q_nocorr = _qc_nocorr*math.cos(2*_Phi_nocorr) - _uc_nocorr*math.sin(2*_Phi_nocorr) + _u_nocorr = _qc_nocorr*math.sin(2*_Phi_nocorr) + _uc_nocorr*math.cos(2*_Phi_nocorr) + _p_nocorr = math.sqrt(_q_nocorr**2 + _u_nocorr**2) + _Theta_0_nocorr = 0 + _Theta_nocorr = (1/2) * math.degrees(math.atan2(_u_nocorr,_q_nocorr) + _Theta_0_nocorr) + _x_px, _y_px = astrosource.coord.to_pixel(polarimetry_group[0].wcs) + + # compute instrumental magnitude (same as for CAHA) + + if flux_mean <= 0.0: + logger.warning(f"{polarimetry_group=}: negative flux mean encountered while relative polarimetry for {astrosource=} ??!! It will be nan, but maybe we should look into this...") + + mag_inst = -2.5 * np.log10(flux_mean) # slower than math.log10 but returns nan when flux < 0 instead of throwing error (see https://github.com/juanep97/iop4/issues/24) + mag_inst_err = math.fabs(2.5 / math.log(10) * flux_err / flux_mean) + + # if the source is a calibrator, compute also the zero point + + if astrosource.srctype == SRCTYPES.CALIBRATOR: + mag_known = getattr(astrosource, f"mag_{band}") + mag_known_err = getattr(astrosource, f"mag_{band}_err", None) or 0.0 + + if mag_known is None: + logger.warning(f"Calibrator {astrosource} has no magnitude for band {band}.") + mag_zp = np.nan + mag_zp_err = np.nan + else: + mag_zp = mag_known - mag_inst + mag_zp_err = math.sqrt(mag_known_err ** 2 + mag_inst_err ** 2) + else: + mag_zp = None + mag_zp_err = None + + # save the results + + result = PhotoPolResult.create(reducedfits=polarimetry_group, + astrosource=astrosource, + reduction=REDUCTIONMETHODS.RELPOL, + mag_inst=mag_inst, mag_inst_err=mag_inst_err, mag_zp=mag_zp, mag_zp_err=mag_zp_err, + flux_counts=flux_mean, p=P, p_err=dP, chi=Theta, chi_err=dTheta, + _x_px=_x_px, _y_px=_y_px, _q_nocorr=_q_nocorr, _u_nocorr=_u_nocorr, _p_nocorr=_p_nocorr, _chi_nocorr=_Theta_nocorr, + aperpix=aperpix) + + photopolresult_L.append(result) + + + # 3. Get average zero point from zp of all calibrators in the group + + calib_mag_zp_array = np.array([result.mag_zp or np.nan for result in photopolresult_L if result.astrosource.srctype == SRCTYPES.CALIBRATOR]) # else it fills with None also and the dtype becomes object + calib_mag_zp_array = calib_mag_zp_array[~np.isnan(calib_mag_zp_array)] + + calib_mag_zp_array_err = np.array([result.mag_zp_err or np.nan for result in photopolresult_L if result.astrosource.srctype == SRCTYPES.CALIBRATOR]) + calib_mag_zp_array_err = calib_mag_zp_array_err[~np.isnan(calib_mag_zp_array_err)] + + if len(calib_mag_zp_array) == 0: + logger.error(f"Can not compute magnitude during relative photo-polarimetry without any calibrators for this reduced fit.") + + zp_avg = np.nanmean(calib_mag_zp_array) + zp_std = np.nanstd(calib_mag_zp_array) + + zp_err = np.sqrt(np.nansum(calib_mag_zp_array_err ** 2)) / len(calib_mag_zp_array_err) + zp_err = math.sqrt(zp_err ** 2 + zp_std ** 2) + + # 4. Compute the calibrated magnitudes for non-calibrators in the group using the averaged zero point + + for result in photopolresult_L: + + if result.astrosource.srctype == SRCTYPES.CALIBRATOR: + continue + + result.mag_zp = zp_avg + result.mag_zp_err = zp_err + + result.mag = result.mag_inst + zp_avg + result.mag_err = math.sqrt(result.mag_inst_err ** 2 + zp_err ** 2) + + result.save() + + # 5. Save results + for result in photopolresult_L: + result.save() + + +class AndorT90(Andor): + + name = "AndorT90" + instrument_kw = "AndorT90" + telescope = OSNT090.name + + + field_width_arcmin = 13.2 + arcsec_per_pix = 0.387 + gain_e_adu = 4.5 + + +class AndorT150(Andor): + + name = "AndorT150" + instrument_kw = "AndorT150" + telescope = OSNT150.name + + arcsec_per_pix = 0.232 + gain_e_adu = 4.5 + field_width_arcmin = 7.92 diff --git a/iop4lib/instruments/cafos.py b/iop4lib/instruments/cafos.py new file mode 100644 index 00000000..b333f9a6 --- /dev/null +++ b/iop4lib/instruments/cafos.py @@ -0,0 +1,379 @@ +# iop4lib config +import iop4lib.config +iop4conf = iop4lib.Config(config_db=False) + +# django imports + +# other imports +import astropy.units as u +from astropy.coordinates import Angle, SkyCoord +import astrometry +import numpy as np +import math + +# iop4lib imports +from iop4lib.enums import * +from .instrument import Instrument +from iop4lib.telescopes import CAHAT220 + +# logging +import logging +logger = logging.getLogger(__name__) + +class CAFOS(Instrument): + + name = "CAFOS2.2" + instrument_kw = "CAFOS 2.2" + telescope = CAHAT220.name + + arcsec_per_pix = 0.530 + gain_e_adu = 1.45 + field_width_arcmin = 34.0 + + + @classmethod + def classify_juliandate_rawfit(cls, rawfit): + """ + CAHA T220 has a DATE keyword in the header in ISO format. + """ + import astropy.io.fits as fits + from astropy.time import Time + date = fits.getheader(rawfit.filepath, ext=0)["DATE"] + jd = Time(date, format='isot', scale='utc').jd + rawfit.juliandate = jd + + @classmethod + def classify_imgtype_rawfit(cls, rawfit): + """ + CAHA T220 has a IMAGETYP keyword in the header: flat, bias, science + """ + from iop4lib.db.rawfit import RawFit + import astropy.io.fits as fits + + with fits.open(rawfit.filepath) as hdul: + if hdul[0].header['IMAGETYP'] == 'flat': + rawfit.imgtype = IMGTYPES.FLAT + elif hdul[0].header['IMAGETYP'] == 'bias': + rawfit.imgtype = IMGTYPES.BIAS + elif hdul[0].header['IMAGETYP'] == 'science': + rawfit.imgtype = IMGTYPES.LIGHT + else: + logger.error(f"Unknown image type for {rawfit.fileloc}.") + rawfit.imgtype = IMGTYPES.ERROR + raise ValueError + + @classmethod + def classify_band_rawfit(cls, rawfit): + """ + INSFLNAM is BesselR ?? + """ + + from iop4lib.db.rawfit import RawFit + import astropy.io.fits as fits + + if 'INSFLNAM' in rawfit.header: + if rawfit.header['INSFLNAM'] == 'BessellR': + rawfit.band = BANDS.R + else: + logger.error(f"{rawfit}: unknown filter {rawfit.header['INSFLNAM']}.") + rawfit.band = BANDS.ERROR + raise ValueError(f"{rawfit}: unknown filter {rawfit.header['INSFLNAM']}.") + else: + rawfit.band = BANDS.ERROR + raise ValueError(f"{rawfit}: INSFLNAM keyword not present.") + + @classmethod + def classify_obsmode_rawfit(cls, rawfit): + """ + For CAHA T220, if we are dealing with polarimetry, we have: + INSTRMOD: Polarizer + INSPOFPI Wollaston + INSPOROT 0.0, 22.48, 67.48 + + I HAVE NOT FOUND YET OTHER VALUES THAT ARE NOT THIS, PRINT A WARNING OTHERWISE. + """ + from iop4lib.db.rawfit import RawFit + + if rawfit.header['INSTRMOD'] == 'Polarizer' and rawfit.header['INSPOFPI'] == 'Wollaston': + rawfit.obsmode = OBSMODES.POLARIMETRY + rawfit.rotangle = float(rawfit.header['INSPOROT']) + + if rawfit.imgtype == IMGTYPES.BIAS: + logger.debug(f"Probably not important, but {rawfit.fileloc} is BIAS but has polarimetry keywords, does it makes sense?") + else: + logger.error("Not implemented, please check the code.") + + @classmethod + def get_header_hintcoord(cls, rawfit): + """ Get the position hint from the FITS header as a coordinate. + + Images from CAFOS T2.2 have RA, DEC in the header, both in degrees. + """ + + hint_coord = SkyCoord(Angle(rawfit.header['RA'], unit=u.deg), Angle(rawfit.header['DEC'], unit=u.deg), frame='icrs') + return hint_coord + + @classmethod + def get_astrometry_position_hint(cls, rawfit, allsky=False, n_field_width=1.5): + """ Get the position hint from the FITS header as an astrometry.PositionHint object. """ + + hintcoord = cls.get_header_hintcoord(rawfit) + + if allsky: + hintsep = 180 + else: + hintsep = n_field_width * u.Quantity("16 arcmin").to_value(u.deg) # 16 arcmin is the full field size of the CAFOS T2.2, our cut is smaller (6.25, 800x800, but the pointing kws might be from anywhere in the full field) + + return astrometry.PositionHint(ra_deg=hintcoord.ra.deg, dec_deg=hintcoord.dec.deg, radius_deg=hintsep) + + @classmethod + def get_astrometry_size_hint(cls, rawfit): + """ Get the size hint for this telescope / rawfit. + + from http://w3.caha.es/CAHA/Instruments/CAFOS/cafos22.html + pixel size in arcmin is around : ~0.530 arcsec + field size (diameter is) 16.0 arcmin (for 2048 pixels) + it seems that this is for 2048x2048 images, our images are 800x800 but the fitsheader DATASEC + indicates it is a cut + """ + + return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.arcsec_per_pix) + + @classmethod + def compute_relative_polarimetry(cls, polarimetry_group): + """ Computes the relative polarimetry for a polarimetry group for CAFOS observations. + + .. note:: + CAFOS Polarimetry observations are done with a system consisting of a half-wave plate (HW) and a Wollaston prism (P). + + The rotation angle theta_i refers to the angle theta_i between the HW plate and its fast (extraordinary) axes. + + The effect of the HW is to rotate the polarization vector by 2*theta_i, and the effect of the Wollaston prism is to split + the beam into two beams polarized in orthogonal directions (ordinary and extraordinary). + + An input polarized beam with direction v will be rotated by HW by 2*theta_i. The O and E fluxes will be the projections of the + rotated vector onto the ordinary and extraordinary directions of the Wollaston prism (in absolute values since -45 and 45 + polarization directions are equivalent). A way to write this is: + + fo(theta_i) = abs( ) = abs ( ), where <,> denotes the scalar product and R is the rotation matrix. + + Therefore the following observed fluxes should be the same (ommiting the abs for clarity): + + fo(0º) = = = = = fe(45º) + fo(22º) = = = = = = fe(67º) + fo(45º) = = = = - = fe(0º) + fo(67º) = = = = = = fe(22º) + + See https://arxiv.org/pdf/astro-ph/0509153.pdf (doi 10.1086/497581) for the formulas relating these fluxes to + the Stokes parameters. + + .. note:: + This rotation angle has a different meaning than for OSN-T090 Polarimetry observations. For them, it is the rotation angle of a polarized filter + with respect to some reference direction. Therefore we have the equivalencies (again ommiting the abs for clarity): + + OSN(45º) = = = = fE(22º) = fO(67º) + OSN(90º) = = = fO(45º) + OSN(-45º) = OSN(135º) = abs() = = = fE(67º) = fO(22º) + OSN(0º) = = = fO(0º) + """ + + from iop4lib.db.aperphotresult import AperPhotResult + from iop4lib.db.photopolresult import PhotoPolResult + from iop4lib.utils import get_target_fwhm_aperpix + + # Perform some checks on the group + + ## get the band of the group + + bands = [reducedfit.band for reducedfit in polarimetry_group] + + if len(set(bands)) == 1: + band = bands[0] + else: # should not happens + raise Exception(f"Can not compute relative polarimetry for a group with different bands: {bands}") + + ## check obsmodes + + if not all([reducedfit.obsmode == OBSMODES.POLARIMETRY for reducedfit in polarimetry_group]): + raise Exception(f"This method is only for polarimetry images.") + + ## check sources in the fields + + sources_in_field_qs_list = [reducedfit.sources_in_field.all() for reducedfit in polarimetry_group] + group_sources = set.intersection(*map(set, sources_in_field_qs_list)) + + if len(group_sources) == 0: + logger.error("No common sources in field for all polarimetry groups.") + return + + if group_sources != set.union(*map(set, sources_in_field_qs_list)): + logger.warning(f"Sources in field do not match for all polarimetry groups: {set.difference(*map(set, sources_in_field_qs_list))}") + + ## check rotation angles + + rot_angles_available = set([redf.rotangle for redf in polarimetry_group]) + rot_angles_required = {0.0, 22.48, 44.98, 67.48} + + if not rot_angles_available.issubset(rot_angles_required): + logger.warning(f"Rotation angles missing: {rot_angles_required - rot_angles_available}") + + # 1. Compute all aperture photometries + + target_fwhm, aperpix, r_in, r_out = get_target_fwhm_aperpix(polarimetry_group, reductionmethod=REDUCTIONMETHODS.RELPOL) + + logger.debug(f"Computing aperture photometries for the {len(polarimetry_group)} reducedfits in the group with target aperpix {aperpix:.1f}.") + + for reducedfit in polarimetry_group: + reducedfit.compute_aperture_photometry(aperpix, r_in, r_out) + + # 2. Compute relative polarimetry for each source (uses the computed aperture photometries) + + logger.debug("Computing relative polarimetry.") + + photopolresult_L = list() + + for astrosource in group_sources: + logger.debug(f"Computing relative polarimetry for {astrosource}.") + + # if any angle is missing for some pair, it uses the equivalent angle of the other pair + + qs = AperPhotResult.objects.filter(reducedfit__in=polarimetry_group, astrosource=astrosource, aperpix=aperpix, flux_counts__isnull=False) + + equivs = ((('O',0.0), ('E',44.98)), + (('O',22.48), ('E',67.48)), + (('O',44.98), ('E',0.0)), + (('O',67.48), ('E',22.48)), + (('E',0.0), ('O',44.98)), + (('E',22.48), ('O',67.48)), + (('E',44.98), ('O',0.0)), + (('E',67.48), ('O',22.48))) + + flux_D = dict() + for equiv in equivs: + if qs.filter(pairs=equiv[0][0], reducedfit__rotangle=equiv[0][1]).exists(): + flux_D[(equiv[0][0], equiv[0][1])] = qs.filter(pairs=equiv[0][0], reducedfit__rotangle=equiv[0][1]).values_list("flux_counts", "flux_counts_err").last() + elif qs.filter(pairs=equiv[1][0], reducedfit__rotangle=equiv[1][1]).exists(): + logger.warning(f"Missing flux for {astrosource} {equiv[0][0]} {equiv[0][1]}, using {equiv[1][0]} {equiv[1][1]}") + flux_D[(equiv[0][0], equiv[0][1])] = qs.filter(pairs=equiv[1][0], reducedfit__rotangle=equiv[1][1]).values_list("flux_counts", "flux_counts_err").last() + else: + logger.error(f"Missing flux for {astrosource} {equiv[0][0]} {equiv[0][1]} and {equiv[1][0]} {equiv[1][1]}") + return + + flux_O_0, flux_O_0_err = flux_D[('O',0.0)] + flux_O_22, flux_O_22_err = flux_D[('O',22.48)] + flux_O_45, flux_O_45_err = flux_D[('O',44.98)] + flux_O_67, flux_O_67_err = flux_D[('O',67.48)] + flux_E_0, flux_E_0_err = flux_D[('E',0.0)] + flux_E_22, flux_E_22_err = flux_D[('E',22.48)] + flux_E_45, flux_E_45_err = flux_D[('E',44.98)] + flux_E_67, flux_E_67_err = flux_D[('E',67.48)] + + fluxes_O = np.array([flux_O_0, flux_O_22, flux_O_45, flux_O_67]) + fluxes_E = np.array([flux_E_0, flux_E_22, flux_E_45, flux_E_67]) + + # logger.debug(f"Fluxes_O: {fluxes_O}") + # logger.debug(f"Fluxes_E: {fluxes_E}") + + fluxes = (fluxes_O + fluxes_E) / 2. + flux_mean = fluxes.mean() + flux_err = fluxes.std() / math.sqrt(len(fluxes)) + + RQ = np.sqrt((flux_O_0 / flux_E_0) / (flux_O_45 / flux_E_45)) + dRQ = RQ / 2 * math.sqrt((flux_O_0_err / flux_O_0) ** 2 + (flux_E_0_err / flux_E_0) ** 2 + (flux_O_45_err / flux_O_45) ** 2 + (flux_E_45_err / flux_E_45) ** 2) + + RU = np.sqrt((flux_O_22 / flux_E_22) / (flux_O_67 / flux_E_67)) + dRU = RU / 2 * math.sqrt((flux_O_22_err / flux_O_22) ** 2 + (flux_E_22_err / flux_E_22) ** 2 + (flux_O_67_err / flux_O_67) ** 2 + (flux_E_67_err / flux_E_67) ** 2) + + Q_I = (RQ - 1) / (RQ + 1) + dQ_I = math.fabs( RQ / (RQ + 1) ** 2 * dRQ) + U_I = (RU - 1) / (RU + 1) + dU_I = math.fabs( RU / (RU + 1) ** 2 * dRU) + + P = math.sqrt(Q_I ** 2 + U_I ** 2) + dP = 1/P * math.sqrt(Q_I**2 * dQ_I**2 + U_I**2 * dU_I**2) + + Theta_0 = 0 + + if Q_I >= 0: + Theta_0 = math.pi + if U_I > 0: + Theta_0 = -1 * math.pi + # if Q_I < 0: + # Theta_0 = math.pi / 2 + + Theta = 0.5 * math.degrees(math.atan(U_I / Q_I) + Theta_0) + dTheta = 0.5 * 180.0 / math.pi * (1 / (1 + (U_I/Q_I) ** 2)) * math.sqrt( (dU_I/Q_I)**2 + (U_I*dQ_I/Q_I**2)**2 ) + + # compute instrumental magnitude + + if flux_mean <= 0.0: + logger.warning(f"{polarimetry_group=}: negative flux mean encountered while relative polarimetry for {astrosource=} ??!! It will be nan, but maybe we should look into this...") + + mag_inst = -2.5 * np.log10(flux_mean) # slower than math.log10 but returns nan when flux < 0 instead of throwing error (see https://github.com/juanep97/iop4/issues/24) + mag_inst_err = math.fabs(2.5 / math.log(10) * flux_err / flux_mean) + + # if the source is a calibrator, compute also the zero point + + if astrosource.srctype == SRCTYPES.CALIBRATOR: + mag_known = getattr(astrosource, f"mag_{band}") + mag_known_err = getattr(astrosource, f"mag_{band}_err", None) or 0.0 + + if mag_known is None: + logger.warning(f"Calibrator {astrosource} has no magnitude for band {band}.") + mag_zp = np.nan + mag_zp_err = np.nan + else: + mag_zp = mag_known - mag_inst + # mag_zp_err = math.sqrt(mag_known_err ** 2 + mag_inst_err ** 2) + mag_zp_err = math.fabs(mag_inst_err) # do not add error on literature magnitude + else: + mag_zp = None + mag_zp_err = None + + # save the results + + result = PhotoPolResult.create(reducedfits=polarimetry_group, + astrosource=astrosource, + reduction=REDUCTIONMETHODS.RELPOL, + mag_inst=mag_inst, mag_inst_err=mag_inst_err, mag_zp=mag_zp, mag_zp_err=mag_zp_err, + flux_counts=flux_mean, p=P, p_err=dP, chi=Theta, chi_err=dTheta, + aperpix=aperpix) + + photopolresult_L.append(result) + + + # 3. Get average zero point from zp of all calibrators in the group + + calib_mag_zp_array = np.array([result.mag_zp or np.nan for result in photopolresult_L if result.astrosource.srctype == SRCTYPES.CALIBRATOR]) # else it fills with None also and the dtype becomes object + calib_mag_zp_array = calib_mag_zp_array[~np.isnan(calib_mag_zp_array)] + + calib_mag_zp_array_err = np.array([result.mag_zp_err or np.nan for result in photopolresult_L if result.astrosource.srctype == SRCTYPES.CALIBRATOR]) + calib_mag_zp_array_err = calib_mag_zp_array_err[~np.isnan(calib_mag_zp_array_err)] + + if len(calib_mag_zp_array) == 0: + logger.error(f"Can not compute magnitude during relative photo-polarimetry without any calibrators for this reduced fit.") + + zp_avg = np.nanmean(calib_mag_zp_array) + zp_std = np.nanstd(calib_mag_zp_array) + + zp_err = np.sqrt(np.nansum(calib_mag_zp_array_err ** 2)) / len(calib_mag_zp_array_err) + zp_err = math.sqrt(zp_err ** 2 + zp_std ** 2) + + # 4. Compute the calibrated magnitudes for non-calibrators in the group using the averaged zero point + + for result in photopolresult_L: + + if result.astrosource.srctype == SRCTYPES.CALIBRATOR: + continue + + result.mag_zp = zp_avg + result.mag_zp_err = zp_err + + result.mag = result.mag_inst + zp_avg + result.mag_err = math.sqrt(result.mag_inst_err ** 2 + zp_err ** 2) + + result.save() + + # 5. Save results + for result in photopolresult_L: + result.save() diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py new file mode 100644 index 00000000..2372d74d --- /dev/null +++ b/iop4lib/instruments/dipol.py @@ -0,0 +1,42 @@ +# iop4lib config +import iop4lib.config +iop4conf = iop4lib.Config(config_db=False) + +# django imports + +# other imports +import astrometry + +# iop4lib imports +from iop4lib.enums import * +from .instrument import Instrument + +# logging +import logging +logger = logging.getLogger(__name__) + + +class DIPOL(Instrument): + + name = "DIPOL-1" + + arcsec_per_pix = 0.134 + + @classmethod + def get_astrometry_size_hint(cls, rawfit): + """ Get the size hint for this telescope / rawfit. + + For DIPOL-1 in OSN-T090, according to preliminary investigation of OSN crew is: + Las posiciones que he tomado y el ángulo de rotación en cada caso son estos: + Dec= -10º HA=+3h rotación=-177.3º + Zenit rotación=-177.3º + Dec=+60º HA=-6h rotación=-177.7º + Dec=+70º HA=+5h rotación=-177.2º + + El campo es de 9.22 x 6.28 arcmin y el tamaño de pixel de 0.134"/pix + + El ángulo de la imagen cambia muy poco entre las posiciones muy separadas del telescopio, y es de 177.5º ± 0.3º + Así que como mucho se produce un error de ± 0.3º en las imágenes, y el punto cero es de 2.5º. + """ + + return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.arcsec_per_pix) \ No newline at end of file diff --git a/iop4lib/instruments/instrument.py b/iop4lib/instruments/instrument.py new file mode 100644 index 00000000..b53a2c4f --- /dev/null +++ b/iop4lib/instruments/instrument.py @@ -0,0 +1,322 @@ +# iop4lib config +import iop4lib.config +iop4conf = iop4lib.Config(config_db=False) + +# django imports + +# other imports +from abc import ABCMeta, abstractmethod + +import re +import numpy as np +import math + +# iop4lib imports +from iop4lib.enums import * + +# logging +import logging +logger = logging.getLogger(__name__) + +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from iop4lib.db import ReducedFit + +class Instrument(metaclass=ABCMeta): + """ Base class for instruments. + + Inherit this class to provide instrument specific functionality (e.g. classification of images, + reduction, etc). + + """ + + # Instrument identification (subclasses must implement these) + + @property + @abstractmethod + def name(self): + pass + + @property + @abstractmethod + def telescope(self): + pass + + @property + @abstractmethod + def instrument_kw(self): + pass + + # Instrument specific properties (subclasses must implement these) + + @property + @abstractmethod + def field_width_arcmin(self): + pass + + @property + @abstractmethod + def arcsec_per_pix(self): + pass + + @property + @abstractmethod + def gain_e_adu(self): + pass + + # Class methods (you should be using these from the Instrument class, not subclasses) + + @classmethod + @abstractmethod + def get_known(cls): + from .andor_cameras import AndorT90, AndorT150 + from .cafos import CAFOS + + return [AndorT90, AndorT150, CAFOS] + + @classmethod + def by_name(cls, name: str) -> 'Instrument': + """ + Try to get instrument by name, else raise Exception. + """ + for instr in Instrument.get_known(): + if instr.name == name: + return instr + raise NotImplementedError(f"Instrument {name} not implemented.") + + # Common instrument functionality + # You should be using these from the subclasses already + # these don't need to be overriden in subclasses, but they can be + + @classmethod + def classify_rawfit(cls, rawfit): + cls.check_instrument_kw(rawfit) + cls.classify_juliandate_rawfit(rawfit) + cls.classify_imgtype_rawfit(rawfit) + cls.classify_band_rawfit(rawfit) + cls.classify_obsmode_rawfit(rawfit) + cls.classify_imgsize(rawfit) + cls.classify_exptime(rawfit) + + @classmethod + def check_instrument_kw(cls, rawfit): + """ Check that the instrument keyword is correct. """ + if rawfit.header["INSTRUME"] != cls.instrument_kw: + raise ValueError(f"Raw fit file {rawfit.fileloc} has INSTRUME != {cls.instrument_kw}.") + + @classmethod + def classify_imgsize(cls, rawfit): + import astropy.io.fits as fits + from iop4lib.db import RawFit + + with fits.open(rawfit.filepath) as hdul: + if hdul[0].header["NAXIS"] == 2: + sizeX = hdul[0].header["NAXIS1"] + sizeY = hdul[0].header["NAXIS2"] + rawfit.imgsize = f"{sizeX}x{sizeY}" + return rawfit.imgsize + else: + raise ValueError(f"Raw fit file {rawfit.fileloc} has NAXIS != 2, cannot get imgsize.") + + @classmethod + def classify_exptime(cls, rawfit): + """ + EXPTIME is an standard FITS keyword, measured in seconds. + """ + import astropy.io.fits as fits + from iop4lib.db import RawFit + + with fits.open(rawfit.filepath) as hdul: + rawfit.exptime = hdul[0].header["EXPTIME"] + + + @classmethod + def get_header_objecthint(self, rawfit): + r""" Get a hint for the AstroSource in this image from the header. OBJECT is a standard keyword. Return None if none found. + + At the moment his only tries to match sources + with the IAU name format `[0-9]*\+[0-9]*`. + """ + + from iop4lib.db import AstroSource + + object_header = rawfit.header["OBJECT"] + + matchs = re.findall(r".*?([0-9]*\+[0-9]*).*", object_header) + if len(matchs) > 0: + return AstroSource.objects.filter(name__contains=matchs[0]).first() + else: + return None + + @classmethod + def compute_aperture_photometry(cls, redf, aperpix, r_in, r_out): + + from iop4lib.db.aperphotresult import AperPhotResult + from iop4lib.utils.sourcedetection import get_bkg, get_segmentation + from photutils.utils import circular_footprint + from photutils.aperture import CircularAperture, CircularAnnulus, ApertureStats, aperture_photometry + from photutils.utils import calc_total_error + from astropy.stats import SigmaClip + from iop4lib.utils import get_target_fwhm_aperpix + + if redf.mdata.shape[0] == 1024: + bkg_box_size = 128 + elif redf.mdata.shape[0] == 2048: + bkg_box_size = 256 + elif redf.mdata.shape[0] == 800: + bkg_box_size = 100 + else: + logger.warning(f"Image size {redf.mdata.shape[0]} not expected.") + bkg_box_size = redf.mdata.shape[0]//10 + + bkg = get_bkg(redf.mdata, filter_size=1, box_size=bkg_box_size) + img = redf.mdata + + if np.sum(redf.mdata <= 0.0) >= 1: + logger.debug(f"{redf}: {np.sum(redf.mdata <= 0.0):.0f} px < 0 ({math.sqrt(np.sum(redf.mdata <= 0.0)):.0f} px2) in IMAGE.") + + if np.sum(img <= 0.0) >= 1: + logger.debug(f"{redf}: {np.sum(img <= 0.0)} px < 0 ({math.sqrt(np.sum(img <= 0.0)):.0f} px2) in BKG-SUBSTRACTED IMG, after masking.") + + error = calc_total_error(img, bkg.background_rms, cls.gain_e_adu) + + for astrosource in redf.sources_in_field.all(): + for pairs, wcs in (('O', redf.wcs1), ('E', redf.wcs2)) if redf.with_pairs else (('O',redf.wcs),): + + ap = CircularAperture(astrosource.coord.to_pixel(wcs), r=aperpix) + annulus = CircularAnnulus(astrosource.coord.to_pixel(wcs), r_in=r_in, r_out=r_out) + + annulus_stats = ApertureStats(redf.mdata, annulus, error=error, sigma_clip=SigmaClip(sigma=5.0, maxiters=10)) + ap_stats = ApertureStats(redf.mdata, ap, error=error) + + bkg_flux_counts = annulus_stats.median*ap_stats.sum_aper_area.value + bkg_flux_counts_err = annulus_stats.sum_err / annulus_stats.sum_aper_area.value * ap_stats.sum_aper_area.value + + flux_counts = ap_stats.sum - annulus_stats.mean*ap_stats.sum_aper_area.value + flux_counts_err = ap_stats.sum_err + + AperPhotResult.create(reducedfit=redf, + astrosource=astrosource, + aperpix=aperpix, + pairs=pairs, + bkg_flux_counts=bkg_flux_counts, bkg_flux_counts_err=bkg_flux_counts_err, + flux_counts=flux_counts, flux_counts_err=flux_counts_err) + + + @classmethod + def compute_relative_photometry(cls, redf: 'ReducedFit') -> None: + """ Common relative photometry method for all instruments. """ + + from iop4lib.db.aperphotresult import AperPhotResult + from iop4lib.db.photopolresult import PhotoPolResult + from iop4lib.utils import get_target_fwhm_aperpix + + if redf.obsmode != OBSMODES.PHOTOMETRY: + raise Exception(f"{redf}: this method is only for plain photometry images.") + + target_fwhm, aperpix, r_in, r_out = get_target_fwhm_aperpix([redf], reductionmethod=REDUCTIONMETHODS.RELPHOT) + + if target_fwhm is None: + logger.error("Could not estimate a target FWHM, aborting relative photometry.") + return + + # 1. Compute all aperture photometries + + logger.debug(f"{redf}: computing aperture photometries for {redf}.") + + redf.compute_aperture_photometry(aperpix, r_in, r_out) + + # 2. Compute relative polarimetry for each source (uses the computed aperture photometries) + + logger.debug(f"{redf}: computing relative photometry.") + + # 2. Compute the flux in counts and the instrumental magnitude + + photopolresult_L = list() + + for astrosource in redf.sources_in_field.all(): + + result = PhotoPolResult.create(reducedfits=[redf], astrosource=astrosource, reduction=REDUCTIONMETHODS.RELPHOT) + + aperphotresult = AperPhotResult.objects.get(reducedfit=redf, astrosource=astrosource, aperpix=aperpix, pairs="O") + + result.bkg_flux_counts = aperphotresult.bkg_flux_counts + result.bkg_flux_counts_err = aperphotresult.bkg_flux_counts_err + result.flux_counts = aperphotresult.flux_counts + result.flux_counts_err = aperphotresult.flux_counts_err + + # logger.debug(f"{self}: {result.flux_counts=}") + + if result.flux_counts is None: # when does this happen? when there is a source whose apertue falls partially outside the image? https://github.com/juanep97/iop4/issues/24 + logger.error(f"{redf}: during relative photometry, encountered flux_counts=None for source {astrosource.name}, aperphotresult {aperphotresult.id}!!!") + result.flux_counts = np.nan + result.flux_counts_err = np.nan + + if result.flux_counts <= 0.0: + logger.warning(f"{redf}: negative flux counts encountered while relative photometry for {astrosource=} ??!! They will be nans, but maybe we should look into this...") + + result.mag_inst = -2.5 * np.log10(result.flux_counts) # np.nan if result.flux_counts <= 0.0 + result.mag_inst_err = math.fabs(2.5 / math.log(10) / result.flux_counts * result.flux_counts_err) + + # if the source is a calibrator, compute also the zero point + if result.astrosource.srctype == SRCTYPES.CALIBRATOR: + result.mag_known = getattr(result.astrosource, f"mag_{redf.band}") + result.mag_known_err = getattr(result.astrosource, f"mag_{redf.band}_err", None) or 0.0 + + if result.mag_known is None: + logger.warning(f"Relative Photometry over {redf}: calibrator {result.astrosource} has no magnitude for band {redf.band}.") + result.mag_zp = np.nan + result.mag_zp_err = np.nan + else: + result.mag_zp = result.mag_known - result.mag_inst + result.mag_zp_err = math.sqrt(result.mag_inst_err**2 + result.mag_known_err**2) + else: + # if it is not a calibrator, we can not save the COMPUTED zp, it will be computed and the USED zp will be stored. + result.mag_zp = None + result.mag_zp_err = None + + result.save() + + photopolresult_L.append(result) + + # 3. Average the zero points + + calib_mag_zp_array = np.array([result.mag_zp or np.nan for result in photopolresult_L if result.astrosource.srctype == SRCTYPES.CALIBRATOR]) # else it fills with None also and the dtype becomes object + calib_mag_zp_array = calib_mag_zp_array[~np.isnan(calib_mag_zp_array)] + + calib_mag_zp_array_err = np.array([result.mag_zp_err or np.nan for result in photopolresult_L if result.astrosource.srctype == SRCTYPES.CALIBRATOR]) + calib_mag_zp_array_err = calib_mag_zp_array_err[~np.isnan(calib_mag_zp_array_err)] + + if len(calib_mag_zp_array) == 0: + logger.error(f"{redf}: can not perform relative photometry without any calibrators for this reduced fit. Deleting results.") + [result.delete() for result in redf.photopolresults.all()] + return #raise Exception(f"{self}: can not perform relative photometry without any calibrators for this reduced fit.") + + zp_avg = np.nanmean(calib_mag_zp_array) + zp_std = np.nanstd(calib_mag_zp_array) + + zp_err = math.sqrt(np.sum(calib_mag_zp_array_err**2)) / len(calib_mag_zp_array_err) + zp_err = math.sqrt(zp_std**2 + zp_err**2) + + # 4. Compute the calibrated magnitudes + + for result in photopolresult_L: + + if result.astrosource.srctype == SRCTYPES.CALIBRATOR: + continue + + # save the zp (to be) used + result.mag_zp = zp_avg + result.mag_zp_err = zp_err + + # compute the calibrated magnitude + result.mag = zp_avg + result.mag_inst + result.mag_err = math.sqrt(result.mag_inst_err**2 + zp_err**2) + + result.save() + + # 5. Save the results + + for result in photopolresult_L: + result.save() + diff --git a/iop4lib/telescopes/cahat220.py b/iop4lib/telescopes/cahat220.py index 21661acc..b2eecc1e 100644 --- a/iop4lib/telescopes/cahat220.py +++ b/iop4lib/telescopes/cahat220.py @@ -43,10 +43,7 @@ class CAHAT220(Telescope, metaclass=ABCMeta): abbrv = "T220" telescop_kw = "CA-2.2" - # telescope / instrument specific properties - - cafos_arcsec_per_pix = 0.530 - cafos_gain_e_adu = 1.45 + # telescope specific properties # telescope specific methods @@ -139,355 +136,3 @@ def download_rawfits(cls, rawfits): ftp.quit() except Exception as e: raise Exception(f"Error downloading {rawfits}: {e}.") - - @classmethod - def classify_juliandate_rawfit(cls, rawfit): - """ - CAHA T220 has a DATE keyword in the header in ISO format. - """ - import astropy.io.fits as fits - from astropy.time import Time - date = fits.getheader(rawfit.filepath, ext=0)["DATE"] - jd = Time(date, format='isot', scale='utc').jd - rawfit.juliandate = jd - - @classmethod - def classify_imgtype_rawfit(cls, rawfit): - """ - CAHA T220 has a IMAGETYP keyword in the header: flat, bias, science - """ - from iop4lib.db.rawfit import RawFit - import astropy.io.fits as fits - - with fits.open(rawfit.filepath) as hdul: - if hdul[0].header['IMAGETYP'] == 'flat': - rawfit.imgtype = IMGTYPES.FLAT - elif hdul[0].header['IMAGETYP'] == 'bias': - rawfit.imgtype = IMGTYPES.BIAS - elif hdul[0].header['IMAGETYP'] == 'science': - rawfit.imgtype = IMGTYPES.LIGHT - else: - logger.error(f"Unknown image type for {rawfit.fileloc}.") - rawfit.imgtype = IMGTYPES.ERROR - raise ValueError - - @classmethod - def classify_band_rawfit(cls, rawfit): - """ - INSFLNAM is BesselR ?? - """ - - from iop4lib.db.rawfit import RawFit - import astropy.io.fits as fits - - if 'INSFLNAM' in rawfit.header: - if rawfit.header['INSFLNAM'] == 'BessellR': - rawfit.band = BANDS.R - else: - logger.error(f"{rawfit}: unknown filter {rawfit.header['INSFLNAM']}.") - rawfit.band = BANDS.ERROR - raise ValueError(f"{rawfit}: unknown filter {rawfit.header['INSFLNAM']}.") - else: - rawfit.band = BANDS.ERROR - raise ValueError(f"{rawfit}: INSFLNAM keyword not present.") - - @classmethod - def classify_obsmode_rawfit(cls, rawfit): - """ - For CAHA T220, if we are dealing with polarimetry, we have: - INSTRMOD: Polarizer - INSPOFPI Wollaston - INSPOROT 0.0, 22.48, 67.48 - - I HAVE NOT FOUND YET OTHER VALUES THAT ARE NOT THIS, PRINT A WARNING OTHERWISE. - """ - from iop4lib.db.rawfit import RawFit - - if rawfit.header['INSTRMOD'] == 'Polarizer' and rawfit.header['INSPOFPI'] == 'Wollaston': - rawfit.obsmode = OBSMODES.POLARIMETRY - rawfit.rotangle = float(rawfit.header['INSPOROT']) - - if rawfit.imgtype == IMGTYPES.BIAS: - logger.debug(f"Probably not important, but {rawfit.fileloc} is BIAS but has polarimetry keywords, does it makes sense?") - else: - logger.error("Not implemented, please check the code.") - - @classmethod - def get_header_hintcoord(cls, rawfit): - """ Get the position hint from the FITS header as a coordinate. - - Images from CAFOS T2.2 have RA, DEC in the header, both in degrees. - """ - - hint_coord = SkyCoord(Angle(rawfit.header['RA'], unit=u.deg), Angle(rawfit.header['DEC'], unit=u.deg), frame='icrs') - return hint_coord - - @classmethod - def get_astrometry_position_hint(cls, rawfit, allsky=False, n_field_width=1.5): - """ Get the position hint from the FITS header as an astrometry.PositionHint object. """ - - hintcoord = cls.get_header_hintcoord(rawfit) - - if allsky: - hintsep = 180 - else: - hintsep = n_field_width * u.Quantity("16 arcmin").to_value(u.deg) # 16 arcmin is the full field size of the CAFOS T2.2, our cut is smaller (6.25, 800x800, but the pointing kws might be from anywhere in the full field) - - return astrometry.PositionHint(ra_deg=hintcoord.ra.deg, dec_deg=hintcoord.dec.deg, radius_deg=hintsep) - - @classmethod - def get_astrometry_size_hint(cls, rawfit): - """ Get the size hint for this telescope / rawfit. - - from http://w3.caha.es/CAHA/Instruments/CAFOS/cafos22.html - pixel size in arcmin is around : ~0.530 arcsec - field size (diameter is) 16.0 arcmin (for 2048 pixels) - it seems that this is for 2048x2048 images, our images are 800x800 but the fitsheader DATASEC - indicates it is a cut - """ - - return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.cafos_arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.cafos_arcsec_per_pix) - - @classmethod - def get_gain_e_adu(cls, rawfit): - return cls.cafos_gain_e_adu - - @classmethod - def compute_relative_polarimetry(cls, polarimetry_group): - """ Computes the relative polarimetry for a polarimetry group for CAHA T220 observations. - - .. note:: - CAHA-T220 Polarimetry observations are done with a system consisting of a half-wave plate (HW) and a Wollaston prism (P). - - The rotation angle theta_i refers to the angle theta_i between the HW plate and its fast (extraordinary) axes. - - The effect of the HW is to rotate the polarization vector by 2*theta_i, and the effect of the Wollaston prism is to split - the beam into two beams polarized in orthogonal directions (ordinary and extraordinary). - - An input polarized beam with direction v will be rotated by HW by 2*theta_i. The O and E fluxes will be the projections of the - rotated vector onto the ordinary and extraordinary directions of the Wollaston prism (in absolute values since -45 and 45 - polarization directions are equivalent). A way to write this is: - - fo(theta_i) = abs( ) = abs ( ), where <,> denotes the scalar product and R is the rotation matrix. - - Therefore the following observed fluxes should be the same (ommiting the abs for clarity): - - fo(0º) = = = = = fe(45º) - fo(22º) = = = = = = fe(67º) - fo(45º) = = = = - = fe(0º) - fo(67º) = = = = = = fe(22º) - - See https://arxiv.org/pdf/astro-ph/0509153.pdf (doi 10.1086/497581) for the formulas relating these fluxes to - the Stokes parameters. - - .. note:: - This rotation angle has a different meaning than for OSN-T090 Polarimetry observations. For them, it is the rotation angle of a polarized filter - with respect to some reference direction. Therefore we have the equivalencies (again ommiting the abs for clarity): - - OSN(45º) = = = = fE(22º) = fO(67º) - OSN(90º) = = = fO(45º) - OSN(-45º) = OSN(135º) = abs() = = = fE(67º) = fO(22º) - OSN(0º) = = = fO(0º) - """ - - from iop4lib.db.aperphotresult import AperPhotResult - from iop4lib.db.photopolresult import PhotoPolResult - from iop4lib.utils import get_target_fwhm_aperpix - - # Perform some checks on the group - - ## get the band of the group - - bands = [reducedfit.band for reducedfit in polarimetry_group] - - if len(set(bands)) == 1: - band = bands[0] - else: # should not happens - raise Exception(f"Can not compute relative polarimetry for a group with different bands: {bands}") - - ## check obsmodes - - if not all([reducedfit.obsmode == OBSMODES.POLARIMETRY for reducedfit in polarimetry_group]): - raise Exception(f"This method is only for polarimetry images.") - - ## check sources in the fields - - sources_in_field_qs_list = [reducedfit.sources_in_field.all() for reducedfit in polarimetry_group] - group_sources = set.intersection(*map(set, sources_in_field_qs_list)) - - if len(group_sources) == 0: - logger.error("No common sources in field for all polarimetry groups.") - return - - if group_sources != set.union(*map(set, sources_in_field_qs_list)): - logger.warning(f"Sources in field do not match for all polarimetry groups: {set.difference(*map(set, sources_in_field_qs_list))}") - - ## check rotation angles - - rot_angles_available = set([redf.rotangle for redf in polarimetry_group]) - rot_angles_required = {0.0, 22.48, 44.98, 67.48} - - if not rot_angles_available.issubset(rot_angles_required): - logger.warning(f"Rotation angles missing: {rot_angles_required - rot_angles_available}") - - # 1. Compute all aperture photometries - - target_fwhm, aperpix, r_in, r_out = get_target_fwhm_aperpix(polarimetry_group, reductionmethod=REDUCTIONMETHODS.RELPOL) - - logger.debug(f"Computing aperture photometries for the {len(polarimetry_group)} reducedfits in the group with target aperpix {aperpix:.1f}.") - - for reducedfit in polarimetry_group: - reducedfit.compute_aperture_photometry(aperpix, r_in, r_out) - - # 2. Compute relative polarimetry for each source (uses the computed aperture photometries) - - logger.debug("Computing relative polarimetry.") - - photopolresult_L = list() - - for astrosource in group_sources: - logger.debug(f"Computing relative polarimetry for {astrosource}.") - - # if any angle is missing for some pair, it uses the equivalent angle of the other pair - - qs = AperPhotResult.objects.filter(reducedfit__in=polarimetry_group, astrosource=astrosource, aperpix=aperpix, flux_counts__isnull=False) - - equivs = ((('O',0.0), ('E',44.98)), - (('O',22.48), ('E',67.48)), - (('O',44.98), ('E',0.0)), - (('O',67.48), ('E',22.48)), - (('E',0.0), ('O',44.98)), - (('E',22.48), ('O',67.48)), - (('E',44.98), ('O',0.0)), - (('E',67.48), ('O',22.48))) - - flux_D = dict() - for equiv in equivs: - if qs.filter(pairs=equiv[0][0], reducedfit__rotangle=equiv[0][1]).exists(): - flux_D[(equiv[0][0], equiv[0][1])] = qs.filter(pairs=equiv[0][0], reducedfit__rotangle=equiv[0][1]).values_list("flux_counts", "flux_counts_err").last() - elif qs.filter(pairs=equiv[1][0], reducedfit__rotangle=equiv[1][1]).exists(): - logger.warning(f"Missing flux for {astrosource} {equiv[0][0]} {equiv[0][1]}, using {equiv[1][0]} {equiv[1][1]}") - flux_D[(equiv[0][0], equiv[0][1])] = qs.filter(pairs=equiv[1][0], reducedfit__rotangle=equiv[1][1]).values_list("flux_counts", "flux_counts_err").last() - else: - logger.error(f"Missing flux for {astrosource} {equiv[0][0]} {equiv[0][1]} and {equiv[1][0]} {equiv[1][1]}") - return - - flux_O_0, flux_O_0_err = flux_D[('O',0.0)] - flux_O_22, flux_O_22_err = flux_D[('O',22.48)] - flux_O_45, flux_O_45_err = flux_D[('O',44.98)] - flux_O_67, flux_O_67_err = flux_D[('O',67.48)] - flux_E_0, flux_E_0_err = flux_D[('E',0.0)] - flux_E_22, flux_E_22_err = flux_D[('E',22.48)] - flux_E_45, flux_E_45_err = flux_D[('E',44.98)] - flux_E_67, flux_E_67_err = flux_D[('E',67.48)] - - fluxes_O = np.array([flux_O_0, flux_O_22, flux_O_45, flux_O_67]) - fluxes_E = np.array([flux_E_0, flux_E_22, flux_E_45, flux_E_67]) - - # logger.debug(f"Fluxes_O: {fluxes_O}") - # logger.debug(f"Fluxes_E: {fluxes_E}") - - fluxes = (fluxes_O + fluxes_E) / 2. - flux_mean = fluxes.mean() - flux_err = fluxes.std() / math.sqrt(len(fluxes)) - - RQ = np.sqrt((flux_O_0 / flux_E_0) / (flux_O_45 / flux_E_45)) - dRQ = RQ / 2 * math.sqrt((flux_O_0_err / flux_O_0) ** 2 + (flux_E_0_err / flux_E_0) ** 2 + (flux_O_45_err / flux_O_45) ** 2 + (flux_E_45_err / flux_E_45) ** 2) - - RU = np.sqrt((flux_O_22 / flux_E_22) / (flux_O_67 / flux_E_67)) - dRU = RU / 2 * math.sqrt((flux_O_22_err / flux_O_22) ** 2 + (flux_E_22_err / flux_E_22) ** 2 + (flux_O_67_err / flux_O_67) ** 2 + (flux_E_67_err / flux_E_67) ** 2) - - Q_I = (RQ - 1) / (RQ + 1) - dQ_I = math.fabs( RQ / (RQ + 1) ** 2 * dRQ) - U_I = (RU - 1) / (RU + 1) - dU_I = math.fabs( RU / (RU + 1) ** 2 * dRU) - - P = math.sqrt(Q_I ** 2 + U_I ** 2) - dP = 1/P * math.sqrt(Q_I**2 * dQ_I**2 + U_I**2 * dU_I**2) - - Theta_0 = 0 - - if Q_I >= 0: - Theta_0 = math.pi - if U_I > 0: - Theta_0 = -1 * math.pi - # if Q_I < 0: - # Theta_0 = math.pi / 2 - - Theta = 0.5 * math.degrees(math.atan(U_I / Q_I) + Theta_0) - dTheta = 0.5 * 180.0 / math.pi * (1 / (1 + (U_I/Q_I) ** 2)) * math.sqrt( (dU_I/Q_I)**2 + (U_I*dQ_I/Q_I**2)**2 ) - - # compute instrumental magnitude - - if flux_mean <= 0.0: - logger.warning(f"{polarimetry_group=}: negative flux mean encountered while relative polarimetry for {astrosource=} ??!! It will be nan, but maybe we should look into this...") - - mag_inst = -2.5 * np.log10(flux_mean) # slower than math.log10 but returns nan when flux < 0 instead of throwing error (see https://github.com/juanep97/iop4/issues/24) - mag_inst_err = math.fabs(2.5 / math.log(10) * flux_err / flux_mean) - - # if the source is a calibrator, compute also the zero point - - if astrosource.srctype == SRCTYPES.CALIBRATOR: - mag_known = getattr(astrosource, f"mag_{band}") - mag_known_err = getattr(astrosource, f"mag_{band}_err", None) or 0.0 - - if mag_known is None: - logger.warning(f"Calibrator {astrosource} has no magnitude for band {band}.") - mag_zp = np.nan - mag_zp_err = np.nan - else: - mag_zp = mag_known - mag_inst - # mag_zp_err = math.sqrt(mag_known_err ** 2 + mag_inst_err ** 2) - mag_zp_err = math.fabs(mag_inst_err) # do not add error on literature magnitude - else: - mag_zp = None - mag_zp_err = None - - # save the results - - result = PhotoPolResult.create(reducedfits=polarimetry_group, - astrosource=astrosource, - reduction=REDUCTIONMETHODS.RELPOL, - mag_inst=mag_inst, mag_inst_err=mag_inst_err, mag_zp=mag_zp, mag_zp_err=mag_zp_err, - flux_counts=flux_mean, p=P, p_err=dP, chi=Theta, chi_err=dTheta, - aperpix=aperpix) - - photopolresult_L.append(result) - - - # 3. Get average zero point from zp of all calibrators in the group - - calib_mag_zp_array = np.array([result.mag_zp or np.nan for result in photopolresult_L if result.astrosource.srctype == SRCTYPES.CALIBRATOR]) # else it fills with None also and the dtype becomes object - calib_mag_zp_array = calib_mag_zp_array[~np.isnan(calib_mag_zp_array)] - - calib_mag_zp_array_err = np.array([result.mag_zp_err or np.nan for result in photopolresult_L if result.astrosource.srctype == SRCTYPES.CALIBRATOR]) - calib_mag_zp_array_err = calib_mag_zp_array_err[~np.isnan(calib_mag_zp_array_err)] - - if len(calib_mag_zp_array) == 0: - logger.error(f"Can not compute magnitude during relative photo-polarimetry without any calibrators for this reduced fit.") - - zp_avg = np.nanmean(calib_mag_zp_array) - zp_std = np.nanstd(calib_mag_zp_array) - - zp_err = np.sqrt(np.nansum(calib_mag_zp_array_err ** 2)) / len(calib_mag_zp_array_err) - zp_err = math.sqrt(zp_err ** 2 + zp_std ** 2) - - # 4. Compute the calibrated magnitudes for non-calibrators in the group using the averaged zero point - - for result in photopolresult_L: - - if result.astrosource.srctype == SRCTYPES.CALIBRATOR: - continue - - result.mag_zp = zp_avg - result.mag_zp_err = zp_err - - result.mag = result.mag_inst + zp_avg - result.mag_err = math.sqrt(result.mag_inst_err ** 2 + zp_err ** 2) - - result.save() - - # 5. Save results - for result in photopolresult_L: - result.save() - diff --git a/iop4lib/telescopes/osnt090.py b/iop4lib/telescopes/osnt090.py index ed53033a..f07f3971 100644 --- a/iop4lib/telescopes/osnt090.py +++ b/iop4lib/telescopes/osnt090.py @@ -33,11 +33,7 @@ class OSNT090(Telescope, metaclass=ABCMeta): abbrv = "T090" telescop_kw = "T90-OSN" - # telescope / instrument specific properties - - andort90_field_width_arcmin = 13.2 - andort90_arcsec_per_pix = 0.387 - andort90_gain_e_adu = 4.5 + # telescope specific properties ftp_address = iop4conf.osn_t090_address ftp_user = iop4conf.osn_t090_user @@ -140,402 +136,3 @@ def check_telescop_kw(cls, rawfit): return super().check_telescop_kw(rawfit) - - @classmethod - def classify_juliandate_rawfit(cls, rawfit): - """ - OSN-T090 fits has JD keyword - """ - import astropy.io.fits as fits - jd = fits.getheader(rawfit.filepath, ext=0)["JD"] - rawfit.juliandate = jd - - @classmethod - def classify_imgtype_rawfit(cls, rawfit): - """ - OSN-T090 fits has IMAGETYP keyword: FLAT, BIAS, LIGHT - - .. note:: - **Sometimes, the IMAGETYP keyword is wrong**, it has LIGHT on it but the filename and the OBJECT keyword contain the word "Flat". In those ocassions, it should be classified as - a FLAT. - """ - from iop4lib.db.rawfit import RawFit - import astropy.io.fits as fits - - with fits.open(rawfit.filepath) as hdul: - if hdul[0].header['IMAGETYP'] == 'FLAT': - rawfit.imgtype = IMGTYPES.FLAT - elif hdul[0].header['IMAGETYP'] == 'BIAS': - rawfit.imgtype = IMGTYPES.BIAS - elif hdul[0].header['IMAGETYP'] == 'LIGHT': - rawfit.imgtype = IMGTYPES.LIGHT - # workarounds for wrong keyword in OSN (see note in docstring) - if "FLAT" in hdul[0].header["OBJECT"].upper() and "FLAT" in rawfit.filename.upper(): - rawfit.imgtype = IMGTYPES.FLAT - elif "BIAS" in hdul[0].header["OBJECT"].upper() and "BIAS" in rawfit.filename.upper(): - rawfit.imgtype = IMGTYPES.BIAS - else: - logger.error(f"Unknown image type for {rawfit.fileloc}.") - rawfit.imgtype = IMGTYPES.ERROR - raise ValueError - - @classmethod - def classify_band_rawfit(cls, rawfit): - """ - OSN Files have no FILTER keyword if they are BIAS, FILTER=Clear if they are FLAT, and FILTER=FilterName if they are LIGHT. - For our DB, we have R, U, ..., None, ERROR. - - For polarimetry, which is done by taking four images with the R filter at different angles, we have R_45, R0, R45, R90. - """ - - from iop4lib.db.rawfit import RawFit - - if 'FILTER' not in rawfit.header: - if rawfit.imgtype == IMGTYPES.BIAS: - rawfit.band = BANDS.NONE - else: - rawfit.band = BANDS.ERROR - raise ValueError(f"Missing FILTER keyword for {rawfit.fileloc} which is not a bias (it is a {rawfit.imgtype}).") - elif rawfit.header['FILTER'] in {"Clear", ""}: - if rawfit.imgtype == IMGTYPES.FLAT: - rawfit.band = BANDS.NONE - else: - rawfit.band = BANDS.ERROR - raise ValueError(f"FILTER keyword is 'Clear' for {rawfit.fileloc} which is not a flat (it is a {rawfit.imgtype}).") - else: - rawfit.band = rawfit.header['FILTER'][0] # First letter of the filter name (R, U, ...) includes cases as R45, R_45, etc - - @classmethod - def classify_obsmode_rawfit(cls, rawfit): - """ - In OSN, we only have polarimetry for filter R, and it is indicated as R_45, R0, R45, R90 (-45, 0, 45 and 90 degrees). They correspond - to the different angles of the polarimeter. - - For photometry, the filter keyword willl be simply the letter R, U, etc. - - The values for angles are -45, 0, 45 and 90. - - Lately we have seen "R-45" instead of "R_45", so we have to take care of that too. - """ - - from iop4lib.db.rawfit import RawFit - import re - - if rawfit.band == BANDS.ERROR: - raise ValueError("Cannot classify obsmode if band is ERROR.") - - if rawfit.band == BANDS.R: - if rawfit.header['FILTER'] == "R": - rawfit.obsmode = OBSMODES.PHOTOMETRY - else: - logger.debug("Band is R, but FILTER is not exactly R, for OSN this must mean it is polarimetry. Trying to extract angle from FILTER keyword.") - - rawfit.obsmode = OBSMODES.POLARIMETRY - - if rawfit.header['FILTER'] == "R_45" or rawfit.header['FILTER'] == "R-45": - rawfit.rotangle = -45 - elif rawfit.header['FILTER'] == "R0": - rawfit.rotangle = 0 - elif rawfit.header['FILTER'] == "R45" or rawfit.header['FILTER'] == "R+45": - rawfit.rotangle = 45 - elif rawfit.header['FILTER'] == "R90": - rawfit.rotangle = 90 - else: - raise ValueError(f"Cannot extract angle from FILTER keyword '{rawfit.header['FILTER']}'.") - else: - logger.debug("Band is not R, assuming it is photometry.") - rawfit.obsmode = OBSMODES.PHOTOMETRY - - @classmethod - def get_header_hintcoord(cls, rawfit): - """ Get the position hint from the fits header as a SkyCoord. - - OSN T090 / AndorT090 have keywords OBJECT, OBJECTRA, OBJECTDEC in the header; e.g: - OBJECT TXS0506 - OBJCTRA 05 09 20 ---> this can be input with unit u.hourangle - OBJCTDEC +05 41 16 ---> this can be input with unit u.deg - """ - - hint_coord = SkyCoord(Angle(rawfit.header['OBJCTRA'], unit=u.hourangle), Angle(rawfit.header['OBJCTDEC'], unit=u.degree), frame='icrs') - return hint_coord - - @classmethod - def get_astrometry_position_hint(cls, rawfit, allsky=False, n_field_width=1.5): - """ Get the position hint from the FITS header as an astrometry.PositionHint.""" - - hintcoord = cls.get_header_hintcoord(rawfit) - - if allsky: - hintsep = 180.0 - else: - hintsep = (n_field_width * cls.andort90_field_width_arcmin*u.Unit("arcmin")).to_value(u.deg) - - return astrometry.PositionHint(ra_deg=hintcoord.ra.deg, dec_deg=hintcoord.dec.deg, radius_deg=hintsep) - - @classmethod - def get_astrometry_size_hint(cls, rawfit): - """ Get the size hint for this telescope / rawfit. - - According to OSN T090 camera information (https://www.osn.iaa.csic.es/page/camaras-ccdt150-y-ccdt90) - the camera pixels are 0.387as/px and it has a field of view of 13,20' x 13,20'. So we provide close values - for the hint. If the files are 1x1 it will be 0.387as/px, if 2x2 it will be twice. - - For DIPOL-1 in OSN-T090, according to preliminary investigation of OSN crew is: - Las posiciones que he tomado y el ángulo de rotación en cada caso son estos: - Dec= -10º HA=+3h rotación=-177.3º - Zenit rotación=-177.3º - Dec=+60º HA=-6h rotación=-177.7º - Dec=+70º HA=+5h rotación=-177.2º - - El campo es de 9.22 x 6.28 arcmin y el tamaño de pixel de 0.134"/pix - - El ángulo de la imagen cambia muy poco entre las posiciones muy separadas del telescopio, y es de 177.5º ± 0.3º - Así que como mucho se produce un error de ± 0.3º en las imágenes, y el punto cero es de 2.5º. - """ - - if rawfit.instrument == INSTRUMENTS.DIPOL1: - - return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*0.134, upper_arcsec_per_pixel=1.05*0.134) - - elif rawfit.instrument == INSTRUMENTS.AndorT90: - - if rawfit.header['NAXIS1'] == 2048: - return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.andort90_arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.andort90_arcsec_per_pix) - elif rawfit.header['NAXIS1'] == 1024: - return astrometry.SizeHint(lower_arcsec_per_pixel=2*0.95*cls.andort90_arcsec_per_pix, upper_arcsec_per_pixel=2*1.05*cls.andort90_arcsec_per_pix) - - else: - raise ValueError("Unexpected or unknown instrument for OSN-T090") - - - - @classmethod - def get_gain_e_adu(cls, rawfit): - if rawfit.instrument == INSTRUMENTS.AndorT90: - return cls.andort90_gain_e_adu - elif rawfit.instrument == INSTRUMENTS.DIPOL1: - logger.error("DIPOL-1 gain not implemented yet... returning 1.0") - return 1.0 - - @classmethod - def compute_relative_polarimetry(cls, polarimetry_group): - """ Computes the relative polarimetry for a polarimetry group for OSNT090 observations. - - .. note:: - The rotation angle in OSNT090 refers to the angle between the polarized filter and some reference direction. This is different - to the rotation angle for CAHA-T220 which is the angle between the half-wave plate (HW) and its fast (extraordinary) axis. See the docs - for ``CAHAT220.compute_relative_polarimetry`` for more information. - - Instrumental polarization is corrected. Currently values are hardcoded in qoff, uoff, dqoff, duoff, Phi, dPhi (see code), - but the values without any correction are stored in the DB so the correction can be automatically obtained in the future. - """ - - from iop4lib.db.aperphotresult import AperPhotResult - from iop4lib.db.photopolresult import PhotoPolResult - from iop4lib.utils import get_target_fwhm_aperpix - - logger.debug("Computing OSN-T090 relative polarimetry for group: %s", "".join(map(str,polarimetry_group))) - - # Instrumental polarization - - ## values for T090 TODO: update them manually or (preferibly) dinamically (TODO) - ## to compute the instrumental polarization we need to get the mean of the Q and U images, use zero - ## (done in the _X_nocorr variables) - - qoff = 0.0579 - uoff = 0.0583 - dqoff = 0.003 - duoff = 0.0023 - Phi = math.radians(-18) - dPhi = math.radians(0.001) - - # Perform some checks on the group - - ## get the band of the group - - bands = [reducedfit.band for reducedfit in polarimetry_group] - - if len(set(bands)) == 1: - band = bands[0] - else: # should not happens - raise Exception(f"Can not compute relative polarimetry for a group with different bands: {bands}") - - ## check obsmodes - - if not all([reducedfit.obsmode == OBSMODES.POLARIMETRY for reducedfit in polarimetry_group]): - raise Exception(f"This method is only for polarimetry images.") - - ## check sources in the fields - - sources_in_field_qs_list = [reducedfit.sources_in_field.all() for reducedfit in polarimetry_group] - group_sources = set.intersection(*map(set, sources_in_field_qs_list)) - - if len(group_sources) == 0: - logger.error("No common sources in field for all polarimetry groups.") - return - - if group_sources != set.union(*map(set, sources_in_field_qs_list)): - logger.warning(f"Sources in field do not match for all polarimetry groups: {set.difference(*map(set, sources_in_field_qs_list))}") - - ## check rotation angles - - rot_angles_available = set([redf.rotangle for redf in polarimetry_group]) - rot_angles_required = {0.0, 45.0, 90.0, -45.0} - - if not rot_angles_required.issubset(rot_angles_available): - logger.error(f"Rotation angles missing: {rot_angles_required - rot_angles_available}; returning early.") - return - - # 1. Compute all aperture photometries - - target_fwhm, aperpix, r_in, r_out = get_target_fwhm_aperpix(polarimetry_group, reductionmethod=REDUCTIONMETHODS.RELPOL) - - logger.debug(f"Computing aperture photometries for the {len(polarimetry_group)} reducedfits in the group with target {aperpix:.1f}.") - - for reducedfit in polarimetry_group: - reducedfit.compute_aperture_photometry(aperpix, r_in, r_out) - - # 2. Compute relative polarimetry for each source (uses the computed aperture photometries) - - logger.debug("Computing relative polarimetry.") - - photopolresult_L = list() - - for astrosource in group_sources: - - flux_0 = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, aperpix=aperpix, pairs="O", reducedfit__rotangle=0.0).flux_counts - flux_0_err = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=0.0).flux_counts_err - - flux_45 = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=45.0).flux_counts - flux_45_err = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=45.0).flux_counts_err - - flux_90 = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=90.0).flux_counts - flux_90_err = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=90.0).flux_counts_err - - flux_n45 = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=-45.0).flux_counts - flux_n45_err = AperPhotResult.objects.get(reducedfit__in=polarimetry_group, astrosource=astrosource, pairs="O", aperpix=aperpix, reducedfit__rotangle=-45.0).flux_counts_err - - # from IOP3 polarimetry_osn() : - - fluxes = np.array([flux_0, flux_45, flux_90, flux_n45]) - flux_mean = fluxes.mean() - flux_err = fluxes.std() / math.sqrt(len(fluxes)) - - qraw = (flux_0 - flux_90) / (flux_0 + flux_90) - uraw = (flux_45 - flux_n45) / (flux_45 + flux_n45) - - #Applying error propagation... - - dqraw = qraw * math.sqrt(((flux_0_err**2+flux_90_err**2)/(flux_0+flux_90)**2)+(((flux_0_err**2+flux_90_err**2))/(flux_0-flux_90)**2)) - duraw = uraw * math.sqrt(((flux_45_err**2+flux_n45_err**2)/(flux_45+flux_n45)**2)+(((flux_45_err**2+flux_n45_err**2))/(flux_45-flux_n45)**2)) - - qc = qraw - qoff - uc = uraw - uoff - - dqc = math.sqrt(dqraw**2 + dqoff**2) - duc = math.sqrt(duraw**2 + duoff**2) - - q = qc*math.cos(2*Phi) - uc*math.sin(2*Phi) - u = qc*math.sin(2*Phi) + uc*math.cos(2*Phi) - - dqa = qc*math.cos(2*Phi) * math.sqrt((dqc/qc)**2+((2*dPhi*math.sin(2*Phi))/(math.cos(2*Phi)))**2) - dqb = uc*math.sin(2*Phi) * math.sqrt((duc/uc)**2+((2*dPhi*math.cos(2*Phi))/(math.sin(2*Phi)))**2) - dua = qc*math.sin(2*Phi) * math.sqrt((dqc/qc)**2+((2*dPhi*math.cos(2*Phi))/(math.sin(2*Phi)))**2) - dub = uc*math.cos(2*Phi) * math.sqrt((duc/uc)**2+((2*dPhi*math.sin(2*Phi))/(math.cos(2*Phi)))**2) - - dq = np.sqrt(dqa**2+dqb**2) - du = np.sqrt(dua**2+dub**2) - - P = math.sqrt(q**2 + u**2) - dP = P * (1/(q**2+u**2)) * math.sqrt((q*dq)**2+(u*du)**2) - - Theta_0 = 0 - Theta = (1/2) * math.degrees(math.atan2(u,q) + Theta_0) - dTheta = (0.5 * 180.0 / math.pi) * dP/P - - # compute also non-corrected values for computation of instrumental polarization - - _Phi_nocorr = 0 # no rotation correction? - _qc_nocorr = qraw # no offset correction - _uc_nocorr = uraw # no offset correction - _q_nocorr = _qc_nocorr*math.cos(2*_Phi_nocorr) - _uc_nocorr*math.sin(2*_Phi_nocorr) - _u_nocorr = _qc_nocorr*math.sin(2*_Phi_nocorr) + _uc_nocorr*math.cos(2*_Phi_nocorr) - _p_nocorr = math.sqrt(_q_nocorr**2 + _u_nocorr**2) - _Theta_0_nocorr = 0 - _Theta_nocorr = (1/2) * math.degrees(math.atan2(_u_nocorr,_q_nocorr) + _Theta_0_nocorr) - _x_px, _y_px = astrosource.coord.to_pixel(polarimetry_group[0].wcs) - - # compute instrumental magnitude (same as for CAHA) - - if flux_mean <= 0.0: - logger.warning(f"{polarimetry_group=}: negative flux mean encountered while relative polarimetry for {astrosource=} ??!! It will be nan, but maybe we should look into this...") - - mag_inst = -2.5 * np.log10(flux_mean) # slower than math.log10 but returns nan when flux < 0 instead of throwing error (see https://github.com/juanep97/iop4/issues/24) - mag_inst_err = math.fabs(2.5 / math.log(10) * flux_err / flux_mean) - - # if the source is a calibrator, compute also the zero point - - if astrosource.srctype == SRCTYPES.CALIBRATOR: - mag_known = getattr(astrosource, f"mag_{band}") - mag_known_err = getattr(astrosource, f"mag_{band}_err", None) or 0.0 - - if mag_known is None: - logger.warning(f"Calibrator {astrosource} has no magnitude for band {band}.") - mag_zp = np.nan - mag_zp_err = np.nan - else: - mag_zp = mag_known - mag_inst - mag_zp_err = math.sqrt(mag_known_err ** 2 + mag_inst_err ** 2) - else: - mag_zp = None - mag_zp_err = None - - # save the results - - result = PhotoPolResult.create(reducedfits=polarimetry_group, - astrosource=astrosource, - reduction=REDUCTIONMETHODS.RELPOL, - mag_inst=mag_inst, mag_inst_err=mag_inst_err, mag_zp=mag_zp, mag_zp_err=mag_zp_err, - flux_counts=flux_mean, p=P, p_err=dP, chi=Theta, chi_err=dTheta, - _x_px=_x_px, _y_px=_y_px, _q_nocorr=_q_nocorr, _u_nocorr=_u_nocorr, _p_nocorr=_p_nocorr, _chi_nocorr=_Theta_nocorr, - aperpix=aperpix) - - photopolresult_L.append(result) - - - # 3. Get average zero point from zp of all calibrators in the group - - calib_mag_zp_array = np.array([result.mag_zp or np.nan for result in photopolresult_L if result.astrosource.srctype == SRCTYPES.CALIBRATOR]) # else it fills with None also and the dtype becomes object - calib_mag_zp_array = calib_mag_zp_array[~np.isnan(calib_mag_zp_array)] - - calib_mag_zp_array_err = np.array([result.mag_zp_err or np.nan for result in photopolresult_L if result.astrosource.srctype == SRCTYPES.CALIBRATOR]) - calib_mag_zp_array_err = calib_mag_zp_array_err[~np.isnan(calib_mag_zp_array_err)] - - if len(calib_mag_zp_array) == 0: - logger.error(f"Can not compute magnitude during relative photo-polarimetry without any calibrators for this reduced fit.") - - zp_avg = np.nanmean(calib_mag_zp_array) - zp_std = np.nanstd(calib_mag_zp_array) - - zp_err = np.sqrt(np.nansum(calib_mag_zp_array_err ** 2)) / len(calib_mag_zp_array_err) - zp_err = math.sqrt(zp_err ** 2 + zp_std ** 2) - - # 4. Compute the calibrated magnitudes for non-calibrators in the group using the averaged zero point - - for result in photopolresult_L: - - if result.astrosource.srctype == SRCTYPES.CALIBRATOR: - continue - - result.mag_zp = zp_avg - result.mag_zp_err = zp_err - - result.mag = result.mag_inst + zp_avg - result.mag_err = math.sqrt(result.mag_inst_err ** 2 + zp_err ** 2) - - result.save() - - # 5. Save results - for result in photopolresult_L: - result.save() - diff --git a/iop4lib/telescopes/osnt150.py b/iop4lib/telescopes/osnt150.py index 1f6e51f4..5972cf2d 100644 --- a/iop4lib/telescopes/osnt150.py +++ b/iop4lib/telescopes/osnt150.py @@ -35,11 +35,7 @@ class OSNT150(OSNT090, Telescope, metaclass=ABCMeta): abbrv = "T150" telescop_kw = "T150-OSN" - # telescope / instrument specific properties - - andort150_arcsec_per_pix = 0.232 - andort150_gain_e_adu = 4.5 - andort150_field_width_arcmin = 7.92 + # telescope specific properties ftp_address = iop4conf.osn_t150_address ftp_user = iop4conf.osn_t150_user diff --git a/iop4lib/telescopes/telescope.py b/iop4lib/telescopes/telescope.py index a0e28679..012feb3b 100644 --- a/iop4lib/telescopes/telescope.py +++ b/iop4lib/telescopes/telescope.py @@ -26,12 +26,13 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from iop4lib.db import RawFit, ReducedFit + from iop4lib.db import RawFit, ReducedFit class Telescope(metaclass=ABCMeta): - """ - Inherit this class to provide telescope specific functionality and - translations. + """ Base class for telescopes. + + Inherit this class to provide telescope specific functionality (e.g. discovering and + downloading new data, classification of instruments, etc). Attributes and methods that must be implemented are marked as abstract (they will give error if the class is inherited and the method is not implemented in the subclass). @@ -65,8 +66,6 @@ def name(self): def abbrv(self): pass - # telescope / instrument specific properties - @property @abstractmethod def telescop_kw(self): @@ -92,60 +91,6 @@ def download_rawfits(cls, epoch): def list_remote_epochnames(cls): pass - @classmethod - @abstractmethod - def classify_juliandate_rawfit(cls, rawfit): - pass - - @classmethod - @abstractmethod - def classify_imgtype_rawfit(cls, rawfit): - pass - - @classmethod - @abstractmethod - def classify_band_rawfit(cls, rawfit): - pass - - @classmethod - @abstractmethod - def classify_obsmode_rawfit(cls, rawfit): - pass - - @classmethod - @abstractmethod - def get_header_hintcoord(cls, rawfit, *args, **kwargs): - pass - - @classmethod - @abstractmethod - def get_astrometry_position_hint(cls, rawfit, *args, **kwargs): - pass - - @classmethod - @abstractmethod - def get_astrometry_size_hint(cls, rawfit): - pass - - @classmethod - @abstractmethod - def get_gain_e_adu(cls, rawfit): - pass - - # Not Implemented Methods (skeleton) - - # @classmethod - # def compute_relative_photometry(cls): - # raise NotImplementedError - - @classmethod - def compute_absolute_photometry(cls): - raise NotImplementedError - - @classmethod - def compute_relative_polarimetry(cls): - raise NotImplementedError - # Class methods (you should be using these only from this Telescope class, not from subclasses) @classmethod @@ -181,14 +126,18 @@ def is_known(self, name): @classmethod def classify_rawfit(cls, rawfit: 'RawFit'): + r""" Try to classify a RawFit object. + + This method will first check that the rawfit belongs to this telescope, + classify the instrument, then hand off classification to the instrument + class. + """ + + from iop4lib.instruments import Instrument + cls.check_telescop_kw(rawfit) cls.classify_instrument_kw(rawfit) - cls.classify_juliandate_rawfit(rawfit) - cls.classify_imgtype_rawfit(rawfit) - cls.classify_band_rawfit(rawfit) - cls.classify_obsmode_rawfit(rawfit) - cls.classify_imgsize(rawfit) - cls.classify_exptime(rawfit) + Instrument.by_name(rawfit.instrument).classify_rawfit(rawfit) @classmethod def check_telescop_kw(cls, rawfit): @@ -218,227 +167,4 @@ def classify_instrument_kw(cls, rawfit): rawfit.instrument = INSTRUMENTS.DIPOL1 else: raise ValueError(f"INSTRUME in fits header ({instrume_header}) not known.") - - @classmethod - def classify_imgsize(cls, rawfit): - import astropy.io.fits as fits - from iop4lib.db import RawFit - - with fits.open(rawfit.filepath) as hdul: - if hdul[0].header["NAXIS"] == 2: - sizeX = hdul[0].header["NAXIS1"] - sizeY = hdul[0].header["NAXIS2"] - rawfit.imgsize = f"{sizeX}x{sizeY}" - return rawfit.imgsize - else: - raise ValueError(f"Raw fit file {rawfit.fileloc} has NAXIS != 2, cannot get imgsize.") - - @classmethod - def classify_exptime(cls, rawfit): - """ - EXPTIME is an standard FITS keyword, measured in seconds. - """ - import astropy.io.fits as fits - from iop4lib.db import RawFit - - with fits.open(rawfit.filepath) as hdul: - rawfit.exptime = hdul[0].header["EXPTIME"] - - - @classmethod - def get_header_objecthint(self, rawfit): - r""" Get a hint for the AstroSource in this image from the header. OBJECT is a standard keyword. Return None if none found. - - At the moment his only tries to match sources - with the IAU name format `[0-9]*\+[0-9]*`. - """ - - from iop4lib.db import AstroSource - - object_header = rawfit.header["OBJECT"] - - matchs = re.findall(r".*?([0-9]*\+[0-9]*).*", object_header) - if len(matchs) > 0: - return AstroSource.objects.filter(name__contains=matchs[0]).first() - else: - return None - - - - - # these implemente more complex functionality related to data reduction - # the ones implemented should not depend on the telescope - # but again can be overriden to customize them - # other reduction procedure like must necessarily be implemented in the subclass (like polarimetry) - - @classmethod - def compute_aperture_photometry(cls, redf, aperpix, r_in, r_out): - - from iop4lib.db.aperphotresult import AperPhotResult - from iop4lib.utils.sourcedetection import get_bkg, get_segmentation - from photutils.utils import circular_footprint - from photutils.aperture import CircularAperture, CircularAnnulus, ApertureStats, aperture_photometry - from photutils.utils import calc_total_error - from astropy.stats import SigmaClip - from iop4lib.utils import get_target_fwhm_aperpix - - if redf.mdata.shape[0] == 1024: - bkg_box_size = 128 - elif redf.mdata.shape[0] == 2048: - bkg_box_size = 256 - elif redf.mdata.shape[0] == 800: - bkg_box_size = 100 - else: - logger.warning(f"Image size {redf.mdata.shape[0]} not expected.") - bkg_box_size = redf.mdata.shape[0]//10 - - bkg = get_bkg(redf.mdata, filter_size=1, box_size=bkg_box_size) - img = redf.mdata - - if np.sum(redf.mdata <= 0.0) >= 1: - logger.debug(f"{redf}: {np.sum(redf.mdata <= 0.0):.0f} px < 0 ({math.sqrt(np.sum(redf.mdata <= 0.0)):.0f} px2) in IMAGE.") - - if np.sum(img <= 0.0) >= 1: - logger.debug(f"{redf}: {np.sum(img <= 0.0)} px < 0 ({math.sqrt(np.sum(img <= 0.0)):.0f} px2) in BKG-SUBSTRACTED IMG, after masking.") - - error = calc_total_error(img, bkg.background_rms, cls.get_gain_e_adu(redf)) - - for astrosource in redf.sources_in_field.all(): - for pairs, wcs in (('O', redf.wcs1), ('E', redf.wcs2)) if redf.with_pairs else (('O',redf.wcs),): - - ap = CircularAperture(astrosource.coord.to_pixel(wcs), r=aperpix) - annulus = CircularAnnulus(astrosource.coord.to_pixel(wcs), r_in=r_in, r_out=r_out) - - annulus_stats = ApertureStats(redf.mdata, annulus, error=error, sigma_clip=SigmaClip(sigma=5.0, maxiters=10)) - ap_stats = ApertureStats(redf.mdata, ap, error=error) - - bkg_flux_counts = annulus_stats.median*ap_stats.sum_aper_area.value - bkg_flux_counts_err = annulus_stats.sum_err / annulus_stats.sum_aper_area.value * ap_stats.sum_aper_area.value - - flux_counts = ap_stats.sum - annulus_stats.mean*ap_stats.sum_aper_area.value - flux_counts_err = ap_stats.sum_err - - AperPhotResult.create(reducedfit=redf, - astrosource=astrosource, - aperpix=aperpix, - pairs=pairs, - bkg_flux_counts=bkg_flux_counts, bkg_flux_counts_err=bkg_flux_counts_err, - flux_counts=flux_counts, flux_counts_err=flux_counts_err) - - - @classmethod - def compute_relative_photometry(cls, redf: 'ReducedFit') -> None: - - from iop4lib.db.aperphotresult import AperPhotResult - from iop4lib.db.photopolresult import PhotoPolResult - from iop4lib.utils import get_target_fwhm_aperpix - - if redf.obsmode != OBSMODES.PHOTOMETRY: - raise Exception(f"{redf}: this method is only for plain photometry images.") - - target_fwhm, aperpix, r_in, r_out = get_target_fwhm_aperpix([redf], reductionmethod=REDUCTIONMETHODS.RELPHOT) - - if target_fwhm is None: - logger.error("Could not estimate a target FWHM, aborting relative photometry.") - return - - # 1. Compute all aperture photometries - - logger.debug(f"{redf}: computing aperture photometries for {redf}.") - - redf.compute_aperture_photometry(aperpix, r_in, r_out) - - # 2. Compute relative polarimetry for each source (uses the computed aperture photometries) - - logger.debug(f"{redf}: computing relative photometry.") - - # 2. Compute the flux in counts and the instrumental magnitude - - photopolresult_L = list() - - for astrosource in redf.sources_in_field.all(): - - result = PhotoPolResult.create(reducedfits=[redf], astrosource=astrosource, reduction=REDUCTIONMETHODS.RELPHOT) - - aperphotresult = AperPhotResult.objects.get(reducedfit=redf, astrosource=astrosource, aperpix=aperpix, pairs="O") - - result.bkg_flux_counts = aperphotresult.bkg_flux_counts - result.bkg_flux_counts_err = aperphotresult.bkg_flux_counts_err - result.flux_counts = aperphotresult.flux_counts - result.flux_counts_err = aperphotresult.flux_counts_err - - # logger.debug(f"{self}: {result.flux_counts=}") - - if result.flux_counts is None: # when does this happen? when there is a source whose apertue falls partially outside the image? https://github.com/juanep97/iop4/issues/24 - logger.error(f"{redf}: during relative photometry, encountered flux_counts=None for source {astrosource.name}, aperphotresult {aperphotresult.id}!!!") - result.flux_counts = np.nan - result.flux_counts_err = np.nan - - if result.flux_counts <= 0.0: - logger.warning(f"{redf}: negative flux counts encountered while relative photometry for {astrosource=} ??!! They will be nans, but maybe we should look into this...") - - result.mag_inst = -2.5 * np.log10(result.flux_counts) # np.nan if result.flux_counts <= 0.0 - result.mag_inst_err = math.fabs(2.5 / math.log(10) / result.flux_counts * result.flux_counts_err) - - # if the source is a calibrator, compute also the zero point - if result.astrosource.srctype == SRCTYPES.CALIBRATOR: - result.mag_known = getattr(result.astrosource, f"mag_{redf.band}") - result.mag_known_err = getattr(result.astrosource, f"mag_{redf.band}_err", None) or 0.0 - - if result.mag_known is None: - logger.warning(f"Relative Photometry over {redf}: calibrator {result.astrosource} has no magnitude for band {redf.band}.") - result.mag_zp = np.nan - result.mag_zp_err = np.nan - else: - result.mag_zp = result.mag_known - result.mag_inst - result.mag_zp_err = math.sqrt(result.mag_inst_err**2 + result.mag_known_err**2) - else: - # if it is not a calibrator, we can not save the COMPUTED zp, it will be computed and the USED zp will be stored. - result.mag_zp = None - result.mag_zp_err = None - - result.save() - - photopolresult_L.append(result) - - # 3. Average the zero points - - calib_mag_zp_array = np.array([result.mag_zp or np.nan for result in photopolresult_L if result.astrosource.srctype == SRCTYPES.CALIBRATOR]) # else it fills with None also and the dtype becomes object - calib_mag_zp_array = calib_mag_zp_array[~np.isnan(calib_mag_zp_array)] - - calib_mag_zp_array_err = np.array([result.mag_zp_err or np.nan for result in photopolresult_L if result.astrosource.srctype == SRCTYPES.CALIBRATOR]) - calib_mag_zp_array_err = calib_mag_zp_array_err[~np.isnan(calib_mag_zp_array_err)] - - if len(calib_mag_zp_array) == 0: - logger.error(f"{redf}: can not perform relative photometry without any calibrators for this reduced fit. Deleting results.") - [result.delete() for result in redf.photopolresults.all()] - return #raise Exception(f"{self}: can not perform relative photometry without any calibrators for this reduced fit.") - - zp_avg = np.nanmean(calib_mag_zp_array) - zp_std = np.nanstd(calib_mag_zp_array) - - zp_err = math.sqrt(np.sum(calib_mag_zp_array_err**2)) / len(calib_mag_zp_array_err) - zp_err = math.sqrt(zp_std**2 + zp_err**2) - - # 4. Compute the calibrated magnitudes - - for result in photopolresult_L: - - if result.astrosource.srctype == SRCTYPES.CALIBRATOR: - continue - - # save the zp (to be) used - result.mag_zp = zp_avg - result.mag_zp_err = zp_err - - # compute the calibrated magnitude - result.mag = zp_avg + result.mag_inst - result.mag_err = math.sqrt(result.mag_inst_err**2 + zp_err**2) - - result.save() - - # 5. Save the results - - for result in photopolresult_L: - result.save() - + \ No newline at end of file diff --git a/iop4lib/utils/astrometry.py b/iop4lib/utils/astrometry.py index 455a6387..ea43d683 100644 --- a/iop4lib/utils/astrometry.py +++ b/iop4lib/utils/astrometry.py @@ -369,7 +369,8 @@ def _build_wcs_params_shotgun_helper(redf, with_pairs=None, if bm is None: return {'success':False} else: - logger.debug(f"{redf}: {msg} worked") + logger.debug(f"{redf}: {msg} worked.") + logger.debug(f"{redf}: {bm.index_path=}") logger.debug(f"{redf}: {bm.center_ra_deg=}") logger.debug(f"{redf}: {bm.center_dec_deg=}") logger.debug(f"{redf}: {bm.scale_arcsec_per_pixel=}") From aff9c93326a17afe3af69166c0a8b069fce98dfb Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 01:27:30 +0200 Subject: [PATCH 017/168] improve tests descriptions --- tests/test_cahat220.py | 20 ++++++++++++++++---- tests/test_osnt090.py | 9 +++++++-- 2 files changed, 23 insertions(+), 6 deletions(-) diff --git a/tests/test_cahat220.py b/tests/test_cahat220.py index 5b8e5928..a2bd1b26 100644 --- a/tests/test_cahat220.py +++ b/tests/test_cahat220.py @@ -19,10 +19,17 @@ from .fixtures import load_test_catalog -@pytest.mark.skipif(os.getenv("CI") != "true", reason="only neccesary for actions CI as a workaround for httpdirfs") +@pytest.mark.skipif(os.getenv("CI") != "true", reason="only for actions CI") @pytest.mark.django_db(transaction=True) def test_build_single_proc(load_test_catalog): - """ Test the whole building process of reduced fits through multiprocessing """ + """ Test the whole building process of reduced fits in a single process + + This test is not really necessary since single-process reduction is already tested + in OSN-T090, therefore it is skipped by default. However CI actions at the momemnt + will fail without it, because if the astrometry index files are accessed by multiple + processes at the same time before they are catched httpdirfs will fail. To run it + locally set the environment variable CI=true. + """ from iop4lib.db import Epoch, RawFit, ReducedFit from iop4lib.enums import IMGTYPES, SRCTYPES @@ -50,8 +57,13 @@ def test_build_single_proc(load_test_catalog): @pytest.mark.django_db(transaction=True) -def test_build_multi_proc(load_test_catalog): - """ Test the whole building process of reduced fits through multiprocessing """ +def test_build_multi_proc_photopol(load_test_catalog): + """ Test the whole building process of reduced fits through multiprocessing + + Also tests here relative photometry and polarimetry results and their + quality (value + uncertainties) (to avoud losing time reducing them + in another test function). + """ from iop4lib.db import Epoch, RawFit, ReducedFit from iop4lib.enums import IMGTYPES, SRCTYPES diff --git a/tests/test_osnt090.py b/tests/test_osnt090.py index 27f41d2a..27028059 100644 --- a/tests/test_osnt090.py +++ b/tests/test_osnt090.py @@ -85,8 +85,13 @@ def test_build_single_proc(load_test_catalog): @pytest.mark.django_db(transaction=True) -def test_build_multi_proc(load_test_catalog): - """ Test the whole building process of reduced fits through multiprocessing """ +def test_build_multi_proc_photopol(load_test_catalog): + """ Test the whole building process of reduced fits through multiprocessing + + Also tests here relative photometry and polarimetry results and their + quality (value + uncertainties) (to avoud losing time reducing them + in another test function). + """ from iop4lib.db import Epoch, RawFit, ReducedFit from iop4lib.enums import IMGTYPES, SRCTYPES From cee94793514473b13f3db4a97ff6acd201cea006 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 04:18:52 +0200 Subject: [PATCH 018/168] iop4admin: improve browsability --- iop4admin/modeladmins/masterflat.py | 10 +++++++++- iop4admin/modeladmins/reducedfit.py | 6 +++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/iop4admin/modeladmins/masterflat.py b/iop4admin/modeladmins/masterflat.py index a0f7b2b1..7b41980e 100644 --- a/iop4admin/modeladmins/masterflat.py +++ b/iop4admin/modeladmins/masterflat.py @@ -13,7 +13,7 @@ class AdminMasterFlat(AdminFitFile): model = MasterFlat - list_display = ['id', 'telescope', 'night', 'imgsize', 'band', 'obsmode', 'rotangle', 'exptime', 'masterbias', 'get_built_from', 'options'] + list_display = ['id', 'telescope', 'night', 'imgsize', 'band', 'obsmode', 'rotangle', 'exptime', 'get_masterbias', 'get_built_from', 'options'] @@ -31,6 +31,14 @@ def telescope(self, obj): def night(self, obj): return obj.epoch.night + @admin.display(description='MasterBias') + def get_masterbias(self, obj): + self.allow_tags = True + if obj.masterbias is None: + return "-" + url = reverse('iop4admin:%s_%s_changelist' % (MasterBias._meta.app_label, MasterBias._meta.model_name)) + f"?id={obj.masterbias.id}" + return mark_safe(rf'{obj.masterbias.id}') + @admin.display(description="Built from") def get_built_from(self, obj): self.allow_tags = True diff --git a/iop4admin/modeladmins/reducedfit.py b/iop4admin/modeladmins/reducedfit.py index 8149b7d3..4844c36c 100644 --- a/iop4admin/modeladmins/reducedfit.py +++ b/iop4admin/modeladmins/reducedfit.py @@ -70,7 +70,11 @@ def get_targets_in_field(self, obj): if len(cat_targets) > 0: return cat_targets - kw_obj_val = obj.rawfit.header['OBJECT'] + try: + kw_obj_val = obj.rawfit.header['OBJECT'] + except FileNotFoundError: + return format_html(f"rawfit not found") + guessed_target = AstroSource.objects.filter(Q(name__icontains=kw_obj_val) | Q(other_name__icontains=kw_obj_val)).values_list('name', flat=True) if len(guessed_target) > 0: From f23bf8b0c0bb18c440f5d7809c3559e07584ef19 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 04:30:22 +0200 Subject: [PATCH 019/168] tests: bump version, include polarimetry --- tests/conftest.py | 2 +- tests/test_cahat220.py | 18 +++++++++------ tests/test_osnt090.py | 50 +++++++++++++++++++++++++++--------------- 3 files changed, 44 insertions(+), 26 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 55617e86..d5b11136 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,7 +12,7 @@ TEST_CONFIG = str(Path(iop4conf.datadir) / "config.tests.yaml") TESTDATA_FPATH = str(Path("~/iop4testdata.tar.gz").expanduser()) -TESTDATA_MD5SUM = '2c6986919012bd4c8ccf99e3a058855e' +TESTDATA_MD5SUM = '8cf7f2cd3b072f16749fb2c04bc5fb48' TEST_DATADIR = str(Path(iop4conf.datadir) / "iop4testdata") TEST_DB_PATH = str(Path(iop4conf.db_path).expanduser().parent / ("test_" + str(Path(iop4conf.db_path).name))) diff --git a/tests/test_cahat220.py b/tests/test_cahat220.py index a2bd1b26..988152a3 100644 --- a/tests/test_cahat220.py +++ b/tests/test_cahat220.py @@ -34,7 +34,7 @@ def test_build_single_proc(load_test_catalog): from iop4lib.db import Epoch, RawFit, ReducedFit from iop4lib.enums import IMGTYPES, SRCTYPES - epochname_L = ["CAHA-T220/2022-09-18", "CAHA-T220/2022-08-27"] + epochname_L = ["CAHA-T220/2022-08-27", "CAHA-T220/2022-09-18"] epoch_L = [Epoch.create(epochname=epochname, check_remote_list=False) for epochname in epochname_L] @@ -68,7 +68,7 @@ def test_build_multi_proc_photopol(load_test_catalog): from iop4lib.db import Epoch, RawFit, ReducedFit from iop4lib.enums import IMGTYPES, SRCTYPES - epochname_L = ["CAHA-T220/2022-09-18", "CAHA-T220/2022-08-27"] + epochname_L = ["CAHA-T220/2022-08-27", "CAHA-T220/2022-09-18"] epoch_L = [Epoch.create(epochname=epochname, check_remote_list=False) for epochname in epochname_L] @@ -92,7 +92,6 @@ def test_build_multi_proc_photopol(load_test_catalog): epoch = Epoch.by_epochname("CAHA-T220/2022-09-18") - epoch.compute_relative_photometry() epoch.compute_relative_polarimetry() qs_res = PhotoPolResult.objects.filter(epoch=epoch, astrosource__name="2200+420").all() @@ -102,8 +101,13 @@ def test_build_multi_proc_photopol(load_test_catalog): res = qs_res[0] - # check that the result is correct to 1.5 sigma compared to IOP3 - assert res.mag == approx(13.38, abs=1.5*res.mag_err) - + # check that the result is correct to 1.5 sigma or 0.02 mag compared to IOP3 + assert res.mag == approx(13.38, abs=max(1.5*res.mag_err, 0.02)) # check that uncertainty of the result is less than 0.08 mag - assert res.mag_err < 0.08 \ No newline at end of file + assert res.mag_err < 0.08 + + assert res.p == approx(10.9/100, abs=max(1.5*res.p_err, 1.0/100)) + assert res.p_err < 0.5 + + assert res.chi == approx(25.2, abs=max(1.5*res.chi_err, 1.0)) + assert res.chi_err < 1.0 diff --git a/tests/test_osnt090.py b/tests/test_osnt090.py index 27028059..9fef5263 100644 --- a/tests/test_osnt090.py +++ b/tests/test_osnt090.py @@ -26,26 +26,26 @@ def test_epoch_creation(load_test_catalog): assert (Epoch.objects.count() == 0) - epoch = Epoch.create(epochname="OSN-T090/2023-06-11") + epoch = Epoch.create(epochname="OSN-T090/2022-09-08") assert (Epoch.objects.count() == 1) assert (epoch.rawfits.count() > 0) - assert (epoch.rawfits.count() == len(os.listdir(Path(iop4conf.datadir) / "raw" / "OSN-T090" / "2023-06-11"))) + assert (epoch.rawfits.count() == len(os.listdir(Path(iop4conf.datadir) / "raw" / "OSN-T090" / "2022-09-08"))) @pytest.mark.django_db(transaction=True) def test_epoch_masterbias_masterflats(load_test_catalog): """ Test masterbias and masterflats creation """ from iop4lib.db import Epoch - epoch = Epoch.create(epochname="OSN-T090/2023-06-11") + epoch = Epoch.create(epochname="OSN-T090/2022-09-23") - assert (epoch.rawfits.count() == len(os.listdir(Path(iop4conf.datadir) / "raw" / "OSN-T090" / "2023-06-11"))) + assert (epoch.rawfits.count() == len(os.listdir(Path(iop4conf.datadir) / "raw" / "OSN-T090" / "2022-09-23"))) epoch.build_master_biases() epoch.build_master_flats() - assert (epoch.masterbias.count() == 1) - assert (epoch.masterflats.count() == 5) + assert (epoch.masterbias.count() > 0) + assert (epoch.masterflats.count() > 0) @pytest.mark.skip(reason="Not implemented yet") @@ -69,15 +69,20 @@ def test_build_single_proc(load_test_catalog): from iop4lib.db import Epoch, ReducedFit - epoch = Epoch.create(epochname="OSN-T090/2023-06-11", check_remote_list=False) - epoch.build_master_biases() - epoch.build_master_flats() + epochname_L = ["OSN-T090/2022-09-23", "OSN-T090/2022-09-18"] + epoch_L = [Epoch.create(epochname=epochname, check_remote_list=False) for epochname in epochname_L] + + for epoch in epoch_L: + epoch.build_master_biases() + epoch.build_master_flats() iop4conf.max_concurrent_threads = 1 + epoch = Epoch.by_epochname("OSN-T090/2022-09-18") + epoch.reduce() - assert (ReducedFit.objects.filter(epoch=epoch).count() == 5) + assert (ReducedFit.objects.filter(epoch=epoch).count() == 1) for redf in ReducedFit.objects.filter(epoch=epoch).all(): assert (redf.has_flag(ReducedFit.FLAGS.BUILT_REDUCED)) @@ -95,7 +100,7 @@ def test_build_multi_proc_photopol(load_test_catalog): from iop4lib.db import Epoch, RawFit, ReducedFit from iop4lib.enums import IMGTYPES, SRCTYPES - epochname_L = ["OSN-T090/2022-09-18", "OSN-T090/2023-06-11"] + epochname_L = ["OSN-T090/2022-09-23", "OSN-T090/2022-09-08", "OSN-T090/2022-09-18"] epoch_L = [Epoch.create(epochname=epochname, check_remote_list=False) for epochname in epochname_L] @@ -109,7 +114,7 @@ def test_build_multi_proc_photopol(load_test_catalog): Epoch.reduce_rawfits(rawfits) - assert (ReducedFit.objects.filter(epoch__in=epoch_L).count() == 6) + assert (ReducedFit.objects.filter(epoch__in=epoch_L).count() == 5) for redf in ReducedFit.objects.filter(epoch__in=epoch_L).all(): assert (redf.has_flag(ReducedFit.FLAGS.BUILT_REDUCED)) @@ -130,15 +135,15 @@ def test_build_multi_proc_photopol(load_test_catalog): res = qs_res[0] - # check that the result is correct to 1.5 sigma compared to IOP3 - assert res.mag == approx(13.35, abs=1.5*res.mag_err) + # check that the result is correct to 1.5 sigma or 0.02 mag compared to IOP3 + assert res.mag == approx(13.35, abs=max(1.5*res.mag_err, 0.02)) # check that uncertainty of the result is less than 0.08 mag assert res.mag_err < 0.08 # 2. test relative polarimetry - epoch = epoch.by_epochname("OSN-T090/2023-06-11") + epoch = epoch.by_epochname("OSN-T090/2022-09-08") epoch.compute_relative_polarimetry() @@ -147,7 +152,16 @@ def test_build_multi_proc_photopol(load_test_catalog): # we expect only one polarimetry result target in this test dataset for this epoch assert qs_res.exclude(astrosource__srctype=SRCTYPES.CALIBRATOR).count() == 1 - res = qs_res[0] + res = qs_res.get(astrosource__name="2200+420") + + # logger.debug(f"{res}\n" + # f" mag {res.mag} +- {res.mag_err}\n" + # f" p {res.p} % +- {res.p_err} %\n" + # f" chi {res.chi} +- {res.chi_err}") + + # check that the result is correct to 1.5 sigma or 0.02 compared to IOP3 + assert res.mag == approx(13.38, abs=max(1.5*res.mag_err, 0.02)) - # check that the result is correct to 1.5 sigma compared to IOP3 - # TODO + # for polarimetry, we expect a higher uncertainty than for photometry + assert res.p == approx(14.0/100, abs=max(2*res.p_err, 2/100)) # 2 sigma or 2% of polarization degree + assert res.chi == approx(14.7, abs=max(2*res.chi_err, 5)) # 2 sigma or 5 degrees of polarization angle From 53e1361ee76c4b48ba1c61563420996632b5ba90 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 05:32:26 +0200 Subject: [PATCH 020/168] iop4.py: add option to re-classify rawfits --- iop4lib/iop4.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/iop4lib/iop4.py b/iop4lib/iop4.py index 0f86f21a..c78824cd 100644 --- a/iop4lib/iop4.py +++ b/iop4lib/iop4.py @@ -143,6 +143,7 @@ def main(): ## other options parser.add_argument('--retry-failed', dest='retry_failed', action='store_true', help=' Retry failed reduced fits', required=False) parser.add_argument('--skip-remote-file-list', dest='skip_remote_file_list', action='store_true', help=' Skip remote file list check', required=False) + parser.add_argument('--reclasify-rawfits', dest="reclassify_rawfits", action="store_true", help=" Re-classify rawfits", required=False) parser.add_argument("--force-rebuild", dest="force_rebuild", action="store_true", help=" Force re-building of files (pass force_rebuild=True)", required=False) args = parser.parse_args() @@ -172,7 +173,7 @@ def main(): ROOT_LOGGER.addHandler(logger_h1) ROOT_LOGGER.addHandler(logger_h2) - ## parallelization: + ## read cli config options if args.nthreads is not None: iop4conf.max_concurrent_threads = args.nthreads @@ -199,6 +200,15 @@ def main(): logger.info("Invoked with --list-only:") logger.info(f"{epochs_to_process=}") + # Classify rawfits if indicated + + if args.reclassify_rawfits: + logger.info("Classifying rawfits.") + for epochname in epochs_to_process: + epoch = Epoch.by_epochname(epochname) + for rawfit in epoch.rawfits.all(): + rawfit.classify() + # Retry failed files if indicated if args.retry_failed: From d4c36f126c9c79a62c5448b85948ac1a9deb03ff Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 05:42:59 +0200 Subject: [PATCH 021/168] instrument.py: add dipol to known instruments --- iop4lib/enums.py | 2 +- iop4lib/instruments/dipol.py | 4 ++-- iop4lib/instruments/instrument.py | 3 ++- iop4lib/telescopes/osnt090.py | 2 +- iop4lib/telescopes/telescope.py | 2 +- 5 files changed, 7 insertions(+), 6 deletions(-) diff --git a/iop4lib/enums.py b/iop4lib/enums.py index ed105fdb..db202f89 100644 --- a/iop4lib/enums.py +++ b/iop4lib/enums.py @@ -43,7 +43,7 @@ class INSTRUMENTS(models.TextChoices): CAFOS = 'CAFOS2.2', "CAFOS2.2" AndorT90 = 'AndorT90', "AndorT90" AndorT150 = 'AndorT150', "AndorT150" - DIPOL1 = 'DIPOL-1', "DIPOL-1" + DIPOL = 'DIPOL', "DIPOL" class TELESCOPES(models.TextChoices): """ diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index 2372d74d..c0de74c1 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -18,7 +18,7 @@ class DIPOL(Instrument): - name = "DIPOL-1" + name = "DIPOL" arcsec_per_pix = 0.134 @@ -26,7 +26,7 @@ class DIPOL(Instrument): def get_astrometry_size_hint(cls, rawfit): """ Get the size hint for this telescope / rawfit. - For DIPOL-1 in OSN-T090, according to preliminary investigation of OSN crew is: + For DIPOL in OSN-T090, according to preliminary investigation of OSN crew is: Las posiciones que he tomado y el ángulo de rotación en cada caso son estos: Dec= -10º HA=+3h rotación=-177.3º Zenit rotación=-177.3º diff --git a/iop4lib/instruments/instrument.py b/iop4lib/instruments/instrument.py index b53a2c4f..424c0618 100644 --- a/iop4lib/instruments/instrument.py +++ b/iop4lib/instruments/instrument.py @@ -71,8 +71,9 @@ def gain_e_adu(self): def get_known(cls): from .andor_cameras import AndorT90, AndorT150 from .cafos import CAFOS + from .dipol import DIPOL - return [AndorT90, AndorT150, CAFOS] + return [AndorT90, AndorT150, CAFOS, DIPOL] @classmethod def by_name(cls, name: str) -> 'Instrument': diff --git a/iop4lib/telescopes/osnt090.py b/iop4lib/telescopes/osnt090.py index f07f3971..23732354 100644 --- a/iop4lib/telescopes/osnt090.py +++ b/iop4lib/telescopes/osnt090.py @@ -132,7 +132,7 @@ def check_telescop_kw(cls, rawfit): """ if rawfit.header["TELESCOP"] == "": cls.classify_instrument_kw(rawfit) - if rawfit.instrument == INSTRUMENTS.DIPOL1: + if rawfit.instrument == INSTRUMENTS.DIPOL: return super().check_telescop_kw(rawfit) diff --git a/iop4lib/telescopes/telescope.py b/iop4lib/telescopes/telescope.py index 012feb3b..60c539ba 100644 --- a/iop4lib/telescopes/telescope.py +++ b/iop4lib/telescopes/telescope.py @@ -164,7 +164,7 @@ def classify_instrument_kw(cls, rawfit): elif instrume_header == "CAFOS 2.2": rawfit.instrument = INSTRUMENTS.CAFOS elif instrume_header == "ASI Camera (1)": - rawfit.instrument = INSTRUMENTS.DIPOL1 + rawfit.instrument = INSTRUMENTS.DIPOL else: raise ValueError(f"INSTRUME in fits header ({instrume_header}) not known.") \ No newline at end of file From 470ee022d78fa555edc26e1787571e895390563e Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 05:45:01 +0200 Subject: [PATCH 022/168] dipol.py: add intrument_kw --- iop4lib/instruments/dipol.py | 1 + 1 file changed, 1 insertion(+) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index c0de74c1..74244c86 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -19,6 +19,7 @@ class DIPOL(Instrument): name = "DIPOL" + instrument_kw = "ASI Camera (1)" arcsec_per_pix = 0.134 From 787cb39baf51372b5a0d651857ff9cae0b8a4417 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 05:56:10 +0200 Subject: [PATCH 023/168] dipol.py: improve classification --- iop4lib/instruments/dipol.py | 71 ++++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index 74244c86..cc1a466d 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -23,6 +23,77 @@ class DIPOL(Instrument): arcsec_per_pix = 0.134 + @classmethod + def classify_juliandate_rawfit(cls, rawfit): + """ + DIPOL files have JD keyword + """ + import astropy.io.fits as fits + jd = fits.getheader(rawfit.filepath, ext=0)["JD"] + rawfit.juliandate = jd + + + @classmethod + def classify_imgtype_rawfit(cls, rawfit): + """ + DIPOL files have IMAGETYP keyword: Light Frame, Bias Frame + + """ + from iop4lib.db.rawfit import RawFit + import astropy.io.fits as fits + + with fits.open(rawfit.filepath) as hdul: + if hdul[0].header['IMAGETYP'] == 'Bias Frame': + rawfit.imgtype = IMGTYPES.BIAS + elif hdul[0].header['IMAGETYP'] == 'Light Frame': + rawfit.imgtype = IMGTYPES.LIGHT + else: + logger.error(f"Unknown image type for {rawfit.fileloc}.") + rawfit.imgtype = IMGTYPES.ERROR + raise ValueError + + @classmethod + def classify_band_rawfit(cls, rawfit): + """ + OSN Files have no FILTER keyword if they are BIAS, FILTER=Clear if they are FLAT, and FILTER=FilterName if they are LIGHT. + For our DB, we have R, U, ..., None, ERROR. + + For polarimetry, which is done by taking four images with the R filter at different angles, we have R_45, R0, R45, R90. + """ + + from iop4lib.db.rawfit import RawFit + + if 'FILTER' not in rawfit.header: + if rawfit.imgtype == IMGTYPES.BIAS: + rawfit.band = BANDS.NONE + else: + rawfit.band = BANDS.ERROR + raise ValueError(f"Missing FILTER keyword for {rawfit.fileloc} which is not a bias (it is a {rawfit.imgtype}).") + elif rawfit.header['FILTER'] == "Red": + rawfit.band = BANDS.R + else: + rawfit.band = BANDS.ERROR + raise ValueError(f"Unknown FILTER keyword for {rawfit.fileloc}: {rawfit.header['FILTER']}.") + + + @classmethod + def classify_obsmode_rawfit(cls, rawfit): + """ + In OSN Andor Polarimetry, we only have polarimetry for filter R, and it is indicated as R_45, R0, R45, R90 (-45, 0, 45 and 90 degrees). They correspond + to the different angles of the polarimeter. + + For photometry, the filter keyword willl be simply the letter R, U, etc. + + The values for angles are -45, 0, 45 and 90. + + Lately we have seen "R-45" instead of "R_45", so we have to take care of that too. + """ + + from iop4lib.db.rawfit import RawFit + import re + + raise NotImplementedError("DIPOL obsmode not implemented yet") + @classmethod def get_astrometry_size_hint(cls, rawfit): """ Get the size hint for this telescope / rawfit. From 41f0b715370e98f7d548d103a57c91362103be06 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 06:04:10 +0200 Subject: [PATCH 024/168] ci.yml: fix command that was making tests fail --- .github/workflows/ci.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 28bb7993..707c290a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -80,10 +80,11 @@ jobs: - name: Run tests (with -o log_cli=true -o log_cli_level=DEBUG to debug CI actions) run: pytest -o log_cli=true -o log_cli_level=DEBUG -vxs tests/ - - name: Output some info for debugging + - name: Output some info for debugging + # | true so erros in this step are ignored run: | - df -h - du -sh $HOME/.cache/httpdirfs/ + df -h || true + du -sh $HOME/.cache/httpdirfs/ || true From e5aa216a34a7343d9d90f05a531c9949b844742d Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 06:05:44 +0200 Subject: [PATCH 025/168] iop4admin: show instrument in model admins --- iop4admin/modeladmins/masterbias.py | 2 +- iop4admin/modeladmins/masterflat.py | 2 +- iop4admin/modeladmins/rawfit.py | 2 +- iop4admin/modeladmins/reducedfit.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/iop4admin/modeladmins/masterbias.py b/iop4admin/modeladmins/masterbias.py index 980d9804..25d567bc 100644 --- a/iop4admin/modeladmins/masterbias.py +++ b/iop4admin/modeladmins/masterbias.py @@ -12,7 +12,7 @@ class AdminMasterBias(AdminFitFile): model = MasterBias - list_display = ['id', 'telescope', 'night', 'imgsize', 'get_built_from', 'options'] + list_display = ['id', 'telescope', 'night', 'instrument', 'imgsize', 'get_built_from', 'options'] diff --git a/iop4admin/modeladmins/masterflat.py b/iop4admin/modeladmins/masterflat.py index 7b41980e..d22b7854 100644 --- a/iop4admin/modeladmins/masterflat.py +++ b/iop4admin/modeladmins/masterflat.py @@ -13,7 +13,7 @@ class AdminMasterFlat(AdminFitFile): model = MasterFlat - list_display = ['id', 'telescope', 'night', 'imgsize', 'band', 'obsmode', 'rotangle', 'exptime', 'get_masterbias', 'get_built_from', 'options'] + list_display = ['id', 'telescope', 'night', 'instrument', 'imgsize', 'band', 'obsmode', 'rotangle', 'exptime', 'get_masterbias', 'get_built_from', 'options'] diff --git a/iop4admin/modeladmins/rawfit.py b/iop4admin/modeladmins/rawfit.py index c3e80243..f6299619 100644 --- a/iop4admin/modeladmins/rawfit.py +++ b/iop4admin/modeladmins/rawfit.py @@ -14,7 +14,7 @@ class AdminRawFit(AdminFitFile): model = RawFit - list_display = ["id", 'filename', 'telescope', 'night', 'status', 'imgtype', 'imgsize', 'band', 'obsmode', 'rotangle', 'exptime', 'options'] + list_display = ["id", 'filename', 'telescope', 'night', 'instrument', 'status', 'imgtype', 'imgsize', 'band', 'obsmode', 'rotangle', 'exptime', 'options'] readonly_fields = [field.name for field in RawFit._meta.fields] search_fields = ['id', 'filename', 'epoch__telescope', 'epoch__night'] ordering = ['-epoch__night','-epoch__telescope'] diff --git a/iop4admin/modeladmins/reducedfit.py b/iop4admin/modeladmins/reducedfit.py index 4844c36c..cbdf4a59 100644 --- a/iop4admin/modeladmins/reducedfit.py +++ b/iop4admin/modeladmins/reducedfit.py @@ -18,7 +18,7 @@ class AdminReducedFit(AdminFitFile): model = ReducedFit - list_display = ["id", 'filename', 'telescope', 'night', 'status', 'imgtype', 'imgsize', 'band', 'obsmode', 'rotangle', 'exptime', 'get_targets_in_field', 'options', 'modified'] + list_display = ["id", 'filename', 'telescope', 'night', 'instrument', 'status', 'imgtype', 'imgsize', 'band', 'obsmode', 'rotangle', 'exptime', 'get_targets_in_field', 'options', 'modified'] readonly_fields = [field.name for field in ReducedFit._meta.fields] search_fields = ['id', 'filename', 'epoch__telescope', 'epoch__night', 'sources_in_field__name'] ordering = ['-epoch__night', '-epoch__telescope', '-juliandate'] From 4c257dc770523f8c07cf34bf7497a9e2803031e7 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 06:12:52 +0200 Subject: [PATCH 026/168] iop4admin: fix bug --- iop4admin/modeladmins/rawfit.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/iop4admin/modeladmins/rawfit.py b/iop4admin/modeladmins/rawfit.py index f6299619..d1d751da 100644 --- a/iop4admin/modeladmins/rawfit.py +++ b/iop4admin/modeladmins/rawfit.py @@ -35,15 +35,17 @@ class AdminRawFit(AdminFitFile): @admin.display(description='OPTIONS') def options(self, obj): - if obj.imgtype == IMGTYPES.LIGHT: + html_src = str() + + if obj.imgtype == IMGTYPES.LIGHT and hasattr(obj, "reduced"): url_reduced = reverse('iop4admin:%s_%s_changelist' % (ReducedFit._meta.app_label, ReducedFit._meta.model_name)) + f"?id={obj.reduced.id}" - url_details = reverse('iop4admin:iop4api_rawfit_details', args=[obj.id]) - url_viewer= reverse('iop4admin:iop4api_rawfit_viewer', args=[obj.id]) - return format_html(rf'reduced / details / advanced viewer') - else: - url_details = reverse('iop4admin:iop4api_rawfit_details', args=[obj.id]) - url_viewer= reverse('iop4admin:iop4api_rawfit_viewer', args=[obj.id]) - return format_html(rf'details / advanced viewer') + html_src += rf'reduced / ' + + url_details = reverse('iop4admin:iop4api_rawfit_details', args=[obj.id]) + url_viewer= reverse('iop4admin:iop4api_rawfit_viewer', args=[obj.id]) + html_src += rf'details / advanced viewer' + + return format_html(html_src) def image_preview(self, obj, allow_tags=True): url_img_preview = reverse('iop4admin:iop4api_rawfit_preview', args=[obj.id]) From 9109b02b84d58b535f444b0fcb8b4520aab4cc9f Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 09:20:28 +0200 Subject: [PATCH 027/168] iop4admin: fix plot with only one pt --- iop4api/views/plot.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/iop4api/views/plot.py b/iop4api/views/plot.py index 9f05c2a7..10ce8b37 100644 --- a/iop4api/views/plot.py +++ b/iop4api/views/plot.py @@ -176,6 +176,12 @@ def f_x1_to_x2(x1_val): # and for the subplots x1_range = min(x1)-0.05*(max(x1)-min(x1)), max(x1)+0.05*(max(x1)-min(x1)) x2_range = f_x1_to_x2(x1_range) + elif len(x1) == 1: + x1_lims = x1[0]-0.2, x1[0]+0.2 + x2_lims = f_x1_to_x2(x1_lims) + + x1_range = x1[0]-0.05, x1[0]+0.05 + x2_range = f_x1_to_x2(x1_range) else: x1_lims = np.nan, np.nan x2_lims = np.nan, np.nan @@ -185,6 +191,8 @@ def f_x1_to_x2(x1_val): # also the freeze y axis range of the main plot if len(y1) >= 2: y1_lims = np.nanmin(y1)-0.05*(np.nanmax(y1)-np.nanmin(y1)), np.nanmax(y1)+0.05*(np.nanmax(y1)-np.nanmin(y1)) + elif len(y1) == 1: + y1_lims = y1[0]-0.05, y1[0]+0.05 else: y1_lims = np.nan, np.nan From b99c08d0b6aab08df9384591ca449c1535f905fc Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 10:13:31 +0200 Subject: [PATCH 028/168] dipol.py: continue on obsmode not implemented --- iop4lib/instruments/dipol.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index cc1a466d..d0aab1e1 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -92,7 +92,8 @@ def classify_obsmode_rawfit(cls, rawfit): from iop4lib.db.rawfit import RawFit import re - raise NotImplementedError("DIPOL obsmode not implemented yet") + # raise NotImplementedError("DIPOL obsmode not implemented yet") + logger.error(f"OSN DIPOL obsmode not implemented yet.") @classmethod def get_astrometry_size_hint(cls, rawfit): From 81f5cf257868dcc71473bb464d9ad8b8ab451546 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Mon, 16 Oct 2023 11:22:07 +0200 Subject: [PATCH 029/168] ci.yml: try to keep cache httpdirfs to speed up CI --- .github/workflows/ci.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 707c290a..c1b5fd55 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -66,6 +66,15 @@ jobs: - name: Install httpdirfs to access astrometry index files without downloading them run: sudo apt install httpdirfs + - name: Try to restore httpdirfs cache data + # this should make subsuquent commits in the same PR faster + uses: actions/cache@v3 + with: + path: $HOME/.cache/httpdirfs/ + key: httpdirfs-astrometry-5200-1-2-3-4-4 + restore-keys: | + httpdirfs-astrometry-5200-1-2-3-4-4 + - name: Mount astrometry index file in default location run: mkdir -p $HOME/.astrometry_cache/5200 && httpdirfs --cache 'https://portal.nersc.gov/project/cosmo/temp/dstn/index-5200/LITE/' $HOME/.astrometry_cache/5200/ @@ -86,6 +95,12 @@ jobs: df -h || true du -sh $HOME/.cache/httpdirfs/ || true + - name: Save httpdirfs cache data + uses: actions/cache@v3 + with: + path: $HOME/.cache/httpdirfs/ + key: httpdirfs-astrometry-5200-1-2-3-4-4 + From 1559a21fa07eba7cdc972d5b8bd99d608b18282d Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Tue, 17 Oct 2023 00:32:28 +0200 Subject: [PATCH 030/168] improve typehinting --- iop4lib/db/epoch.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/iop4lib/db/epoch.py b/iop4lib/db/epoch.py index b71f3b38..734d1458 100644 --- a/iop4lib/db/epoch.py +++ b/iop4lib/db/epoch.py @@ -34,7 +34,9 @@ import logging logger = logging.getLogger(__name__) - +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from iop4lib.db import RawFit, ReducedFit, Epoch class Epoch(models.Model): """A class representing an epoch. @@ -185,9 +187,9 @@ def __str__(self): # creator @staticmethod - def epochname_to_tel_night(epochname): + def epochname_to_tel_night(epochname : str) -> tuple[str, datetime.date]: """Parses an epochname to a telescope and night.""" - + matches = re.findall(r"([a-zA-Z0-9]+)/([0-9]{2,4}-?[0-9]{2}-?[0-9]{2})$", epochname) if len(matches) != 1: From 1398d7f52694bcc1947ecf0faeab90c4854ae718 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Tue, 17 Oct 2023 00:34:08 +0200 Subject: [PATCH 031/168] formatting --- iop4lib/db/rawfit.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/iop4lib/db/rawfit.py b/iop4lib/db/rawfit.py index 2e608db3..34b2d727 100644 --- a/iop4lib/db/rawfit.py +++ b/iop4lib/db/rawfit.py @@ -158,12 +158,16 @@ def _repr_html_(self): def fileloc_to_tel_night_filename(fileloc): """Parses a fileloc to telescope, night and filename.""" from .epoch import Epoch + matches = re.findall(r"(([a-zA-Z0-9]+)/([0-9]{2,4}-?[0-9]{2}-?[0-9]{2}))/([^/\\]+)$", fileloc) + if len(matches) != 1: raise Exception(f"fileloc {fileloc} is not EPOCHNAME/filename") epochname = matches[0][0] - telescope, night = Epoch.epochname_to_tel_night(matches[0][0]) + + telescope, night = Epoch.epochname_to_tel_night(epochname) + filename = matches[0][-1] return telescope, night, filename From 163c746ac2ae438b514c65582b3cc3516657171a Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Tue, 17 Oct 2023 00:35:25 +0200 Subject: [PATCH 032/168] type hinting --- iop4lib/telescopes/osnt090.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/iop4lib/telescopes/osnt090.py b/iop4lib/telescopes/osnt090.py index 23732354..124e6e5e 100644 --- a/iop4lib/telescopes/osnt090.py +++ b/iop4lib/telescopes/osnt090.py @@ -25,6 +25,11 @@ import logging logger = logging.getLogger(__name__) +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from iop4lib.db import RawFit, ReducedFit, Epoch + + class OSNT090(Telescope, metaclass=ABCMeta): # telescope identification From d9b7bdcd65914a570083145f9aa6637bd172796f Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Tue, 17 Oct 2023 00:36:52 +0200 Subject: [PATCH 033/168] set encoding to latin-1 in OSN ftp servers --- iop4lib/telescopes/osnt090.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/iop4lib/telescopes/osnt090.py b/iop4lib/telescopes/osnt090.py index 124e6e5e..d6943665 100644 --- a/iop4lib/telescopes/osnt090.py +++ b/iop4lib/telescopes/osnt090.py @@ -51,7 +51,7 @@ def list_remote_epochnames(cls): try: logger.debug(f"Loging to {cls.name} FTP server") - ftp = ftplib.FTP(cls.ftp_address) + ftp = ftplib.FTP(cls.ftp_address, encoding='latin-1') ftp.login(cls.ftp_user, cls.ftp_password) remote_dirnameL_all = ftp.nlst() ftp.quit() @@ -80,7 +80,7 @@ def list_remote_raw_fnames(cls, epoch): try: logger.debug(f"Loging to {cls.name} FTP server") - ftp = ftplib.FTP(cls.ftp_address) + ftp = ftplib.FTP(cls.ftp_address, encoding='latin-1') ftp.login(cls.ftp_user, cls.ftp_password) logger.debug(f"Changing to OSN dir {epoch.yyyymmdd}") From dd57009fe5843d851d54a33c960867480b177492 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Tue, 17 Oct 2023 00:42:41 +0200 Subject: [PATCH 034/168] add Telescope.list_remote_filelocs method to list all remote filelocs from a list of epochnames --- iop4lib/telescopes/cahat220.py | 40 +++++++++++++++++++++++++++++++++ iop4lib/telescopes/osnt090.py | 38 ++++++++++++++++++++++++++----- iop4lib/telescopes/telescope.py | 13 +++++++---- 3 files changed, 82 insertions(+), 9 deletions(-) diff --git a/iop4lib/telescopes/cahat220.py b/iop4lib/telescopes/cahat220.py index b2eecc1e..bc24bb6c 100644 --- a/iop4lib/telescopes/cahat220.py +++ b/iop4lib/telescopes/cahat220.py @@ -24,6 +24,10 @@ import logging logger = logging.getLogger(__name__) +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from iop4lib.db import RawFit, ReducedFit, Epoch + class CAHAT220(Telescope, metaclass=ABCMeta): """ CAHA T220 telescope. @@ -136,3 +140,39 @@ def download_rawfits(cls, rawfits): ftp.quit() except Exception as e: raise Exception(f"Error downloading {rawfits}: {e}.") + + @classmethod + def list_remote_filelocs(cls, epochnames: list[str]) -> list[str]: + from iop4lib.db import Epoch + + ftp = ftplib.FTP(iop4conf.caha_address) + + ftp.login(iop4conf.caha_user, iop4conf.caha_password) + + dirnames = ftp.nlst() + + re_expr = re.compile(r".*\.fits?") + + fileloc_list = list() + + for epochname in epochnames: + + tel, night = Epoch.epochname_to_tel_night(epochname) + yymmdd = night.strftime("%y%m%d") + + if f"{yymmdd}_CAFOS" not in dirnames: + logger.error(f"CAHA remote dir {yymmdd}_CAFOS does not exist.") + continue + + try: + ftp.cwd(f"/{yymmdd}_CAFOS") + + fileloc_list.extend([f"{epochname}/{fname}" for fname in ftp.nlst() if re_expr.search(fname) and fname != '.' and fname != '..']) + + + except Exception as e: + logger.error(f"Error listing CAHA remote dir for {epochname}: {e}.") + + ftp.quit() + + return fileloc_list \ No newline at end of file diff --git a/iop4lib/telescopes/osnt090.py b/iop4lib/telescopes/osnt090.py index d6943665..8ab252d8 100644 --- a/iop4lib/telescopes/osnt090.py +++ b/iop4lib/telescopes/osnt090.py @@ -91,11 +91,6 @@ def list_remote_raw_fnames(cls, epoch): logger.debug(f"Total of {len(remote_fnameL_all)} files in OSN {epoch.epochname}: {remote_fnameL_all}.") - # if iop4conf.osn_download_all_then_check_owner: - # remote_fnameL = remote_fnameL_all - # else: - # remote_fnameL = [s for s in remote_fnameL_all if re.compile('|'.join(iop4conf.osn_fnames_patterns)).search(s)] # Filter by filename pattern (get only our files) - remote_fnameL = [s for s in remote_fnameL_all if re.compile('|'.join(iop4conf.osn_fnames_patterns)).search(s)] # Filter by filename pattern (get only our files) logger.debug(f"Filtered to {len(remote_fnameL)} files in OSN {epoch.epochname}.") @@ -127,6 +122,39 @@ def download_rawfits(cls, rawfits): except Exception as e: raise Exception(f"Error downloading file {rawfit.filename}: {e}.") + @classmethod + def list_remote_filelocs(cls, epochnames: list[str]) -> list[str]: + + from iop4lib.db import Epoch + + ftp = ftplib.FTP(cls.ftp_address, cls.ftp_user, cls.ftp_password, encoding='latin-1') + + re_expr = re.compile('|'.join(iop4conf.osn_fnames_patterns)) + + dirnames = ftp.nlst() + + fileloc_list = list() + + for epochname in epochnames: + + tel, night = Epoch.epochname_to_tel_night(epochname) + yyyymmdd = night.strftime("%Y%m%d") + + if yyyymmdd not in dirnames: + logger.warning(f"Could not find {yyyymmdd} in {cls.name} remote.") + continue + + try: + + fileloc_list.extend([f"{epochname}/{fname}" for fname in ftp.nlst(yyyymmdd) if re_expr.search(fname) and fname != '.' and fname != '..']) + + except Exception as e: + logger.error(f"Error listing OSN remote dir for {epochname}: {e}.") + + ftp.quit() + + return fileloc_list + @classmethod def check_telescop_kw(cls, rawfit): r""" Subclassed to account for DIPOL files, that have empty TELESCOP keyword as of 2023-10-11 diff --git a/iop4lib/telescopes/telescope.py b/iop4lib/telescopes/telescope.py index 60c539ba..5839c0b7 100644 --- a/iop4lib/telescopes/telescope.py +++ b/iop4lib/telescopes/telescope.py @@ -26,7 +26,7 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from iop4lib.db import RawFit, ReducedFit + from iop4lib.db import RawFit, ReducedFit, Epoch class Telescope(metaclass=ABCMeta): """ Base class for telescopes. @@ -78,17 +78,22 @@ def telescop_kw(self): @classmethod @abstractmethod - def list_remote_raw_fnames(cls, epoch): + def list_remote_raw_fnames(cls, epoch: 'Epoch') -> list[str] : pass @classmethod @abstractmethod - def download_rawfits(cls, epoch): + def download_rawfits(cls, epoch: 'Epoch') -> None : pass @classmethod @abstractmethod - def list_remote_epochnames(cls): + def list_remote_epochnames(cls) -> list[str] : + pass + + @classmethod + @abstractmethod + def list_remote_filelocs(cls, epochnames: list[str]) -> list[str] : pass # Class methods (you should be using these only from this Telescope class, not from subclasses) From b096eb1ec51fffa2b4ea6e349f3e61a982b74780 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Wed, 18 Oct 2023 22:17:24 +0200 Subject: [PATCH 035/168] plot.py: tidy up a bit --- iop4api/views/plot.py | 35 ++++++++++++++--------------------- 1 file changed, 14 insertions(+), 21 deletions(-) diff --git a/iop4api/views/plot.py b/iop4api/views/plot.py index 10ce8b37..cc2eed11 100644 --- a/iop4api/views/plot.py +++ b/iop4api/views/plot.py @@ -88,28 +88,21 @@ def f_x1_to_x2(x1_val): # choose the x and y if qs.count() > 0: vals = get_column_values(qs, column_names) + else: + vals = {k:np.array([]) for k in column_names} - vals["instrument"] = np.array(vals["instrument"]) - vals["id"] = np.array(vals["id"]) - vals["juliandate"] = np.array(vals["juliandate"]) - vals["mag"] = np.array(vals["mag"]) - vals["mag_err"] = np.array(vals["mag_err"]) - vals["p"] = np.array(vals["p"]) - vals["p_err"] = np.array(vals["p_err"]) - vals["chi"] = np.array(vals["chi"]) - vals["chi_err"] = np.array(vals["chi_err"]) - - if enable_iop3: - vals["instrument"] = np.append(vals["instrument"], list(map(lambda x: "IOP3-"+x, iop3_df["Telescope"]))) - vals["id"] = np.append(vals["id"], -np.arange(len(iop3_df))) - vals["juliandate"] = np.append(vals["juliandate"], Time(iop3_df["mjd_obs"], format="mjd").jd) - vals["mag"] = np.append(vals["mag"], iop3_df['Mag']) - vals["mag_err"] = np.append(vals["mag_err"], iop3_df['dMag']) - vals["p"] = np.append(vals["p"], iop3_df['P']/100) - vals["p_err"] = np.append(vals["p_err"], iop3_df['dP']/100) - vals["chi"] = np.append(vals["chi"], iop3_df['Theta']) - vals["chi_err"] = np.append(vals["chi_err"], iop3_df['dTheta']) - + if enable_iop3: + vals["instrument"] = np.append(vals["instrument"], list(map(lambda x: "IOP3-"+x, iop3_df["Telescope"]))) + vals["id"] = np.append(vals["id"], -np.arange(len(iop3_df))) + vals["juliandate"] = np.append(vals["juliandate"], Time(iop3_df["mjd_obs"], format="mjd").jd) + vals["mag"] = np.append(vals["mag"], iop3_df['Mag']) + vals["mag_err"] = np.append(vals["mag_err"], iop3_df['dMag']) + vals["p"] = np.append(vals["p"], iop3_df['P']/100) + vals["p_err"] = np.append(vals["p_err"], iop3_df['dP']/100) + vals["chi"] = np.append(vals["chi"], iop3_df['Theta']) + vals["chi_err"] = np.append(vals["chi_err"], iop3_df['dTheta']) + + if len(vals['id']) > 0: pks = vals['id'] x1 = Time(vals['juliandate'], format='jd').mjd x2 = f_x1_to_x2(x1) From ebf8b478c8c575906ae556b1b700931799ae1ac3 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Wed, 18 Oct 2023 22:24:04 +0200 Subject: [PATCH 036/168] config and telescopes: improve file name matching --- config/config.example.yaml | 51 +++++++++++++++++++++++++++------- iop4lib/config.py | 4 +-- iop4lib/telescopes/cahat220.py | 8 +++--- iop4lib/telescopes/osnt090.py | 8 +++--- 4 files changed, 51 insertions(+), 20 deletions(-) diff --git a/config/config.example.yaml b/config/config.example.yaml index 0d5e0cb3..d13f40d5 100644 --- a/config/config.example.yaml +++ b/config/config.example.yaml @@ -1,43 +1,74 @@ # This is an example of the config file, copy it to config/config.yaml and edit it. # IOP4LIB will attempt to use config/config.yaml if it exists, otherwise it will use this one. -# GENERAL +############### +### GENERAL ### +############### + datadir: ~/.iop4data/ # Path to iop4data data folder. set_rawdata_readonly: False # True / False (sets raw fits file to readonly when downloading them or creating a RawFit object). db_path: ~/.iop4data/iop4.db # Path to iop4 sqlite database file. astrometry_cache_path: ~/.astrometry_cache/ # Path to store the astromery index files. max_concurrent_threads: 4 # Number of threads / processes to use (e.g. 4). -# RAY CLUSTER +################### +### RAY CLUSTER ### +################### + ray_use_cluster: False # True/False (use ray for parallelization), let it to false if you have not configured it. ray_cluster_address: null # Aaddress for the cluster, needs ssh keys for current user; e.g. 'user@address'. ray_cluster_config: null # Path to ray cluster config file, e.g. /path/to/iop4/priv.rayconfig.yaml'. ray_db_path: null # Path in ray cluster, e.g. '~/iop4data/iop4.db'. ray_datadir: null # Path in ray cluster, e.g. '~/iop4data/'. -# GRAPHICS +################ +### GRAPHICS ### +################ + mplt_default_dpi: 100 # dpi for matplotlib (e.g. 100) -# LOGGING +############### +### LOGGING ### +############### + log_fname: ~/.iop4data/iop4.log # log_date_format: '%Y-%m-%d %H:%M:%S' # log_format: '%(asctime)s - %(name)s [%(filename)s:%(lineno)d] - %(levelname)s - %(message)s' # log_level: 20 # . Possible values are: 10 (DEBUG), 20 (INFO), 30 (WARNING), 40 (ERROR), 50 (CRITICAL). -# CAHA +############ +### CAHA ### +############ + caha_address: null # caha_password: null # caha_user: null # -# OSN +########### +### OSN ### +########### + osn_t090_address: null # osn_t090_user: null # osn_t090_password: null # + osn_t150_address: null # osn_t150_user: null # osn_t150_password: null # + +# List of names to download from OSN. +# The names will be introduced in osn_fnames_patterns as 'name.*\.fi?ts?$' (see below). + osn_source_list_path: null # -osn_fnames_patterns: # List of regex patterns. Files matching any of these patterns will be downloaded from OSN. For example, (^BLLac.*\.fits?$) will match file names starting with BLLac and ending in .fit or .fits. You can include your initials here, if you want to download only your files and the observers include it in the file name, e.g. (.*mfz.*\.fits?$) for files containing mfz in the name could belong to Menganito Fulano Zutano. -- (^Flat.*\.fits?$) -- (^Bias.*\.fits?$) -# osn_download_all_then_check_owner: null # Alternatively, for OSN, just download all files (^.*\.fits?$) and then remove those that contain this word in the OBSERVER keyword in the header (except for files that contain 'bias' or 'flat' in the name). Leave null for default behaviour. \ No newline at end of file + +# List of (case insentitive) regex patterns. +# Files matching any of these patterns will be downloaded from OSN. +# For example, (^BLLac.*\.fits?$) will match file names starting with BLLac +# and ending in .fit, .fits, or .fts. You can include your initials here, +# If you want to download only your files and the observers include it in the +# filename, e.g. (.*mfz.*\.fits?$) for files containing mfz in the name +# could belong to Menganito Fulano Zutano. The re.IGNORECASE flag will be passed. + +osn_fnames_patterns: +- (^Flat.*\.fi?ts?$) +- (^Bias.*\.fi?ts?$) \ No newline at end of file diff --git a/iop4lib/config.py b/iop4lib/config.py index 5a9f113a..0ba8893b 100644 --- a/iop4lib/config.py +++ b/iop4lib/config.py @@ -120,11 +120,11 @@ def configure(self, config_path=None, config_db=False, gonogui=True, jupytermode if not os.path.exists(self.datadir): os.makedirs(self.datadir) - # Load OSN names from external file if indicated + # Load OSN names from external file if indicated, load them into patterns like name*.fit, name*.fits, or name*.fts. if self.osn_source_list_path is not None and os.path.exists(self.osn_source_list_path): with open(self.osn_source_list_path, 'r') as f: - self.osn_fnames_patterns += [fr"(^{s[:-1]}.*\.fits?$)" for s in f.readlines() if s[0] != '#'] + self.osn_fnames_patterns += [fr"(^{s[:-1]}.*\.fi?ts?$)" for s in f.readlines() if s[0] != '#'] if gonogui: matplotlib.use("Agg") diff --git a/iop4lib/telescopes/cahat220.py b/iop4lib/telescopes/cahat220.py index bc24bb6c..ea3d3240 100644 --- a/iop4lib/telescopes/cahat220.py +++ b/iop4lib/telescopes/cahat220.py @@ -49,6 +49,8 @@ class CAHAT220(Telescope, metaclass=ABCMeta): # telescope specific properties + fnames_re_expr = re.compile(r".*\.fi?ts?", flags=re.IGNORECASE) + # telescope specific methods @classmethod @@ -109,7 +111,7 @@ def list_remote_raw_fnames(cls, epoch): logger.debug(f"Total of {len(remote_fnameL_all)} files in CAHA {epoch.epochname}.") - remote_fnameL = [s for s in remote_fnameL_all if re.compile(r".*\.fits?").search(s)] # Filter by filename pattern (get only our files) + remote_fnameL = [s for s in remote_fnameL_all if cls.fnames_re_expr.search(s)] logger.debug(f"Filtered to {len(remote_fnameL)} *.fit(s) files in CAHA {epoch.epochname}.") @@ -151,8 +153,6 @@ def list_remote_filelocs(cls, epochnames: list[str]) -> list[str]: dirnames = ftp.nlst() - re_expr = re.compile(r".*\.fits?") - fileloc_list = list() for epochname in epochnames: @@ -167,7 +167,7 @@ def list_remote_filelocs(cls, epochnames: list[str]) -> list[str]: try: ftp.cwd(f"/{yymmdd}_CAFOS") - fileloc_list.extend([f"{epochname}/{fname}" for fname in ftp.nlst() if re_expr.search(fname) and fname != '.' and fname != '..']) + fileloc_list.extend([f"{epochname}/{fname}" for fname in ftp.nlst() if cls.fnames_re_expr.search(fname) and fname != '.' and fname != '..']) except Exception as e: diff --git a/iop4lib/telescopes/osnt090.py b/iop4lib/telescopes/osnt090.py index 8ab252d8..2e856cce 100644 --- a/iop4lib/telescopes/osnt090.py +++ b/iop4lib/telescopes/osnt090.py @@ -44,6 +44,8 @@ class OSNT090(Telescope, metaclass=ABCMeta): ftp_user = iop4conf.osn_t090_user ftp_password = iop4conf.osn_t090_password + fnames_re_expr = re.compile('|'.join(iop4conf.osn_fnames_patterns), flags=re.IGNORECASE) + # telescope specific methods @classmethod @@ -91,7 +93,7 @@ def list_remote_raw_fnames(cls, epoch): logger.debug(f"Total of {len(remote_fnameL_all)} files in OSN {epoch.epochname}: {remote_fnameL_all}.") - remote_fnameL = [s for s in remote_fnameL_all if re.compile('|'.join(iop4conf.osn_fnames_patterns)).search(s)] # Filter by filename pattern (get only our files) + remote_fnameL = [s for s in remote_fnameL_all if cls.fnames_re_expr.search(s)] # Filter by filename pattern (get only our files) logger.debug(f"Filtered to {len(remote_fnameL)} files in OSN {epoch.epochname}.") @@ -129,8 +131,6 @@ def list_remote_filelocs(cls, epochnames: list[str]) -> list[str]: ftp = ftplib.FTP(cls.ftp_address, cls.ftp_user, cls.ftp_password, encoding='latin-1') - re_expr = re.compile('|'.join(iop4conf.osn_fnames_patterns)) - dirnames = ftp.nlst() fileloc_list = list() @@ -146,7 +146,7 @@ def list_remote_filelocs(cls, epochnames: list[str]) -> list[str]: try: - fileloc_list.extend([f"{epochname}/{fname}" for fname in ftp.nlst(yyyymmdd) if re_expr.search(fname) and fname != '.' and fname != '..']) + fileloc_list.extend([f"{epochname}/{fname}" for fname in ftp.nlst(yyyymmdd) if cls.fnames_re_expr.search(fname) and fname != '.' and fname != '..']) except Exception as e: logger.error(f"Error listing OSN remote dir for {epochname}: {e}.") From 2ca357646f4324a3d1b78493a13490851f118c14 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Thu, 19 Oct 2023 13:50:09 +0200 Subject: [PATCH 037/168] more changes for instrument class refactoring --- docs/citations.bib | 4 ++++ iop4lib/db/reducedfit.py | 16 +++++++++------- iop4lib/instruments/andor_cameras.py | 9 +++++++-- iop4lib/instruments/cafos.py | 3 ++- iop4lib/instruments/instrument.py | 24 +++++++++++++++++++++++- iop4lib/telescopes/osnt150.py | 14 -------------- iop4lib/utils/astrometry.py | 2 +- 7 files changed, 46 insertions(+), 26 deletions(-) diff --git a/docs/citations.bib b/docs/citations.bib index 38b4853a..abffda1b 100644 --- a/docs/citations.bib +++ b/docs/citations.bib @@ -45,3 +45,7 @@ @article{dipol:2020 pages = "A46", } +@online{astropy:ccd_data_reduction_guide, + title = {CCD Data Reduction Guide}, + url = {https://www.astropy.org/ccd-reduction-and-photometry-guide/}, +} \ No newline at end of file diff --git a/iop4lib/db/reducedfit.py b/iop4lib/db/reducedfit.py index 4772e7ec..702b5edc 100644 --- a/iop4lib/db/reducedfit.py +++ b/iop4lib/db/reducedfit.py @@ -287,9 +287,8 @@ def astrometric_calibration(self): If the are both ordinary and extraordinary sources in the field, one WCS will be built for each, and the will be saved in the first and second extensions of the FITS file. """ - from iop4lib.utils.astrometry import build_wcs - build_wcs_result = build_wcs(self) + build_wcs_result = Instrument.by_name(self.instrument).build_wcs(self) if build_wcs_result['success']: @@ -362,25 +361,28 @@ def header_hintcoord(self): def header_objecthint(self): return self.rawfit.header_objecthint + + # REDUCTION METHODS + + ## Delegated to telescopes or instrument classes + def get_astrometry_position_hint(self, allsky=False, n_field_width=1.5): return Instrument.by_name(self.instrument).get_astrometry_position_hint(self.rawfit, allsky=allsky, n_field_width=n_field_width) def get_astrometry_size_hint(self): return Instrument.by_name(self.instrument).get_astrometry_size_hint(self.rawfit) - - - # REDUCTION METHODS - - ## Delegated to telescopes def compute_aperture_photometry(self, *args, **kwargs): + """ Delegated to the instrument. """ return Instrument.by_name(self.instrument).compute_aperture_photometry(self, *args, **kwargs) def compute_relative_photometry(self, *args, **kwargs): + """ Delegated to the instrument. """ return Instrument.by_name(self.instrument).compute_relative_photometry(self, *args, **kwargs) @classmethod def compute_relative_polarimetry(cls, polarimetry_group, *args, **kwargs): + """ Delegated to the instrument. """ if not all([redf.telescope == polarimetry_group[0].telescope for redf in polarimetry_group]): raise Exception("All reduced fits in a polarimetry group must be from the same telescope") diff --git a/iop4lib/instruments/andor_cameras.py b/iop4lib/instruments/andor_cameras.py index 26da674e..9e35cd9c 100644 --- a/iop4lib/instruments/andor_cameras.py +++ b/iop4lib/instruments/andor_cameras.py @@ -160,10 +160,13 @@ def get_astrometry_position_hint(cls, rawfit, allsky=False, n_field_width=1.5): def get_astrometry_size_hint(cls, rawfit): """ Get the size hint for this telescope / rawfit. - According to OSN T090 cameras information (https://www.osn.iaa.csic.es/page/camaras-ccdt150-y-ccdt90) + According to OSN T090 camera information (https://www.osn.iaa.csic.es/page/camaras-ccdt150-y-ccdt90) the camera pixels are 0.387as/px and it has a field of view of 13,20' x 13,20'. So we provide close values for the hint. If the files are 1x1 it will be 0.387as/px, if 2x2 it will be twice. + According to OSN T0150 camera information (https://www.osn.iaa.csic.es/page/camaras-ccdt150-y-ccdt90) + camera pixels are 0.232as/px and it has a field of view of 7.92' x 7.92'. + If the files are 1x1 it will be that, if they are 2x2 it will be twice. """ if rawfit.header['NAXIS1'] == 2048: @@ -248,7 +251,7 @@ def compute_relative_polarimetry(cls, polarimetry_group): logger.debug(f"Computing aperture photometries for the {len(polarimetry_group)} reducedfits in the group with target {aperpix:.1f}.") for reducedfit in polarimetry_group: - reducedfit.compute_aperture_photometry(aperpix, r_in, r_out) + cls.compute_aperture_photometry(reducedfit, aperpix, r_in, r_out) # 2. Compute relative polarimetry for each source (uses the computed aperture photometries) @@ -403,6 +406,7 @@ class AndorT90(Andor): field_width_arcmin = 13.2 + field_height_arcmin = 13.2 arcsec_per_pix = 0.387 gain_e_adu = 4.5 @@ -416,3 +420,4 @@ class AndorT150(Andor): arcsec_per_pix = 0.232 gain_e_adu = 4.5 field_width_arcmin = 7.92 + field_height_arcmin = 7.92 diff --git a/iop4lib/instruments/cafos.py b/iop4lib/instruments/cafos.py index b333f9a6..122fa1d0 100644 --- a/iop4lib/instruments/cafos.py +++ b/iop4lib/instruments/cafos.py @@ -29,6 +29,7 @@ class CAFOS(Instrument): arcsec_per_pix = 0.530 gain_e_adu = 1.45 field_width_arcmin = 34.0 + field_height_arcmin = 34.0 @classmethod @@ -224,7 +225,7 @@ def compute_relative_polarimetry(cls, polarimetry_group): logger.debug(f"Computing aperture photometries for the {len(polarimetry_group)} reducedfits in the group with target aperpix {aperpix:.1f}.") for reducedfit in polarimetry_group: - reducedfit.compute_aperture_photometry(aperpix, r_in, r_out) + cls.compute_aperture_photometry(reducedfit, aperpix, r_in, r_out) # 2. Compute relative polarimetry for each source (uses the computed aperture photometries) diff --git a/iop4lib/instruments/instrument.py b/iop4lib/instruments/instrument.py index 424c0618..81eac5ec 100644 --- a/iop4lib/instruments/instrument.py +++ b/iop4lib/instruments/instrument.py @@ -148,7 +148,29 @@ def get_header_objecthint(self, rawfit): return AstroSource.objects.filter(name__contains=matchs[0]).first() else: return None + + @classmethod + @abstractmethod + def get_astrometry_position_hint(cls, rawfit, allsky=False, n_field_width=1.5): + """ Get the position hint from the FITS header as an astrometry.PositionHint object. """ + pass + + @classmethod + @abstractmethod + def get_astrometry_size_hint(cls, rawfit): + """ Get the size hint for this telescope / rawfit.""" + pass + + @classmethod + def build_wcs(self, reducedfit: 'ReducedFit'): + """ Build a WCS for a reduced fit from this instrument. + By default (Instrument class), this will just call the build_wcs from iop4lib.utils.astrometry. + """ + from iop4lib.utils.astrometry import build_wcs + return build_wcs(reducedfit) + + @classmethod def compute_aperture_photometry(cls, redf, aperpix, r_in, r_out): @@ -225,7 +247,7 @@ def compute_relative_photometry(cls, redf: 'ReducedFit') -> None: logger.debug(f"{redf}: computing aperture photometries for {redf}.") - redf.compute_aperture_photometry(aperpix, r_in, r_out) + cls.compute_aperture_photometry(redf, aperpix, r_in, r_out) # 2. Compute relative polarimetry for each source (uses the computed aperture photometries) diff --git a/iop4lib/telescopes/osnt150.py b/iop4lib/telescopes/osnt150.py index 5972cf2d..4026fa4f 100644 --- a/iop4lib/telescopes/osnt150.py +++ b/iop4lib/telescopes/osnt150.py @@ -43,20 +43,6 @@ class OSNT150(OSNT090, Telescope, metaclass=ABCMeta): # telescope specific methods - @classmethod - def get_astrometry_size_hint(cls, rawfit): - r""" Get the size hint for this telescope / rawfit. - - According to OSN T0150 camera information (https://www.osn.iaa.csic.es/page/camaras-ccdt150-y-ccdt90) - camera pixels are 0.232as/px and it has a field of view of 7.92' x 7.92'. - If the files are 1x1 it will be that, if they are 2x2 it will be twice. - """ - - if rawfit.header['NAXIS1'] == 2048: - return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.andort150_arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.andort150_arcsec_per_pix) - elif rawfit.header['NAXIS1'] == 1024: - return astrometry.SizeHint(lower_arcsec_per_pixel=2*0.95*cls.andort150_arcsec_per_pix, upper_arcsec_per_pixel=2*1.05*cls.andort150_arcsec_per_pix) - @classmethod def compute_relative_photometry(cls, rawfit): diff --git a/iop4lib/utils/astrometry.py b/iop4lib/utils/astrometry.py index ea43d683..b8bb1f11 100644 --- a/iop4lib/utils/astrometry.py +++ b/iop4lib/utils/astrometry.py @@ -133,7 +133,7 @@ def build_wcs_params_shotgun(redf, shotgun_params_kwargs=None, hard=False): - Implement a more robust way to choose the parameters for source extraction such that the astrometry solver works with less attempts. - Explore other detectors and solvers if necessary to improve speed, sucess rate and accuracy. - + - Use pre-computed pair distances. """ param_dicts_L = [] From 07f25361bc6eda3cef80bbaa6222aeae3ffa10dd Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Thu, 19 Oct 2023 13:57:27 +0200 Subject: [PATCH 038/168] dipol: classify obsmode, override header obj hint --- iop4lib/instruments/dipol.py | 54 +++++++++++++++++++++++++----------- 1 file changed, 38 insertions(+), 16 deletions(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index d0aab1e1..b8ea0e47 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -5,6 +5,7 @@ # django imports # other imports +import re import astrometry # iop4lib imports @@ -16,15 +17,21 @@ logger = logging.getLogger(__name__) +import typing +if typing.TYPE_CHECKING: + from iop4lib.db import RawFit, ReducedFit, Epoch + class DIPOL(Instrument): name = "DIPOL" instrument_kw = "ASI Camera (1)" arcsec_per_pix = 0.134 + field_width_arcmin = 9.22 + field_height_arcmin = 6.28 @classmethod - def classify_juliandate_rawfit(cls, rawfit): + def classify_juliandate_rawfit(cls, rawfit: 'RawFit'): """ DIPOL files have JD keyword """ @@ -34,7 +41,7 @@ def classify_juliandate_rawfit(cls, rawfit): @classmethod - def classify_imgtype_rawfit(cls, rawfit): + def classify_imgtype_rawfit(cls, rawfit: 'RawFit'): """ DIPOL files have IMAGETYP keyword: Light Frame, Bias Frame @@ -53,7 +60,7 @@ def classify_imgtype_rawfit(cls, rawfit): raise ValueError @classmethod - def classify_band_rawfit(cls, rawfit): + def classify_band_rawfit(cls, rawfit: 'RawFit'): """ OSN Files have no FILTER keyword if they are BIAS, FILTER=Clear if they are FLAT, and FILTER=FilterName if they are LIGHT. For our DB, we have R, U, ..., None, ERROR. @@ -77,26 +84,41 @@ def classify_band_rawfit(cls, rawfit): @classmethod - def classify_obsmode_rawfit(cls, rawfit): + def classify_obsmode_rawfit(cls, rawfit: 'RawFit'): + """ + As of 2023-10-28, DIPOL polarimetry files have NOTES keyword with the angle like 'xxxx deg', + photometry files have empty NOTES keyword. """ - In OSN Andor Polarimetry, we only have polarimetry for filter R, and it is indicated as R_45, R0, R45, R90 (-45, 0, 45 and 90 degrees). They correspond - to the different angles of the polarimeter. - For photometry, the filter keyword willl be simply the letter R, U, etc. + if 'NOTES' in rawfit.header and not 'deg' in rawfit.header['NOTES']: + rawfit.obsmode = OBSMODES.PHOTOMETRY + else: + rawfit.obsmode = OBSMODES.POLARIMETRY + try: + rawfit.rotangle = float(rawfit.header['NOTES'].split(' ')[0]) + except Exception as e: + logger.error(f"Error parsing NOTES keyword for {rawfit.fileloc} as a float: {e}.") - The values for angles are -45, 0, 45 and 90. - Lately we have seen "R-45" instead of "R_45", so we have to take care of that too. - """ - from iop4lib.db.rawfit import RawFit - import re - - # raise NotImplementedError("DIPOL obsmode not implemented yet") - logger.error(f"OSN DIPOL obsmode not implemented yet.") + @classmethod + def get_header_objecthint(self, rawfit): + r""" Get a hint for the AstroSource in this image from the header. OBJECT is a standard keyword. Return None if none found. + + Overriden for DIPOL, which are using the other_name field. + """ + + from iop4lib.db import AstroSource + matchs = rawfit.header["OBJECT"].split('_')[0] + + if len(matchs) > 0: + return AstroSource.objects.filter(other_name__icontains=matchs[0]).first() + else: + return None + @classmethod - def get_astrometry_size_hint(cls, rawfit): + def get_astrometry_size_hint(cls, rawfit: 'RawFit'): """ Get the size hint for this telescope / rawfit. For DIPOL in OSN-T090, according to preliminary investigation of OSN crew is: From 198dcfe5c8a6e0be11a79a8b6e561ace6109bcb9 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Thu, 19 Oct 2023 13:57:41 +0200 Subject: [PATCH 039/168] dipol: override build_wcs --- iop4lib/instruments/dipol.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index b8ea0e47..f83179ee 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -134,4 +134,19 @@ def get_astrometry_size_hint(cls, rawfit: 'RawFit'): Así que como mucho se produce un error de ± 0.3º en las imágenes, y el punto cero es de 2.5º. """ - return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.arcsec_per_pix) \ No newline at end of file + return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.arcsec_per_pix) + + @classmethod + def build_wcs(self, reducedfit: 'ReducedFit'): + """ Override Instrument build_wcs. + + While for PHOTOMETRY observations, DIPOL has a wide field which can be astrometrically calibrated, + POLARIMETRY files are small with only the source field ordinary and extraordianty images in the center (to save up space). + In some ocassions, there might be some close source also in the field. + + Therefore, to calibrate polarimetry files, we just give it a WCS centered on the source. + """ + if reducedfit.obsmode == OBSMODES.PHOTOMETRY: + return super().build_wcs(reducedfit) + elif reducedfit.obsmode == OBSMODES.POLARIMETRY: + raise NotImplementedError("Polarimetry WCS not implemented yet for DIPOL") \ No newline at end of file From 87e92e268b6c83ce96afb09871e08b09059d4ef4 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Thu, 19 Oct 2023 18:14:15 +0200 Subject: [PATCH 040/168] ci.yml: download test dataset specific version --- .github/workflows/ci.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c1b5fd55..3b2e2010 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -84,7 +84,9 @@ jobs: - name: Download test data env: TEST_DATA_PASSWORD: ${{ secrets.test_data_password }} - run: wget --post-data "pass=$TEST_DATA_PASSWORD" 'https://vhega.iaa.es/iop4/iop4testdata.tar.gz' -O $HOME/iop4testdata.tar.gz + run: | + export TESTDATA_MD5SUM=`grep 'TESTDATA_MD5SUM' ./tests/conftest.py | awk -F"'" '{print $2}' | tr -d)` + wget --post-data "pass=$TEST_DATA_PASSWORD" "https://vhega.iaa.es/iop4/iop4testdata.tar.gz?md5sum=$TESTDATA_MD5SUM" -O $HOME/iop4testdata.tar.gz - name: Run tests (with -o log_cli=true -o log_cli_level=DEBUG to debug CI actions) run: pytest -o log_cli=true -o log_cli_level=DEBUG -vxs tests/ From 2e6d25cd38631debce1121eb662d92cb33aeb73f Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Thu, 19 Oct 2023 18:25:56 +0200 Subject: [PATCH 041/168] ci.yml: typo --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3b2e2010..751793c7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -85,7 +85,7 @@ jobs: env: TEST_DATA_PASSWORD: ${{ secrets.test_data_password }} run: | - export TESTDATA_MD5SUM=`grep 'TESTDATA_MD5SUM' ./tests/conftest.py | awk -F"'" '{print $2}' | tr -d)` + export TESTDATA_MD5SUM=`grep 'TESTDATA_MD5SUM' ./tests/conftest.py | awk -F"'" '{print $2}' | tr -d '\n'` wget --post-data "pass=$TEST_DATA_PASSWORD" "https://vhega.iaa.es/iop4/iop4testdata.tar.gz?md5sum=$TESTDATA_MD5SUM" -O $HOME/iop4testdata.tar.gz - name: Run tests (with -o log_cli=true -o log_cli_level=DEBUG to debug CI actions) From 765878897c127f5af6be736124ad67b9b5f5ae42 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Thu, 19 Oct 2023 19:02:35 +0200 Subject: [PATCH 042/168] ci.yml: attempt again caching astrometry files --- .github/workflows/ci.yml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 751793c7..83d8432f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -68,12 +68,10 @@ jobs: - name: Try to restore httpdirfs cache data # this should make subsuquent commits in the same PR faster - uses: actions/cache@v3 + uses: actions/cache@/restore@v3 with: path: $HOME/.cache/httpdirfs/ - key: httpdirfs-astrometry-5200-1-2-3-4-4 - restore-keys: | - httpdirfs-astrometry-5200-1-2-3-4-4 + key: httpdirfs-astrometry-5200-1-2-3-4 - name: Mount astrometry index file in default location run: mkdir -p $HOME/.astrometry_cache/5200 && httpdirfs --cache 'https://portal.nersc.gov/project/cosmo/temp/dstn/index-5200/LITE/' $HOME/.astrometry_cache/5200/ @@ -98,10 +96,10 @@ jobs: du -sh $HOME/.cache/httpdirfs/ || true - name: Save httpdirfs cache data - uses: actions/cache@v3 + uses: actions/cache/save@v3 with: path: $HOME/.cache/httpdirfs/ - key: httpdirfs-astrometry-5200-1-2-3-4-4 + key: httpdirfs-astrometry-5200-1-2-3-4 From a8d4647082f7a8518fc782d664caa142e7ee2f03 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Fri, 20 Oct 2023 12:08:42 +0200 Subject: [PATCH 043/168] README.md: typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 979e662f..7a237f93 100644 --- a/README.md +++ b/README.md @@ -56,7 +56,7 @@ To run the tests, first follow the previous steps to configure IOP4. At the mome ``` If it is the first time executing IOP4, the astrometry index files will be downloaded to `astrometry_cache_path` (see `config/config.example.yaml`). This will take some time and a few tens of GB, depending on the exact version. -**Warning**: in some macOS systems, the process [might hang up](https://github.com/juanep97/iop4/issues/14#issuecomment-1748465276). Execute `export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES` or add that line to you shell init script. +**Warning**: in some macOS systems, the process [might hang up](https://github.com/juanep97/iop4/issues/14#issuecomment-1748465276). Execute `export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES` or add that line to your shell init script. ## Usage From fab01fe87640081155fb99cb453007dbcd34cb19 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Fri, 20 Oct 2023 14:55:37 +0200 Subject: [PATCH 044/168] add support for darks and masterdarks frames --- iop4admin/modeladmins/masterdark.py | 47 ++++++ iop4admin/sites.py | 2 + iop4lib/db/__init__.py | 1 + iop4lib/db/epoch.py | 79 ++++------ iop4lib/db/masterbias.py | 28 ++-- iop4lib/db/masterdark.py | 219 ++++++++++++++++++++++++++++ iop4lib/db/masterflat.py | 50 ++++--- iop4lib/db/rawfit.py | 94 ++++-------- iop4lib/db/reducedfit.py | 65 ++------- iop4lib/enums.py | 1 + iop4lib/instruments/dipol.py | 19 ++- iop4lib/telescopes/osnt090.py | 9 +- iop4lib/utils/parallel.py | 5 +- 13 files changed, 414 insertions(+), 205 deletions(-) create mode 100644 iop4admin/modeladmins/masterdark.py create mode 100644 iop4lib/db/masterdark.py diff --git a/iop4admin/modeladmins/masterdark.py b/iop4admin/modeladmins/masterdark.py new file mode 100644 index 00000000..403f40c8 --- /dev/null +++ b/iop4admin/modeladmins/masterdark.py @@ -0,0 +1,47 @@ +from django.contrib import admin + +from django.utils.html import format_html +from django.urls import reverse +from django.utils.safestring import mark_safe + +from iop4api.filters import * +from iop4api.models import * +from .fitfile import AdminFitFile + +import logging +logger = logging.getLogger(__name__) + +class AdminMasterDark(AdminFitFile): + model = MasterDark + list_display = ['id', 'telescope', 'night', 'instrument', 'imgsize', 'exptime', 'get_masterbias', 'get_built_from', 'options'] + + @admin.display(description='Options') + def options(self, obj): + url_details = reverse('iop4admin:iop4api_masterdark_details', args=[obj.id]) + url_viewer= reverse('iop4admin:iop4api_masterdark_details', args=[obj.id]) + return format_html(rf'details / advanced viewer') + + @admin.display(description='Telescope') + def telescope(self, obj): + return obj.epoch.telescope + + @admin.display(description='Night') + def night(self, obj): + return obj.epoch.night + + @admin.display(description='MasterBias') + def get_masterbias(self, obj): + self.allow_tags = True + if obj.masterbias is None: + return "-" + url = reverse('iop4admin:%s_%s_changelist' % (MasterBias._meta.app_label, MasterBias._meta.model_name)) + f"?id={obj.masterbias.id}" + return mark_safe(rf'{obj.masterbias.id}') + + @admin.display(description="Built from") + def get_built_from(self, obj): + self.allow_tags = True + link_L = list() + for rawfit in obj.rawfits.all(): + url = reverse('iop4admin:%s_%s_changelist' % (RawFit._meta.app_label, RawFit._meta.model_name)) + f"?id={rawfit.id}" + link_L.append(rf'{rawfit.id}') + return mark_safe(", ".join(link_L)) \ No newline at end of file diff --git a/iop4admin/sites.py b/iop4admin/sites.py index fd6a199b..66031268 100644 --- a/iop4admin/sites.py +++ b/iop4admin/sites.py @@ -17,6 +17,7 @@ class IOP4AdminSite(admin.AdminSite): from .modeladmins.epoch import AdminEpoch from .modeladmins.rawfit import AdminRawFit from .modeladmins.masterflat import AdminMasterFlat +from .modeladmins.masterdark import AdminMasterDark from .modeladmins.masterbias import AdminMasterBias from .modeladmins.reducedfit import AdminReducedFit from .modeladmins.astrosource import AdminAstroSource @@ -27,6 +28,7 @@ class IOP4AdminSite(admin.AdminSite): iop4admin_site.register(RawFit, AdminRawFit) iop4admin_site.register(MasterBias, AdminMasterBias) iop4admin_site.register(MasterFlat, AdminMasterFlat) +iop4admin_site.register(MasterDark, AdminMasterDark) iop4admin_site.register(ReducedFit, AdminReducedFit) iop4admin_site.register(AstroSource, AdminAstroSource) iop4admin_site.register(PhotoPolResult, AdminPhotoPolResult) diff --git a/iop4lib/db/__init__.py b/iop4lib/db/__init__.py index 7dbdbcda..f2b5865b 100644 --- a/iop4lib/db/__init__.py +++ b/iop4lib/db/__init__.py @@ -4,6 +4,7 @@ from .astrosource import AstroSource from .masterbias import MasterBias from .masterflat import MasterFlat +from .masterdark import MasterDark from .reducedfit import ReducedFit from .photopolresult import PhotoPolResult, PhotoPolResultReducedFitRelation from .aperphotresult import AperPhotResult \ No newline at end of file diff --git a/iop4lib/db/epoch.py b/iop4lib/db/epoch.py index 734d1458..302faff9 100644 --- a/iop4lib/db/epoch.py +++ b/iop4lib/db/epoch.py @@ -100,6 +100,11 @@ def masterflatdir(self): """Returns the path to the directory where the masterflat files of this epoch are stored.""" return os.path.join(iop4conf.datadir, "masterflat", self.epochname) + @property + def masterdarkdir(self): + """Returns the path to the directory where the masterdark files of this epoch are stored.""" + return os.path.join(iop4conf.datadir, "masterdark", self.epochname) + @property def yyyymmdd(self): """Returns the date of the epoch in the format YYYYMMDD.""" @@ -361,71 +366,47 @@ def get_summary_rawfits_status(self): - def build_master_biases(self, force_rebuild=False): - from iop4lib.db import RawFit, MasterBias + def build_masters(self, model, force_rebuild=False): import itertools - # define keywords to be used to build master biases - kw_L = MasterBias.mbargs_kwL + # define keywords to be used to build the master + kw_L = model.margs_kwL # for each keyword, get the set of values in the rawfits of this epoch - kw_set_D = {kw:set(self.rawfits.filter(imgtype=IMGTYPES.BIAS).values_list(kw, flat=True).distinct()) for kw in kw_L} + kw_set_D = {kw:set(self.rawfits.filter(imgtype=model.imgtype).values_list(kw, flat=True).distinct()) for kw in kw_L} # create a list of dictionaries with all the combinations of values for each keyword - mbargs_L = [dict(zip(kw_L, prod)) for prod in itertools.product(*kw_set_D.values())] - - # create master biases - - try: - for mbargs in mbargs_L: - mbargs['epoch'] = self - logger.debug(f"{mbargs=}") - if self.rawfits.filter(imgtype=IMGTYPES.BIAS, **mbargs).count() > 0: - logger.info(f"Building masterbias for {MasterBias.mbargs2str(mbargs)}.") - MasterBias.create(**mbargs, force_rebuild=force_rebuild) - else: - logger.debug(f"No masterbias will be built for this mbargs since there are no files for it.") - except Exception as e: - logger.error(f"Error building masterbias for {self.epochname}: {e}.") - self.set_flag(Epoch.FLAGS.ERROR) - - if self.auto_merge_to_db: - self.save() - - - - def build_master_flats(self, force_rebuild=False): - from iop4lib.db import RawFit, MasterFlat - import itertools - - # define keywords to be used to build master flats - kw_L = MasterFlat.mfargs_kwL - - # for each keyword, get the set of values in the rawfits of this epoch - kw_set_D = {kw:set(self.rawfits.filter(imgtype=IMGTYPES.FLAT).values_list(kw, flat=True).distinct()) for kw in kw_L} - - # create a list of dictionaries with all the combinations of values for each keyword - mfargs_L = [dict(zip(kw_L, prod)) for prod in itertools.product(*kw_set_D.values())] + margs_L = [dict(zip(kw_L, prod)) for prod in itertools.product(*kw_set_D.values())] # create master flats try: - for mfargs in mfargs_L: - mfargs['epoch'] = self - logger.debug(f"{mfargs=}") - if self.rawfits.filter(imgtype=IMGTYPES.FLAT, **mfargs).count() > 0: - logger.info(f"Building masterflat for {MasterFlat.mfargs2str(mfargs)}.") - MasterFlat.create(**mfargs, force_rebuild=force_rebuild) + for margs in margs_L: + margs['epoch'] = self + logger.debug(f"{margs=}") + if self.rawfits.filter(imgtype=model.imgtype, **margs).count() > 0: + logger.info(f"Building {model._meta.verbose_name} for {model.margs2str(margs)}.") + model.create(**margs, force_rebuild=force_rebuild) else: - logger.debug(f"No masterflat will be built for this mfargs since there are no files for it.") + logger.debug(f"No {model._meta.verbose_name} will be built for this margs since there are no files for it.") except Exception as e: - logger.error(f"Error building masterflats for {self.epochname}: {e}.") + logger.error(f"Error building {model._meta.verbose_name} for {self.epochname}: {e}.") self.set_flag(Epoch.FLAGS.ERROR) if self.auto_merge_to_db: - self.save() - + self.save() + def build_master_biases(self, **kwargs): + from iop4lib.db import MasterBias + return self.build_masters(MasterBias, **kwargs) + + def build_master_flats(self, **kwargs): + from iop4lib.db import MasterFlat + return self.build_masters(MasterFlat, **kwargs) + + def build_master_darks(self, **kwargs): + from iop4lib.db import MasterDark + return self.build_masters(MasterDark, **kwargs) def reduce(self, force_rebuild=False): """ Reduces all (LIGHT) rawfits of this epoch. diff --git a/iop4lib/db/masterbias.py b/iop4lib/db/masterbias.py index 8cacefd4..366a9f7f 100644 --- a/iop4lib/db/masterbias.py +++ b/iop4lib/db/masterbias.py @@ -18,12 +18,14 @@ class MasterBias(FitFileModel): """A class representing a master bias for an epoch.""" - mbargs_kwL = ['epoch', 'instrument', 'imgsize'] + margs_kwL = ['epoch', 'instrument', 'imgsize'] + + imgtype = IMGTYPES.BIAS # Database fields rawfits = models.ManyToManyField('RawFit', related_name='built_masterbias') - # fields corresponding to MasterBias kw arguments (mbargs_kwL) + # fields corresponding to MasterBias kw arguments (margs_kwL) epoch = models.ForeignKey('Epoch', on_delete=models.CASCADE, related_name='masterbias') instrument = models.CharField(max_length=20, choices=INSTRUMENTS.choices) @@ -57,7 +59,7 @@ def filedpropdir(self): self.__class__.__name__) @property - def mbargs(self): + def margs(self): """Return a dict of the arguments used to build this MasterFlat. """ @@ -66,17 +68,17 @@ def mbargs(self): # repr and str @classmethod - def mbargs2str(cls, mbargs): + def margs2str(cls, margs): """Class method to build a nice string rep of the arguments of a MasterFlat. """ - return f"{mbargs['epoch'].epochname} | {mbargs['instrument']} | {mbargs['imgsize']}" + return f"{margs['epoch'].epochname} | {margs['instrument']} | {margs['imgsize']}" def __repr__(self): return f"MasterBias.objects.get(id={self.id!r})" def __str__(self): - return f"" + return f"" def _repr_pretty_(self, p, cycle): if cycle: @@ -84,7 +86,7 @@ def _repr_pretty_(self, p, cycle): else: with p.group(4, f'<{self.__class__.__name__}(', ')>'): p.text(f"id: {self.id},") - for k,v in self.mbargs.items(): + for k,v in self.margs.items(): p.breakable() p.text(f"{k}: {v}") @@ -118,19 +120,19 @@ def create(cls, """ - mbargs = {k:kwargs[k] for k in MasterBias.mbargs_kwL if k in kwargs} + margs = {k:kwargs[k] for k in MasterBias.margs_kwL if k in kwargs} from iop4lib.db import RawFit - if (mb := MasterBias.objects.filter(**mbargs).first()) is not None: + if (mb := MasterBias.objects.filter(**margs).first()) is not None: logger.debug(f"DB entry for {mb} already exists, using it instead.") else: - logger.debug(f"A DB entry for MasterBias {MasterBias.mbargs2str(mbargs)} will be created.") - mb = cls(**mbargs) + logger.debug(f"A DB entry for MasterBias {MasterBias.margs2str(margs)} will be created.") + mb = cls(**margs) mb.save() if rawfits is None: - rawfits = RawFit.objects.filter(imgtype=IMGTYPES.BIAS, **mbargs) + rawfits = RawFit.objects.filter(imgtype=IMGTYPES.BIAS, **margs) logger.debug(f"Found {len(rawfits)} bias raw files for {mb}.") else: logger.debug(f"Using {len(rawfits)} bias raw files for {mb}.") @@ -145,7 +147,7 @@ def create(cls, except Exception as e: logger.error("An error occurred while building the masterbias, deleting it and raising Exception") mb.delete() - raise Exception(f"An error occurred while building the MasterBias for {mbargs}: {e}") + raise Exception(f"An error occurred while building the MasterBias for {margs}: {e}") else: logger.info("Built masterbias successfully") diff --git a/iop4lib/db/masterdark.py b/iop4lib/db/masterdark.py new file mode 100644 index 00000000..2cc8789c --- /dev/null +++ b/iop4lib/db/masterdark.py @@ -0,0 +1,219 @@ +import iop4lib.config +iop4conf = iop4lib.Config(config_db=False) + +from django.db import models + +import os +import datetime +import numpy as np +import astropy.io.fits as fits + +from .fitfilemodel import FitFileModel +from ..enums import * + +import logging +logger = logging.getLogger(__name__) + +class MasterDark(FitFileModel): + """ + A class representing a master dark for an epoch. + """ + + margs_kwL = ['epoch', 'instrument', 'imgsize', 'exptime'] + + imgtype = IMGTYPES.DARK + + # Database fields + rawfits = models.ManyToManyField('RawFit', related_name='built_masterdarks') + + # fields corresponding to MasterDark kw arguments (margs_kwL) + + epoch = models.ForeignKey('Epoch', on_delete=models.CASCADE, related_name='masterdarks') + instrument = models.CharField(max_length=20, choices=INSTRUMENTS.choices) + imgsize = models.CharField(max_length=12, null=True) + exptime = models.FloatField(null=True) + + masterbias = models.ForeignKey('MasterBias', null=True, on_delete=models.CASCADE, related_name='masterdarks') + + class Meta: + app_label = 'iop4api' + verbose_name = "Master Dark" + verbose_name_plural = "Master Darks" + + # Properties + + @property + def filename(self): + return f"masterdark_id{self.id}.fits" + + @property + def filepath(self): + return os.path.join(self.epoch.masterdarkdir, self.filename) + + @property + def fileloc(self): + return f"{self.epoch.epochname}/{self.filename}" + + # Filed Properties location + + @property + def filedpropdir(self): + return os.path.join(self.epoch.calibrationdir, + self.filename + '.d', + self.__class__.__name__) + + # some helper properties + + @property + def margs(self): + """ + Return a dict of the arguments used to build this MasterDark. + """ + return {'epoch':self.epoch, 'instrument':self.instrument, 'imgsize':self.imgsize, 'exptime':self.exptime} + + # repr and str + + @classmethod + def margs2str(cls, margs): + """ + Class method to build a nice string rep of the arguments of a MasterDark. + """ + return f"{margs['epoch'].epochname} | {margs['instrument']} | {margs['imgsize']} | {margs['exptime']}s" + + def __repr__(self): + return f"MasterDark.objects.get(id={self.id!r})" + + def __str__(self): + return f"" + + def _repr_pretty_(self, p, cycle): + if cycle: + p.text(f"{self!r}") + else: + with p.group(4, f'<{self.__class__.__name__}(', ')>'): + p.text(f"id: {self.id},") + for k,v in self.margs.items(): + p.breakable() + p.text(f"{k}: {v}") + + + # Constructor + @classmethod + def create(cls, + rawfits=None, + masterbias=None, + auto_merge_to_db=True, + force_rebuild=True, + **kwargs): + """ + Create a MasterDark object for Epoch epoch. + + Parameters + ---------- + epoch: Epoch + instrument: str + imgsize: "WidthxHeight" str + band: str + obsmode: str + rotangle: float + exptime: float + + Other Parameters + ---------------- + rawfits: list of RawFit (optional) + masterbias: MasterBias (optional) + auto_merge_to_db: bool (optional) Default: True + """ + + #margs = {k:kwargs[k] for k in MasterDark.margs_kwL if k in kwargs} + margs = {k:kwargs[k] for k in MasterDark.margs_kwL} # so it gives error if some margs kw missing + + from iop4lib.db import RawFit + + if (md := MasterDark.objects.filter(**margs).first()) is not None: + logger.info(f"{md} exists, using it instead.") + else: + logger.info(f"A MasterDark entry for {MasterDark.margs2str(margs)} will be created.") + md = cls(**margs) + logger.debug("Saving MasterDark to DB so that it has an id.") + md.save() + + if rawfits is None: + rawfits = RawFit.objects.filter(imgtype=IMGTYPES.DARK, **margs) + logger.debug(f"Found {len(rawfits)} dark raw files for {md}.") + else: + logger.debug(f"Using {len(rawfits)} dark raw files for {md}.") + + md.rawfits.set(rawfits) + + if masterbias is None: + from .masterbias import MasterBias + margs = {k:kwargs[k] for k in MasterBias.margs_kwL if k in kwargs} + masterbias = MasterBias.objects.filter(**margs).first() + logger.debug(f"Using {masterbias} as MasterBias for {md}.") + md.masterbias = masterbias + + # Build the file + if not md.fileexists or force_rebuild: + logger.info(f"Building file") + try: + md.build_file() + except Exception as e: + logger.error(f"An error ocurred while building the MasterDark, deleting it and raising Exception.") + md.delete() + raise Exception(f"An error ocurred while building the MasterDark for {margs}: {e}") + else: + logger.info("MasterDark created successfully.") + + # merge to DB + + if auto_merge_to_db: + md.save() + + return md + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.auto_merge_to_db=True + + # Methods + + def build_file(self): + logger.debug(f"Getting data from files") + + if self.rawfits.count() == 0: + raise Exception(f"No rawfits for {self}") + + data_L = [] + for rf in self.rawfits.all(): + with fits.open(rf.filepath) as hdul: + data = (hdul[0].data - self.masterbias.data) / self.exptime + data_L.append(data) + + data = np.nanmedian(data_L, axis=0) + + logger.debug(f"Building header") + + header = fits.Header() + + header['TELESCOP'] = self.epoch.telescope + header['NIGHT'] = self.epoch.yyyymmdd + header['EPOCH'] = self.epoch.epochname + header['IMGSIZE'] = self.imgsize + header['IMGTYPE'] = 'masterdark' + header['DATECREA'] = datetime.datetime.utcnow().isoformat(timespec="milliseconds") + header['NRAWFITS'] = self.rawfits.count() + header['INSTRUME'] = self.instrument + header['EXPTIME'] = self.exptime + + logger.debug(f"Building HDU") + + if not os.path.isdir(self.epoch.masterdarkdir): + logger.debug(f"Creating directory {self.epoch.masterdarkdir}") + os.makedirs(self.epoch.masterdarkdir) + + hdu = fits.PrimaryHDU(data, header=header) + + logger.debug(f"Writing MasterDark to {self.filepath}") + + hdu.writeto(self.filepath, overwrite=True) \ No newline at end of file diff --git a/iop4lib/db/masterflat.py b/iop4lib/db/masterflat.py index 80e81b5c..f46cb51d 100644 --- a/iop4lib/db/masterflat.py +++ b/iop4lib/db/masterflat.py @@ -19,12 +19,14 @@ class MasterFlat(FitFileModel): A class representing a master flat for an epoch. """ - mfargs_kwL = ['epoch', 'instrument', 'imgsize', 'band', 'obsmode', 'rotangle', 'exptime'] + margs_kwL = ['epoch', 'instrument', 'imgsize', 'band', 'obsmode', 'rotangle', 'exptime'] + imgtype = IMGTYPES.FLAT + # Database fields rawfits = models.ManyToManyField('RawFit', related_name='built_masterflats') - # fields corresponding to MasterFlat kw arguments (mfargs_kwL) + # fields corresponding to MasterFlat kw arguments (margs_kwL) epoch = models.ForeignKey('Epoch', on_delete=models.CASCADE, related_name='masterflats') instrument = models.CharField(max_length=20, choices=INSTRUMENTS.choices) @@ -66,7 +68,7 @@ def filedpropdir(self): # some helper properties @property - def mfargs(self): + def margs(self): """ Return a dict of the arguments used to build this MasterFlat. """ @@ -75,17 +77,17 @@ def mfargs(self): # repr and str @classmethod - def mfargs2str(cls, mfargs): + def margs2str(cls, margs): """ Class method to build a nice string rep of the arguments of a MasterFlat. """ - return f"{mfargs['epoch'].epochname} | {mfargs['instrument']} | {mfargs['imgsize']} | {mfargs['band']} | {mfargs['obsmode']} | {mfargs['rotangle']} º | {mfargs['exptime']}s" + return f"{margs['epoch'].epochname} | {margs['instrument']} | {margs['imgsize']} | {margs['band']} | {margs['obsmode']} | {margs['rotangle']} º | {margs['exptime']}s" def __repr__(self): return f"MasterFlat.objects.get(id={self.id!r})" def __str__(self): - return f"" + return f"" def _repr_pretty_(self, p, cycle): if cycle: @@ -93,7 +95,7 @@ def _repr_pretty_(self, p, cycle): else: with p.group(4, f'<{self.__class__.__name__}(', ')>'): p.text(f"id: {self.id},") - for k,v in self.mfargs.items(): + for k,v in self.margs.items(): p.breakable() p.text(f"{k}: {v}") @@ -103,6 +105,7 @@ def _repr_pretty_(self, p, cycle): def create(cls, rawfits=None, masterbias=None, + masterdark=None, auto_merge_to_db=True, force_rebuild=True, **kwargs): @@ -126,21 +129,21 @@ def create(cls, auto_merge_to_db: bool (optional) Default: True """ - #mfargs = {k:kwargs[k] for k in MasterFlat.mfargs_kwL if k in kwargs} - mfargs = {k:kwargs[k] for k in MasterFlat.mfargs_kwL} # so it gives error if some mfargs kw missing + #margs = {k:kwargs[k] for k in MasterFlat.margs_kwL if k in kwargs} + margs = {k:kwargs[k] for k in MasterFlat.margs_kwL} # so it gives error if some margs kw missing from iop4lib.db import RawFit - if (mf := MasterFlat.objects.filter(**mfargs).first()) is not None: + if (mf := MasterFlat.objects.filter(**margs).first()) is not None: logger.info(f"{mf} exists, using it instead.") else: - logger.info(f"A MasterFlat entry for {MasterFlat.mfargs2str(mfargs)} will be created.") - mf = cls(**mfargs) + logger.info(f"A MasterFlat entry for {MasterFlat.margs2str(margs)} will be created.") + mf = cls(**margs) logger.debug("Saving MasterFlat to DB so that it has an id.") mf.save() if rawfits is None: - rawfits = RawFit.objects.filter(imgtype=IMGTYPES.FLAT, **mfargs) + rawfits = RawFit.objects.filter(imgtype=IMGTYPES.FLAT, **margs) logger.debug(f"Found {len(rawfits)} flat raw files for {mf}.") else: logger.debug(f"Using {len(rawfits)} flat raw files for {mf}.") @@ -149,11 +152,18 @@ def create(cls, if masterbias is None: from .masterbias import MasterBias - mbargs = {k:kwargs[k] for k in MasterBias.mbargs_kwL if k in kwargs} - masterbias = MasterBias.objects.filter(**mbargs).first() + margs = {k:kwargs[k] for k in MasterBias.margs_kwL if k in kwargs} + masterbias = MasterBias.objects.filter(**margs).first() logger.debug(f"Using {masterbias} as MasterBias for {mf}.") mf.masterbias = masterbias + if masterdark is None: + from .masterdark import MasterDark + margs = {k:kwargs[k] for k in MasterDark.margs_kwL if k in kwargs} + masterdark = MasterDark.objects.filter(**margs).first() + logger.debug(f"Using {masterdark} as MasterDark for {mf}.") + mf.masterdark = masterdark + # Build the file if not mf.fileexists or force_rebuild: logger.info(f"Building file") @@ -162,7 +172,7 @@ def create(cls, except Exception as e: logger.error(f"An error ocurred while building the MasterFlat, deleting it and raising Exception.") mf.delete() - raise Exception(f"An error ocurred while building the MasterFlat for {mfargs}: {e}") + raise Exception(f"An error ocurred while building the MasterFlat for {margs}: {e}") else: logger.info("MasterFlat created successfully.") @@ -188,7 +198,13 @@ def build_file(self): data_L = [] for rf in self.rawfits.all(): with fits.open(rf.filepath) as hdul: - data = (hdul[0].data - self.masterbias.data) + + if self.masterdark is not None: + data = hdul[0].data - self.masterbias.data - self.masterdark.data * self.exptime + else: + logger.warning(f"No MasterDark for {self}, is this a CCD and it is cold?") + data = (hdul[0].data - self.masterbias.data) + data = data / np.nanmedian(data) data_L.append(data) diff --git a/iop4lib/db/rawfit.py b/iop4lib/db/rawfit.py index 34b2d727..6be96e50 100644 --- a/iop4lib/db/rawfit.py +++ b/iop4lib/db/rawfit.py @@ -308,87 +308,57 @@ def classify(self): if self.auto_merge_to_db: self.save() - - - def request_masterbias(self, other_epochs=False): - """ Returns the master bias for this raw fit. - - Notes - ----- - See also iop4lib.db.MasterBias.request_masterbias(). - """ - from iop4lib.db import MasterBias - - rf_vals = RawFit.objects.filter(id=self.id).values().get() - args = {k:rf_vals[k] for k in rf_vals if k in MasterBias.mbargs_kwL} - args["epoch"] = self.epoch # from values we only get epoch__id - - mb = MasterBias.objects.filter(**args).first() - - if mb is None and other_epochs == True: - args.pop("epoch") - - mb_other_epochs = np.array(MasterBias.objects.filter(**args).all()) - - if len(mb_other_epochs) == 0: - logger.debug(f"No master bias for {args} in DB, None will be returned.") - return None - - mb_other_epochs_jyear = np.array([mb.epoch.jyear for mb in mb_other_epochs]) - mb = mb_other_epochs[np.argsort(np.abs(mb_other_epochs_jyear - self.epoch.jyear))[0]] - - if (mb.epoch.jyear - self.epoch.jyear) > 7/365: - #logger.debug(f"Master bias from epoch {mb.epoch} is more than 1 week away from epoch {self.epoch}, None will be returned.") - #return None - logger.warning(f"Master bias from epoch {mb.epoch} is more than 1 week away from epoch {self.epoch}.") - - return mb - - - - def request_masterflat(self, other_epochs=False): - """ Searchs in the DB and returns an appropiate masterflat for this rawfit. + def request_master(self, model, other_epochs=False): + """ Searchs in the DB and returns an appropiate master bias / flat / dark for this rawfit. Notes ----- - It takes into account the parameters (band, size, etc) defined in MaserFlat.mfargs_kwL; except - for exptime, which is not taken into account (flats with different extime can and must be used). - By default, it looks for masterflats in the same epoch, but if other_epochs is set to True, it - will look for masterflats in other epochs. If more than one masterflat is found, it returns the + It takes into account the parameters (band, size, etc) defined in Master' margs_kwL; except + for exptime, which is not taken into account. + By default, it looks for masters in the same epoch, but if other_epochs is set to True, it + will look for masters in other epochs. If more than one master is found, it returns the one from the closest night. It will print a warning even with other_epochs if it is more than 1 week away from the rawfit epoch. - If no masterflat is found, it returns None. + If no master is found, it returns None. """ - from iop4lib.db import MasterFlat - rf_vals = RawFit.objects.filter(id=self.id).values().get() - args = {k:rf_vals[k] for k in rf_vals if k in MasterFlat.mfargs_kwL} + args = {k:rf_vals[k] for k in rf_vals if k in model.margs_kwL} - args.pop("exptime", None) - args["epoch"] = self.epoch # from values we only get epoch__id + args.pop("exptime", None) # exptime might be a building keywords (for flats and darks), but masters with different exptime can be applied + args["epoch"] = self.epoch # from .values() we only get epoch__id - mf = MasterFlat.objects.filter(**args).first() + master = model.objects.filter(**args).first() - if mf is None and other_epochs == True: + if master is None and other_epochs == True: args.pop("epoch") - mf_other_epochs = np.array(MasterFlat.objects.filter(**args).all()) + master_other_epochs = np.array(model.objects.filter(**args).all()) - if len(mf_other_epochs) == 0: - logger.debug(f"No master flat for {args} in DB, None will be returned.") + if len(master_other_epochs) == 0: + logger.debug(f"No {model._meta.verbose_name} for {args} in DB, None will be returned.") return None - mf_other_epochs_jyear = np.array([mf.epoch.jyear for mf in mf_other_epochs]) - mf = mf_other_epochs[np.argsort(np.abs(mf_other_epochs_jyear - self.epoch.jyear))[0]] + master_other_epochs_jyear = np.array([md.epoch.jyear for md in master_other_epochs]) + master = master_other_epochs[np.argsort(np.abs(master_other_epochs_jyear - self.epoch.jyear))[0]] - if (mf.epoch.jyear - self.epoch.jyear) > 7/365: - #logger.debug(f"Master flat from epoch {mf.epoch} is more than 1 week away from epoch {self.epoch}, None will be returned.") - #return None - logger.warning(f"Master flat from epoch {mf.epoch} is more than 1 week away from epoch {self.epoch}.") + if (master.epoch.jyear - self.epoch.jyear) > 7/365: + logger.warning(f"{model._meta.verbose_name} from epoch {master.epoch} is more than 1 week away from epoch {self.epoch}.") - return mf + return master + + def request_masterbias(self, *args, **kwargs): + from iop4lib.db import MasterBias + return self.request_master(MasterBias, *args, **kwargs) + + def request_masterflat(self, *args, **kwargs): + from iop4lib.db import MasterFlat + return self.request_master(MasterFlat, *args, **kwargs) + + def request_masterdark(self, *args, **kwargs): + from iop4lib.db import MasterDark + return self.request_master(MasterDark, *args, **kwargs) @property def header_hintcoord(self): diff --git a/iop4lib/db/reducedfit.py b/iop4lib/db/reducedfit.py index 702b5edc..3684178c 100644 --- a/iop4lib/db/reducedfit.py +++ b/iop4lib/db/reducedfit.py @@ -42,6 +42,7 @@ class ReducedFit(RawFit): # ReducedFit specific fields masterbias = models.ForeignKey('MasterBias', null=True, on_delete=models.CASCADE, related_name='reduced', help_text="MasterBias to be used for the reduction.") masterflat = models.ForeignKey('MasterFlat', null=True, on_delete=models.CASCADE, related_name='reduced', help_text="MasterFlat to be used for the reduction.") + masterdark = models.ForeignKey('MasterDark', null=True, on_delete=models.CASCADE, related_name='reduced', help_text="MasterDark to be used for the reduction.") sources_in_field = models.ManyToManyField('AstroSource', related_name='in_reducedfits', blank=True, help_text="Sources in the field of this FITS.") modified = models.DateTimeField(auto_now=True, help_text="Last time this entry was modified.") @@ -187,11 +188,7 @@ def build_file(self): self.unset_flag(ReducedFit.FLAGS.BUILT_REDUCED) - logger.debug(f"{self}: applying masterbias {self.masterbias}") - self.apply_masterbias() - - logger.debug(f"{self}: applying masterflat {self.masterflat}") - self.apply_masterflat() + self.apply_masters() logger.debug(f"{self}: performing astrometric calibration") @@ -219,61 +216,31 @@ def build_file(self): self.save() - def apply_masterbias(self): - """ Applies the masterbias to the rawfit. - - It starts from the RawFit FITS file. This creates the ReducedFit file for the first time. - """ - + def apply_masters(self): import astropy.io.fits as fits + logger.debug(f"{self}: applying masters") + rf_data = fits.getdata(self.rawfit.filepath) mb_data = fits.getdata(self.masterbias.filepath) + mf_data = fits.getdata(self.masterflat.filepath) - data_new = rf_data - mb_data - - if not os.path.exists(os.path.dirname(self.filepath)): - logger.debug(f"{self}: creating directory {os.path.dirname(self.filepath)}") - os.makedirs(os.path.dirname(self.filepath)) - - # header_new = self.rawfit.header - - # # remove blank keyword - # # unlinke the rawfit, the reduced fit now contains float values, so the blank keyword is non standard - # # and will cause warnings, we remove it from the rawfit header. - # if 'BLANK' in header_new: - # del header_new['BLANK'] + if self.masterdark is not None: + md_dark = fits.getdata(self.masterdark.filepath) + else : + logger.warning(f"{self}: no masterdark found, assuming dark current = 0, is this a CCD camera and it's cold?") + md_dark = 0 - # better create a new header beacuse the previous one had bad keywords for wcs, they will give problems + data_new = (rf_data - mb_data - md_dark*self.rawfit.exptime) / (mf_data) header_new = fits.Header() + if not os.path.exists(os.path.dirname(self.filepath)): + logger.debug(f"{self}: creating directory {os.path.dirname(self.filepath)}") + os.makedirs(os.path.dirname(self.filepath)) + fits.writeto(self.filepath, data_new, header=header_new, overwrite=True) - - def apply_masterflat(self): - """ Applies the masterflat to the rawfit. - Notes - ----- - #no longer: - This normalizes by exposure time. - - self.apply_masterbias() must have been called before, as it creates the reduced FIT file. - """ - - import numpy as np - import astropy.io.fits as fits - - data = fits.getdata(self.filepath) - mf_data = fits.getdata(self.masterflat.filepath) - - import warnings - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - #data_new = (data/self.rawfit.exptime) / (mf_data) - data_new = (data) / (mf_data) - - fits.writeto(self.filepath, data_new, header=self.header, overwrite=True) - - @property def with_pairs(self): """ Indicates whether both ordinary and extraordinary sources are present diff --git a/iop4lib/enums.py b/iop4lib/enums.py index db202f89..d57c2e46 100644 --- a/iop4lib/enums.py +++ b/iop4lib/enums.py @@ -8,6 +8,7 @@ class IMGTYPES(models.TextChoices): NONE = None, "None" ERROR = "ERROR", "Error" FLAT = 'FLAT', "Flat" + DARK = 'DARK', "Dark" BIAS = 'BIAS', "Bias" LIGHT = 'LIGHT', "Light" diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index f83179ee..519cf5af 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -43,7 +43,7 @@ def classify_juliandate_rawfit(cls, rawfit: 'RawFit'): @classmethod def classify_imgtype_rawfit(cls, rawfit: 'RawFit'): """ - DIPOL files have IMAGETYP keyword: Light Frame, Bias Frame + DIPOL files have IMAGETYP keyword: Light Frame (it can b, Bias Frame """ from iop4lib.db.rawfit import RawFit @@ -52,8 +52,13 @@ def classify_imgtype_rawfit(cls, rawfit: 'RawFit'): with fits.open(rawfit.filepath) as hdul: if hdul[0].header['IMAGETYP'] == 'Bias Frame': rawfit.imgtype = IMGTYPES.BIAS + elif hdul[0].header['IMAGETYP'] == 'Dark Frame': + rawfit.imgtype = IMGTYPES.DARK elif hdul[0].header['IMAGETYP'] == 'Light Frame': - rawfit.imgtype = IMGTYPES.LIGHT + if 'skyflat' in rawfit.header['OBJECT'].lower(): + rawfit.imgtype = IMGTYPES.FLAT + else: + rawfit.imgtype = IMGTYPES.LIGHT else: logger.error(f"Unknown image type for {rawfit.fileloc}.") rawfit.imgtype = IMGTYPES.ERROR @@ -62,20 +67,18 @@ def classify_imgtype_rawfit(cls, rawfit: 'RawFit'): @classmethod def classify_band_rawfit(cls, rawfit: 'RawFit'): """ - OSN Files have no FILTER keyword if they are BIAS, FILTER=Clear if they are FLAT, and FILTER=FilterName if they are LIGHT. - For our DB, we have R, U, ..., None, ERROR. - - For polarimetry, which is done by taking four images with the R filter at different angles, we have R_45, R0, R45, R90. + .. warning: + Red is in a differnt photometric system. """ from iop4lib.db.rawfit import RawFit if 'FILTER' not in rawfit.header: - if rawfit.imgtype == IMGTYPES.BIAS: + if rawfit.imgtype == IMGTYPES.BIAS or rawfit.imgtype == IMGTYPES.DARK: rawfit.band = BANDS.NONE else: rawfit.band = BANDS.ERROR - raise ValueError(f"Missing FILTER keyword for {rawfit.fileloc} which is not a bias (it is a {rawfit.imgtype}).") + raise ValueError(f"Missing FILTER keyword for {rawfit.fileloc} which is not a bias or dark (it is a {rawfit.imgtype}).") elif rawfit.header['FILTER'] == "Red": rawfit.band = BANDS.R else: diff --git a/iop4lib/telescopes/osnt090.py b/iop4lib/telescopes/osnt090.py index 2e856cce..66d4ddd7 100644 --- a/iop4lib/telescopes/osnt090.py +++ b/iop4lib/telescopes/osnt090.py @@ -16,6 +16,7 @@ import astrometry import numpy as np import math +import datetime # iop4lib imports from iop4lib.enums import * @@ -157,13 +158,13 @@ def list_remote_filelocs(cls, epochnames: list[str]) -> list[str]: @classmethod def check_telescop_kw(cls, rawfit): - r""" Subclassed to account for DIPOL files, that have empty TELESCOP keyword as of 2023-10-11 + r""" Subclassed to account for DIPOL files, that may have empty TELESCOP keyword as of 2023-10-11 - TODO: this kw should not be empty. + If it is empty, check first the instrument, and if it is DIPOL and the night is before 2023-10-11, then continue. - If it is empty, check first the instrument, and if it is DIPOL, then continue. + Otherwise just call the parent method. """ - if rawfit.header["TELESCOP"] == "": + if rawfit.header["TELESCOP"] == "" and rawfit.night < datetime.date(2023, 10, 11): cls.classify_instrument_kw(rawfit) if rawfit.instrument == INSTRUMENTS.DIPOL: return diff --git a/iop4lib/utils/parallel.py b/iop4lib/utils/parallel.py index 2a0814bc..da469b7f 100644 --- a/iop4lib/utils/parallel.py +++ b/iop4lib/utils/parallel.py @@ -288,10 +288,9 @@ def _buildfile(redf_id): if not redf.fileexists: try: - redf.apply_masterbias() - redf.apply_masterflat() + redf.apply_masters() except Exception as e: - logger.error(f"{redf}: exception during .apply_masterbias(), .apply_masterflat(): {e}") + logger.error(f"{redf}: exception during .apply_masters(): {e}") pass if success: From fd763fd91e1036d033544e32361a85b925eb1077 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Fri, 20 Oct 2023 14:58:03 +0200 Subject: [PATCH 045/168] ci.yml: fix typo --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 83d8432f..876a2dde 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -68,7 +68,7 @@ jobs: - name: Try to restore httpdirfs cache data # this should make subsuquent commits in the same PR faster - uses: actions/cache@/restore@v3 + uses: actions/cache/restore@v3 with: path: $HOME/.cache/httpdirfs/ key: httpdirfs-astrometry-5200-1-2-3-4 From 313a039fc4c3d9de3e49d6de6f60ca45c64e9e81 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Fri, 20 Oct 2023 16:10:23 +0200 Subject: [PATCH 046/168] ci.yml: comment caching index files bc not working --- .github/workflows/ci.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 876a2dde..a9b34b6f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -66,12 +66,12 @@ jobs: - name: Install httpdirfs to access astrometry index files without downloading them run: sudo apt install httpdirfs - - name: Try to restore httpdirfs cache data - # this should make subsuquent commits in the same PR faster - uses: actions/cache/restore@v3 - with: - path: $HOME/.cache/httpdirfs/ - key: httpdirfs-astrometry-5200-1-2-3-4 + # - name: Try to restore httpdirfs cache data + # # this should make subsuquent commits in the same PR faster + # uses: actions/cache@/restore@v3 + # with: + # path: $HOME/.cache/httpdirfs/ + # key: httpdirfs-astrometry-5200-1-2-3-4 - name: Mount astrometry index file in default location run: mkdir -p $HOME/.astrometry_cache/5200 && httpdirfs --cache 'https://portal.nersc.gov/project/cosmo/temp/dstn/index-5200/LITE/' $HOME/.astrometry_cache/5200/ @@ -95,11 +95,11 @@ jobs: df -h || true du -sh $HOME/.cache/httpdirfs/ || true - - name: Save httpdirfs cache data - uses: actions/cache/save@v3 - with: - path: $HOME/.cache/httpdirfs/ - key: httpdirfs-astrometry-5200-1-2-3-4 + # - name: Save httpdirfs cache data + # uses: actions/cache/save@v3 + # with: + # path: $HOME/.cache/httpdirfs/ + # key: httpdirfs-astrometry-5200-1-2-3-4 From 11e5ef3b438e53ac62ad2e3d98775f3ee48d11e0 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Sat, 21 Oct 2023 14:05:06 +0200 Subject: [PATCH 047/168] iop4admin: add filters to masters admins --- iop4admin/modeladmins/masterbias.py | 12 +++++++++++- iop4admin/modeladmins/masterdark.py | 11 +++++++++++ iop4admin/modeladmins/masterflat.py | 10 ++++++++++ 3 files changed, 32 insertions(+), 1 deletion(-) diff --git a/iop4admin/modeladmins/masterbias.py b/iop4admin/modeladmins/masterbias.py index 25d567bc..593178ad 100644 --- a/iop4admin/modeladmins/masterbias.py +++ b/iop4admin/modeladmins/masterbias.py @@ -11,10 +11,20 @@ logger = logging.getLogger(__name__) class AdminMasterBias(AdminFitFile): + model = MasterBias - list_display = ['id', 'telescope', 'night', 'instrument', 'imgsize', 'get_built_from', 'options'] + list_display = ['id', 'telescope', 'night', 'instrument', 'imgsize', 'get_built_from', 'options'] + list_filter = ( + RawFitIdFilter, + RawFitTelescopeFilter, + RawFitNightFilter, + RawFitInstrumentFilter, + RawFitFlagFilter, + "imgsize", + ) + @admin.display(description='Options') def options(self, obj): diff --git a/iop4admin/modeladmins/masterdark.py b/iop4admin/modeladmins/masterdark.py index 403f40c8..5653e457 100644 --- a/iop4admin/modeladmins/masterdark.py +++ b/iop4admin/modeladmins/masterdark.py @@ -12,9 +12,20 @@ logger = logging.getLogger(__name__) class AdminMasterDark(AdminFitFile): + model = MasterDark + list_display = ['id', 'telescope', 'night', 'instrument', 'imgsize', 'exptime', 'get_masterbias', 'get_built_from', 'options'] + list_filter = ( + RawFitIdFilter, + RawFitTelescopeFilter, + RawFitNightFilter, + RawFitInstrumentFilter, + RawFitFlagFilter, + "imgsize", + ) + @admin.display(description='Options') def options(self, obj): url_details = reverse('iop4admin:iop4api_masterdark_details', args=[obj.id]) diff --git a/iop4admin/modeladmins/masterflat.py b/iop4admin/modeladmins/masterflat.py index d22b7854..34af7272 100644 --- a/iop4admin/modeladmins/masterflat.py +++ b/iop4admin/modeladmins/masterflat.py @@ -12,9 +12,19 @@ logger = logging.getLogger(__name__) class AdminMasterFlat(AdminFitFile): + model = MasterFlat + list_display = ['id', 'telescope', 'night', 'instrument', 'imgsize', 'band', 'obsmode', 'rotangle', 'exptime', 'get_masterbias', 'get_built_from', 'options'] + list_filter = ( + RawFitIdFilter, + RawFitTelescopeFilter, + RawFitNightFilter, + RawFitInstrumentFilter, + RawFitFlagFilter, + "imgsize", + ) @admin.display(description='Options') From cbc5e446206723cd8fc07df5f3d9db8d2e801a04 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Sat, 21 Oct 2023 16:51:05 +0200 Subject: [PATCH 048/168] instrument refactoring, dark frame support --- iop4lib/db/reducedfit.py | 156 +++------------------------ iop4lib/instruments/andor_cameras.py | 5 + iop4lib/instruments/cafos.py | 6 ++ iop4lib/instruments/dipol.py | 2 + iop4lib/instruments/instrument.py | 154 +++++++++++++++++++++++++- iop4lib/utils/astrometry.py | 22 ++-- iop4lib/utils/plotting.py | 10 +- 7 files changed, 194 insertions(+), 161 deletions(-) diff --git a/iop4lib/db/reducedfit.py b/iop4lib/db/reducedfit.py index 3684178c..6578a8e8 100644 --- a/iop4lib/db/reducedfit.py +++ b/iop4lib/db/reducedfit.py @@ -69,6 +69,7 @@ def filepath(self): def create(cls, rawfit, masterbias=None, masterflat=None, + masterdark=None, auto_build=False, force_rebuild=False, auto_merge_to_db=True): @@ -122,35 +123,7 @@ def create(cls, rawfit, # instance only attributes reduced.auto_merge_to_db = auto_merge_to_db - # associate a masterbias to this reducedfit - - if masterbias is not None: - reduced.masterbias = masterbias - else: - if (mb := rawfit.request_masterbias()) is not None: - reduced.masterbias = mb - else: - logger.warning(f"{reduced}: MasterBias in this epoch could not be found, attemptying adjacent epochs.") - if (mb := rawfit.request_masterbias(other_epochs=True)) is not None: - reduced.masterbias = mb - else: - logger.error(f"{reduced}: Could not find any MasterBias, not even in adjacent epochs.") - reduced.set_flag(ReducedFit.FLAGS.ERROR) - - # associate a masterflat to this reducedfit - - if masterflat is not None: - reduced.masterflat = masterflat - else: - if (mf := rawfit.request_masterflat()) is not None: - reduced.masterflat = mf - else: - logger.warning(f"{reduced}: MasterFlat in this epoch could not be found, attemptying adjacent epochs.") - if (mf := rawfit.request_masterflat(other_epochs=True)) is not None: - reduced.masterflat = mf - else: - logger.error(f"{reduced}: Could not find any MasterFlat, not even in adjacent epochs.") - reduced.set_flag(ReducedFit.FLAGS.ERROR) + reduced.associate_masters(masterbias=masterbias, masterdark=masterdark, masterflat=masterflat) # build file @@ -170,121 +143,27 @@ def __init__(self, *args, **kwargs): self.auto_merge_to_db = True - # Calibration methods - - def build_file(self): - """ Builds the ReducedFit FITS file. - - Notes - ----- - The file is built by: - - applying masterbias.. - - applying masterflat. - - try to astrometerically calibrate the reduced fit, giving it a WCS. - - find the catalog sources in the field. - """ - - logger.debug(f"{self}: building file") - - self.unset_flag(ReducedFit.FLAGS.BUILT_REDUCED) - - self.apply_masters() - - logger.debug(f"{self}: performing astrometric calibration") - - try: - self.astrometric_calibration() - except Exception as e: - logger.error(f"{self}: could not perform astrometric calibration on {self}: {e}") - self.set_flag(ReducedFit.FLAGS.ERROR_ASTROMETRY) - if self.auto_merge_to_db: - self.save() - raise e - else: - logger.debug(f"{self}: astrometric calibration was successful.") - self.unset_flag(ReducedFit.FLAGS.ERROR_ASTROMETRY) - - logger.debug(f"{self}: searching for sources in field...") - sources_in_field = AstroSource.get_sources_in_field(fit=self) - - logger.debug(f"{self}: found {len(sources_in_field)} sources in field.") - self.sources_in_field.set(sources_in_field, clear=True) - - self.set_flag(ReducedFit.FLAGS.BUILT_REDUCED) + # REDUCTION METHODS - if self.auto_merge_to_db: - self.save() + ## Delegated to telescopes or instrument classes + def associate_masters(self, *args, **kwargs): + return Instrument.by_name(self.instrument).associate_masters(self, *args, **kwargs) def apply_masters(self): - import astropy.io.fits as fits - - logger.debug(f"{self}: applying masters") - - rf_data = fits.getdata(self.rawfit.filepath) - mb_data = fits.getdata(self.masterbias.filepath) - mf_data = fits.getdata(self.masterflat.filepath) - - if self.masterdark is not None: - md_dark = fits.getdata(self.masterdark.filepath) - else : - logger.warning(f"{self}: no masterdark found, assuming dark current = 0, is this a CCD camera and it's cold?") - md_dark = 0 - - data_new = (rf_data - mb_data - md_dark*self.rawfit.exptime) / (mf_data) + return Instrument.by_name(self.instrument).apply_masters(self) + + def build_file(self): + return Instrument.by_name(self.instrument).build_file(self) - header_new = fits.Header() + def astrometric_calibration(self): + return Instrument.by_name(self.instrument).astrometric_calibration(self) - if not os.path.exists(os.path.dirname(self.filepath)): - logger.debug(f"{self}: creating directory {os.path.dirname(self.filepath)}") - os.makedirs(os.path.dirname(self.filepath)) - - fits.writeto(self.filepath, data_new, header=header_new, overwrite=True) - @property - def with_pairs(self): - """ Indicates whether both ordinary and extraordinary sources are present - in the file. At the moment, this happens only for CAFOS polarimetry - """ - return (self.rawfit.instrument == INSTRUMENTS.CAFOS and self.rawfit.obsmode == OBSMODES.POLARIMETRY) + def has_pairs(self): + """ Indicates whether both ordinary and extraordinary sources are present in the file. """ + return Instrument.by_name(self.instrument).has_pairs(self) - def astrometric_calibration(self): - """ Performs astrometric calibration on the reduced fit, giving it the appropriate WCS. - - If the are both ordinary and extraordinary sources in the field, one WCS will be built for each, - and the will be saved in the first and second extensions of the FITS file. - """ - - build_wcs_result = Instrument.by_name(self.instrument).build_wcs(self) - - if build_wcs_result['success']: - - logger.debug(f"{self}: saving WCSs to FITS header.") - - wcs1 = build_wcs_result['wcslist'][0] - - header = fits.Header() - - header.update(wcs1.to_header(relax=True, key="A")) - - if self.with_pairs: - wcs2 = build_wcs_result['wcslist'][1] - header.update(wcs2.to_header(relax=True, key="B")) - - # if available, save also some info about the astrometry solution - if 'bm' in build_wcs_result['info']: - bm = build_wcs_result['info']['bm'] - # adding HIERARCH avoids a warning, they can be accessed without HIERARCH - header['HIERARCH AS_ARCSEC_PER_PIX'] = bm.scale_arcsec_per_pixel - header['HIERARCH AS_CENTER_RA_DEG'] = bm.center_ra_deg - header['HIERARCH AS_CENTER_DEC_DEG'] = bm.center_dec_deg - - with fits.open(self.filepath, 'update') as hdul: - hdul[0].header.update(header) - - else: - raise Exception(f"Could not perform astrometric calibration on {self}: {build_wcs_result=}") - @property def wcs(self): """ Returns the WCS of the reduced fit. """ @@ -327,11 +206,6 @@ def header_hintcoord(self): @property def header_objecthint(self): return self.rawfit.header_objecthint - - - # REDUCTION METHODS - - ## Delegated to telescopes or instrument classes def get_astrometry_position_hint(self, allsky=False, n_field_width=1.5): return Instrument.by_name(self.instrument).get_astrometry_position_hint(self.rawfit, allsky=allsky, n_field_width=n_field_width) diff --git a/iop4lib/instruments/andor_cameras.py b/iop4lib/instruments/andor_cameras.py index 9e35cd9c..5f436e7f 100644 --- a/iop4lib/instruments/andor_cameras.py +++ b/iop4lib/instruments/andor_cameras.py @@ -25,6 +25,8 @@ class Andor(Instrument, metaclass=ABCMeta): r""" Abstract class for OSN Andor cameras.""" + required_masters = ['masterbias', 'masterflat'] + @classmethod def classify_juliandate_rawfit(cls, rawfit): """ @@ -174,6 +176,9 @@ def get_astrometry_size_hint(cls, rawfit): elif rawfit.header['NAXIS1'] == 1024: return astrometry.SizeHint(lower_arcsec_per_pixel=2*0.95*cls.arcsec_per_pix, upper_arcsec_per_pixel=2*1.05*cls.arcsec_per_pix) + @classmethod + def has_pairs(cls, fit_instance): + return False @classmethod def compute_relative_polarimetry(cls, polarimetry_group): diff --git a/iop4lib/instruments/cafos.py b/iop4lib/instruments/cafos.py index 122fa1d0..867adad9 100644 --- a/iop4lib/instruments/cafos.py +++ b/iop4lib/instruments/cafos.py @@ -31,6 +31,7 @@ class CAFOS(Instrument): field_width_arcmin = 34.0 field_height_arcmin = 34.0 + required_masters = ['masterbias', 'masterflat'] @classmethod def classify_juliandate_rawfit(cls, rawfit): @@ -140,6 +141,11 @@ def get_astrometry_size_hint(cls, rawfit): return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.arcsec_per_pix) + @classmethod + def has_pairs(cls, fit_instance): + """ At the moment, CAFOS polarimetry. """ + return (fit_instance.obsmode == OBSMODES.POLARIMETRY) + @classmethod def compute_relative_polarimetry(cls, polarimetry_group): """ Computes the relative polarimetry for a polarimetry group for CAFOS observations. diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index 519cf5af..0830cd93 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -30,6 +30,8 @@ class DIPOL(Instrument): field_width_arcmin = 9.22 field_height_arcmin = 6.28 + required_masters = ['masterbias', 'masterflat', 'masterdark'] + @classmethod def classify_juliandate_rawfit(cls, rawfit: 'RawFit'): """ diff --git a/iop4lib/instruments/instrument.py b/iop4lib/instruments/instrument.py index 81eac5ec..bc180ad0 100644 --- a/iop4lib/instruments/instrument.py +++ b/iop4lib/instruments/instrument.py @@ -7,9 +7,11 @@ # other imports from abc import ABCMeta, abstractmethod +import os import re import numpy as np import math +import astropy.io.fits as fits # iop4lib imports from iop4lib.enums import * @@ -20,7 +22,7 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from iop4lib.db import ReducedFit + from iop4lib.db import RawFit, ReducedFit class Instrument(metaclass=ABCMeta): """ Base class for instruments. @@ -64,6 +66,11 @@ def arcsec_per_pix(self): def gain_e_adu(self): pass + @property + @abstractmethod + def required_masters(self): + pass + # Class methods (you should be using these from the Instrument class, not subclasses) @classmethod @@ -130,7 +137,6 @@ def classify_exptime(cls, rawfit): with fits.open(rawfit.filepath) as hdul: rawfit.exptime = hdul[0].header["EXPTIME"] - @classmethod def get_header_objecthint(self, rawfit): r""" Get a hint for the AstroSource in this image from the header. OBJECT is a standard keyword. Return None if none found. @@ -161,6 +167,14 @@ def get_astrometry_size_hint(cls, rawfit): """ Get the size hint for this telescope / rawfit.""" pass + @classmethod + @abstractmethod + def has_pairs(cls, fit_instance: 'ReducedFit' or 'RawFit') -> bool: + """ Indicates whether both ordinary and extraordinary sources are present + in the file. At the moment, this happens only for CAFOS polarimetry + """ + pass + @classmethod def build_wcs(self, reducedfit: 'ReducedFit'): """ Build a WCS for a reduced fit from this instrument. @@ -171,6 +185,139 @@ def build_wcs(self, reducedfit: 'ReducedFit'): return build_wcs(reducedfit) + @classmethod + def associate_masters(cls, reducedfit, **masters_dict): + """ Associate a masterbias, masterdark and masterflat to this reducedfit.""" + + from iop4lib.db import MasterBias, MasterDark, MasterFlat + + for (attrname, model) in zip(['masterbias', 'masterdark', 'masterflat'], [MasterBias, MasterDark, MasterFlat]): + + if attrname not in cls.required_masters: + continue + + if masters_dict.pop(attrname) is not None: + setattr(reducedfit, attrname, masters_dict.pop(attrname)) + else: + if (master := reducedfit.rawfit.request_master(model)) is not None: + setattr(reducedfit, attrname, master) + else: + logger.warning(f"{reducedfit}: {attrname} in this epoch could not be found, attemptying adjacent epochs.") + if (master := reducedfit.rawfit.request_master(model, other_epochs=True)) is not None: + setattr(reducedfit, attrname, master) + else: + logger.error(f"{reducedfit}: Could not find any {attrname}, not even in adjacent epochs.") + reducedfit.set_flag(ReducedFit.FLAGS.ERROR) + + @classmethod + def apply_masters(cls, reducedfit): + import astropy.io.fits as fits + + logger.debug(f"{reducedfit}: applying masters") + + rf_data = fits.getdata(reducedfit.rawfit.filepath) + mb_data = fits.getdata(reducedfit.masterbias.filepath) + mf_data = fits.getdata(reducedfit.masterflat.filepath) + + if reducedfit.masterdark is not None: + md_dark = fits.getdata(reducedfit.masterdark.filepath) + else : + logger.warning(f"{reducedfit}: no masterdark found, assuming dark current = 0, is this a CCD camera and it's cold?") + md_dark = 0 + + data_new = (rf_data - mb_data - md_dark*reducedfit.rawfit.exptime) / (mf_data) + + header_new = fits.Header() + + if not os.path.exists(os.path.dirname(reducedfit.filepath)): + logger.debug(f"{reducedfit}: creating directory {os.path.dirname(reducedfit.filepath)}") + os.makedirs(os.path.dirname(reducedfit.filepath)) + + fits.writeto(reducedfit.filepath, data_new, header=header_new, overwrite=True) + + @classmethod + def astrometric_calibration(cls, reducedfit: 'ReducedFit'): + """ Performs astrometric calibration on the reduced fit, giving it the appropriate WCS. + + If the are both ordinary and extraordinary sources in the field, one WCS will be built for each, + and the will be saved in the first and second extensions of the FITS file. + """ + + build_wcs_result = cls.build_wcs(reducedfit) + + if build_wcs_result['success']: + + logger.debug(f"{reducedfit}: saving WCSs to FITS header.") + + wcs1 = build_wcs_result['wcslist'][0] + + header = fits.Header() + + header.update(wcs1.to_header(relax=True, key="A")) + + if reducedfit.has_pairs: + wcs2 = build_wcs_result['wcslist'][1] + header.update(wcs2.to_header(relax=True, key="B")) + + # if available, save also some info about the astrometry solution + if 'bm' in build_wcs_result['info']: + bm = build_wcs_result['info']['bm'] + # adding HIERARCH avoids a warning, they can be accessed without HIERARCH + header['HIERARCH AS_ARCSEC_PER_PIX'] = bm.scale_arcsec_per_pixel + header['HIERARCH AS_CENTER_RA_DEG'] = bm.center_ra_deg + header['HIERARCH AS_CENTER_DEC_DEG'] = bm.center_dec_deg + + with fits.open(reducedfit.filepath, 'update') as hdul: + hdul[0].header.update(header) + + else: + raise Exception(f"Could not perform astrometric calibration on {reducedfit}: {build_wcs_result=}") + + @classmethod + def build_file(cls, reducedfit: 'ReducedFit'): + """ Builds the ReducedFit FITS file. + + Notes + ----- + The file is built by: + - applying masters + - try to astrometerically calibrate the reduced fit, giving it a WCS. + - find the catalog sources in the field. + """ + + from iop4lib.db import AstroSource, ReducedFit + + logger.debug(f"{reducedfit}: building file") + + reducedfit.unset_flag(ReducedFit.FLAGS.BUILT_REDUCED) + + reducedfit.apply_masters() + + logger.debug(f"{reducedfit}: performing astrometric calibration") + + try: + reducedfit.astrometric_calibration() + except Exception as e: + logger.error(f"{reducedfit}: could not perform astrometric calibration on {reducedfit}: {e}") + reducedfit.set_flag(ReducedFit.FLAGS.ERROR_ASTROMETRY) + if reducedfit.auto_merge_to_db: + reducedfit.save() + raise e + else: + logger.debug(f"{reducedfit}: astrometric calibration was successful.") + reducedfit.unset_flag(ReducedFit.FLAGS.ERROR_ASTROMETRY) + + logger.debug(f"{reducedfit}: searching for sources in field...") + sources_in_field = AstroSource.get_sources_in_field(fit=reducedfit) + + logger.debug(f"{reducedfit}: found {len(sources_in_field)} sources in field.") + reducedfit.sources_in_field.set(sources_in_field, clear=True) + + reducedfit.set_flag(ReducedFit.FLAGS.BUILT_REDUCED) + + if reducedfit.auto_merge_to_db: + reducedfit.save() + @classmethod def compute_aperture_photometry(cls, redf, aperpix, r_in, r_out): @@ -204,7 +351,7 @@ def compute_aperture_photometry(cls, redf, aperpix, r_in, r_out): error = calc_total_error(img, bkg.background_rms, cls.gain_e_adu) for astrosource in redf.sources_in_field.all(): - for pairs, wcs in (('O', redf.wcs1), ('E', redf.wcs2)) if redf.with_pairs else (('O',redf.wcs),): + for pairs, wcs in (('O', redf.wcs1), ('E', redf.wcs2)) if redf.has_pairs else (('O',redf.wcs),): ap = CircularAperture(astrosource.coord.to_pixel(wcs), r=aperpix) annulus = CircularAnnulus(astrosource.coord.to_pixel(wcs), r_in=r_in, r_out=r_out) @@ -225,7 +372,6 @@ def compute_aperture_photometry(cls, redf, aperpix, r_in, r_out): bkg_flux_counts=bkg_flux_counts, bkg_flux_counts_err=bkg_flux_counts_err, flux_counts=flux_counts, flux_counts_err=flux_counts_err) - @classmethod def compute_relative_photometry(cls, redf: 'ReducedFit') -> None: """ Common relative photometry method for all instruments. """ diff --git a/iop4lib/utils/astrometry.py b/iop4lib/utils/astrometry.py index b8bb1f11..ad0ca53b 100644 --- a/iop4lib/utils/astrometry.py +++ b/iop4lib/utils/astrometry.py @@ -157,7 +157,7 @@ def build_wcs_params_shotgun(redf, shotgun_params_kwargs=None, hard=False): # FAST VERSION - if redf.with_pairs: + if redf.has_pairs: params["keep_n_seg"] = [300] else: params["keep_n_seg"] = [150] @@ -268,7 +268,7 @@ def build_wcs_params_shotgun(redf, shotgun_params_kwargs=None, hard=False): -def _build_wcs_params_shotgun_helper(redf, with_pairs=None, +def _build_wcs_params_shotgun_helper(redf, has_pairs=None, bkg_filter_size = 11, bkg_box_size = 16, seg_kernel_size = None, @@ -286,8 +286,8 @@ def _build_wcs_params_shotgun_helper(redf, with_pairs=None, imgdata = redf.mdata - if with_pairs is None: - with_pairs = redf.with_pairs + if has_pairs is None: + has_pairs = redf.has_pairs if size_hint is None: size_hint = redf.get_astrometry_size_hint() @@ -295,7 +295,7 @@ def _build_wcs_params_shotgun_helper(redf, with_pairs=None, if position_hint is None: position_hint = redf.get_astrometry_position_hint(allsky=allsky) - if with_pairs: + if has_pairs: if bins is None: bins = int( 0.75 * max(imgdata.shape) ) if hist_range is None: @@ -329,7 +329,7 @@ def _build_wcs_params_shotgun_helper(redf, with_pairs=None, # Pair finding with results from image segmentation - if with_pairs: + if has_pairs: seg1, seg2, seg_d0, seg_disp_sign = get_pairs_d(pos_seg, d_eps=d_eps, bins=bins, hist_range=hist_range) logger.debug(f"{redf}: seg pairs -> {len(seg1)} ({len(seg1)/len(pos_seg)*100:.1f}%), seg_disp_sign={seg_disp_sign}") seg1_best, seg2_best, seg_disp_best, seg_disp_sign_best = get_best_pairs(seg1, seg2, seg_disp_sign) @@ -343,7 +343,7 @@ def _build_wcs_params_shotgun_helper(redf, with_pairs=None, bm = None - if with_pairs: ## Attempt both with D pairs and XY pairs + if has_pairs: ## Attempt both with D pairs and XY pairs attempts = ((f"Seg Best XY Pairs (n={len(seg1xy_best)})", seg1xy_best, seg_disp_sign_xy_best), (f"Seg Best D Pairs (n={len(seg1_best)})", seg1_best, seg_disp_sign_best),) else: ## Use the positions of the segments @@ -380,7 +380,7 @@ def _build_wcs_params_shotgun_helper(redf, with_pairs=None, wcs1 = WCS(bm.wcs_fields) - if with_pairs: + if has_pairs: # Build WCS for pairs (just displace the center pixel by the disp_sign) wcs2 = wcs1.deepcopy() wcs2.wcs.crpix[0] += disp_sign[0] @@ -389,7 +389,7 @@ def _build_wcs_params_shotgun_helper(redf, with_pairs=None, # save results and return return {'success':True, - 'wcslist': [wcs1, wcs2] if with_pairs else [wcs1], + 'wcslist': [wcs1, wcs2] if has_pairs else [wcs1], 'info': _save_astrocalib_proc_vars(locals())} @@ -401,7 +401,7 @@ def _save_astrocalib_proc_vars(locals_dict): astrocalib_proc_vars = dict() save_list = [ - 'with_pairs', + 'has_pairs', 'bkg_box_size', 'bkg_filter_size', 'bkg', 'imgdata_bkg_substracted', @@ -411,7 +411,7 @@ def _save_astrocalib_proc_vars(locals_dict): 'stars', 'disp_sign', ] - if locals_dict['with_pairs']: + if locals_dict['has_pairs']: save_list += [ 'wcs2', 'hist_range', 'bins', 'd_eps', diff --git a/iop4lib/utils/plotting.py b/iop4lib/utils/plotting.py index f3c56ae7..bdbb0cff 100644 --- a/iop4lib/utils/plotting.py +++ b/iop4lib/utils/plotting.py @@ -185,9 +185,9 @@ def get_matched_sources(match, pos, d_eps=1.412): else: wcs1 = redf.wcs1 - with_pairs = astrocalib_proc_vars.pop('with_pairs', redf.with_pairs) + has_pairs = astrocalib_proc_vars.pop('has_pairs', redf.has_pairs) - if with_pairs: + if has_pairs: if 'wcs2' in astrocalib_proc_vars: wcs2 = astrocalib_proc_vars['wcs2'] else: @@ -260,7 +260,7 @@ def get_matched_sources(match, pos, d_eps=1.412): for i, source in enumerate(sources_in_field): ap = CircularAperture([*source.coord.to_pixel(wcs1)], r=20) h = ap.plot(color="r", lw=1, alpha=1, linestyle='-', ax=ax, label=f"{source.name}") - if with_pairs: + if has_pairs: ax.plot(*source.coord.to_pixel(wcs2), 'rx', alpha=1) x, y = source.coord.to_pixel(wcs1) ax.annotate(text=source.name if names_over else f"{i}", @@ -511,7 +511,7 @@ def build_astrometry_summary_images(redf, astrocalib_proc_vars, summary_kwargs): logger.debug(f"{redf}: plotting astrometry summary image of segmentation results") - if astrocalib_proc_vars['with_pairs']: + if astrocalib_proc_vars['has_pairs']: fig = mplt.figure.Figure(figsize=(12,6), dpi=iop4conf.mplt_default_dpi) axs = fig.subplots(nrows=2, ncols=4) @@ -562,7 +562,7 @@ def build_astrometry_summary_images(redf, astrocalib_proc_vars, summary_kwargs): logger.debug(f"{redf}: plotting astrometry summary image of daofind results") - if astrocalib_proc_vars['with_pairs']: + if astrocalib_proc_vars['has_pairs']: fig = mplt.figure.Figure(figsize=(12,6), dpi=iop4conf.mplt_default_dpi, layout="constrained") axs = fig.subplot_mosaic([["A", "B", "C", "D"], ["A", "E", "F", "G"]]) From d71dd076dc0b0810fc1cea67822b79791fcc0648 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Sat, 21 Oct 2023 17:17:26 +0200 Subject: [PATCH 049/168] instrument.py: add docstrings --- iop4lib/instruments/instrument.py | 28 +++++++++++++++++++++++----- 1 file changed, 23 insertions(+), 5 deletions(-) diff --git a/iop4lib/instruments/instrument.py b/iop4lib/instruments/instrument.py index bc180ad0..0b1c1259 100644 --- a/iop4lib/instruments/instrument.py +++ b/iop4lib/instruments/instrument.py @@ -37,16 +37,19 @@ class Instrument(metaclass=ABCMeta): @property @abstractmethod def name(self): + """ The name of the instrument.""" pass @property @abstractmethod def telescope(self): + """ The telescope this instrument is mounted on.""" pass @property @abstractmethod def instrument_kw(self): + """ The keyword in the FITS header that identifies this instrument.""" pass # Instrument specific properties (subclasses must implement these) @@ -54,28 +57,36 @@ def instrument_kw(self): @property @abstractmethod def field_width_arcmin(self): + """ Field width in arcmin.""" pass @property @abstractmethod def arcsec_per_pix(self): + """ Pixel size in arcseconds per pixel.""" pass @property @abstractmethod def gain_e_adu(self): + """ Gain in e-/ADU. Used to compute the error in aperture photometry.""" pass @property @abstractmethod def required_masters(self): + r""" List of calibration frames needed. + + Cooled CCD cameras will only need `required_masters = ['masterbias', 'masterflat']` in the subclass, since dark current is close to zero. + If dark current is not negligible, set `required_masters = ['masterbias', 'masterdark', 'masterflat']` in the subclass. + """ pass # Class methods (you should be using these from the Instrument class, not subclasses) @classmethod - @abstractmethod def get_known(cls): + """ Return a list of all known instruments subclasses.""" from .andor_cameras import AndorT90, AndorT150 from .cafos import CAFOS from .dipol import DIPOL @@ -85,7 +96,7 @@ def get_known(cls): @classmethod def by_name(cls, name: str) -> 'Instrument': """ - Try to get instrument by name, else raise Exception. + Try to get instrument subclass by name, else raise Exception. """ for instr in Instrument.get_known(): if instr.name == name: @@ -96,6 +107,8 @@ def by_name(cls, name: str) -> 'Instrument': # You should be using these from the subclasses already # these don't need to be overriden in subclasses, but they can be + # classification methods + @classmethod def classify_rawfit(cls, rawfit): cls.check_instrument_kw(rawfit) @@ -114,6 +127,7 @@ def check_instrument_kw(cls, rawfit): @classmethod def classify_imgsize(cls, rawfit): + """ Read the size of the image from the FITS header, and save it in rawfit.imgsize.""" import astropy.io.fits as fits from iop4lib.db import RawFit @@ -137,6 +151,8 @@ def classify_exptime(cls, rawfit): with fits.open(rawfit.filepath) as hdul: rawfit.exptime = hdul[0].header["EXPTIME"] + # reduction methods + @classmethod def get_header_objecthint(self, rawfit): r""" Get a hint for the AstroSource in this image from the header. OBJECT is a standard keyword. Return None if none found. @@ -189,7 +205,7 @@ def build_wcs(self, reducedfit: 'ReducedFit'): def associate_masters(cls, reducedfit, **masters_dict): """ Associate a masterbias, masterdark and masterflat to this reducedfit.""" - from iop4lib.db import MasterBias, MasterDark, MasterFlat + from iop4lib.db import ReducedFit, MasterBias, MasterDark, MasterFlat for (attrname, model) in zip(['masterbias', 'masterdark', 'masterflat'], [MasterBias, MasterDark, MasterFlat]): @@ -211,6 +227,7 @@ def associate_masters(cls, reducedfit, **masters_dict): @classmethod def apply_masters(cls, reducedfit): + """ Apply the associated calibration frames to the raw fit to obtain the reduced fit.""" import astropy.io.fits as fits logger.debug(f"{reducedfit}: applying masters") @@ -280,8 +297,8 @@ def build_file(cls, reducedfit: 'ReducedFit'): Notes ----- The file is built by: - - applying masters - - try to astrometerically calibrate the reduced fit, giving it a WCS. + - applying master calibration frames. + - astrometrically calibrate the reduced fit, giving it a WCS. - find the catalog sources in the field. """ @@ -320,6 +337,7 @@ def build_file(cls, reducedfit: 'ReducedFit'): @classmethod def compute_aperture_photometry(cls, redf, aperpix, r_in, r_out): + """ Common aperture photometry method for all instruments.""" from iop4lib.db.aperphotresult import AperPhotResult from iop4lib.utils.sourcedetection import get_bkg, get_segmentation From e57a57dc8bf5c2c736f4c8981cffb4785691f95d Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Sun, 22 Oct 2023 12:37:59 +0200 Subject: [PATCH 050/168] iop4.py: fix bug --- iop4lib/iop4.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/iop4lib/iop4.py b/iop4lib/iop4.py index c78824cd..7e5dd15f 100644 --- a/iop4lib/iop4.py +++ b/iop4lib/iop4.py @@ -219,7 +219,9 @@ def main(): if args.interactive: logger.info("Jumping to IPython shell.") import IPython - IPython.embed(header="Start IOP4ing!", module=sys.modules['__main__']) + _ns = dict(globals()) + _ns.update(locals()) + IPython.embed(header="Start IOP4ing!", module=sys.modules['__main__'], user_ns=_ns) sys.exit(0) From 76b2219c45f99f67488c0163600b349c55693293 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Sun, 22 Oct 2023 12:39:36 +0200 Subject: [PATCH 051/168] keep implementing darks --- .../templates/iop4admin/view_fitdetails.html | 5 +- iop4lib/db/epoch.py | 5 +- iop4lib/db/masterflat.py | 2 +- iop4lib/db/rawfit.py | 39 +--------------- iop4lib/instruments/instrument.py | 46 +++++++++++++++++++ 5 files changed, 56 insertions(+), 41 deletions(-) diff --git a/iop4admin/templates/iop4admin/view_fitdetails.html b/iop4admin/templates/iop4admin/view_fitdetails.html index 71700e16..aebd104d 100644 --- a/iop4admin/templates/iop4admin/view_fitdetails.html +++ b/iop4admin/templates/iop4admin/view_fitdetails.html @@ -115,7 +115,7 @@

Reduction information:

-

MasterBias and MasterFlat

+

Calibration Frames

diff --git a/iop4lib/db/epoch.py b/iop4lib/db/epoch.py index 302faff9..73bee129 100644 --- a/iop4lib/db/epoch.py +++ b/iop4lib/db/epoch.py @@ -377,8 +377,11 @@ def build_masters(self, model, force_rebuild=False): # create a list of dictionaries with all the combinations of values for each keyword margs_L = [dict(zip(kw_L, prod)) for prod in itertools.product(*kw_set_D.values())] + + if len(margs_L) == 0: + logger.error(f"No {model._meta.verbose_name} will be built for this epoch since there are no files for it.") - # create master flats + # create master try: for margs in margs_L: diff --git a/iop4lib/db/masterflat.py b/iop4lib/db/masterflat.py index f46cb51d..153b83a8 100644 --- a/iop4lib/db/masterflat.py +++ b/iop4lib/db/masterflat.py @@ -159,7 +159,7 @@ def create(cls, if masterdark is None: from .masterdark import MasterDark - margs = {k:kwargs[k] for k in MasterDark.margs_kwL if k in kwargs} + margs = {k:kwargs[k] for k in MasterDark.margs_kwL if k in kwargs if k != 'exptime'} # exptime is a build parameter, but darks with different exptime can be used masterdark = MasterDark.objects.filter(**margs).first() logger.debug(f"Using {masterdark} as MasterDark for {mf}.") mf.masterdark = masterdark diff --git a/iop4lib/db/rawfit.py b/iop4lib/db/rawfit.py index 6be96e50..42eea369 100644 --- a/iop4lib/db/rawfit.py +++ b/iop4lib/db/rawfit.py @@ -309,44 +309,7 @@ def classify(self): self.save() def request_master(self, model, other_epochs=False): - """ Searchs in the DB and returns an appropiate master bias / flat / dark for this rawfit. - - Notes - ----- - It takes into account the parameters (band, size, etc) defined in Master' margs_kwL; except - for exptime, which is not taken into account. - By default, it looks for masters in the same epoch, but if other_epochs is set to True, it - will look for masters in other epochs. If more than one master is found, it returns the - one from the closest night. It will print a warning even with other_epochs if it is more than 1 - week away from the rawfit epoch. - - If no master is found, it returns None. - """ - - rf_vals = RawFit.objects.filter(id=self.id).values().get() - args = {k:rf_vals[k] for k in rf_vals if k in model.margs_kwL} - - args.pop("exptime", None) # exptime might be a building keywords (for flats and darks), but masters with different exptime can be applied - args["epoch"] = self.epoch # from .values() we only get epoch__id - - master = model.objects.filter(**args).first() - - if master is None and other_epochs == True: - args.pop("epoch") - - master_other_epochs = np.array(model.objects.filter(**args).all()) - - if len(master_other_epochs) == 0: - logger.debug(f"No {model._meta.verbose_name} for {args} in DB, None will be returned.") - return None - - master_other_epochs_jyear = np.array([md.epoch.jyear for md in master_other_epochs]) - master = master_other_epochs[np.argsort(np.abs(master_other_epochs_jyear - self.epoch.jyear))[0]] - - if (master.epoch.jyear - self.epoch.jyear) > 7/365: - logger.warning(f"{model._meta.verbose_name} from epoch {master.epoch} is more than 1 week away from epoch {self.epoch}.") - - return master + return Instrument.by_name(self.instrument).request_master(self, model, other_epochs=other_epochs) def request_masterbias(self, *args, **kwargs): from iop4lib.db import MasterBias diff --git a/iop4lib/instruments/instrument.py b/iop4lib/instruments/instrument.py index 0b1c1259..ee0ca86c 100644 --- a/iop4lib/instruments/instrument.py +++ b/iop4lib/instruments/instrument.py @@ -200,6 +200,51 @@ def build_wcs(self, reducedfit: 'ReducedFit'): from iop4lib.utils.astrometry import build_wcs return build_wcs(reducedfit) + @classmethod + def request_master(cls, rawfit, model, other_epochs=False): + """ Searchs in the DB and returns an appropiate master bias / flat / dark for this rawfit. + + Notes + ----- + + It takes into account the parameters (band, size, etc) defined in Master' margs_kwL; except + for exptime, since master calibration frames with different exptime can be applied. + + By default, it looks for masters in the same epoch, but if other_epochs is set to True, it + will look for masters in other epochs. If more than one master is found, it returns the + one from the closest night. It will print a warning even with other_epochs if it is more than 1 + week away from the rawfit epoch. + + If no master is found, it returns None. + """ + + from iop4lib.db import RawFit + + rf_vals = RawFit.objects.filter(id=rawfit.id).values().get() + args = {k:rf_vals[k] for k in rf_vals if k in model.margs_kwL} + + args.pop("exptime", None) # exptime might be a building keywords (for flats and darks), but masters with different exptime can be applied + args["epoch"] = rawfit.epoch # from .values() we only get epoch__id + + master = model.objects.filter(**args).first() + + if master is None and other_epochs == True: + args.pop("epoch") + + master_other_epochs = np.array(model.objects.filter(**args).all()) + + if len(master_other_epochs) == 0: + logger.debug(f"No {model._meta.verbose_name} for {args} in DB, None will be returned.") + return None + + master_other_epochs_jyear = np.array([md.epoch.jyear for md in master_other_epochs]) + master = master_other_epochs[np.argsort(np.abs(master_other_epochs_jyear - rawfit.epoch.jyear))[0]] + + if (master.epoch.jyear - rawfit.epoch.jyear) > 7/365: + logger.warning(f"{model._meta.verbose_name} from epoch {master.epoch} is more than 1 week away from epoch {rawfit.epoch}.") + + return master + @classmethod def associate_masters(cls, reducedfit, **masters_dict): @@ -228,6 +273,7 @@ def associate_masters(cls, reducedfit, **masters_dict): @classmethod def apply_masters(cls, reducedfit): """ Apply the associated calibration frames to the raw fit to obtain the reduced fit.""" + import astropy.io.fits as fits logger.debug(f"{reducedfit}: applying masters") From 44dccea94dbef18105a2a1d08283cda671f86952 Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Sun, 22 Oct 2023 12:40:57 +0200 Subject: [PATCH 052/168] dipol.py: use full width masters in polarimetry --- iop4lib/instruments/dipol.py | 104 +++++++++++++++++++++++++++++++++-- 1 file changed, 98 insertions(+), 6 deletions(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index 0830cd93..291b44ea 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -5,8 +5,10 @@ # django imports # other imports +import os import re import astrometry +import numpy as np # iop4lib imports from iop4lib.enums import * @@ -105,13 +107,99 @@ def classify_obsmode_rawfit(cls, rawfit: 'RawFit'): logger.error(f"Error parsing NOTES keyword for {rawfit.fileloc} as a float: {e}.") + @classmethod + def request_master(cls, rawfit, model, other_epochs=False): + r""" Overriden Instrument associate_masters. + + DIPOL POLARIMETRY files are a cut of the full field, so when associating master calibration files, it needs to search a different size of images. + For DIPOL PHOTOMETRY files, everything is the same as in the parent class. + + The full field images are 4144x2822, polarimetry images are 1100x900, the cut + position is saved in the images as + XORGSUBF 0 + YORGSUBF 0 + """ + + if rawfit.obsmode == OBSMODES.PHOTOMETRY: + return super().request_master(rawfit, model, other_epochs=other_epochs) + + # POLARIMETRY (see docstring) + # everything should be the same as in the parent class except for the line changing args["imgsize"] + + from iop4lib.db import RawFit + + rf_vals = RawFit.objects.filter(id=rawfit.id).values().get() + args = {k:rf_vals[k] for k in rf_vals if k in model.margs_kwL} + + args.pop("exptime", None) # exptime might be a building keywords (for flats and darks), but masters with different exptime can be applied + args["epoch"] = rawfit.epoch # from .values() we only get epoch__id + + args["imgsize"] = "4144x2822" # search for full field calibration frames + + master = model.objects.filter(**args).first() + + if master is None and other_epochs == True: + args.pop("epoch") + + master_other_epochs = np.array(model.objects.filter(**args).all()) + + if len(master_other_epochs) == 0: + logger.debug(f"No {model._meta.verbose_name} for {args} in DB, None will be returned.") + return None + + master_other_epochs_jyear = np.array([md.epoch.jyear for md in master_other_epochs]) + master = master_other_epochs[np.argsort(np.abs(master_other_epochs_jyear - rawfit.epoch.jyear))[0]] + + if (master.epoch.jyear - rawfit.epoch.jyear) > 7/365: + logger.warning(f"{model._meta.verbose_name} from epoch {master.epoch} is more than 1 week away from epoch {rawfit.epoch}.") + + return master @classmethod - def get_header_objecthint(self, rawfit): - r""" Get a hint for the AstroSource in this image from the header. OBJECT is a standard keyword. Return None if none found. + def apply_masters(cls, reducedfit): + """ Overriden for DIPOL (see DIPOL.request_master). - Overriden for DIPOL, which are using the other_name field. + The cut position is saved in the raw fit header as: + XORGSUBF 1500 + YORGSUBF 1000 """ + + x_start = reducedfit.rawfit.header['XORGSUBF'] + y_start = reducedfit.rawfit.header['YORGSUBF'] + + x_end = x_start + reducedfit.rawfit.header['NAXIS1'] + y_end = y_start + reducedfit.rawfit.header['NAXIS2'] + + idx = np.s_[y_start:y_end, x_start:x_end] + + import astropy.io.fits as fits + + logger.debug(f"{reducedfit}: applying masters") + + rf_data = fits.getdata(reducedfit.rawfit.filepath) + mb_data = fits.getdata(reducedfit.masterbias.filepath)[idx] + mf_data = fits.getdata(reducedfit.masterflat.filepath)[idx] + + if reducedfit.masterdark is not None: + md_dark = fits.getdata(reducedfit.masterdark.filepath)[idx] + else : + logger.warning(f"{reducedfit}: no masterdark found, assuming dark current = 0, is this a CCD camera and it's cold?") + md_dark = 0 + + data_new = (rf_data - mb_data - md_dark*reducedfit.rawfit.exptime) / (mf_data) + + header_new = fits.Header() + + if not os.path.exists(os.path.dirname(reducedfit.filepath)): + logger.debug(f"{reducedfit}: creating directory {os.path.dirname(reducedfit.filepath)}") + os.makedirs(os.path.dirname(reducedfit.filepath)) + + fits.writeto(reducedfit.filepath, data_new, header=header_new, overwrite=True) + + + @classmethod + def get_header_objecthint(self, rawfit): + r""" Overriden for DIPOL, which are using the convention for the other_name field. """ from iop4lib.db import AstroSource @@ -124,7 +212,7 @@ def get_header_objecthint(self, rawfit): @classmethod def get_astrometry_size_hint(cls, rawfit: 'RawFit'): - """ Get the size hint for this telescope / rawfit. + """ Implement Instrument.get_astrometry_size_hint for DIPOL. For DIPOL in OSN-T090, according to preliminary investigation of OSN crew is: Las posiciones que he tomado y el ángulo de rotación en cada caso son estos: @@ -143,7 +231,7 @@ def get_astrometry_size_hint(cls, rawfit: 'RawFit'): @classmethod def build_wcs(self, reducedfit: 'ReducedFit'): - """ Override Instrument build_wcs. + """ Overriden Instrument build_wcs. While for PHOTOMETRY observations, DIPOL has a wide field which can be astrometrically calibrated, POLARIMETRY files are small with only the source field ordinary and extraordianty images in the center (to save up space). @@ -151,7 +239,11 @@ def build_wcs(self, reducedfit: 'ReducedFit'): Therefore, to calibrate polarimetry files, we just give it a WCS centered on the source. """ + if reducedfit.obsmode == OBSMODES.PHOTOMETRY: return super().build_wcs(reducedfit) elif reducedfit.obsmode == OBSMODES.POLARIMETRY: - raise NotImplementedError("Polarimetry WCS not implemented yet for DIPOL") \ No newline at end of file + if ((src_header_obj := reducedfit.rawfit.header_objecthint) is None): + raise Exception(f"I dont know which object is this supposed to be.") + + \ No newline at end of file From 97a80b1faa44762a05309d821d1abefe53e4eadc Mon Sep 17 00:00:00 2001 From: Juan Escudero Pedrosa Date: Sun, 22 Oct 2023 13:42:49 +0200 Subject: [PATCH 053/168] iop4.py: improve cli --- iop4lib/iop4.py | 268 +++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 222 insertions(+), 46 deletions(-) diff --git a/iop4lib/iop4.py b/iop4lib/iop4.py index 7e5dd15f..7932b0eb 100644 --- a/iop4lib/iop4.py +++ b/iop4lib/iop4.py @@ -17,6 +17,10 @@ import pandas as pd import matplotlib as mplt import matplotlib.pyplot as plt +import itertools +from astropy.time import Time +import datetime +import time # iop4lib imports from iop4lib.db import * @@ -31,23 +35,28 @@ def process_epochs(epochname_list, force_rebuild, check_remote_list): - epoch_L = list() + epoch_L : list[Epoch] = list() logger.info("Epochs will be created.") for epochname in epochname_list: - epoch = Epoch.create(epochname=epochname, check_remote_list=check_remote_list) - epoch_L.append(epoch) + epoch = Epoch.create(epochname=epochname, check_remote_list=check_remote_list) + epoch_L.append(epoch) - logger.info("Creating Master Biases.") + logger.info("Creating Master Biases") for epoch in epoch_L: - epoch.build_master_biases(force_rebuild=force_rebuild) + + epoch.build_master_biases(force_rebuild=force_rebuild) logger.info("Creating Master Flats.") for epoch in epoch_L: - epoch.build_master_flats(force_rebuild=force_rebuild) + epoch.build_master_flats(force_rebuild=force_rebuild) + + logger.info("Creating Master Darks.") + for epoch in epoch_L: + epoch.build_master_darks(force_rebuild=force_rebuild) logger.info("Science files will be reduced.") @@ -65,44 +74,74 @@ def process_epochs(epochname_list, force_rebuild, check_remote_list): epoch.compute_relative_polarimetry() +def list_local_epochnames(): + """ List all local epochnames in local archives (by looking at the raw directory).""" + + local_epochnames = list() -def discover_new_epochs(add_local_epochs_to_list=False): + for tel_cls in Telescope.get_known(): + if os.path.isdir(f"{iop4conf.datadir}/raw/{tel_cls.name}/"): + local_epochnames.extend([f"{tel_cls.name}/{night}" for night in os.listdir(f"{iop4conf.datadir}/raw/{tel_cls.name}/")]) - new_epochnames_all = set() + return local_epochnames + +def list_remote_epochnames(): + """ List all remote epochnames in remote archives. + """ + epochnames = list() for tel_cls in Telescope.get_known(): - logger.info(f"Listing remote epochs for {tel_cls.name}...") - - remote_epochnames = tel_cls.list_remote_epochnames() - logger.info(f"Found {len(remote_epochnames)} remote epochs for {tel_cls.name}.") + epochnames.extend(tel_cls.list_remote_epochnames()) + + return epochnames - if os.path.isdir(f"{iop4conf.datadir}/raw/{tel_cls.name}/"): - local_epochnames = [f"{tel_cls.name}/{night}" for night in os.listdir(f"{iop4conf.datadir}/raw/{tel_cls.name}/")] - else: - local_epochnames = list() - logger.info(f"Found {len(local_epochnames)} epochs for {tel_cls.name} in local raw archive.") +def discover_missing_epochs(): + """ Discover missing epochs in remote archive.""" + return list(set(list_remote_epochnames()).difference(list_local_epochnames())) - if not add_local_epochs_to_list: - new_epochnames = set(remote_epochnames).difference(local_epochnames) - - logger.info(f"New epochs discovered in {tel_cls.name} (n={len(new_epochnames)}): {new_epochnames}") - new_epochnames_all = new_epochnames_all.union(new_epochnames) +def list_remote_filelocs(epochnames: None | list[str] = None): + """ Discover files in remote archive for the given epochs. - return new_epochnames_all + Use this function to list all files in the remote archive for the given epochs. + It avoids calling list_remote_raw_fnames() for each epoch. + """ + + if epochnames is None: + epochnames = list_remote_epochnames() + + grouped_epochnames = group_epochnames_by_telescope(epochnames) + filelocs = list() + for tel, epochnames in grouped_epochnames.items(): + if len(epochnames) > 0: + filelocs.extend(Telescope.by_name(tel).list_remote_filelocs(epochnames)) -def discover_local_epochs(): + return filelocs - local_epochs = set() +def list_local_filelocs(): + """ Discover local filelocs in local archive.""" + + local_filelocs = list() for tel_cls in Telescope.get_known(): - local_epochs = local_epochs.union([f"{tel_cls.name}/{night}" for night in os.listdir(f"{iop4conf.datadir}/raw/{tel_cls.name}/")]) + if os.path.isdir(f"{iop4conf.datadir}/raw/{tel_cls.name}/"): + for d in os.scandir(f"{iop4conf.datadir}/raw/{tel_cls.name}/"): + local_filelocs.extend([f"{tel_cls.name}/{d.name}/{f.name}" for f in os.scandir(f"{iop4conf.datadir}/raw/{tel_cls.name}/{d.name}")]) + + return local_filelocs - return local_epochs +def discover_missing_filelocs(): + """ Discover missing files in remote archive. + Compares the lists of remote files with the list of local files and returns the fileloc of the missing files. + + If epochnames is None, all remote epochs will be checked. + """ + + return list(set(list_remote_filelocs()).difference(list_local_filelocs())) def retry_failed_files(): @@ -114,6 +153,47 @@ def retry_failed_files(): +def filter_epochname_by_date(epochname_list, date_start=None, date_end=None): + if date_start is not None: + epochname_list = [epochname for epochname in epochname_list if Epoch.epochname_to_tel_night(epochname)[1] > datetime.date.fromisoformat(date_start)] + + if date_end is not None: + epochname_list = [epochname for epochname in epochname_list if Epoch.epochname_to_tel_night(epochname)[1] < datetime.date.fromisoformat(date_end)] + + return epochname_list + +def filter_filelocs_by_date(fileloc_list, date_start=None, date_end=None): + + if date_start is not None: + fileloc_list = [fileloc for fileloc in fileloc_list if RawFit.fileloc_to_tel_night_filename(fileloc)[1] > datetime.date.fromisoformat(date_start)] + + if date_end is not None: + fileloc_list = [fileloc for fileloc in fileloc_list if RawFit.fileloc_to_tel_night_filename(fileloc)[1] < datetime.date.fromisoformat(date_end)] + + return fileloc_list + + +def group_epochnames_by_telescope(epochnames): + + epochnames_by_telescope = {tel_cls.name:list() for tel_cls in Telescope.get_known()} + + for epochname in epochnames: + tel, night = Epoch.epochname_to_tel_night(epochname) + epochnames_by_telescope[tel].append(epochname) + + return epochnames_by_telescope + +def group_filelocs_by_telescope(filelocs): + + filelocs_by_telescope = {tel_cls.name:list() for tel_cls in Telescope.get_known()} + + for fileloc in filelocs: + tel, night, filename = RawFit.fileloc_to_tel_night_filename(fileloc) + filelocs_by_telescope[tel].append(fileloc) + + return filelocs_by_telescope + + def main(): # Parse args: @@ -134,17 +214,30 @@ def main(): parser.add_argument("--nthreads", dest="nthreads", type=int, default=None, help=" Number of threads to use when possible (default: %(default)s)", required=False) parser.add_argument("--use-ray-cluster", dest="ray_use_cluster", action="store_true", help=" Use ray for parallelization", required=False) - # processing options - parser.add_argument('-l', '--epoch-list', dest='epochname_list', nargs='+', help=' List of epochs (e.g: T090/230102 T090/230204)', required=False) - parser.add_argument('--discover-new', dest='discover_new', action='store_true', help=' Discover new epochs to process them', required=False) - parser.add_argument('--discover-local', dest='discover_local', action='store_true', help=' Discover local epochs to process them', required=False) + # epoch processing options + parser.add_argument('--epoch-list', dest='epochname_list', nargs='+', help=' List of epochs (e.g: T090/230102 T090/230204)', required=False) + parser.add_argument('--discover-missing', dest='discover_missing', action='store_true', help=' Discover new epochs to process them', required=False) + parser.add_argument('--list-local', dest='list_local', action='store_true', help=' Discover local epochs to process them', required=False) parser.add_argument('--list-only', dest='list_only', action='store_true', help=' If given, the built list of epochs will be printed but not processed', required=False) + parser.add_argument('--no-check-db', dest='keep_epochs_in_db', action='store_true', help=' Process discovered epochs even if they existed in archive', required=False) + + ## file processing options + parser.add_argument('--file-list', dest='fileloc_list', nargs='+', help=' List of files (e.g: tel/yyyy-mm-dd/name))', required=False) + parser.add_argument('--discover-missing-files', dest='discover_missing_files', action='store_true', help=' Discover files in remote archives that are not present in archive', required=False) + parser.add_argument('--list-local-files', dest='list_local_files', action='store_true', help=' Discover local files to process them', required=False) + parser.add_argument('--list-files-only', dest='list_files_only', action='store_true', help=' If given, the built list of filelocs will be printed but not processed', required=False) + parser.add_argument('--no-check-db-files', dest='keep_files_in_db', action='store_true', help=' Process discovered files even if they existed in archive', required=False) + - ## other options - parser.add_argument('--retry-failed', dest='retry_failed', action='store_true', help=' Retry failed reduced fits', required=False) + # other options parser.add_argument('--skip-remote-file-list', dest='skip_remote_file_list', action='store_true', help=' Skip remote file list check', required=False) - parser.add_argument('--reclasify-rawfits', dest="reclassify_rawfits", action="store_true", help=" Re-classify rawfits", required=False) parser.add_argument("--force-rebuild", dest="force_rebuild", action="store_true", help=" Force re-building of files (pass force_rebuild=True)", required=False) + parser.add_argument('--retry-failed', dest='retry_failed', action='store_true', help=' Retry failed reduced fits', required=False) + parser.add_argument('--reclassify-rawfits', dest="reclassify_rawfits", action="store_true", help=" Re-classify rawfits", required=False) + + # range + parser.add_argument('--date_start', '-s', dest='date_start', type=str, default=None, help=' Start date (YYYY-MM-DD)', required=False) + parser.add_argument('--date_end', '-e', dest='date_end', type=str, default=None, help=' End date (YYYY-MM-DD)', required=False) args = parser.parse_args() @@ -181,34 +274,117 @@ def main(): if args.ray_use_cluster: iop4conf.ray_use_cluster = True - # Reduce indicated epochs + # Epochs - epochs_to_process = set() + epochnames_to_process = set() - if args.discover_new: - epochs_to_process = epochs_to_process.union(discover_new_epochs()) + if args.list_local: + logger.info("Listing local epochs...") + local_epochs = list_local_epochnames() + epochnames_to_process = epochnames_to_process.union(local_epochs) + logger.info(f"Listed {len(local_epochs)} local epochs.") - if args.discover_local: - epochs_to_process = epochs_to_process.union(discover_local_epochs()) + if args.discover_missing: + logger.info("Discovering missing epochs...") + missing_epochs = discover_missing_epochs() + epochnames_to_process = epochnames_to_process.union(missing_epochs) + logger.info(f"Discovered {len(missing_epochs)} missing epochs.") if args.epochname_list is not None: - epochs_to_process = epochs_to_process.union(args.epochname_list) + logger.info("Adding epochs from command line...") + epochnames_to_process = epochnames_to_process.union(args.epochname_list) + logger.info(f"Added {len(args.epochname_list)} epochs from command line.") + + if len(epochnames_to_process) > 0 and not args.keep_epochs_in_db: + logger.info("Removing epochs already in the DB...") + epochnames_in_db = set([epoch.epochname for epoch in Epoch.objects.all()]) + epochnames_to_process = epochnames_to_process.difference(epochnames_in_db) + logger.info(f"Left {len(epochnames_to_process)} epochs to process.") + + logger.info(f"Gathered {len(epochnames_to_process)} epochs to process between {args.date_start} and {args.date_end}.") + + if args.date_start is not None or args.date_end is not None: + logger.info("Filtering epochs by date.") + epochnames_to_process = filter_epochname_by_date(epochnames_to_process, args.date_start, args.date_end) + logger.info(f"Filtered to {len(epochnames_to_process)} epochs to process between {args.date_start} and {args.date_end}.") + + logger.debug(f"{epochnames_to_process=}") + + if not args.list_only: + if len(epochnames_to_process) > 0: + logger.info("Processing epochs.") + process_epochs(epochnames_to_process, args.force_rebuild, check_remote_list=~args.skip_remote_file_list) + else: + logger.info("Invoked with --list-only!") + + # Files + + filelocs_to_process = set() + + if args.list_local_files: + logger.info("Listing local files...") + filelocs_local = list_local_filelocs() + filelocs_to_process = filelocs_to_process.union(filelocs_local) + logger.info(f"Listed {len(filelocs_local)} local files.") + + if args.discover_missing_files: + logger.info("Discovering missing files...") + filelocs_missing = discover_missing_filelocs() + filelocs_to_process = filelocs_to_process.union(filelocs_missing) + logger.info(f"Discovered {len(filelocs_missing)} missing files.") + + if args.fileloc_list is not None: + logger.info("Adding files from command line...") + filelocs_to_process = filelocs_to_process.union(args.file_list) + logger.info(f"Added {len(args.file_list)} files from command line.") + + if len(filelocs_to_process) > 0 and not args.keep_files_in_db: + logger.info(f"Removing files already in the DB ({RawFit.objects.count()}).") + filelocs_in_db = set([rawfit.fileloc for rawfit in RawFit.objects.all()]) + filelocs_to_process = filelocs_to_process.difference(filelocs_in_db) + logger.info(f"Left {len(filelocs_to_process)} files to process.") + + logger.info(f"Gathered {len(filelocs_to_process)} files to process.") + + if args.date_start is not None or args.date_end is not None: + logger.info("Filtering files by date...") + filelocs_to_process = filter_filelocs_by_date(filelocs_to_process, args.date_start, args.date_end) + logger.info(f"Filtered to {len(filelocs_to_process)} filelocs_to_process between {args.date_start} and {args.date_end}.") + + logger.debug(f"{filelocs_to_process=}") + + if not args.list_files_only: + + if args.discover_missing_files and len(filelocs_missing) > 0: + logger.info("Downloading missing files.") + for telname, filelocs in group_filelocs_by_telescope(filelocs_missing): + Telescope.by_name(telname).download_rawfits(filelocs) + + for fileloc in filelocs_missing: + rawfit = RawFit.create(fileloc=fileloc) + + if len(filelocs_to_process) > 0: + logger.info("Processing files.") + pass - if len(epochs_to_process) > 0 and not args.list_only: - process_epochs(epochs_to_process, args.force_rebuild, check_remote_list=~args.skip_remote_file_list) else: - logger.info("Invoked with --list-only:") - logger.info(f"{epochs_to_process=}") + logger.info("Invoked with --list-files-only") # Classify rawfits if indicated if args.reclassify_rawfits: + logger.info("Classifying rawfits.") - for epochname in epochs_to_process: + + for epochname in epochnames_to_process: epoch = Epoch.by_epochname(epochname) for rawfit in epoch.rawfits.all(): rawfit.classify() + for fileloc in filelocs_to_process: + rawfit = RawFit.by_fileloc(fileloc) + rawfit.classify() + # Retry failed files if indicated if args.retry_failed: From 6850e9d870f359b75457ece3ad212888a4e9fe6a Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Sun, 22 Oct 2023 12:59:48 +0000 Subject: [PATCH 054/168] andort150: fix instrument_kw --- iop4lib/instruments/andor_cameras.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iop4lib/instruments/andor_cameras.py b/iop4lib/instruments/andor_cameras.py index 5f436e7f..9156d17c 100644 --- a/iop4lib/instruments/andor_cameras.py +++ b/iop4lib/instruments/andor_cameras.py @@ -419,7 +419,7 @@ class AndorT90(Andor): class AndorT150(Andor): name = "AndorT150" - instrument_kw = "AndorT150" + instrument_kw = "Andor" telescope = OSNT150.name arcsec_per_pix = 0.232 From f3831d6aef20458469e2ca31334b1469f504c1a1 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Sun, 22 Oct 2023 13:56:19 +0000 Subject: [PATCH 055/168] customize iop4admin style, add debug warnings --- iop4admin/templates/admin/base.html | 7 +++++++ iop4admin/templates/admin/base_site.html | 8 ++++++++ iop4admin/templates/iop4admin/base.custom.css | 8 ++++++++ iop4admin/templates/iop4admin/base_site.html | 9 --------- iop4admin/templates/iop4admin/singleobj.html | 2 +- iop4api/templates/iop4api/about.html | 6 ++++++ iop4api/templates/iop4api/index.html | 3 +++ iop4site/iop4site/settings.py | 1 + 8 files changed, 34 insertions(+), 10 deletions(-) create mode 100644 iop4admin/templates/admin/base.html create mode 100644 iop4admin/templates/admin/base_site.html create mode 100644 iop4admin/templates/iop4admin/base.custom.css delete mode 100644 iop4admin/templates/iop4admin/base_site.html diff --git a/iop4admin/templates/admin/base.html b/iop4admin/templates/admin/base.html new file mode 100644 index 00000000..505ccca6 --- /dev/null +++ b/iop4admin/templates/admin/base.html @@ -0,0 +1,7 @@ +{% extends "admin/base.html" %} + +{% if site_title == "IOP4 admin" %} + {% block extrastyle %}{{ block.super }} + {% include 'iop4admin/base.custom.css' %} + {% endblock %} +{% endif %} diff --git a/iop4admin/templates/admin/base_site.html b/iop4admin/templates/admin/base_site.html new file mode 100644 index 00000000..f190a4f7 --- /dev/null +++ b/iop4admin/templates/admin/base_site.html @@ -0,0 +1,8 @@ +{% extends "admin/base_site.html" %} + +{% if site_title == "IOP4 admin" %} + {% block branding %} + {{ block.super }} + {% if debug %} DEBUG*{% endif %} + {% endblock %} +{% endif %} \ No newline at end of file diff --git a/iop4admin/templates/iop4admin/base.custom.css b/iop4admin/templates/iop4admin/base.custom.css new file mode 100644 index 00000000..79501578 --- /dev/null +++ b/iop4admin/templates/iop4admin/base.custom.css @@ -0,0 +1,8 @@ + \ No newline at end of file diff --git a/iop4admin/templates/iop4admin/base_site.html b/iop4admin/templates/iop4admin/base_site.html deleted file mode 100644 index 9e8cc604..00000000 --- a/iop4admin/templates/iop4admin/base_site.html +++ /dev/null @@ -1,9 +0,0 @@ -{% extends "admin/base.html" %} - -{% block title %}{% if subtitle %}{{ subtitle }} | {% endif %}{{ title }} | {{ site_title|default:_('Django site admin') }}{% endblock %} - -{% block branding %} -

{{ site_header|default:_('Django administration') }}

-{% endblock %} - -{% block nav-global %}{% endblock %} diff --git a/iop4admin/templates/iop4admin/singleobj.html b/iop4admin/templates/iop4admin/singleobj.html index cf79a62e..6baa99d9 100644 --- a/iop4admin/templates/iop4admin/singleobj.html +++ b/iop4admin/templates/iop4admin/singleobj.html @@ -1,4 +1,4 @@ -{% extends "iop4admin/base_site.html" %} +{% extends "admin/base_site.html" %} {% load i18n admin_urls %} diff --git a/iop4api/templates/iop4api/about.html b/iop4api/templates/iop4api/about.html index 51c690c8..1d711d97 100644 --- a/iop4api/templates/iop4api/about.html +++ b/iop4api/templates/iop4api/about.html @@ -16,6 +16,12 @@

About  rocket_launch

This site is currently running IOP4 {{ git_branch }} @ {{ git_describe }} .

+ + {% if debug %} +

+ It appears that you are running this site in the DEBUG server. +

+ {% endif %} diff --git a/iop4api/templates/iop4api/index.html b/iop4api/templates/iop4api/index.html index 63624347..3b40ee2e 100644 --- a/iop4api/templates/iop4api/index.html +++ b/iop4api/templates/iop4api/index.html @@ -41,7 +41,7 @@ From 2858253937cb2f01e0773864b32d3776b7667141 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Sun, 22 Oct 2023 15:15:06 +0000 Subject: [PATCH 057/168] improve iop4 portal navigation --- iop4api/templates/iop4api/index.html | 41 ++++++++++++++++++++++++++-- 1 file changed, 38 insertions(+), 3 deletions(-) diff --git a/iop4api/templates/iop4api/index.html b/iop4api/templates/iop4api/index.html index 3b40ee2e..cf15e839 100644 --- a/iop4api/templates/iop4api/index.html +++ b/iop4api/templates/iop4api/index.html @@ -91,14 +91,17 @@

VHEGA@IAA-CSIC

const tab_tree = JSON.parse('{{ tab_tree | escapejs }}'); const { ref } = Vue + + C1selectedTab_0 = {% if 'C1selectedTab' in tabs %} "{{ tabs.C1selectedTab }}" {% else %} "about" {% endif %}; + C2selectedTab_0 = {% if 'C2selectedTab' in tabs %} "{{ tabs.C2selectedTab }}" {% else %} "plot" {% endif %}; vueApp = Vue.createApp({ delimiters: ['[[', ']]'], data() { return { //tabs - C1selectedTab: {% if 'C1selectedTab' in tabs %} "{{ tabs.C1selectedTab }}" {% else %} "about" {% endif %}, - C2selectedTab: {% if 'C2selectedTab' in tabs %} "{{ tabs.C2selectedTab }}" {% else %} "plot" {% endif %}, + C1selectedTab: C1selectedTab_0, + C2selectedTab: C2selectedTab_0, // log logEntries: [], // plot @@ -151,8 +154,8 @@

VHEGA@IAA-CSIC

}, }, methods: { - // Update the URL based on the selected tabs updateURL() { + // Update the URL based on the selected tabs (called when C1selectedTab or C2selectedTab change) let newURL = `/iop4/${this.C1selectedTab}/` if (this.C1selectedTab in tab_tree) { @@ -162,6 +165,30 @@

VHEGA@IAA-CSIC

} window.history.pushState({}, '', newURL); + + this.updateSelectedTabFromPath(); + }, + updateSelectedTabFromPath() { + // Called when the URL changes (e.g. back/forward button) + const path = window.location.pathname; + const segments = path.split('/').filter(Boolean); + + // Reset to default values + this.C1selectedTab = C1selectedTab_0; + this.C2selectedTab = C2selectedTab_0; + + if (segments.length >= 2) { + this.C1selectedTab = segments[1]; + } + if (segments.length >= 3) { + this.C2selectedTab = segments[2]; + } else if (this.C1selectedTab) { + // Set default second-level tab based on the first-level tab using tab_tree + const subTabs = tab_tree[this.C1selectedTab]; + if (subTabs) { + this.C2selectedTab = Object.keys(subTabs)[0]; // Set to the first key as the default + } + } }, // Add a new entry to the log addLogEntry(title, content, log_title=null) { @@ -179,6 +206,14 @@

VHEGA@IAA-CSIC

this.input_astrosource = row.name; }, }, + beforeMount() { + this.updateSelectedTabFromPath(); + }, + mounted() { + window.addEventListener('popstate', () => { + this.updateSelectedTabFromPath(); + }); + } }).use(Quasar).mount('#app') //Quasar.iconSet.set(Quasar.iconSet.materialIconsOutlined); From 54475d968a984c6cb809b3ce76ac88a883d981ae Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Sun, 22 Oct 2023 15:15:21 +0000 Subject: [PATCH 058/168] improve iop4 portal navigation --- iop4api/views/index.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/iop4api/views/index.py b/iop4api/views/index.py index f658d563..db35450b 100644 --- a/iop4api/views/index.py +++ b/iop4api/views/index.py @@ -28,7 +28,8 @@ def index(request, tabs=None): context = {} - context["tab_tree"] = json.dumps({"about":{}, "login":{}, "explore": {"catalog":{}, "query":{}, "plot":{}, "data":{}, "logs":{}}}) + tab_tree = {"about":{}, "login":{}, "explore": {"catalog":{}, "query":{}, "plot":{}, "data":{}, "logs":{}}} + context["tab_tree"] = json.dumps(tab_tree) # pass the tabs to the template (e.g. /iop4/tab1/tab2/, see urls.py) if tabs is not None: @@ -37,6 +38,11 @@ def index(request, tabs=None): # redirect to login if they are trying to see a tab that requires login if not request.user.is_authenticated and "C1selectedTab" in context['tabs'] and context['tabs']["C1selectedTab"] not in ["about", "login"]: return redirect("{}".format(reverse('iop4api:index', args=[["login",]]))) + + # # # if the tab is not in the tab tree, redirect to the index + # for i, tab in enumerate(tabs): + # if i == 0 and tab not in tab_tree.keys(): + # return redirect("{}".format(reverse('iop4api:index'))) # if the user is logged, pass source names to the template if request.user.is_authenticated: From 18674a61e78b98bec7f08e2a11202c164f94a323 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Sun, 22 Oct 2023 15:42:13 +0000 Subject: [PATCH 059/168] improve iop4 portal navigation --- iop4api/templates/iop4api/explore.html | 12 ++-- iop4api/templates/iop4api/index.html | 93 ++++++++++++++------------ 2 files changed, 57 insertions(+), 48 deletions(-) diff --git a/iop4api/templates/iop4api/explore.html b/iop4api/templates/iop4api/explore.html index b7a9f082..10816a87 100644 --- a/iop4api/templates/iop4api/explore.html +++ b/iop4api/templates/iop4api/explore.html @@ -1,7 +1,7 @@

Explore data  

- + {% comment %} {% endcomment %} @@ -19,23 +19,23 @@

Explore data   -
+
{% include 'iop4api/catalog.html' %}
- {% comment %}
+ {% comment %}
{% include 'iop4api/query.html' %}
{% endcomment %} -
+
{% include 'iop4api/plot.html' %}
-
+
{% include 'iop4api/data.html' %}
-
+
{% include 'iop4api/logs.html' %}
diff --git a/iop4api/templates/iop4api/index.html b/iop4api/templates/iop4api/index.html index cf15e839..7af6ee46 100644 --- a/iop4api/templates/iop4api/index.html +++ b/iop4api/templates/iop4api/index.html @@ -46,7 +46,7 @@

VHEGA@IAA-CSIC

{% endif %}
- + @@ -66,16 +66,16 @@

VHEGA@IAA-CSIC

-
+
{% include 'iop4api/about.html' %}
-
+
{% include 'iop4api/login.html' %}
{% if request.user.is_authenticated %} -
+
{% include 'iop4api/explore.html' %}
{% endif %} @@ -100,8 +100,7 @@

VHEGA@IAA-CSIC

data() { return { //tabs - C1selectedTab: C1selectedTab_0, - C2selectedTab: C2selectedTab_0, + selectedTabs: [C1selectedTab_0, C2selectedTab_0], // log logEntries: [], // plot @@ -122,26 +121,28 @@

VHEGA@IAA-CSIC

} }, watch: { - // Watch for changes in C1selectedTab - C1selectedTab(newVal, oldVal) { - this.updateURL(); - }, - // Watch for changes in C2selectedTab - //C2selectedTab(newVal, oldVal) { - // this.updateURL(); - //}, - // needed to load the catalog -only- if necessary - C2selectedTab: { - handler(newVal) { + // Watch for changes in selectedTabs + selectedTabs: { + handler(newTabs, oldTabs) { this.updateURL(); - if ((this.catalog == null) & (newVal == 'catalog')) { - load_catalog(); + + // Handling specific cases based on the new tab selections + if (newTabs.length > 0) { + // first level tab } - if ((this.pipeline_log.data == null) & (newVal == 'logs')) { + if (newTabs.length > 1) { + const newVal = newTabs[1]; // second level tab + + if (this.catalog == null && newVal === 'catalog') { + load_catalog(); + } + if (this.pipeline_log.data == null && newVal === 'logs') { load_pipeline_log(); + } } }, - immediate: true + deep: true, + immediate: true, }, pipeline_log_options: { handler(newValue, oldValue) { @@ -155,38 +156,46 @@

VHEGA@IAA-CSIC

}, methods: { updateURL() { - // Update the URL based on the selected tabs (called when C1selectedTab or C2selectedTab change) - let newURL = `/iop4/${this.C1selectedTab}/` - - if (this.C1selectedTab in tab_tree) { - if (this.C2selectedTab in tab_tree[this.C1selectedTab]) { - newURL = newURL + `${this.C2selectedTab}/`; + // Start forming the new URL based on the selected tabs + let newURL = '/iop4/'; + + let currentTabTree = tab_tree; + + for (let i = 0; i < this.selectedTabs.length; i++) { + const tab = this.selectedTabs[i]; + if (tab in currentTabTree) { + newURL += `${tab}/`; + currentTabTree = currentTabTree[tab]; + } else { + // Set to the first key in the object as default if not matching + if (Object.keys(currentTabTree).length > 0) { + const defaultTab = Object.keys(currentTabTree)[0]; + newURL += `${defaultTab}/`; + currentTabTree = currentTabTree[defaultTab]; + this.selectedTabs[i] = defaultTab; + } else { + // No sub-levels, so don't set anything + } } } window.history.pushState({}, '', newURL); - - this.updateSelectedTabFromPath(); }, updateSelectedTabFromPath() { // Called when the URL changes (e.g. back/forward button) const path = window.location.pathname; const segments = path.split('/').filter(Boolean); + + let currentTabTree = tab_tree; - // Reset to default values - this.C1selectedTab = C1selectedTab_0; - this.C2selectedTab = C2selectedTab_0; + // Skip the first segment if it's "iop4" + const startIndex = segments[0] === 'iop4' ? 1 : 0; - if (segments.length >= 2) { - this.C1selectedTab = segments[1]; - } - if (segments.length >= 3) { - this.C2selectedTab = segments[2]; - } else if (this.C1selectedTab) { - // Set default second-level tab based on the first-level tab using tab_tree - const subTabs = tab_tree[this.C1selectedTab]; - if (subTabs) { - this.C2selectedTab = Object.keys(subTabs)[0]; // Set to the first key as the default + for (let index = startIndex; index < segments.length; index++) { + const segment = segments[index]; + if (segment in currentTabTree) { + this.selectedTabs[index - startIndex] = segment; + currentTabTree = currentTabTree[segment]; } } }, From 66db9ab1be2a60b9334e2b448e3d9b556334f7c0 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Sun, 22 Oct 2023 16:06:26 +0000 Subject: [PATCH 060/168] iop4 portal fix header --- iop4api/static/iop4api/base.css | 15 ++++++++++++--- iop4api/templates/iop4api/index.html | 10 ++++++---- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/iop4api/static/iop4api/base.css b/iop4api/static/iop4api/base.css index fdecfd6c..18cb2c51 100644 --- a/iop4api/static/iop4api/base.css +++ b/iop4api/static/iop4api/base.css @@ -110,16 +110,25 @@ body { } } -#navbar > div { +#site-header { display: flex; flex-direction: row; align-items: center; } -#navbar h2 { - font-size: var(--navbar-h2-fontsize-initial); +#site-header > div { + display: flex; + flex-direction: row; + flex-wrap: wrap; margin-left: var(--navbar-gaps-initial); margin-right: var(--navbar-gaps-initial); +} + +#site-header > div h2, #site-header > div span { + display: flex; + align-items: center; + padding: 0 10px; + font-size: var(--navbar-h2-fontsize-initial); transition: font-size 0.3s, margin-left 0.3s; white-space: nowrap; } diff --git a/iop4api/templates/iop4api/index.html b/iop4api/templates/iop4api/index.html index 7af6ee46..5e413cd1 100644 --- a/iop4api/templates/iop4api/index.html +++ b/iop4api/templates/iop4api/index.html @@ -40,10 +40,12 @@ @@ -236,8 +236,8 @@

Header {{forloop.counter }}:

(scroll down)
//console.log(value2.value) // Update displayed values - value1_label.innerHTML = Number(value1.value).toFixed(5); - value2_label.innerHTML = Number(value2.value).toFixed(5); + value1_label.innerHTML = Number(value1.value).toFixed(4).padStart(Math.ceil(Math.log10(vabsmax))+7); + value2_label.innerHTML = Number(value2.value).toFixed(4).padStart(Math.ceil(Math.log10(vabsmax))+7); } a = 10 From dacd9a7d414ddd0acb860f991aca404128a174ee Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Mon, 23 Oct 2023 22:51:43 +0000 Subject: [PATCH 068/168] astrometry: fix bug in border removal --- iop4lib/utils/astrometry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iop4lib/utils/astrometry.py b/iop4lib/utils/astrometry.py index ad0ca53b..9312bda5 100644 --- a/iop4lib/utils/astrometry.py +++ b/iop4lib/utils/astrometry.py @@ -325,7 +325,7 @@ def _build_wcs_params_shotgun_helper(redf, has_pairs=None, if border_margin_px is not None: logger.debug(f"{redf}: Removing segments within {border_margin_px} px from border.") - pos_seg = [pos for pos in pos_seg if ( (border_margin_px < pos[0] < imgdata.shape[0]-border_margin_px) and (border_margin_px < pos[1] < imgdata.shape[1]-border_margin_px))] + pos_seg = [pos for pos in pos_seg if ( (border_margin_px < pos[0] < imgdata.shape[1]-border_margin_px) and (border_margin_px < pos[1] < imgdata.shape[0]-border_margin_px))] # Pair finding with results from image segmentation From 62a87425111f7f2f68c1c41a92d23bae7b40939b Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 24 Oct 2023 03:55:50 +0000 Subject: [PATCH 069/168] dipol: fix get_header_objecthint --- iop4lib/instruments/dipol.py | 44 +++++++++++++++++++++++++++++++----- 1 file changed, 38 insertions(+), 6 deletions(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index f54efe2e..1bea4892 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -9,6 +9,7 @@ import re import astrometry import numpy as np +import astropy.units as u # iop4lib imports from iop4lib.enums import * @@ -201,16 +202,47 @@ def apply_masters(cls, reducedfit): @classmethod def get_header_objecthint(self, rawfit): - r""" Overriden for DIPOL, which are using the convention for the other_name field. """ + r""" Overriden for DIPOL, which are using the convention for the other_name field. + The regex used has been obtained from the notebook checking all keywords. + """ + + from iop4lib.db import AstroSource - matchs = rawfit.header["OBJECT"].split('_')[0] + catalog = AstroSource.objects.exclude(srctype=SRCTYPES.CALIBRATOR).values('name', 'other_name') + + #pattern = re.compile(r"^([a-zA-Z0-9]{4,}|[a-zA-Z0-9]{1,3}(_[a-zA-Z0-9]+)?)(?=_|$)") + pattern = re.compile(r"^([a-zA-Z0-9]{1,3}_[a-zA-Z0-9]+|[a-zA-Z0-9]{4,})(?=_|$)") - if len(matchs) > 0: - return AstroSource.objects.filter(other_name__icontains=matchs[0]).first() - else: - return None + obj_kw = rawfit.header['OBJECT'] + + match = pattern.match(obj_kw) + + def get_invariable_str(s): + return s.replace(' ', '').replace('-','').replace('+','').replace('_','').upper() + + if match: + + search_str = match.group(0) + + for source in catalog: + if not source['other_name']: + continue + if get_invariable_str(search_str) in get_invariable_str(source['other_name']): + return AstroSource.objects.get(name=source['name']) + + for source in catalog: + if not source['other_name']: + continue + if get_invariable_str(search_str) in get_invariable_str(source['name']): + return AstroSource.objects.get(name=source['name']) + + return None + + + + @classmethod def get_astrometry_size_hint(cls, rawfit: 'RawFit'): From ac3f707af5a5bc093ac549c3091709c3efe4e7ac Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 24 Oct 2023 03:58:41 +0000 Subject: [PATCH 070/168] dipol: fix get_astrometry_position_hint --- iop4lib/instruments/dipol.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index 1bea4892..314dc710 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -263,6 +263,23 @@ def get_astrometry_size_hint(cls, rawfit: 'RawFit'): return astrometry.SizeHint(lower_arcsec_per_pixel=0.95*cls.arcsec_per_pix, upper_arcsec_per_pixel=1.05*cls.arcsec_per_pix) + @classmethod + def get_astrometry_position_hint(cls, rawfit: 'RawFit', allsky=False, n_field_width=1.5): + """ Get the position hint from the FITS header as an astrometry.PositionHint.""" + + hintcoord = cls.get_header_hintcoord(rawfit) + + if rawfit.header["XBINNING"] != 2: + logger.error(f"Cannot compute astrometry for {rawfit} because of the binning: {rawfit.header['XBINNING']}.") + return None + + if allsky: + hintsep = 180.0 + else: + hintsep = (n_field_width * cls.field_width_arcmin*u.Unit("arcmin")).to_value(u.deg) + + return astrometry.PositionHint(ra_deg=hintcoord.ra.deg, dec_deg=hintcoord.dec.deg, radius_deg=hintsep) + @classmethod def build_wcs(self, reducedfit: 'ReducedFit'): """ Overriden Instrument build_wcs. From 8c58ddf17427b45111cd8200b03ec14cec63f60a Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 24 Oct 2023 03:59:12 +0000 Subject: [PATCH 071/168] dipol: fix has_pairs --- iop4lib/instruments/dipol.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index 314dc710..aba1becf 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -280,6 +280,11 @@ def get_astrometry_position_hint(cls, rawfit: 'RawFit', allsky=False, n_field_wi return astrometry.PositionHint(ra_deg=hintcoord.ra.deg, dec_deg=hintcoord.dec.deg, radius_deg=hintsep) + @classmethod + def has_pairs(cls, fit_instance: 'ReducedFit' or 'RawFit') -> bool: + """ DIPOL ALWAYS HAS PAIRS?!!!! """ + return True + @classmethod def build_wcs(self, reducedfit: 'ReducedFit'): """ Overriden Instrument build_wcs. From a1a9a199637f79000607df902d81fd3c6868bdc7 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 24 Oct 2023 03:59:33 +0000 Subject: [PATCH 072/168] dipol: fix get_header_hintcoord --- iop4lib/instruments/dipol.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index aba1becf..0f95cd9c 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -302,4 +302,12 @@ def build_wcs(self, reducedfit: 'ReducedFit'): if ((src_header_obj := reducedfit.rawfit.header_objecthint) is None): raise Exception(f"I dont know which object is this supposed to be.") - \ No newline at end of file + + @classmethod + def get_header_hintcoord(cls, rawfit): + """ Overriden for DIPOL + + As of 2023-10-23, DIPOL does not inclide RA and DEC in the header, RA and DEC will be derived from the object name. + """ + + return rawfit.header_objecthint.coord \ No newline at end of file From 48b6c2a8919b204c0cdfe620940ecb3c9e4035b2 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 24 Oct 2023 04:04:29 +0000 Subject: [PATCH 073/168] epoch: do not stop build_masters if one margs fail --- iop4lib/db/epoch.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/iop4lib/db/epoch.py b/iop4lib/db/epoch.py index d179d285..928f46be 100644 --- a/iop4lib/db/epoch.py +++ b/iop4lib/db/epoch.py @@ -383,8 +383,8 @@ def build_masters(self, model, force_rebuild=False): # create master - try: - for margs in margs_L: + for margs in margs_L: + try: margs['epoch'] = self logger.debug(f"{margs=}") if self.rawfits.filter(imgtype=model.imgtype, **margs).count() > 0: @@ -392,9 +392,9 @@ def build_masters(self, model, force_rebuild=False): model.create(**margs, force_rebuild=force_rebuild) else: logger.debug(f"No {model._meta.verbose_name} will be built for this margs since there are no files for it.") - except Exception as e: - logger.error(f"Error building {model._meta.verbose_name} for {self.epochname}: {e}.") - self.set_flag(Epoch.FLAGS.ERROR) + except Exception as e: + logger.error(f"Error building {model._meta.verbose_name} for {self.epochname} with args {margs}: {e}.") + self.set_flag(Epoch.FLAGS.ERROR) if self.auto_merge_to_db: self.save() From f9b610921cea9cb7e1911c8d720a5918b5539e3e Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 24 Oct 2023 04:05:59 +0000 Subject: [PATCH 074/168] plotting: imshow_w_sources add kwarg for r aper --- iop4lib/utils/plotting.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/iop4lib/utils/plotting.py b/iop4lib/utils/plotting.py index bdbb0cff..a1d4ddd2 100644 --- a/iop4lib/utils/plotting.py +++ b/iop4lib/utils/plotting.py @@ -47,7 +47,7 @@ def hist_data(data, log=True, ax=None): ax.xaxis.set_major_locator(mplt.ticker.MaxNLocator(4)) -def imshow_w_sources(imgdata, pos1=None, pos2=None, normtype="log", vmin=None, vmax=None, a=10, cmap=None, ax=None): +def imshow_w_sources(imgdata, pos1=None, pos2=None, normtype="log", vmin=None, vmax=None, a=10, cmap=None, ax=None, r_aper=20): if ax is None: ax = plt.gca() @@ -85,14 +85,14 @@ def imshow_w_sources(imgdata, pos1=None, pos2=None, normtype="log", vmin=None, v pos2_present = pos2 is not None and len(pos2) > 0 if pos1_present and not pos2_present: - apertures1 = CircularAperture(pos1, r=20.0) + apertures1 = CircularAperture(pos1, r=r_aper) apertures1.plot(color="r", lw=1, alpha=0.9, linestyle='--', ax=ax) if pos1_present and pos2_present: if len(pos1) < 300: - apertures1 = CircularAperture(pos1, r=20.0) - apertures2 = CircularAperture(pos2, r=20.0) + apertures1 = CircularAperture(pos1, r=r_aper) + apertures2 = CircularAperture(pos2, r=r_aper) color_cycle = itertools.cycle(plt.rcParams['axes.prop_cycle'].by_key()['color']) colors = [next(color_cycle) for _ in range(len(apertures1))] @@ -101,8 +101,8 @@ def imshow_w_sources(imgdata, pos1=None, pos2=None, normtype="log", vmin=None, v ap1.plot(color=colors[i], lw=1, alpha=0.9, linestyle='--', ax=ax) ap2.plot(color=colors[i], lw=1, alpha=0.9, linestyle='-', ax=ax) else: - apertures1 = CircularAperture(pos1, r=20.0) - apertures2 = CircularAperture(pos2, r=20.0) + apertures1 = CircularAperture(pos1, r=r_aper) + apertures2 = CircularAperture(pos2, r=r_aper) apertures1.plot(color="m", lw=1, alpha=0.9, linestyle='--', ax=ax) apertures2.plot(color="y", lw=1, alpha=0.9, linestyle='-', ax=ax) From ce5f35b571e4219ab622cc3dc128aad9dd861165 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 24 Oct 2023 04:11:51 +0000 Subject: [PATCH 075/168] sourcepairing.py: add d_min and d_max, x, y equivs --- iop4lib/utils/sourcepairing.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/iop4lib/utils/sourcepairing.py b/iop4lib/utils/sourcepairing.py index 43ceebcd..bb5c75b5 100644 --- a/iop4lib/utils/sourcepairing.py +++ b/iop4lib/utils/sourcepairing.py @@ -12,7 +12,7 @@ -def get_pairs_d(pos, d_eps=0.8, d_min=60, d0=None, bins=None, hist_range=None, redf=None, doplot=False, ax=None): +def get_pairs_d(pos, d_eps=0.8, d_min=0, d_max=60, d0=None, bins=None, hist_range=None, redf=None, doplot=False, ax=None): """ From a list of positions, finds the most common distance between them (d0), and pairs the points that are at such distance. If d0 is given, it is used instead of computing it. @@ -42,7 +42,7 @@ def get_pairs_d(pos, d_eps=0.8, d_min=60, d0=None, bins=None, hist_range=None, r hist, edges = np.histogram(distances, bins=bins, range=hist_range) centers = (edges[:-1]+edges[1:])/2 - idx = centers < d_min + idx = (d_min <= centers) & (centers <= d_max) idx_max = np.argmax(hist[idx]) d0 = centers[idx][idx_max] @@ -70,7 +70,7 @@ def get_pairs_d(pos, d_eps=0.8, d_min=60, d0=None, bins=None, hist_range=None, r -def get_pairs_dxy(pos, d_eps=0.8, disp=None, d_min=60, bins=None, hist_range=None, redf=None, doplot=False, axs=None, fig=None): +def get_pairs_dxy(pos, dx_eps=0.8, dy_eps=0.8, d_eps=None, disp=None, dx_min=0, dx_max=60, dy_min=0, dy_max=60, d_min=None, d_max=None, bins=None, hist_range=None, redf=None, doplot=False, axs=None, fig=None): """ From a list of positions, finds the most common distances between them in both x and y axes (disp), and pairs the points that are at such distances. @@ -96,25 +96,31 @@ def get_pairs_dxy(pos, d_eps=0.8, disp=None, d_min=60, bins=None, hist_range=Non hist_range = (0, min(redf.data.shape)) else: raise ValueError("hist_range must be specified if redf is not given") - - + + if d_min: + dx_min = dy_min = d_min + if d_max: + dx_max = dy_max = d_max + if d_eps: + dx_eps = dy_eps = d_eps + pairs = list(itertools.combinations(pos, 2)) if disp is None: disp = list() - for i in [0, 1]: # for each axis + for i, d_min, d_max in zip([0, 1], [dx_min, dy_min], [dx_max, dy_max]): # for each axis distances = [abs(p1[i]-p2[i]) for p1,p2 in pairs] hist, edges = np.histogram(distances, bins=bins, range=hist_range) centers = (edges[:-1]+edges[1:])/2 - idx = centers < d_min + idx = (d_min <= centers) & (centers <= d_max) idx_max = np.argmax(hist[idx]) d0 = centers[idx][idx_max] - + disp.append(d0) - paired = [(p1,p2) for p1,p2 in pairs if ( abs( abs( p1[0] - p2[0] ) - disp[0] ) < d_eps and abs( (abs( p1[1] - p2[1] ) - disp[1] ) ) < d_eps )] + paired = [(p1,p2) for p1,p2 in pairs if ( abs( abs( p1[0] - p2[0] ) - disp[0] ) < dx_eps and abs( (abs( p1[1] - p2[1] ) - disp[1] ) ) < dy_eps )] paired = [[p1,p2] if p1[0]>p2[0] else [p2,p1] for (p1,p2) in paired] if len(paired) == 0: From c594369a6a37c727d6607b02198ad0a7e3f2923d Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 24 Oct 2023 04:52:15 +0000 Subject: [PATCH 076/168] astrometric calibration refactor, type hinting --- iop4lib/instruments/andor_cameras.py | 19 ++++ iop4lib/instruments/instrument.py | 61 +++++++++++-- iop4lib/utils/astrometry.py | 126 ++++++--------------------- 3 files changed, 97 insertions(+), 109 deletions(-) diff --git a/iop4lib/instruments/andor_cameras.py b/iop4lib/instruments/andor_cameras.py index 9156d17c..5d931c6b 100644 --- a/iop4lib/instruments/andor_cameras.py +++ b/iop4lib/instruments/andor_cameras.py @@ -21,6 +21,9 @@ import logging logger = logging.getLogger(__name__) +import typing +if typing.TYPE_CHECKING: + from iop4lib.db.reducedfit import ReducedFit, RawFit class Andor(Instrument, metaclass=ABCMeta): r""" Abstract class for OSN Andor cameras.""" @@ -415,6 +418,22 @@ class AndorT90(Andor): arcsec_per_pix = 0.387 gain_e_adu = 4.5 + @classmethod + def build_wcs(self, reducedfit: 'ReducedFit', *args, **kwargs): + r""" Overriden for OSN-T090 + + Overriden to account for HIP2838 U band images with exptime < 2.5, which are known to fail. + """ + from iop4lib.utils.astrometry import BuildWCSResult + + # OSN-T090 images of HIP2838 in band U with < 2.5s + # they can not get automatically calibrated because there are almost no sources visible, just return error so we dont loose time trying parameters. + + if reducedfit.band == "U" and "HIP2838" in reducedfit.filename and reducedfit.exptime < 2.5: + logger.error("Skipping WCS build for HIP2838 U band image with exptime < 2.5 , as it is known to fail, and we will only lose time. Manual calibration is needed for this image. See build_wcs_for_HIP2838_U_band_images.ipynb for more info.") + return BuildWCSResult(success=False) + + return super().build_wcs(reducedfit, *args, **kwargs) class AndorT150(Andor): diff --git a/iop4lib/instruments/instrument.py b/iop4lib/instruments/instrument.py index ee0ca86c..82915e9d 100644 --- a/iop4lib/instruments/instrument.py +++ b/iop4lib/instruments/instrument.py @@ -12,6 +12,8 @@ import numpy as np import math import astropy.io.fits as fits +import astropy.units as u +import itertools # iop4lib imports from iop4lib.enums import * @@ -23,6 +25,7 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: from iop4lib.db import RawFit, ReducedFit + from iop4lib.utils.astrometry import BuildWCSResult class Instrument(metaclass=ABCMeta): """ Base class for instruments. @@ -192,13 +195,53 @@ def has_pairs(cls, fit_instance: 'ReducedFit' or 'RawFit') -> bool: pass @classmethod - def build_wcs(self, reducedfit: 'ReducedFit'): + def build_wcs(self, reducedfit: 'ReducedFit', shotgun_params_kwargs : dict = dict(), build_summary_images : bool = True, summary_kwargs : dict = {'with_simbad':True}) -> 'BuildWCSResult': """ Build a WCS for a reduced fit from this instrument. - By default (Instrument class), this will just call the build_wcs from iop4lib.utils.astrometry. + By default (Instrument class), this will just call the build_wcs_params_shotgun from iop4lib.utils.astrometry. + + Keyword Arguments + ----------------- + shotgun_params_kwargs : dict, optional + The parameters to pass to the shotgun_params function. + build_summary_images : bool, optional + Whether to build summary images of the process. Default is True. + summary_kwargs : dict, optional + with_simbad : bool, default True + Whether to query and plot a few Simbad sources in the image. Might be useful to + check whether the found coordinates are correct. Default is True. """ - from iop4lib.utils.astrometry import build_wcs - return build_wcs(reducedfit) + from iop4lib.utils.astrometry import build_wcs_params_shotgun + from iop4lib.utils.plotting import build_astrometry_summary_images + + if reducedfit.header_objecthint is not None and 'allsky' not in shotgun_params_kwargs: + if reducedfit.header_objecthint.coord.separation(reducedfit.header_hintcoord) > u.Quantity("20 arcmin"): + logger.debug(f"{reducedfit}: large pointing mismatch detected, setting allsky = True for the position hint.") + shotgun_params_kwargs["allsky"] = [True] + + + build_wcs_result = build_wcs_params_shotgun(reducedfit, shotgun_params_kwargs) + + if build_wcs_result.success and build_summary_images: + logger.debug(f"{reducedfit}: building summary images.") + build_astrometry_summary_images(reducedfit, build_wcs_result.info, summary_kwargs=summary_kwargs) + + # Save only some variables and return + + if build_wcs_result.success: + to_save_from_info_kw_L = ['params', 'bm', 'seg_d0', 'seg_disp_sign', 'seg_disp_xy', 'seg_disp_sign_xy', 'seg_disp_xy_best'] + to_save = {k:build_wcs_result.info[k] for k in to_save_from_info_kw_L if k in build_wcs_result.info} + to_save['logodds'] = build_wcs_result.info['bm'].logodds + try: + # redf.astrometry_info = [to_save] + if isinstance(reducedfit.astrometry_info, list): + reducedfit.astrometry_info = list(itertools.chain(reducedfit.astrometry_info, [to_save])) + else: + reducedfit.astrometry_info = [to_save] + except NameError: + reducedfit.astrometry_info = [to_save] + + return build_wcs_result @classmethod def request_master(cls, rawfit, model, other_epochs=False): @@ -308,23 +351,23 @@ def astrometric_calibration(cls, reducedfit: 'ReducedFit'): build_wcs_result = cls.build_wcs(reducedfit) - if build_wcs_result['success']: + if build_wcs_result.success: logger.debug(f"{reducedfit}: saving WCSs to FITS header.") - wcs1 = build_wcs_result['wcslist'][0] + wcs1 = build_wcs_result.wcslist[0] header = fits.Header() header.update(wcs1.to_header(relax=True, key="A")) if reducedfit.has_pairs: - wcs2 = build_wcs_result['wcslist'][1] + wcs2 = build_wcs_result.wcslist[1] header.update(wcs2.to_header(relax=True, key="B")) # if available, save also some info about the astrometry solution - if 'bm' in build_wcs_result['info']: - bm = build_wcs_result['info']['bm'] + if 'bm' in build_wcs_result.info: + bm = build_wcs_result.info['bm'] # adding HIERARCH avoids a warning, they can be accessed without HIERARCH header['HIERARCH AS_ARCSEC_PER_PIX'] = bm.scale_arcsec_per_pixel header['HIERARCH AS_CENTER_RA_DEG'] = bm.center_ra_deg diff --git a/iop4lib/utils/astrometry.py b/iop4lib/utils/astrometry.py index 9312bda5..509c3ab3 100644 --- a/iop4lib/utils/astrometry.py +++ b/iop4lib/utils/astrometry.py @@ -19,6 +19,9 @@ import multiprocessing import functools import itertools +import dataclasses + +# iop4lib imports from iop4lib.utils.sourcepairing import (get_pairs_d, get_pairs_dxy, get_best_pairs) from iop4lib.utils.sourcedetection import (get_bkg, get_segmentation, get_cat_sources_from_segment_map) @@ -29,100 +32,23 @@ import logging logger = logging.getLogger(__name__) +import typing +if typing.TYPE_CHECKING: + from iop4lib.db.reducedfit import ReducedFit - -def build_wcs(redf, shotgun_params_kwargs=None, build_summary_images=True, summary_kwargs=None): - """ Build the appropiate WCSs for a ReducedFit image. - - This functions tries to calibrate astrometrically a FITS image (given as a ReducedFit object). In the case of - images with Ordinary and Extraordinary sources, it returns two WCS, one in the first FITS extension and another - in the second one. - - Note: this function acts as a common function to call the appropiate WCS builder depending on the image type, the kind - of parallelization and the parameters. The actual WCS building is done by helpers functions, see the code. - - Parameters - ---------- - redf : ReducedFit - The ReducedFit object to build the WCSs for. - - Returns - ------- - dict - The result, of the form - { - 'success': bool, # whether the appropiate WCSs was built successfully - # if success is True, the following keys are also present: - 'wcslist': list # list of WCS objects built (usually one, two if there are extraordinary sources in the image) - 'info': dict or None # dict with extra information about the process - } - - Keyword Arguments - ----------------- - shotgun_params_kwargs : dict, optional - The parameters to pass to the shotgun_params function. - build_summary_images : bool, optional - Whether to build summary images of the process. Default is True. - summary_kwargs : dict, optional - with_simbad : bool, default True - Whether to query and plot a few Simbad sources in the image. Might be useful to - check whether the found coordinates are correct. Default is True. - +@dataclasses.dataclass +class BuildWCSResult(): + r""" + 'success': bool, # whether the appropiate WCSs was built successfully + 'wcslist': list # list of WCS objects built (usually one, two if there are extraordinary sources in the image) + 'info': dict or None # dict with extra information about the process """ - - if summary_kwargs is None: - summary_kwargs = {'with_simbad':True} - - if shotgun_params_kwargs is None: - shotgun_params_kwargs = dict() - - # OSN-T090 images of HIP2838 in band U with < 2.5s - # they can not get automatically calibrated because there are almost no sources visible, just return error so we dont loose time trying parameters. - - if redf.band == "U" and "HIP2838" in redf.filename and redf.exptime < 2.5: - logger.error("Skipping WCS build for HIP2838 U band image with exptime < 2.5 , as it is known to fail, and we will only lose time. Manual calibration is needed for this image. See build_wcs_for_HIP2838_U_band_images.ipynb for more info.") - build_wcs_result = {'success':False, 'wcs_list':[], 'info':dict()} - return build_wcs_result - - # For the rest, try to build the WCSs trying with different parameters. - - ## if there might be a big pointing mismatch, try with other larger separation: - - if redf.header_objecthint is not None and 'allsky' not in shotgun_params_kwargs: - if redf.header_objecthint.coord.separation(redf.header_hintcoord) > u.Quantity("20 arcmin"): - logger.debug(f"{redf}: large pointing mismatch detected, setting allsky = True for the position hint.") - shotgun_params_kwargs["allsky"] = [True] - - ## try with different parameters - - build_wcs_result = build_wcs_params_shotgun(redf, shotgun_params_kwargs) - - ## if it worked, build summary images - - if build_wcs_result['success'] and build_summary_images: - logger.debug(f"{redf}: building summary images.") - build_astrometry_summary_images(redf, build_wcs_result['info'], summary_kwargs=summary_kwargs) - - # Save only some variables and return - - if build_wcs_result['success']: - to_save_from_info_kw_L = ['params', 'bm', 'seg_d0', 'seg_disp_sign', 'seg_disp_xy', 'seg_disp_sign_xy', 'seg_disp_xy_best'] - to_save = {k:build_wcs_result['info'][k] for k in to_save_from_info_kw_L if k in build_wcs_result['info']} - to_save['logodds'] = build_wcs_result['info']['bm'].logodds - try: - # redf.astrometry_info = [to_save] - if isinstance(redf.astrometry_info, list): - redf.astrometry_info = list(itertools.chain(redf.astrometry_info, [to_save])) - else: - redf.astrometry_info = [to_save] - except NameError: - redf.astrometry_info = [to_save] - - return build_wcs_result - + success: bool + wcslist: list[WCS] = dataclasses.field(default_factory=list) + info: dict = dataclasses.field(default_factory=dict) -def build_wcs_params_shotgun(redf, shotgun_params_kwargs=None, hard=False): +def build_wcs_params_shotgun(redf: 'ReducedFit', shotgun_params_kwargs : dict() = None, hard : bool =False) -> BuildWCSResult: """ Build the appropiate WCSs for a ReducedFit image, trying different parameters. See `build_wcs` for more info. Note: at the moment, this function tries source extraction with different combination of parameters and thresholds for @@ -252,16 +178,16 @@ def build_wcs_params_shotgun(redf, shotgun_params_kwargs=None, hard=False): # logger.error(f"{redf}: some error ocurred during attempt {i+1} / {len(param_dicts_L)}, ({params_dict}), ignoring. Error: {e}") # build_wcs_result = {'success': False} - if build_wcs_result['success']: + if build_wcs_result.success: logger.debug(f"{redf}: WCS built with attempt {i+1} / {len(param_dicts_L)} ({params_dict}).") break else: # if none worked logger.error(f"{redf}: could not solve astrometry with any of the {len(param_dicts_L)} default parameter combinations for source extraction.") - return {'success': False} + return BuildWCSResult(success=False) # add the parameters that worked to the result - build_wcs_result['info']['params'] = params_dict + build_wcs_result.info['params'] = params_dict return build_wcs_result @@ -281,8 +207,8 @@ def _build_wcs_params_shotgun_helper(redf, has_pairs=None, bins=None, hist_range=None, position_hint=None, size_hint=None, allsky=False, - output_logodds_threshold=21): - """ helper func. See build_wcs_params_shotgun for more info. Same signature. """ + output_logodds_threshold=21) -> BuildWCSResult: + """ helper func, see build_wcs_params_shotgun for more info. """ imgdata = redf.mdata @@ -313,7 +239,7 @@ def _build_wcs_params_shotgun_helper(redf, has_pairs=None, if segment_map is None: logger.debug(f"{redf}: No segments found, returning early.") - return {'success': False} + return BuildWCSResult(success=False) seg_cat, pos_seg, tb = get_cat_sources_from_segment_map(segment_map, imgdata_bkg_substracted, convolved_data) @@ -367,7 +293,7 @@ def _build_wcs_params_shotgun_helper(redf, has_pairs=None, ## If not, desist and return early; else we continue and build and save the wcs. if bm is None: - return {'success':False} + return BuildWCSResult(success=False) else: logger.debug(f"{redf}: {msg} worked.") logger.debug(f"{redf}: {bm.index_path=}") @@ -388,9 +314,9 @@ def _build_wcs_params_shotgun_helper(redf, has_pairs=None, # save results and return - return {'success':True, - 'wcslist': [wcs1, wcs2] if has_pairs else [wcs1], - 'info': _save_astrocalib_proc_vars(locals())} + return BuildWCSResult(success=True, + wcslist=[wcs1, wcs2] if has_pairs else [wcs1], + info=_save_astrocalib_proc_vars(locals())) From a7d02fcef36694110709d0737293a0db6e0bcc81 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 24 Oct 2023 05:27:35 +0000 Subject: [PATCH 077/168] astrometry, sourcepairing: add min, max, eps dists --- iop4lib/utils/astrometry.py | 14 ++++++++++--- iop4lib/utils/sourcepairing.py | 38 +++++++++++++++++++++------------- 2 files changed, 35 insertions(+), 17 deletions(-) diff --git a/iop4lib/utils/astrometry.py b/iop4lib/utils/astrometry.py index 509c3ab3..9313771b 100644 --- a/iop4lib/utils/astrometry.py +++ b/iop4lib/utils/astrometry.py @@ -203,7 +203,15 @@ def _build_wcs_params_shotgun_helper(redf, has_pairs=None, n_rms_seg = 1.0, keep_n_seg = 200, border_margin_px = 20, - d_eps=0.8, + dx_eps=None, + dy_eps=None, + d_eps=None, + dx_min=None, + dx_max=None, + dy_min=None, + dy_max=None, + d_min=None, + d_max=None, bins=None, hist_range=None, position_hint=None, size_hint=None, allsky=False, @@ -256,11 +264,11 @@ def _build_wcs_params_shotgun_helper(redf, has_pairs=None, # Pair finding with results from image segmentation if has_pairs: - seg1, seg2, seg_d0, seg_disp_sign = get_pairs_d(pos_seg, d_eps=d_eps, bins=bins, hist_range=hist_range) + seg1, seg2, seg_d0, seg_disp_sign = get_pairs_d(pos_seg, bins=bins, hist_range=hist_range, d_min=d_min, d_eps=d_eps, d_max=d_max) logger.debug(f"{redf}: seg pairs -> {len(seg1)} ({len(seg1)/len(pos_seg)*100:.1f}%), seg_disp_sign={seg_disp_sign}") seg1_best, seg2_best, seg_disp_best, seg_disp_sign_best = get_best_pairs(seg1, seg2, seg_disp_sign) logger.debug(f"{redf}: seg pairs best -> {len(seg1_best)} ({len(seg1_best)/len(pos_seg)*100:.1f}%), seg_disp_sign_best={seg_disp_sign_best}") - seg1xy, seg2xy, seg_disp_xy, seg_disp_sign_xy = get_pairs_dxy(pos_seg, d_eps=d_eps, bins=bins, hist_range=hist_range) + seg1xy, seg2xy, seg_disp_xy, seg_disp_sign_xy = get_pairs_dxy(pos_seg, bins=bins, hist_range=hist_range, dx_min=dx_min, dx_max=dx_max, dy_min=dy_min, dy_max=dy_max, dx_eps=dx_eps, dy_eps=dy_eps) logger.debug(f"{redf}: seg pairs xy -> {len(seg1xy)}, disp_sign_xy={seg_disp_sign_xy}") seg1xy_best, seg2xy_best, seg_disp_xy_best, seg_disp_sign_xy_best = get_best_pairs(seg1xy, seg2xy, seg_disp_sign_xy) logger.debug(f"{redf}: seg pairs xy best -> {len(seg1xy_best)} ({len(seg1xy_best)/len(pos_seg)*100:.1f}%), seg_disp_sign_xy_best={seg_disp_sign_xy_best}") diff --git a/iop4lib/utils/sourcepairing.py b/iop4lib/utils/sourcepairing.py index bb5c75b5..4cf43c77 100644 --- a/iop4lib/utils/sourcepairing.py +++ b/iop4lib/utils/sourcepairing.py @@ -12,7 +12,9 @@ -def get_pairs_d(pos, d_eps=0.8, d_min=0, d_max=60, d0=None, bins=None, hist_range=None, redf=None, doplot=False, ax=None): +def get_pairs_d(pos, d0=None, + d_eps=None, d_min=None, d_max=None, + bins=None, hist_range=None, redf=None, doplot=False, ax=None): """ From a list of positions, finds the most common distance between them (d0), and pairs the points that are at such distance. If d0 is given, it is used instead of computing it. @@ -20,8 +22,9 @@ def get_pairs_d(pos, d_eps=0.8, d_min=0, d_max=60, d0=None, bins=None, hist_rang The pairs are ordered such that for pair (p1, p2), p1 is always to the left (smaller x value) than p2. """ - if pos is None or len(pos) < 2: - return [], [], None, None + d_eps = d_eps or 0.8 + d_min = d_min or 0 + d_max = d_max or 60 if bins is None: if redf is not None: @@ -34,6 +37,9 @@ def get_pairs_d(pos, d_eps=0.8, d_min=0, d_max=60, d0=None, bins=None, hist_rang hist_range = (0, min(redf.data.shape)) else: raise ValueError("hist_range must be specified if redf is not given") + + if pos is None or len(pos) < 2: + return [], [], None, None pairs = list(itertools.combinations(pos, 2)) distances = [np.linalg.norm(p1-p2) for p1,p2 in pairs] @@ -70,7 +76,9 @@ def get_pairs_d(pos, d_eps=0.8, d_min=0, d_max=60, d0=None, bins=None, hist_rang -def get_pairs_dxy(pos, dx_eps=0.8, dy_eps=0.8, d_eps=None, disp=None, dx_min=0, dx_max=60, dy_min=0, dy_max=60, d_min=None, d_max=None, bins=None, hist_range=None, redf=None, doplot=False, axs=None, fig=None): +def get_pairs_dxy(pos, disp=None, + dx_eps=None, dy_eps=None, d_eps=None, dx_min=None, dx_max=None, dy_min=None, dy_max=None, d_min=None, + bins=None, hist_range=None, redf=None, doplot=False, axs=None, fig=None): """ From a list of positions, finds the most common distances between them in both x and y axes (disp), and pairs the points that are at such distances. @@ -82,8 +90,14 @@ def get_pairs_dxy(pos, dx_eps=0.8, dy_eps=0.8, d_eps=None, disp=None, dx_min=0, Note: this function is similar to get_pairs_d(), but finds the most common distances both in x and y axes. """ - if pos is None or len(pos) < 2: - return [], [], None, None + dx_eps = dx_eps or 0.8 + dy_eps = dy_eps or 0.8 + d_eps = d_eps or 0.8 + dx_min = dx_min or 0 + dx_max = dx_max or 60 + dy_min = dy_min or 0 + dy_max = dy_max or 60 + d_min = d_min or 0 if bins is None: if redf is not None: @@ -96,14 +110,10 @@ def get_pairs_dxy(pos, dx_eps=0.8, dy_eps=0.8, d_eps=None, disp=None, dx_min=0, hist_range = (0, min(redf.data.shape)) else: raise ValueError("hist_range must be specified if redf is not given") - - if d_min: - dx_min = dy_min = d_min - if d_max: - dx_max = dy_max = d_max - if d_eps: - dx_eps = dy_eps = d_eps - + + if pos is None or len(pos) < 2: + return [], [], None, None + pairs = list(itertools.combinations(pos, 2)) if disp is None: From 9fbbe89d3bacf27ca97db1ad8af07780802f0358 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 24 Oct 2023 05:28:04 +0000 Subject: [PATCH 078/168] dipol: custom shotgun parameters --- iop4lib/instruments/dipol.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index 0f95cd9c..8fe6cd8b 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -294,9 +294,37 @@ def build_wcs(self, reducedfit: 'ReducedFit'): In some ocassions, there might be some close source also in the field. Therefore, to calibrate polarimetry files, we just give it a WCS centered on the source. + + For PHOTOMETRY files, we use the parent class method, but we set some custom shotgun_params_kwargs to account + for the low flux and big size of the images. + """ if reducedfit.obsmode == OBSMODES.PHOTOMETRY: + + shotgun_params_kwargs = dict() + + shotgun_params_kwargs["keep_n_seg"] = [300] + shotgun_params_kwargs["border_margin_px"] = [20] + shotgun_params_kwargs["output_logodds_threshold"] = [14] + shotgun_params_kwargs["n_rms_seg"] = [1.5, 1.2, 1.0] + shotgun_params_kwargs["bkg_filter_size"] = [11] + shotgun_params_kwargs["bkg_box_size"] = [32] + shotgun_params_kwargs["seg_fwhm"] = [1.0] + shotgun_params_kwargs["npixels"] = [32, 8, 16] + shotgun_params_kwargs["allsky"] = [False] + + shotgun_params_kwargs["d_eps"] = [1.2, 4.0] + shotgun_params_kwargs["dx_min"] = [150] + shotgun_params_kwargs["dx_max"] = [300] + shotgun_params_kwargs["dy_min"] = [0] + shotgun_params_kwargs["dy_max"] = [50] + shotgun_params_kwargs["bins"] = int(500) + shotgun_params_kwargs["hist_range"] = [(0,500)] + + shotgun_params_kwargs["position_hint"] = [reducedfit.position_hint] + shotgun_params_kwargs["size_hint"] = [reducedfit.size_hint] + return super().build_wcs(reducedfit) elif reducedfit.obsmode == OBSMODES.POLARIMETRY: if ((src_header_obj := reducedfit.rawfit.header_objecthint) is None): From fa32332ca713bcb12123825f374c21afe547255f Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 24 Oct 2023 05:55:34 +0000 Subject: [PATCH 079/168] improve Instrument and DIPOL build_wcs --- iop4lib/instruments/dipol.py | 16 ++++++++++------ iop4lib/instruments/instrument.py | 2 +- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index 8fe6cd8b..b304b417 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -311,21 +311,25 @@ def build_wcs(self, reducedfit: 'ReducedFit'): shotgun_params_kwargs["bkg_filter_size"] = [11] shotgun_params_kwargs["bkg_box_size"] = [32] shotgun_params_kwargs["seg_fwhm"] = [1.0] - shotgun_params_kwargs["npixels"] = [32, 8, 16] + shotgun_params_kwargs["npixels"] = [32, 8] + shotgun_params_kwargs["seg_kernel_size"] = [None] shotgun_params_kwargs["allsky"] = [False] - shotgun_params_kwargs["d_eps"] = [1.2, 4.0] + shotgun_params_kwargs["d_eps"] = [4.0] + shotgun_params_kwargs["dx_eps"] = [4.0] + shotgun_params_kwargs["dy_eps"] = [2.0] shotgun_params_kwargs["dx_min"] = [150] shotgun_params_kwargs["dx_max"] = [300] shotgun_params_kwargs["dy_min"] = [0] shotgun_params_kwargs["dy_max"] = [50] - shotgun_params_kwargs["bins"] = int(500) + shotgun_params_kwargs["bins"] = [400] shotgun_params_kwargs["hist_range"] = [(0,500)] - shotgun_params_kwargs["position_hint"] = [reducedfit.position_hint] - shotgun_params_kwargs["size_hint"] = [reducedfit.size_hint] + shotgun_params_kwargs["position_hint"] = [reducedfit.get_astrometry_position_hint(allsky=False)] + shotgun_params_kwargs["size_hint"] = [reducedfit.get_astrometry_size_hint()] - return super().build_wcs(reducedfit) + return super().build_wcs(reducedfit, shotgun_params_kwargs=shotgun_params_kwargs) + elif reducedfit.obsmode == OBSMODES.POLARIMETRY: if ((src_header_obj := reducedfit.rawfit.header_objecthint) is None): raise Exception(f"I dont know which object is this supposed to be.") diff --git a/iop4lib/instruments/instrument.py b/iop4lib/instruments/instrument.py index 82915e9d..a8288717 100644 --- a/iop4lib/instruments/instrument.py +++ b/iop4lib/instruments/instrument.py @@ -214,7 +214,7 @@ def build_wcs(self, reducedfit: 'ReducedFit', shotgun_params_kwargs : dict = di from iop4lib.utils.astrometry import build_wcs_params_shotgun from iop4lib.utils.plotting import build_astrometry_summary_images - if reducedfit.header_objecthint is not None and 'allsky' not in shotgun_params_kwargs: + if reducedfit.header_objecthint is not None and 'allsky' not in shotgun_params_kwargs and 'position_hint' not in shotgun_params_kwargs: if reducedfit.header_objecthint.coord.separation(reducedfit.header_hintcoord) > u.Quantity("20 arcmin"): logger.debug(f"{reducedfit}: large pointing mismatch detected, setting allsky = True for the position hint.") shotgun_params_kwargs["allsky"] = [True] From 397112a46bf9a6ceef0d10a7d2dea725bb2d345a Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Thu, 26 Oct 2023 13:48:49 +0000 Subject: [PATCH 080/168] slightly improve fit detail view speed --- .../templates/iop4admin/view_fitdetails.html | 13 ++--- iop4admin/views/fitfile.py | 25 ++++++-- iop4lib/db/fitfilemodel.py | 58 ++++++++++++------- 3 files changed, 64 insertions(+), 32 deletions(-) diff --git a/iop4admin/templates/iop4admin/view_fitdetails.html b/iop4admin/templates/iop4admin/view_fitdetails.html index ff5bd772..dfa6f352 100644 --- a/iop4admin/templates/iop4admin/view_fitdetails.html +++ b/iop4admin/templates/iop4admin/view_fitdetails.html @@ -61,14 +61,14 @@

Summary statistics


- {% for key in object.stats %} + {% for key in stats %} {% endfor %} - {% for value in object.stats.values %} + {% for value in stats.values %} {% endfor %} @@ -132,6 +132,7 @@

Calibration Frames

Astrometric calibration

+ {{ astrometry_info.working_msg }}
{{ astrometry_info.bm }}
@@ -147,7 +148,6 @@

Astrometric calibration

-

Sources in the field

{% if sources_in_field_L %}
    @@ -157,7 +157,7 @@

    Sources in the field

    {{ source.name }} {% if source.other_name %} ({{ source.other_name }}) - {% endif %}. + {% endif %} [{{ source.srctype | upper }}] @@ -198,7 +198,6 @@

    Header {{forloop.counter }}:

    (scroll down)
{{ key }}
{{ value | stringformat:".2g" }}
{% endfor %} - {% else %}

Local file does not exist, FIT can not be opened.

{% endif %} @@ -210,8 +209,8 @@

Header {{forloop.counter }}:

(scroll down)
+ + + + + @@ -30,6 +35,7 @@ + @@ -111,6 +117,9 @@

VHEGA@IAA-CSIC

enable_full_lc: false, enable_iop3: false, enable_errorbars: false, + selected_plot_idx: [], + show_selected_plot_pts: false, + selected_refresh: 0, // data showDataTable: false, // catalog @@ -122,6 +131,92 @@

VHEGA@IAA-CSIC

pipeline_log_options: {'errors':true, 'warnings':true, 'info':true, 'debug':false, 'filter_text': null}, } }, + computed: { + bokeh_document() { + return Bokeh.documents.slice(-1)[0]; + }, + bokeh_source: { + get() { + if (this.bokeh_document == null) { + return null; + } else { + return this.bokeh_document.get_model_by_name('source').data; + } + }, + set(newValue) { + new Error('bokeh_source is read-only'); + } + }, + selected_plot_pts() { + + console.log("Computing selected_plot_pts"); + + this.selected_refresh++; + // Ensure that selected_plot_idx is reactive and triggers this computed property + const newIndices = this.selected_plot_idx; + + // Get the last document in the Bokeh documents array + const bokehDocument = Bokeh.documents.slice(-1)[0]; + if (!bokehDocument) { + return []; + } + + // Get the data source from the Bokeh document by its name + const dataSource = bokehDocument.get_model_by_name('source').data; + if (!dataSource) { + return []; + } + + // Use map to transform the indices into an array of point data + return newIndices.map(index => { + let pointData = {}; + for (let field in dataSource) { + if (dataSource.hasOwnProperty(field)) { + pointData[field] = dataSource[field][index]; + } + } + + // add fake but useful flag str representation field + pointData['flagsstr'] = flags_to_str(pointData['flags']); + + return pointData; + }); + + }, + selected_plot_pts_flagstate() { + if (this.selected_plot_pts.length == 0) { + notset = null; + bad_photometry = null; + bad_polarimetry = null; + } else { + if (this.selected_plot_pts.every(pt => (pt['flags'] === 0))) { + notset = true; + } else if (this.selected_plot_pts.some(pt => (pt['flags'] === 0))) { + notset = null; + } else { + notset = false; + } + + if (this.selected_plot_pts.every(pt => (pt['flags'] & (1<<0)))) { + bad_photometry = true; + } else if (this.selected_plot_pts.some(pt => (pt['flags'] & (1<<0)))) { + bad_photometry = null; + } else { + bad_photometry = false; + } + + if (this.selected_plot_pts.every(pt => (pt['flags'] & (1<<1)))) { + bad_polarimetry = true; + } else if (this.selected_plot_pts.some(pt => (pt['flags'] & (1<<1)))) { + bad_polarimetry = null; + } else { + bad_polarimetry = false; + } + } + + return {notset:notset, bad_photometry:bad_photometry, bad_polarimetry:bad_polarimetry}; + } + }, watch: { // Watch for changes in selectedTabs selectedTabs: { @@ -216,6 +311,9 @@

VHEGA@IAA-CSIC

catalog_row_clicked(evt,row) { this.input_astrosource = row.name; }, + // flagging + toggle_flag, + set_flag, }, beforeMount() { this.updateSelectedTabFromPath(); diff --git a/iop4api/templates/iop4api/plot.html b/iop4api/templates/iop4api/plot.html index d3125efe..bf0dfe35 100644 --- a/iop4api/templates/iop4api/plot.html +++ b/iop4api/templates/iop4api/plot.html @@ -2,7 +2,7 @@
-
+ {% csrf_token %}
@@ -30,7 +30,7 @@
-
+
@@ -44,10 +44,63 @@
+
+
+ + +
+ Flag GUI +
+ +
+ +
+ +
+ [[ selected_plot_pts.length ]] points selected +
+ +
+
+ + +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + +
pkflag (int)flags (str)
[[ point.pk ]][[ point.flags ]][[ point.flagsstr ]] + bad photometry + bad polarimetry +
+
+
+
From 74a4625f08bb0811ecf4ae315b0e91839debf984 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Sat, 11 Nov 2023 20:10:00 +0000 Subject: [PATCH 139/168] iop4api: plot, flag GUI, data and logs --- iop4api/static/iop4api/gui.js | 186 +++++++++++++++++++++------ iop4api/templates/iop4api/data.html | 58 +++++++++ iop4api/templates/iop4api/index.html | 84 +++++++++++- iop4api/templates/iop4api/logs.html | 17 +-- iop4api/templates/iop4api/plot.html | 34 +++-- iop4api/views/catalog.py | 17 +-- iop4api/views/data.py | 39 ++++-- iop4api/views/plot.py | 106 ++++++++++----- iop4lib/utils/__init__.py | 32 +++++ 9 files changed, 461 insertions(+), 112 deletions(-) diff --git a/iop4api/static/iop4api/gui.js b/iop4api/static/iop4api/gui.js index 9962f6d4..04a2aa85 100644 --- a/iop4api/static/iop4api/gui.js +++ b/iop4api/static/iop4api/gui.js @@ -53,7 +53,29 @@ function load_source_datatable(form_element) { if (request.readyState === 4) { if (request.status === 200) { vueApp.addLogEntry(null, "", "Query - Resonse OK"); - make_nice_table(JSON.parse(request.responseText)); + vueApp.$data.tableData = JSON.parse(request.responseText) + vueApp.$data.tableData.tabulatorjs_coldefs = vueApp.$data.tableData.columns.map(function(c) { + cdef = { + field: c.name, + title: c.title, + visible: c.visible, + headerTooltip: c.help, + }; + + if (c.type == 'float') { + // if it is a general float field + cdef['formatter'] = function(cell, formatterParams, onRendered) { + val = cell.getValue(); + if (val == null) { + return ""; + } else { + return val.toFixed(3); + } + } + } + return cdef; + }); + make_nice_table(); vueApp.$data.showDataTable = true; } else { vueApp.addLogEntry("Error loading data", request.responseText, "Query - Error"); @@ -65,11 +87,27 @@ function load_source_datatable(form_element) { request.send(formdata); } -function make_nice_table(tableData) { +// extend tabulator with custom filters +Tabulator.extendModule("filter", "filters", { + "null":function(filterVal, rowVal){ + return rowVal == null ? true : false; + }, + "notnull":function(filterVal, rowVal){ + return rowVal != null ? true : false; + }, + "after":function(filterVal, rowVal){ + return Date.parse(rowVal) > Date.parse(filterVal) ? true : false; + }, + "before":function(filterVal, rowVal){ + return Date.parse(rowVal) < Date.parse(filterVal) ? true : false; + } +}); + +function make_nice_table() { var table = new Tabulator("#tableDiv", { - data: tableData.data, - columns: tableData.columns, + data: vueApp.$data.tableData.data, + columns: vueApp.$data.tableData.tabulatorjs_coldefs, // autoColumns: true, layout: "fitDataFill", // "fitDataStretch", pagination: true, @@ -89,8 +127,8 @@ function make_nice_table(tableData) { }); // link table controls to this table - document.getElementById("download-csv").onclick = function() { table.download("csv", "data.csv"); }; - document.getElementById("download-pdf").onclick = function() { table.download("pdf", "data.pdf", { orientation:"landscape", title:"title", }); }; + document.getElementById("download-csv").onclick = function() { table.download("csv", `IOP4_data_${vueApp.$data.tableData.query.source_name}.csv`); }; + document.getElementById("download-pdf").onclick = function() { table.download("pdf", `IOP4_data_${vueApp.$data.tableData.query.source_name}.pdf`, { orientation:"landscape", title:"title", }); }; //filters = array of filters currently applied, rows = array of row components that pass the filters table.on("dataFiltered", function(filters, rows){ @@ -372,8 +410,9 @@ function get_ymin_ymax(field_y, field_y_err) { function check_plot_layout() { console.log((new Date).toLocaleTimeString(), "Checking plot") + console.log("Is idle", Bokeh.documents.slice(-1)[0].is_idle) - // compute the y_min and y_max columns from the source data if not present + /* compute the y_min and y_max columns from the source data if not present */ source = Bokeh.documents.slice(-1)[0].get_model_by_name('source') @@ -400,7 +439,39 @@ function check_plot_layout() { // source.change.emit(); - // check error bar status + /* reorder table and flag gui, we want the flag gui to be as closest as posible to the plot */ + + function _pos_at_center(element) { + const {top, left, width, height} = element.getBoundingClientRect(); + return { + x: left + width / 2, + y: top + height / 2 + }; + } + + function _distance(a, b) { + const aPosition = _pos_at_center(a); + const bPosition = _pos_at_center(b); + + return Math.hypot(aPosition.x - bPosition.x, aPosition.y - bPosition.y); + } + + plotDiv = document.getElementById("plotDiv"); + tb = document.getElementById("tablePlotDiv"); + flaggui = document.getElementById("FlagGUI"); + + if (_distance(plotDiv, flaggui) > _distance(plotDiv, tb)) { + if (FlagGUI.parentElement.compareDocumentPosition(tb.parentElement.parentElement) & Node.DOCUMENT_POSITION_PRECEDING) { + FlagGUI.parentElement.after(tb.parentElement.parentElement); + } else { + FlagGUI.parentElement.before(tb.parentElement.parentElement); + } + } + + // force table to redraw by emmitting the change + Bokeh.documents.slice(-1)[0]._roots[1].change.emit(); + + /* check error bar status */ console.log("Checking errorbar status") @@ -426,27 +497,53 @@ function check_plot_layout() { // emit all changes - for (let plot of Bokeh.documents.slice(-1)[0]._roots[0].children) { - plot[0].left[0].change.emit(); - plot[0].title.change.emit(); + function _check_plot_emit_changes() { + // emit changes on the altered elements so the redraw + + for (let plot of Bokeh.documents.slice(-1)[0]._roots[0].children) { + plot[0].left[0].change.emit(); + plot[0].title.change.emit(); + } + + Bokeh.documents.slice(-1)[0]._roots[0].children[0][0].above[0].change.emit(); } - Bokeh.documents.slice(-1)[0]._roots[0].children[0][0].above[0].change.emit(); - Bokeh.documents.slice(-1)[0]._roots[0].children[0][0].change.emit(); + _check_plot_emit_changes(); + + // work around because some times the plot takes a bit longer to update + // there should be a function to just redraw or trigger the resize for + // but i can not find when its the right time. 200ms should be enough + // Basically repeats the above code after 200ms. + + setTimeout(() => { + _check_plot_emit_changes(); + }, 200); + + // alternatively, emit resize event to force the plot to redraw + // but implement some flag to avoid infinite recursion (not done yet) + // window.dispatchEvent(new Event('resize')) /* plot table size */ + tb_container = document.getElementById("plotTableContainerDiv") tb_r = tb_container.parentElement tb_r.style.height = 0 y = 0 Array.from(tb_r.parentElement.children).forEach( x => { if (x != tb_r) { y += x.offsetHeight + 20; }}); // 20 is the gap - // let maxheight = clamp(tb_r.parentElement.offsetHeight - y, parseInt(getComputedStyle(tb_container)['min-height']), parseInt(getComputedStyle(tb_container)['max-height'])) + 'px'; let maxHeight = parseInt(window.getComputedStyle(tb_r.parentElement).height) - y; tb_r.style.height = ""; - tb_container.style.maxHeight = maxHeight + 'px'; + if (maxHeight > 100) { // of it is = 0 it will wrapped in column, 100 as error margin + tb_container.style.maxHeight = maxHeight + 'px'; + } else { + tb_container.style.maxHeight = "10 em"; + } + + + + console.log("Plot checked") } @@ -566,13 +663,19 @@ function load_catalog() { if (request.readyState === 4) { if (request.status === 200) { vueApp.$data.catalog = JSON.parse(request.responseText); - vueApp.$data.catalog.columns = vueApp.$data.catalog.columns.map((c) => ({ - name: c.name, - align: 'left', - label: c.title, - field: c.field, - style: 'min-width: min-content;', - })); + // map the columns data provided by the api endpoint to the columns data required by the quasar table component + vueApp.$data.catalog.columns = vueApp.$data.catalog.columns.map(function(c) { + cdef = {}; + cdef = Object.assign(c, c); + cdef = Object.assign(cdef, { + name: c.name, + field: c.name, + label: c.title, + align: 'left', + style: 'min-width: min-content;', + }); + return cdef; + }); } else { Quasar.Notify.create("Error loading catalog"); } @@ -626,19 +729,23 @@ function extractTextFromHTML(html) { highlight_parser = new DOMParser(); function highlightTextInHTML(html, re_expr) { - let doc = highlight_parser.parseFromString(html, 'text/html'); - - //if (re_expr.test(doc.textContent)) { - doc.body.querySelectorAll('*').forEach(el => { - // Check if it's a innermost text node - if (el.childNodes.length === 1 && el.firstChild.nodeType === 3) { - el.innerHTML = el.innerHTML.replaceAll(re_expr, function(match) { - return `${match}`; - }); + let parser = new DOMParser(); + let doc = parser.parseFromString(html, 'text/html'); + + function processNode(node) { + if (node.nodeType === 3) { // Node.TEXT_NODE + const matches = node.nodeValue.match(re_expr); + if (matches) { + const span = document.createElement('span'); + span.innerHTML = node.nodeValue.replaceAll(re_expr, '$&'); + node.parentNode.replaceChild(span, node); } - }); - //} + } else if (node.nodeType === 1) { // Node.ELEMENT_NODE + Array.from(node.childNodes).forEach(processNode); + } + } + Array.from(doc.body.childNodes).forEach(processNode); return doc.body.innerHTML; } @@ -649,9 +756,13 @@ function highlightTextInHTML2(html, re_expr) { } function show_pipeline_log() { + + console.log("vueApp.$data.pipeline_log_options", vueApp.$data.pipeline_log_options) + + // Filter the log lines vueApp.$data.pipeline_log.items = vueApp.$data.pipeline_log.data.split('\n').filter((txt) => { // if the filter text is not empty, hide lines that do not contain it - if ((vueApp.$data.pipeline_log_options.filter_text != null) && (vueApp.$data.pipeline_log_options.filter_text != '') && (vueApp.$data.pipeline_log_options.filter_text.length > 2)) { + if (!!(vueApp.$data.pipeline_log_options.filter_text) && (vueApp.$data.pipeline_log_options.filter_text.length > 0)) { if (!extractTextFromHTML(txt).toUpperCase().includes(vueApp.$data.pipeline_log_options.filter_text.toUpperCase())) { return false; } } // show only lines of the selected logging levels @@ -662,11 +773,12 @@ function show_pipeline_log() { return false }); - // if the filter text is not empty, highlight the text - if ((vueApp.$data.pipeline_log_options.filter_text != null) && (vueApp.$data.pipeline_log_options.filter_text != '' && vueApp.$data.pipeline_log_options.filter_text.length > 2)) { + // Highlight the searched text + if ((vueApp.$data.pipeline_log_options.filter_text != null) && (vueApp.$data.pipeline_log_options.filter_text != '' && vueApp.$data.pipeline_log_options.filter_text.length > 0)) { re_expr = new RegExp(vueApp.$data.pipeline_log_options.filter_text, 'gi'); for (let i = 0; i < vueApp.$data.pipeline_log.items.length; i++) { - vueApp.$data.pipeline_log.items[i] = highlightTextInHTML2(vueApp.$data.pipeline_log.items[i], re_expr); + vueApp.$data.pipeline_log.items[i] = highlightTextInHTML(vueApp.$data.pipeline_log.items[i], re_expr); } } + } \ No newline at end of file diff --git a/iop4api/templates/iop4api/data.html b/iop4api/templates/iop4api/data.html index cb152f37..446b3739 100644 --- a/iop4api/templates/iop4api/data.html +++ b/iop4api/templates/iop4api/data.html @@ -12,6 +12,7 @@ + + + + + + No filters yet + + + + + + + + + + + + + + + + + + + + + + + + +
fieldtypevalueaction
+ + + + + + +
+
+
+
diff --git a/iop4api/templates/iop4api/index.html b/iop4api/templates/iop4api/index.html index 18d02489..e7807f9e 100644 --- a/iop4api/templates/iop4api/index.html +++ b/iop4api/templates/iop4api/index.html @@ -117,11 +117,16 @@

VHEGA@IAA-CSIC

enable_full_lc: false, enable_iop3: false, enable_errorbars: false, + // plot (flagging) selected_plot_idx: [], show_selected_plot_pts: false, selected_refresh: 0, + flag_gui_popup: false, // data showDataTable: false, + showDataTableFilters: false, + DataTableFilters: [], + tableData: null, // catalog catalog: null, // query @@ -151,22 +156,30 @@

VHEGA@IAA-CSIC

console.log("Computing selected_plot_pts"); - this.selected_refresh++; + console.log("this.selected_refresh", this.selected_refresh++); + // Ensure that selected_plot_idx is reactive and triggers this computed property const newIndices = this.selected_plot_idx; // Get the last document in the Bokeh documents array const bokehDocument = Bokeh.documents.slice(-1)[0]; if (!bokehDocument) { + console.log("no bokeh document"); return []; } // Get the data source from the Bokeh document by its name const dataSource = bokehDocument.get_model_by_name('source').data; if (!dataSource) { + console.log("no data source"); return []; } + // If the flag GUI is activated, show the popup table + if (this.flag_gui_popup) { + this.show_selected_plot_pts = true; + } + // Use map to transform the indices into an array of point data return newIndices.map(index => { let pointData = {}; @@ -179,11 +192,14 @@

VHEGA@IAA-CSIC

// add fake but useful flag str representation field pointData['flagsstr'] = flags_to_str(pointData['flags']); + // to make it easier + pointData['has_flag_bad_photometry'] = (pointData['flags'] & (1<<0)); + pointData['has_flag_bad_polarimetry'] = (pointData['flags'] & (1<<1)); + return pointData; }); - }, - selected_plot_pts_flagstate() { + selected_plot_pts_flagsummary() { if (this.selected_plot_pts.length == 0) { notset = null; bad_photometry = null; @@ -241,15 +257,65 @@

VHEGA@IAA-CSIC

deep: true, immediate: true, }, + // log viewer gui pipeline_log_options: { handler(newValue, oldValue) { if (this.pipeline_log.isLoaded) { - show_pipeline_log(); + // filter on input with: + //show_pipeline_log(); + // but this can be slow if the log is large and there's only 1 or 2 chars written + // better filter on enter (implemented on the @change) + // but if filter options are changed, we should trigger filter (done in @update:selected on the q-chips) + // if the update is done here, remove those @events } this.pipeline_log_options.filter_timeout = null; }, deep: true }, + // data table filters + DataTableFilters: { + handler(newValue, oldValue) { + // get only filters with non empty field and type + let filter_array = newValue.filter(function (el) { + return el.field != '' && el.type != ''; + }); + + // interpret some filters + filter_array = Array.from(filter_array).map(function (e) { + let field = e.field; + let type = e.type; + let value = e.value; + + // debug + console.log('input [field, type, value] = ' + [field, type, value]); // debug + + if (value === '') { // if empty, null (works with = or != default filters) + value = null; + } else if (type != 'after') { // if values can be numbers, fix type + if (!isNaN(value) && !isNaN(Number(value))) { + value = Number(value); + } + } + + // debug + console.log(`was interpreted as [${field}, ${type}, ${value}] = [${field}, ${type}, ${value}] (${typeof value})`); // debug + + return {'field':field, 'type':type, 'value':value}; + }); + + // get tabulator table + let table = Tabulator.findTable('#tableDiv')[0]; + + // set new filters or clear them if there are none + if (filter_array.length == 0) { + table.clearFilter(); + } else { + table.setFilter(filter_array); + } + + }, + deep: true + }, }, methods: { updateURL() { @@ -311,9 +377,19 @@

VHEGA@IAA-CSIC

catalog_row_clicked(evt,row) { this.input_astrosource = row.name; }, + // also make some gui.js available inside vue // flagging toggle_flag, set_flag, + // logs, + show_pipeline_log, + // to format floats to give precision or return nan + formatFloat(value, precision) { + if (value === null || isNaN(Number(value))) { + return 'NaN'; + } + return Number(value).toFixed(precision); + }, }, beforeMount() { this.updateSelectedTabFromPath(); diff --git a/iop4api/templates/iop4api/logs.html b/iop4api/templates/iop4api/logs.html index b102725f..125b80bb 100644 --- a/iop4api/templates/iop4api/logs.html +++ b/iop4api/templates/iop4api/logs.html @@ -29,19 +29,20 @@
- - errors + + errors - - warning + + warning - - info + + info - + debug - + +
diff --git a/iop4api/templates/iop4api/plot.html b/iop4api/templates/iop4api/plot.html index bf0dfe35..6b6d3379 100644 --- a/iop4api/templates/iop4api/plot.html +++ b/iop4api/templates/iop4api/plot.html @@ -16,7 +16,7 @@ - +
@@ -48,11 +48,11 @@
-
- +
+
- Flag GUI + Flag GUI
@@ -65,34 +65,44 @@
- - + +
- + + + + + + - + - + + + + + + - + diff --git a/iop4api/views/catalog.py b/iop4api/views/catalog.py index a97d3254..fddfa24b 100644 --- a/iop4api/views/catalog.py +++ b/iop4api/views/catalog.py @@ -6,10 +6,12 @@ # django imports from django.http import JsonResponse, HttpResponseBadRequest from django.contrib.auth.decorators import permission_required +from django.db import models # iop4lib imports from iop4lib.db import AstroSource from iop4lib.enums import SRCTYPES +from iop4lib.utils import qs_to_table # other imports @@ -23,17 +25,8 @@ @permission_required(["iop4api.view_astrosource"]) def catalog(request): - qs = AstroSource.objects.exclude(srctype=SRCTYPES.CALIBRATOR).exclude(srctype=SRCTYPES.UNPOLARIZED_FIELD_STAR).values() + qs = AstroSource.objects.exclude(srctype=SRCTYPES.CALIBRATOR).exclude(srctype=SRCTYPES.UNPOLARIZED_FIELD_STAR) - data = list(qs) + all_column_names = [f.name for f in AstroSource._meta.get_fields() if hasattr(f, 'verbose_name')] - if len(data) > 0: - all_column_names = data[0].keys() - default_column_names = set(all_column_names) - else: - all_column_names = [] - default_column_names = [] - - columns = [{"name": k, "title": AstroSource._meta.get_field(k).verbose_name, "field":k, "visible": (k in default_column_names)} for k in all_column_names] - - return JsonResponse({'data': data, 'columns': columns}) + return JsonResponse(qs_to_table(qs=qs, column_names=all_column_names)) diff --git a/iop4api/views/data.py b/iop4api/views/data.py index 098b2817..e47ee09f 100644 --- a/iop4api/views/data.py +++ b/iop4api/views/data.py @@ -5,11 +5,14 @@ # django imports from django.http import JsonResponse, HttpResponseBadRequest from django.contrib.auth.decorators import permission_required +from django.db import models # iop4lib imports from iop4lib.db import AstroSource, PhotoPolResult +from iop4lib.utils import qs_to_table # other imports +from astropy.time import Time #logging import logging @@ -20,21 +23,37 @@ def data(request): source_name = request.POST.get("source_name", None) - band = request.POST.get("band", "R") if not AstroSource.objects.filter(name=source_name).exists(): return HttpResponseBadRequest(f"Source '{source_name}' does not exist".format(source_name=source_name)) - vals = PhotoPolResult.objects.filter(astrosource__name=source_name, band=band).values() + qs = PhotoPolResult.objects.filter(astrosource__name=source_name) - if len(vals) > 0: - all_column_names = vals[0].keys() - default_column_names = set(all_column_names).intersection(['juliandate', 'instrument', 'band', 'mag', 'mag_err', 'p', 'p_err', 'chi', 'chi_err']) - else: - all_column_names = [] - default_column_names = [] + data = qs.values() - columns = [{"name": k, "title": PhotoPolResult._meta.get_field(k).verbose_name, "field":k, "visible": (k in default_column_names)} for k in all_column_names] + all_column_names = data[0].keys() + default_column_names = ['juliandate', 'instrument', 'band', 'mag', 'mag_err', 'p', 'p_err', 'chi', 'chi_err'] - return JsonResponse({'data': list(vals), 'columns': columns}) + table_and_columns = qs_to_table(data=data, model=PhotoPolResult, column_names=all_column_names, default_column_names=default_column_names) + + result = { + "data": table_and_columns["data"], + "columns": table_and_columns["columns"], + "query": { + "source_name": source_name, + "count": qs.count() + } + } + + # annotate with date fromt the julian date + for r in result["data"]: + r["date"] = Time(r["juliandate"], format='jd').iso + result["columns"].append({"name": "date", "title": "date", "type": "date", "help": "date and time in ISO 8601 format, from the julian date"}) + + # annotate with flag labels + for r in result["data"]: + r["flag_labels"] = ",".join(PhotoPolResult.FLAGS.get_labels(r["flags"])) + result["columns"].append({"name": "flag_labels", "title": "flag labels", "type": "string", "help": "flags as human readable labels"}) + + return JsonResponse(result) diff --git a/iop4api/views/plot.py b/iop4api/views/plot.py index 77c21152..4477a37f 100644 --- a/iop4api/views/plot.py +++ b/iop4api/views/plot.py @@ -38,6 +38,7 @@ def plot(request): enable_full_lc = request.POST.get("enable_full_lc", False) enable_errorbars = request.POST.get("enable_errorbars", False) + save_filename = f"IOP4_{source_name}_{band}" # comment these lines to allow for empty plot if not AstroSource.objects.filter(name=source_name).exists(): @@ -49,13 +50,15 @@ def plot(request): from bokeh.document import Document from bokeh.transform import factor_cmap from bokeh.layouts import column, gridplot - from bokeh.models import CategoricalColorMapper, LinearColorMapper, RangeTool, Range1d, LinearAxis, CustomJS, ColumnDataSource, Whisker, DatetimeAxis, DatetimeTickFormatter, Scatter, Segment, CDSView, GroupFilter, AllIndices, HoverTool, NumeralTickFormatter, BoxZoomTool, DataTable, TableColumn, CategoricalMarkerMapper, CustomJSTransform + from bokeh.models import CategoricalColorMapper, LinearColorMapper, RangeTool, Range1d, LinearAxis, CustomJS, ColumnDataSource, Whisker, DatetimeAxis, DatetimeTickFormatter, Scatter, Segment, CDSView, GroupFilter, AllIndices, HoverTool, NumeralTickFormatter, BoxZoomTool, DataTable, TableColumn, CategoricalMarkerMapper, CustomJSTransform, CustomJSHover, SaveTool, Toolbar from bokeh.plotting import figure, show from bokeh.embed import components, json_item from bokeh.models import Div, Circle, Column, Row from bokeh.plotting import figure from bokeh.models import ColumnDataSource, Styles, InlineStyleSheet, GlobalInlineStyleSheet from bokeh.transform import factor_cmap, factor_mark, transform + from bokeh.events import Event, PanStart, PanEnd, Press, PressUp, RangesUpdate + from bokeh.models.widgets import HTMLTemplateFormatter, NumberFormatter lod_threshold = 2000 lod_factor = 10 @@ -177,8 +180,8 @@ def f_x1_to_x2(x1_val): source = ColumnDataSource(data=dict(pk = pks, instrument = vals['instrument'], - x1 = x1, - x2 = x2, + x1 = x1, # mjd + x2 = x2, # datetime datestr = Time(vals["juliandate"],format="jd").strftime("%Y/%m/%d %H:%M"), y1 = vals['mag'], y1_min = vals['mag']-vals['mag_err'], # if not sent it is computed in JS in check_plot() @@ -237,25 +240,66 @@ def f_x1_to_x2(x1_val): range_tool.overlay.fill_alpha = 0.2 # Create a hover tool with custom JS callback - hover = HoverTool(renderers=[]) - hover.callback = CustomJS(args = dict(source = source, hover = hover), code = ''' - if (cb_data.index.indices.length > 0) { - let index = cb_data.index.indices[0]; - let p = source.data.y2[index]; - let flags = source.data.flags[index]; - - if (isFinite(p)) { - hover.tooltips = [["id", "@pk"], ["instrument", "@instrument"], ["date", "@datestr"], ["mag", "@y1"], ["p", "@y2{0.0 %}"], ["chi", "@y3"], ["flags", flags_to_str(flags)]]; - } else { - hover.tooltips = [["id", "@pk"], ["instrument", "@instrument"], ["date", "@datestr"], ["mag", "@y1"], ["flags", flags_to_str(flags)]]; - } - } ''') + + hover_formatter = CustomJSHover(args=dict(source=source), + code=""" + let idx = special_vars.index; + + const [name, s1, s2] = format.split(','); + const y = source.data[s1][idx]; + const dy = source.data[s2][idx]; + + if (name == "mag") { + return parseFloat(y).toFixed(2) + " ± " + parseFloat(dy).toFixed(2); + } else if ((name == "p")) { + return parseFloat(100*y).toFixed(2) + " ± " + parseFloat(100*dy).toFixed(2) + " %"; + } else if ((name == "chi")) { + return parseFloat(y).toFixed(2) + " ± " + parseFloat(dy).toFixed(2) + " º"; + } + + return "error in hover formatter"; + """) + + hover_tool = HoverTool(renderers=[], formatters={"@pk":hover_formatter}) + + hover_tool.callback = CustomJS(args = dict(source = source, hover = hover_tool), code = ''' + if (cb_data.index.indices.length > 0) { + let index = cb_data.index.indices[0]; + + let mag = source.data.y1[index]; + let p = source.data.y2[index]; + let chi = source.data.y3[index]; + let flags = source.data.flags[index]; + + let tooltips = [["id", "@pk"], ["instrument", "@instrument"], ["date", "@datestr"]] + + if (isFinite(mag)) { + //tooltips.push(["mag", "@y1"]); + tooltips.push(["mag", "@pk{mag,y1,y1_err}"]); + } + + if (isFinite(p)) { + //tooltips.push(["mag", "@y2{0.0 %}"]); + tooltips.push(["p", "@pk{p,y2,y2_err}"]); + } + + if (isFinite(chi)) { + //tooltips.push(["mag", "@y2"]); + tooltips.push(["chi", "@pk{chi,y3,y3_err}"]); + } + + tooltips.push(["flags", flags_to_str(flags)]); + + hover.tooltips = tooltips + + } ''') # other tools - #tools = ["fullscreen", "reset", "save", "pan", "auto_box_zoom", "wheel_zoom", "box_select", "lasso_select"] - tools = ["fullscreen", "reset", "save", "pan", "wheel_zoom", "box_select", "tap", "lasso_select"] - box_zoom_tool = BoxZoomTool(dimensions="auto") + save_tool = SaveTool(filename=save_filename) # customize filename of saved plot + # tools = ["fullscreen", "reset", "pan", "wheel_zoom", "box_select", "tap", "lasso_select", save_tool, "auto_box_zoom"] + box_zoom_tool = BoxZoomTool(dimensions="auto") # added separately in each figure to make it the default active tool + tools = ["fullscreen", "reset", "pan", "wheel_zoom", "box_select", "tap", "lasso_select", save_tool] if enable_full_lc: # Create the main plot with range slider and secondary x-axis, fixed styles of markers @@ -339,7 +383,7 @@ def f_x1_to_x2(x1_val): for axLabel, axDict in pltDict.items(): - p = figure(title=None, x_range=selected_range, toolbar_location=None, tools=tools, lod_threshold=lod_threshold, lod_factor=lod_factor, lod_timeout=lod_timeout, output_backend="webgl") + p = figure(title=None, x_range=selected_range, toolbar_location=None, tools=tools, lod_threshold=lod_threshold, lod_factor=lod_factor, lod_timeout=lod_timeout, output_backend="webgl", name=f"plot_{axLabel}") scatter_initial = Scatter(x=axDict['x'], y=axDict['y'], size=axDict["size"], fill_color=axDict["color"], line_color=axDict["color"], marker=axDict["marker"], fill_alpha=axDict["alpha"], line_alpha=axDict["alpha"]) # scatter_selected = Scatter(x=axDict['x'], y=axDict['y'], size=axDict["size"], fill_color=axDict["selected_color"], line_color=axDict["selected_color"], marker=axDict["marker"], fill_alpha=axDict["selected_alpha"], line_alpha=axDict["selected_alpha"]) @@ -383,8 +427,8 @@ def f_x1_to_x2(x1_val): p.title.visible = False # Add common hover tool with custom JS callback - hover.renderers += [pt_renderers] # it has no renderers, append the scatter - p.add_tools(hover) + hover_tool.renderers += [pt_renderers] # it has no renderers, append the scatter + p.add_tools(hover_tool) # make box_zoom the default active tool p.add_tools(box_zoom_tool) @@ -412,9 +456,16 @@ def f_x1_to_x2(x1_val): else: plot_layout = gridplot([[pltDict["ax1"]["p"]], [pltDict["ax2"]["p"]], [pltDict["ax3"]["p"]]], merge_tools=True, toolbar_location="right", sizing_mode='stretch_both') - # Add a callback to hide errorbars when panning (it makes the plot smoother) + plot_layout.name = "gridplot" - from bokeh.events import Event, PanStart, PanEnd, Press, PressUp, RangesUpdate + # this is a fix for the save tool of the gridplot not behaving as expected + # and having the filename of the configured plot + for tool in plot_layout.toolbar.tools: + if isinstance(tool, SaveTool): + tool.filename = save_filename + break + + # Add a callback to hide errorbars when panning (it makes the plot smoother) if enable_errorbars: cb_hide_errorbars = """window.were_errorbars_active = document.querySelector('#cbox_errorbars').checked; plot_hide_errorbars();""" @@ -442,7 +493,6 @@ def f_x1_to_x2(x1_val): # Data table # ############## - from bokeh.models.widgets import HTMLTemplateFormatter, NumberFormatter table_link_formatter = HTMLTemplateFormatter(template="""<%= value %>""") float_formatter = NumberFormatter(format='0.00') percent_formatter = NumberFormatter(format='0.0 %') @@ -461,12 +511,10 @@ def f_x1_to_x2(x1_val): TableColumn(field="flags", title="flags", formatter=flags_to_str_formatter), ] - # tb_full_height = 6 + 25 + 25*max(len(source.data['pk']),30) # css padding + header + rows (and a max) data_table = DataTable(source=source, view=view, columns=table_columns, index_position=None, - sortable=True, reorderable=True, scroll_to_selection=True, sizing_mode="stretch_width", - # min_height=tb_full_height, height=tb_full_height, height_policy="fixed", + sortable=True, reorderable=True, scroll_to_selection=True, #sizing_mode="stretch_width", # this works and is fast + sizing_mode="inherit", # this also works fast while respecting the container size stylesheets=[InlineStyleSheet(css=""".slick-cell.selected { background-color: #d2eaff; }""")]) - # the last two commented rows make the table unusable, the page becomes too slow. ################################################# # Create a legend (it will be a different plot) # diff --git a/iop4lib/utils/__init__.py b/iop4lib/utils/__init__.py index 81cc5ef7..23210ea3 100644 --- a/iop4lib/utils/__init__.py +++ b/iop4lib/utils/__init__.py @@ -31,6 +31,38 @@ def get_column_values(qs, column_names): return {k: v for k, v in zip(column_names, map(np.array, values_lists))} +def qs_to_table(qs=None, model=None, data=None, column_names=None, default_column_names=None): + """ Specify either queryset, or data and models.""" + + from django.db import models + + if data is None or model is None: + if qs is None: + raise Exception("Either qs or data and model must be specified") + else: + model = qs.model + data = qs.values() + + if column_names is None: + column_names = [f.name for f in model._meta.get_fields() if hasattr(f, 'name') and f.name in data[0].keys()] + + if default_column_names is None: + default_column_names = column_names + + columns = [{ + "name": k, + "title": model._meta.get_field(k).verbose_name, + "visible": (k in default_column_names), + "type": "int" if isinstance(model._meta.get_field(k), models.IntegerField) else \ + "float" if isinstance(model._meta.get_field(k), models.FloatField) else \ + "str" if isinstance(model._meta.get_field(k), models.CharField) else \ + "str" if isinstance(model._meta.get_field(k), models.TextField) else \ + "date" if isinstance(model._meta.get_field(k), models.DateField) else \ + "unknown", + "help": model._meta.get_field(k).help_text, + } for k in column_names] + + return {'data': list(data), 'columns': columns} def divisorGenerator(n): """Generator for divisors of n""" From 2476c46f0d8b688ad0658894be76cb59e197aa81 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Sat, 11 Nov 2023 20:53:46 +0000 Subject: [PATCH 140/168] photopolresult admin: single link to all redfs --- iop4admin/modeladmins/photopolresult.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/iop4admin/modeladmins/photopolresult.py b/iop4admin/modeladmins/photopolresult.py index 0fb2e63c..7fb779b0 100644 --- a/iop4admin/modeladmins/photopolresult.py +++ b/iop4admin/modeladmins/photopolresult.py @@ -42,11 +42,11 @@ def get_src_type(self, obj): @admin.display(description="ReducedFits") def get_reducedfits(self, obj): self.allow_tags = True - link_L = list() - for reducedfit in obj.reducedfits.all(): - url = reverse('iop4admin:%s_%s_changelist' % (ReducedFit._meta.app_label, ReducedFit._meta.model_name)) + f"?id={reducedfit.id}" - link_L.append(rf'{reducedfit.id}') - return mark_safe(", ".join(link_L)) + + ids_str_L = [str(reducedfit.id) for reducedfit in obj.reducedfits.all()] + a_href = reverse('iop4admin:%s_%s_changelist' % (ReducedFit._meta.app_label, ReducedFit._meta.model_name)) + "?id__in=%s" % ",".join(ids_str_L) + a_text = ", ".join(ids_str_L) + return mark_safe(f'{a_text}') @admin.display(description="JD") def get_juliandate(self, obj): From 6097d62c5ec122293b9417e5834da6ba4f3515b3 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Sun, 12 Nov 2023 14:21:42 +0000 Subject: [PATCH 141/168] make_polarimetry_groups: fix bug it was not respecting the order of reducedfits by juliandate --- iop4lib/db/epoch.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/iop4lib/db/epoch.py b/iop4lib/db/epoch.py index 928f46be..b74f98d3 100644 --- a/iop4lib/db/epoch.py +++ b/iop4lib/db/epoch.py @@ -548,7 +548,8 @@ def make_polarimetry_groups(self): # output some debug info about the groups made for key_T, redf_L in groups_D.items(): key_D = dict(key_T) - logger.debug(f"{len(redf_L)=}; {key_D.values()}, {set([redf.rotangle for redf in redf_L])}") + rotangles_S = sorted(set([redf.rotangle for redf in redf_L])) + logger.debug(f"{len(redf_L)=}; {key_D.values()}, {rotangles_S}") # split the groups into subgroups such that every subgroup has at most 4 elements and all rotangles are present in the subgroup @@ -558,12 +559,12 @@ def make_polarimetry_groups(self): for key_T, redf_L in groups_D.items(): key_D = dict(key_T) - rotangles_S = set([redf.rotangle for redf in redf_L]) # rotangles available in the redf_L + rotangles_S = sorted(set([redf.rotangle for redf in redf_L])) # rotangles available in the redf_L split_rotangle_D = {rotangle:[redf for redf in redf_L if redf.rotangle==rotangle] for rotangle in rotangles_S} # to access the redfs in the redfL by rotangle while any([len(split_rotangle_D[rotangle])>0 for rotangle in rotangles_S]): # while there are redfs for some rotangle, create groups by popping one of each rotangle - split_groups.append([split_rotangle_D[rotangle].pop() for rotangle in rotangles_S if len(split_rotangle_D[rotangle]) > 0]) + split_groups.append([split_rotangle_D[rotangle].pop(0) for rotangle in rotangles_S if len(split_rotangle_D[rotangle]) > 0]) split_groups_keys.append(key_D) # sort the groups by min(juliandate) @@ -581,7 +582,7 @@ def make_polarimetry_groups(self): t1 = Time(min([redf.juliandate for redf in redf_L]), format="jd").datetime.strftime("%H:%M:%S") t2 = Time(max([redf.juliandate for redf in redf_L]), format="jd").datetime.strftime("%H:%M:%S") - logging.debug(f"{len(redf_L)=}; {key_D.values()}, {set([redf.rotangle for redf in redf_L])} ({t1}, {t2})") + logging.debug(f"Group {len(redf_L)=}; {key_D.values()}, {set([redf.rotangle for redf in redf_L])} ({t1}, {t2})") for redf in redf_L: logging.debug(f" -> {redf.rotangle}: {Time(redf.juliandate, format='jd').datetime.strftime('%H:%M:%S')}") From e4f92cd9623d9044c7119d00134b54529b6f236d Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Sun, 12 Nov 2023 16:19:38 +0000 Subject: [PATCH 142/168] dipol shotgun params: constraint absolute distance * it was causing bad astrometric using best D pairs --- iop4lib/instruments/dipol.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index b8365690..ba94336b 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -414,6 +414,8 @@ def _build_shotgun_params(cls, redf: 'ReducedFit'): shotgun_params_kwargs["dx_max"] = [300] shotgun_params_kwargs["dy_min"] = [0] shotgun_params_kwargs["dy_max"] = [50] + shotgun_params_kwargs["d_min"] = [150] + shotgun_params_kwargs["d_max"] = [250] shotgun_params_kwargs["bins"] = [400] shotgun_params_kwargs["hist_range"] = [(0,500)] From 7f2449b058721cbdc959cd748f36b06e702cfc6e Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Sun, 12 Nov 2023 16:22:32 +0000 Subject: [PATCH 143/168] small changes in reduced fit admin --- iop4admin/modeladmins/reducedfit.py | 2 +- iop4admin/templates/iop4admin/view_fitdetails.html | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/iop4admin/modeladmins/reducedfit.py b/iop4admin/modeladmins/reducedfit.py index cbdf4a59..743e1a2b 100644 --- a/iop4admin/modeladmins/reducedfit.py +++ b/iop4admin/modeladmins/reducedfit.py @@ -18,7 +18,7 @@ class AdminReducedFit(AdminFitFile): model = ReducedFit - list_display = ["id", 'filename', 'telescope', 'night', 'instrument', 'status', 'imgtype', 'imgsize', 'band', 'obsmode', 'rotangle', 'exptime', 'get_targets_in_field', 'options', 'modified'] + list_display = ["id", 'filename', 'telescope', 'night', 'instrument', 'status', 'imgtype', 'imgsize', 'band', 'obsmode', 'rotangle', 'exptime', 'get_targets_in_field', 'juliandate', 'options', 'modified'] readonly_fields = [field.name for field in ReducedFit._meta.fields] search_fields = ['id', 'filename', 'epoch__telescope', 'epoch__night', 'sources_in_field__name'] ordering = ['-epoch__night', '-epoch__telescope', '-juliandate'] diff --git a/iop4admin/templates/iop4admin/view_fitdetails.html b/iop4admin/templates/iop4admin/view_fitdetails.html index 48fa6ba3..214739ad 100644 --- a/iop4admin/templates/iop4admin/view_fitdetails.html +++ b/iop4admin/templates/iop4admin/view_fitdetails.html @@ -139,7 +139,7 @@

Astrometry info:

{% for k,v in D.items %}
- + {% endfor %} {% endfor %} From 5a56cd3f2cb1dd4c4721a15891ecc12f37a73630 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Sun, 12 Nov 2023 16:25:11 +0000 Subject: [PATCH 144/168] BuildWCSResult: add __bool__ method --- iop4lib/utils/astrometry.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/iop4lib/utils/astrometry.py b/iop4lib/utils/astrometry.py index 3c151272..b6db500a 100644 --- a/iop4lib/utils/astrometry.py +++ b/iop4lib/utils/astrometry.py @@ -39,13 +39,21 @@ @dataclasses.dataclass class BuildWCSResult(): r""" - 'success': bool, # whether the appropiate WCSs was built successfully - 'wcslist': list # list of WCS objects built (usually one, two if there are extraordinary sources in the image) - 'info': dict or None # dict with extra information about the process + 'success': bool + whether the appropiate WCSs was built successfully + 'wcslist': list + list of WCS objects built (usually one, two if there are extraordinary sources in the image) + 'info': dict or None + dict with extra information about the process + + Boolean evaluation of this object returns the value of 'success'. """ success: bool wcslist: list[WCS] = dataclasses.field(default_factory=list) - info: dict = dataclasses.field(default_factory=dict) + info: dict = dataclasses.field(default_factory=dict) + + def __bool__(self): + return self.success def build_wcs_params_shotgun(redf: 'ReducedFit', shotgun_params_kwargs : dict() = None, hard : bool = False, summary_kwargs : dict = {'build_summary_images':True, 'with_simbad':True}) -> BuildWCSResult: @@ -346,6 +354,7 @@ def _save_astrocalib_proc_vars(locals_dict): astrocalib_proc_vars = dict() save_list = [ + 'msg', 'has_pairs', 'bkg_box_size', 'bkg_filter_size', 'bkg', @@ -358,6 +367,7 @@ def _save_astrocalib_proc_vars(locals_dict): if locals_dict['has_pairs']: save_list += [ + 'msg', 'wcs2', 'hist_range', 'bins', 'd_eps', 'seg1', 'seg2', 'seg_d0', 'seg_disp_sign', From cc07b291ad0b35af26da87c17be2b4e1d975a002 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Mon, 13 Nov 2023 19:36:37 +0000 Subject: [PATCH 145/168] iop4: fix --- iop4lib/db/reducedfit.py | 8 +- iop4lib/instruments/andor_cameras.py | 5 +- iop4lib/instruments/cafos.py | 5 +- iop4lib/instruments/dipol.py | 228 ++++++++++++++++++--------- iop4lib/instruments/instrument.py | 28 ++-- iop4lib/iop4.py | 11 +- iop4lib/utils/__init__.py | 3 +- 7 files changed, 183 insertions(+), 105 deletions(-) diff --git a/iop4lib/db/reducedfit.py b/iop4lib/db/reducedfit.py index 7626306d..cdcca543 100644 --- a/iop4lib/db/reducedfit.py +++ b/iop4lib/db/reducedfit.py @@ -153,11 +153,11 @@ def associate_masters(self, *args, **kwargs): def apply_masters(self): return Instrument.by_name(self.instrument).apply_masters(self) - def build_file(self): - return Instrument.by_name(self.instrument).build_file(self) + def build_file(self, **build_wcs_kwargs): + return Instrument.by_name(self.instrument).build_file(self, **build_wcs_kwargs) - def astrometric_calibration(self): - return Instrument.by_name(self.instrument).astrometric_calibration(self) + def astrometric_calibration(self, **build_wcs_kwargs): + return Instrument.by_name(self.instrument).astrometric_calibration(self, **build_wcs_kwargs) @property def has_pairs(self): diff --git a/iop4lib/instruments/andor_cameras.py b/iop4lib/instruments/andor_cameras.py index adef65ad..54322ad4 100644 --- a/iop4lib/instruments/andor_cameras.py +++ b/iop4lib/instruments/andor_cameras.py @@ -253,8 +253,9 @@ def compute_relative_polarimetry(cls, polarimetry_group): # 1. Compute all aperture photometries - target_fwhm, aperpix, r_in, r_out = cls.estimate_common_apertures(polarimetry_group, reductionmethod=REDUCTIONMETHODS.RELPOL) - + aperpix, r_in, r_out, fit_res_dict = cls.estimate_common_apertures(polarimetry_group, reductionmethod=REDUCTIONMETHODS.RELPHOT) + target_fwhm = fit_res_dict['mean_fwhm'] + logger.debug(f"Computing aperture photometries for the {len(polarimetry_group)} reducedfits in the group with target {aperpix:.1f}.") for reducedfit in polarimetry_group: diff --git a/iop4lib/instruments/cafos.py b/iop4lib/instruments/cafos.py index e3308c79..8c52816a 100644 --- a/iop4lib/instruments/cafos.py +++ b/iop4lib/instruments/cafos.py @@ -225,8 +225,9 @@ def compute_relative_polarimetry(cls, polarimetry_group): # 1. Compute all aperture photometries - target_fwhm, aperpix, r_in, r_out = cls.estimate_common_apertures(polarimetry_group, reductionmethod=REDUCTIONMETHODS.RELPOL) - + aperpix, r_in, r_out, fit_res_dict = cls.estimate_common_apertures(polarimetry_group, reductionmethod=REDUCTIONMETHODS.RELPHOT) + target_fwhm = fit_res_dict['mean_fwhm'] + logger.debug(f"Computing aperture photometries for the {len(polarimetry_group)} reducedfits in the group with target aperpix {aperpix:.1f}.") for reducedfit in polarimetry_group: diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index ba94336b..311e1a58 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -29,7 +29,7 @@ from iop4lib.enums import * from .instrument import Instrument from iop4lib.utils import imshow_w_sources, get_candidate_rank_by_matchs, get_angle_from_history, build_wcs_centered_on, get_simbad_sources -from iop4lib.utils.sourcedetection import get_sources_daofind, get_segmentation, get_cat_sources_from_segment_map +from iop4lib.utils.sourcedetection import get_sources_daofind, get_segmentation, get_cat_sources_from_segment_map, get_bkg from iop4lib.utils.plotting import plot_preview_astrometry from iop4lib.utils.astrometry import BuildWCSResult @@ -56,6 +56,15 @@ class DIPOL(Instrument): required_masters = ['masterbias', 'masterflat', 'masterdark'] + + # pre computed pairs distances to use in the astrometric calibrations + # obtained from calibrated photometry fields + + disp_sign_mean, disp_sign_std = np.array([-2.09032765e+02, 1.65384209e-02]), np.array([4.13289109, 0.66159702]) + disp_mean, disp_std = np.abs(disp_sign_mean), disp_sign_std + disp_std = np.array([15, 5]) + + @classmethod def classify_juliandate_rawfit(cls, rawfit: 'RawFit'): """ @@ -107,8 +116,12 @@ def classify_band_rawfit(cls, rawfit: 'RawFit'): else: rawfit.band = BANDS.ERROR raise ValueError(f"Missing FILTER keyword for {rawfit.fileloc} which is not a bias or dark (it is a {rawfit.imgtype}).") - elif rawfit.header['FILTER'] == "Red": + elif rawfit.header['FILTER'] == "Red": # TODO: they are not exacty red, they are in a different photometric system. Conversion must be implemented. rawfit.band = BANDS.R + elif rawfit.header['FILTER'] == "Green": + rawfit.band = BANDS.V + elif rawfit.header['FILTER'] == "Blue": + rawfit.band = BANDS.B else: rawfit.band = BANDS.ERROR raise ValueError(f"Unknown FILTER keyword for {rawfit.fileloc}: {rawfit.header['FILTER']}.") @@ -313,8 +326,39 @@ def has_pairs(cls, fit_instance: 'ReducedFit' or 'RawFit') -> bool: """ DIPOL ALWAYS HAS PAIRS?!!!! """ return True + + + + @classmethod - def build_wcs(cls, reducedfit: 'ReducedFit', summary_kwargs : dict = {'build_summary_images':True, 'with_simbad':True}): + def _estimate_positions_from_segments(cls, redf, fwhm=1.0, npixels=64, n_seg_threshold=3.0, centered=True): + # get the sources positions + + data = redf.data + + mean, median, std = sigma_clipped_stats(data, sigma=5.0) + + bkg = get_bkg(redf.mdata, filter_size=5, box_size=redf.width//10) + imgdata_bkg_substracted = redf.mdata - bkg.background + seg_threshold = n_seg_threshold * bkg.background_rms + + segment_map, convolved_data = get_segmentation(imgdata_bkg_substracted, fwhm=fwhm, npixels=npixels, threshold=seg_threshold) + if segment_map is None: + return list() + else: + seg_cat, positions, tb = get_cat_sources_from_segment_map(segment_map, imgdata_bkg_substracted, convolved_data) + + if centered: + # select only the sources in the center + cx, cy = redf.width//2, redf.height//2 + idx = np.abs(positions[:,0]-cx) < 1/3 * redf.width + idx = idx & (np.abs(positions[:,1]-cy) < 1/3 * redf.height) + positions = positions[idx] + + return positions + + @classmethod + def build_wcs(cls, reducedfit: 'ReducedFit', summary_kwargs : dict = {'build_summary_images':True, 'with_simbad':True}, method=None): """ Overriden Instrument build_wcs. While for PHOTOMETRY observations, DIPOL has a wide field which can be astrometrically calibrated, @@ -327,65 +371,97 @@ def build_wcs(cls, reducedfit: 'ReducedFit', summary_kwargs : dict = {'build_sum for the low flux and big size of the images. """ - from iop4lib.db import ReducedFit + + if method is not None: + logger.warning(f"Calling {method} for {reducedfit}.") + return method(reducedfit, summary_kwargs=summary_kwargs) + + from iop4lib.db import ReducedFit, AstroSource + + target_src = reducedfit.header_hintobject if reducedfit.obsmode == OBSMODES.PHOTOMETRY: return super().build_wcs(reducedfit, shotgun_params_kwargs=cls._build_shotgun_params(reducedfit), summary_kwargs=summary_kwargs) elif reducedfit.obsmode == OBSMODES.POLARIMETRY: - # First attempt: this fill find the sources in the image, if there are only two, that is. - - build_wcs = cls._build_wcs_for_polarimetry_from_target_O_and_E(reducedfit, summary_kwargs=summary_kwargs) - if build_wcs.success: - return build_wcs - else: - n_bright_sources = build_wcs.info['n_bright_sources'] + # Gather some info to perform a good decision on which methods to use + + n_estimate = len(cls._estimate_positions_from_segments(redf=reducedfit, n_seg_threshold=1.5, centered=False)) + n_estimate_centered = len(cls._estimate_positions_from_segments(redf=reducedfit, n_seg_threshold=1.5, centered=True)) + redf_phot = ReducedFit.objects.filter(instrument=reducedfit.instrument, + sources_in_field__in=[reducedfit.header_hintobject], + obsmode=OBSMODES.PHOTOMETRY, + flags__has=ReducedFit.FLAGS.BUILT_REDUCED).first() + n_expected_simbad_sources = len(get_simbad_sources(reducedfit.header_hintobject.coord, radius=(reducedfit.width*cls.arcsec_per_pix*u.arcsec))) + n_expected_calibrators = AstroSource.objects.filter(calibrates__in=[reducedfit.header_hintobject]).count() + + # log the variables above - logger.debug(f"Found {n_bright_sources} bright (>10 sigma) sources near the center of the field.") + logger.debug(f"{n_estimate=}") + logger.debug(f"{n_estimate_centered=}") + logger.debug(f"{redf_phot=}") + logger.debug(f"{n_expected_simbad_sources=}") + logger.debug(f"{n_expected_calibrators=}") - # Second attempt: this method will try to match the relative positions of sources to the ones in the catalog or simbad. + def _try_EO_method(): - if n_bright_sources < 4: - build_wcs = cls._build_wcs_for_polarimetry_images_catalog_matching(reducedfit, summary_kwargs=summary_kwargs) - if build_wcs.success: - return build_wcs + if target_src.srctype == SRCTYPES.STAR: + n_seg_threshold_L = [500, 300, 200, 100, 50] + npixels_L = [64] else: - logger.debug(f"Catalog matching failed.") - - # Third attempt: if there are many sources in the catalog and a previous photometry field has been solved, we can try to use the new method. + n_seg_threshold_L = [6.0, 3.0, 1.5, 1.0] + npixels_L = [64] + + for npixels, n_seg_threshold in zip(npixels_L, n_seg_threshold_L): + if (build_wcs_result := cls._build_wcs_for_polarimetry_from_target_O_and_E(reducedfit, summary_kwargs=summary_kwargs, n_seg_threshold=n_seg_threshold, npixels=npixels)): + break + return build_wcs_result + + def _try_quad_method(): + if redf_phot is not None: + + if target_src.srctype == SRCTYPES.STAR: + n_threshold_L = [500, 300, 200, 100] + else: + n_threshold_L = [15,5,3] + + for fwhm, n_threshold in itertools.product([30,15], n_threshold_L): + if (build_wcs_result := cls._build_wcs_for_polarimetry_images_photo_quads(reducedfit, summary_kwargs=summary_kwargs, n_threshold=n_threshold, find_fwhm=fwhm, smooth_fwhm=4)): + break + else: + build_wcs_result = BuildWCSResult(success=False) + return build_wcs_result - # build_wcs = cls._build_wcs_for_polarimetry_images_photo_quads(reducedfit, summary_kwargs=summary_kwargs) - # if build_wcs.success: - # return build_wcs - # else: - # logger.debug(f"Photo quads matching failed.") + def _try_catalog_method(): - # TODO: change it to use segmentation with only one threshold, and remove the loop below. - - if (redf_phot := ReducedFit.objects.filter(instrument=reducedfit.instrument, - sources_in_field__in=[reducedfit.header_hintobject], - obsmode=OBSMODES.PHOTOMETRY, - flags__has=ReducedFit.FLAGS.BUILT_REDUCED).first()) is not None: - - logger.debug(f"Found solved photometry field for {reducedfit}, I will try the new method.") + if target_src.srctype == SRCTYPES.STAR: + n_seg_threshold_L = [500, 300, 200, 100, 50] + npixels_L = [64] + else: + n_seg_threshold_L = [1.5, 1.0] + npixels_L = [64, 32] + + if n_expected_calibrators > 0 or n_expected_simbad_sources > 0: + for npixels, n_seg_threshold in itertools.product(npixels_L, n_seg_threshold_L): + if (build_wcs := cls._build_wcs_for_polarimetry_images_catalog_matching(reducedfit, summary_kwargs=summary_kwargs, n_seg_threshold=n_seg_threshold, npixels=npixels)): + break + else: + build_wcs = BuildWCSResult(success=False) + return build_wcs + - for fwhm, n_threshold in itertools.product([30,15], [15,5,3]): - - sources, _ = get_sources_daofind(reducedfit.mdata, fwhm=fwhm, n_threshold=n_threshold, brightest=100, exclude_border=True) + method_try_order = [_try_EO_method, _try_quad_method, _try_catalog_method] - if len(sources) >= 5: - build_wcs_result = cls._build_wcs_for_polarimetry_images_photo_quads(reducedfit, - summary_kwargs=summary_kwargs, - n_threshold=n_threshold, - find_fwhm=fwhm, - smooth_fwhm=4) + if n_estimate_centered < 3: + method_try_order = [_try_EO_method, _try_quad_method, _try_catalog_method] + else: + method_try_order = [_try_quad_method, _try_EO_method, _try_catalog_method] - if build_wcs_result.success: - return build_wcs_result - - logger.warning(f"The new method did not work, trying with the old one") + for m in method_try_order: + if (build_wcs := m()): + break - return cls._build_wcs_for_polarimetry_images_catalog_matching(reducedfit, summary_kwargs=summary_kwargs) + return build_wcs else: logger.error(f"Unknown obsmode {reducedfit.obsmode} for {reducedfit}.") @@ -798,12 +874,12 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', # tests for pairs +-30, should be more than enough # if they are not pairs, the procedure definitely failed, raise exception - if not np.isclose(np.abs(target_E[0]-target_O[0]), disp_mean[0], atol=60): - logger.error(f"These are not pairs, x mismatch detected according to hard-coded pair distance: err x = {np.abs(target_E[0]-target_O[0]):.0f} px") + if not np.isclose(np.abs(target_E[0]-target_O[0]), cls.disp_mean[0], atol=disp_allowed_err[0]): + logger.error(f"These are not pairs, x mismatch detected according to hard-coded pair distance: disp x = {np.abs(target_E[0]-target_O[0]):.0f} px)") return BuildWCSResult(success=False) - if not np.isclose(np.abs(target_E[1]-target_O[1]), disp_mean[1], atol=60): - logger.error(f"These are not pairs, y mismatch detected according to hard-coded pair distance: err y = {np.abs(target_E[1]-target_O[1]):.0f} px") + if not np.isclose(np.abs(target_E[1]-target_O[1]), cls.disp_mean[1], atol=disp_allowed_err[1]): + logger.error(f"These are not pairs, y mismatch detected according to hard-coded pair distance: disp y = {np.abs(target_E[1]-target_O[1]):.0f} px") return BuildWCSResult(success=False) # WCS for Ordinary and Extraordinary images @@ -824,6 +900,9 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', logger.debug(f"Using {len(calibrators_in_field)} calibrators in field to fit WCS for {target_src}.") wcslist = list() + + _, (fits_O, fits_E) = zip(*[get_candidate_rank_by_matchs(redf, pos, angle=angle, r_search=30, calibrators=expected_sources_in_field) for pos in [target_O, target_E]]) + for target_px, fits in zip([target_O, target_E], [fits_O, fits_E]): known_pos_skycoord = [target_src.coord] # fit[0] is the astro source fitted, fit[1] (fit[1][0] is the gaussian, fit[1][1] is the constant @@ -879,30 +958,15 @@ def _build_wcs_for_polarimetry_from_target_O_and_E(cls, redf: 'ReducedFit', summ raise Exception("No target source found in header, cannot build WCS.") data = redf.mdata - disp_sign_mean, disp_sign_std = np.array([-2.09032765e+02, 1.65384209e-02]), np.array([4.13289109, 0.66159702]) - disp_mean, disp_std = np.abs(disp_sign_mean), disp_sign_std # get the sources positions - mean, median, std = sigma_clipped_stats(data, sigma=5.0) - img_bkg_substracted = data - median - - if target_src.srctype == SRCTYPES.STAR: - seg_threshold = 3.0 * std - else: - seg_threshold = 1.0 * std - - segment_map, convolved_data = get_segmentation(img_bkg_substracted, fwhm=1, npixels=64, threshold=seg_threshold) - if segment_map is None: - logger.error(f"{redf}: No sources found, returning success = False.") - return BuildWCSResult(success=False, wcslist=list(), info={'n_bright_sources':0}) - else: - seg_cat, positions, tb = get_cat_sources_from_segment_map(segment_map, img_bkg_substracted, convolved_data) - cx, cy = redf.width//2, redf.height//2 - idx = np.abs(positions[:,0]-cx) < 1/3 * redf.width - idx = idx & (np.abs(positions[:,1]-cy) < 1/3 * redf.height) - positions = positions[idx] + positions = cls._estimate_positions_from_segments(redf, n_seg_threshold=n_seg_threshold, npixels=npixels, centered=True) + + if len(positions) == 0: + logger.error(f"{redf}: Found no sources in the field, cannot build WCS.") + return BuildWCSResult(success=False) if summary_kwargs['build_summary_images']: # plot summary of detected sources @@ -925,13 +989,12 @@ def _build_wcs_for_polarimetry_from_target_O_and_E(cls, redf: 'ReducedFit', summ logger.error(f"{redf}: {len(positions)} sources found, expected 2.") return BuildWCSResult(success=False, wcslist=list(), info={'n_bright_sources':len(positions)}) - # Check that the sources are pairs - if not np.isclose(np.abs(positions[0][0]-positions[1][0]), disp_mean[0], atol=70): - logger.error(f"These are not pairs, x mismatch detected according to hard-coded pair distance: err x = {np.abs(positions[0][0]-positions[1][0]):.0f} px") + if not np.isclose(np.abs(positions[0][0]-positions[1][0]), cls.disp_mean[0], atol=3*cls.disp_std[0]): + logger.error(f"These are not pairs, x mismatch detected according to hard-coded pair distance: disp x = {np.abs(positions[0][0]-positions[1][0]):.0f} px") return BuildWCSResult(success=False, info={'n_bright_sources':len(positions)}) - if not np.isclose(np.abs(positions[0][1]-positions[1][1]), disp_mean[1], atol=70): - logger.error(f"These are not pairs, y mismatch detected according to hard-coded pair distance: err y = {np.abs(positions[0][1]-positions[1][1]):.0f} px") + if not np.isclose(np.abs(positions[0][1]-positions[1][1]), cls.disp_mean[1], atol=3*cls.disp_std[1]): + logger.error(f"These are not pairs, y mismatch detected according to hard-coded pair distance: disp y = {np.abs(positions[0][1]-positions[1][1]):.0f} px") return BuildWCSResult(success=False, info={'n_bright_sources':len(positions)}) # define the targets to be the two positions found, with the ordinary on the right @@ -970,7 +1033,15 @@ def _build_wcs_for_polarimetry_from_target_O_and_E(cls, redf: 'ReducedFit', summ return BuildWCSResult(success=True, wcslist=[wcs1,wcs2], info={'n_bright_sources':len(positions)}) + + + @classmethod + def estimate_common_apertures(cls, reducedfits, reductionmethod=None, fit_boxsize=None, search_boxsize=(90,90)): + aperpix, r_in, r_out, fit_res_dict = super().estimate_common_apertures(reducedfits, reductionmethod=reductionmethod, fit_boxsize=fit_boxsize, search_boxsize=search_boxsize) + sigma = fit_res_dict['sigma'] + return 1.8*sigma, 5*sigma, 10*sigma, fit_res_dict + @classmethod def compute_relative_polarimetry(cls, polarimetry_group): """ Computes the relative polarimetry for a polarimetry group for DIPOL @@ -1024,8 +1095,9 @@ def compute_relative_polarimetry(cls, polarimetry_group): # 1. Compute all aperture photometries - target_fwhm, aperpix, r_in, r_out = cls.estimate_common_apertures(polarimetry_group, reductionmethod=REDUCTIONMETHODS.RELPOL) - + aperpix, r_in, r_out, fit_res_dict = cls.estimate_common_apertures(polarimetry_group, reductionmethod=REDUCTIONMETHODS.RELPHOT) + target_fwhm = fit_res_dict['mean_fwhm'] + logger.debug(f"Computing aperture photometries for the {len(polarimetry_group)} reducedfits in the group with target aperpix {aperpix:.1f}.") for reducedfit in polarimetry_group: diff --git a/iop4lib/instruments/instrument.py b/iop4lib/instruments/instrument.py index 86a519ff..d6fdb4c1 100644 --- a/iop4lib/instruments/instrument.py +++ b/iop4lib/instruments/instrument.py @@ -326,7 +326,7 @@ def apply_masters(cls, reducedfit): fits.writeto(reducedfit.filepath, data_new, header=header_new, overwrite=True) @classmethod - def astrometric_calibration(cls, reducedfit: 'ReducedFit'): + def astrometric_calibration(cls, reducedfit: 'ReducedFit', **build_wcs_kwargs): """ Performs astrometric calibration on the reduced fit, giving it the appropriate WCS. If the are both ordinary and extraordinary sources in the field, one WCS will be built for each, @@ -338,7 +338,7 @@ def astrometric_calibration(cls, reducedfit: 'ReducedFit'): os.remove(fpath) # build the WCS - build_wcs_result = cls.build_wcs(reducedfit) + build_wcs_result = cls.build_wcs(reducedfit, **build_wcs_kwargs) if build_wcs_result.success: @@ -387,7 +387,7 @@ def astrometric_calibration(cls, reducedfit: 'ReducedFit'): raise Exception(f"Could not perform astrometric calibration on {reducedfit}: {build_wcs_result=}") @classmethod - def build_file(cls, reducedfit: 'ReducedFit'): + def build_file(cls, reducedfit: 'ReducedFit', **build_wcs_kwargs): """ Builds the ReducedFit FITS file. Notes @@ -409,7 +409,7 @@ def build_file(cls, reducedfit: 'ReducedFit'): logger.debug(f"{reducedfit}: performing astrometric calibration") try: - reducedfit.astrometric_calibration() + reducedfit.astrometric_calibration(**build_wcs_kwargs) except Exception as e: logger.error(f"{reducedfit}: could not perform astrometric calibration on {reducedfit}: {e}") reducedfit.set_flag(ReducedFit.FLAGS.ERROR_ASTROMETRY) @@ -441,14 +441,17 @@ def compute_aperture_photometry(cls, redf, aperpix, r_in, r_out): from photutils.aperture import CircularAperture, CircularAnnulus, ApertureStats, aperture_photometry from photutils.utils import calc_total_error from astropy.stats import SigmaClip - from iop4lib.utils import estimate_common_apertures - if redf.mdata.shape[0] == 1024: + if redf.mdata.shape[0] == 1024: # andor cameras bkg_box_size = 128 - elif redf.mdata.shape[0] == 2048: + elif redf.mdata.shape[0] == 2048: # andor cameras bkg_box_size = 256 - elif redf.mdata.shape[0] == 800: + elif redf.mdata.shape[0] == 800: # cafos bkg_box_size = 100 + elif redf.mdata.shape[0] == 900: # dipol polarimetry + bkg_box_size = 90 + elif redf.mdata.shape[0] == 4144: # dipol photometry + bkg_box_size = 518 else: logger.warning(f"Image size {redf.mdata.shape[0]} not expected.") bkg_box_size = redf.mdata.shape[0]//10 @@ -493,8 +496,9 @@ def compute_relative_photometry(cls, redf: 'ReducedFit') -> None: if redf.obsmode != OBSMODES.PHOTOMETRY: raise Exception(f"{redf}: this method is only for plain photometry images.") - target_fwhm, aperpix, r_in, r_out = cls.estimate_common_apertures([redf], reductionmethod=REDUCTIONMETHODS.RELPHOT) - + aperpix, r_in, r_out, fit_res_dict = cls.estimate_common_apertures([redf], reductionmethod=REDUCTIONMETHODS.RELPHOT) + target_fwhm = fit_res_dict['mean_fwhm'] + if target_fwhm is None: logger.error("Could not estimate a target FWHM, aborting relative photometry.") return @@ -625,7 +629,7 @@ def estimate_common_apertures(cls, reducedfits, reductionmethod=None, fit_boxsiz elif len(astrosource_S) > 0: target = astrosource_S.pop() else: - return np.nan, np.nan, np.nan, np.nan + return np.nan, np.nan, np.nan, {'mean_fwhm':np.nan, 'sigma':np.nan} fwhm_L = list() @@ -651,6 +655,6 @@ def estimate_common_apertures(cls, reducedfits, reductionmethod=None, fit_boxsiz sigma = mean_fwhm / (2*np.sqrt(2*math.log(2))) r = sigma - return mean_fwhm, 3.0*r, 7.0*r, 15.0*r + return 3.0*r, 7.0*r, 15.0*r, {'mean_fwhm':mean_fwhm, 'sigma':sigma} diff --git a/iop4lib/iop4.py b/iop4lib/iop4.py index 3b19c4b4..0875c867 100644 --- a/iop4lib/iop4.py +++ b/iop4lib/iop4.py @@ -25,7 +25,8 @@ # iop4lib imports from iop4lib.db import * from iop4lib.enums import * -from iop4lib.telescopes import Telescope +from iop4lib.telescopes import * +from iop4lib.instruments import * # logging import logging @@ -49,15 +50,15 @@ def process_epochs(epochname_list, force_rebuild, check_remote_list): epoch.build_master_biases(force_rebuild=force_rebuild) + logger.info("Creating Master Darks.") + for epoch in epoch_L: + epoch.build_master_darks(force_rebuild=force_rebuild) + logger.info("Creating Master Flats.") for epoch in epoch_L: epoch.build_master_flats(force_rebuild=force_rebuild) - logger.info("Creating Master Darks.") - for epoch in epoch_L: - epoch.build_master_darks(force_rebuild=force_rebuild) - logger.info("Science files will be reduced.") rawfits = RawFit.objects.filter(epoch__in=epoch_L, imgtype=IMGTYPES.LIGHT).all() diff --git a/iop4lib/utils/__init__.py b/iop4lib/utils/__init__.py index 23210ea3..1eb1adec 100644 --- a/iop4lib/utils/__init__.py +++ b/iop4lib/utils/__init__.py @@ -202,8 +202,7 @@ def estimate_common_apertures(redfL, reductionmethod=None): sigma = mean_fwhm / (2*np.sqrt(2*math.log(2))) r = sigma - return mean_fwhm, 5.0*r, 15.0*r, 20.0*r - + return 5.0*r, 7.0*r, 15.0*r, {'mean_fwhm':mean_fwhm, 'sigma':sigma} def fit_fwhm(pos_px: (float,float), data: NDArray = None, redf: 'ReducedFit' = None, px_max: int = None) -> float: From f780433320c038fc1e06bbf9c9cbbb9346cdc2ff Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Mon, 13 Nov 2023 19:49:02 +0000 Subject: [PATCH 146/168] add dipol astrometric calibration to tests --- iop4lib/db/epoch.py | 8 +- iop4lib/instruments/dipol.py | 271 ++++++++++++------ iop4lib/instruments/instrument.py | 4 +- iop4lib/utils/__init__.py | 15 +- tests/builld_test_dataset.py | 180 +++++++++--- tests/conftest.py | 3 +- .../{test_cahat220.py => test_caha_cafos.py} | 0 tests/test_generic.py | 2 +- ...{test_osnt090.py => test_osnt090_andor.py} | 0 tests/test_osnt090_dipol.py | 91 ++++++ 10 files changed, 434 insertions(+), 140 deletions(-) rename tests/{test_cahat220.py => test_caha_cafos.py} (100%) rename tests/{test_osnt090.py => test_osnt090_andor.py} (100%) create mode 100644 tests/test_osnt090_dipol.py diff --git a/iop4lib/db/epoch.py b/iop4lib/db/epoch.py index b74f98d3..f968dc15 100644 --- a/iop4lib/db/epoch.py +++ b/iop4lib/db/epoch.py @@ -602,7 +602,13 @@ def compute_relative_polarimetry(self, *args, **kwargs): f = lambda x: Instrument.by_name(x[1]['instrument']).compute_relative_polarimetry(x[0], *args, **kwargs) - return list(map(f, zip(clusters_L, groupkeys_L))) + for i, (group, keys) in enumerate(zip(clusters_L, groupkeys_L)): + try: + Instrument.by_name(keys['instrument']).compute_relative_polarimetry(group, *args, **kwargs) + except Exception as e: + logger.error(f"{self}: error computing relative polarimetry for group n {i} {keys}: {e}") + finally: + logger.info(f"{self}: computed relative polarimetry for group n {i} {keys}.") diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index 311e1a58..e4cc53ce 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -397,6 +397,7 @@ def build_wcs(cls, reducedfit: 'ReducedFit', summary_kwargs : dict = {'build_sum # log the variables above + logger.debug(f"{target_src.srctype=}") logger.debug(f"{n_estimate=}") logger.debug(f"{n_estimate_centered=}") logger.debug(f"{redf_phot=}") @@ -406,13 +407,16 @@ def build_wcs(cls, reducedfit: 'ReducedFit', summary_kwargs : dict = {'build_sum def _try_EO_method(): if target_src.srctype == SRCTYPES.STAR: - n_seg_threshold_L = [500, 300, 200, 100, 50] - npixels_L = [64] + n_seg_threshold_L = [700, 600, 500, 400, 300, 200, 100, 50] + if reducedfit.exptime <= 5: + npixels_L = [128, 256, 64] + else: + npixels_L = [64, 128] else: n_seg_threshold_L = [6.0, 3.0, 1.5, 1.0] npixels_L = [64] - for npixels, n_seg_threshold in zip(npixels_L, n_seg_threshold_L): + for npixels, n_seg_threshold in itertools.product(npixels_L, n_seg_threshold_L): if (build_wcs_result := cls._build_wcs_for_polarimetry_from_target_O_and_E(reducedfit, summary_kwargs=summary_kwargs, n_seg_threshold=n_seg_threshold, npixels=npixels)): break return build_wcs_result @@ -421,7 +425,7 @@ def _try_quad_method(): if redf_phot is not None: if target_src.srctype == SRCTYPES.STAR: - n_threshold_L = [500, 300, 200, 100] + n_threshold_L = [700, 600, 500, 400, 300, 200, 100] else: n_threshold_L = [15,5,3] @@ -435,10 +439,10 @@ def _try_quad_method(): def _try_catalog_method(): if target_src.srctype == SRCTYPES.STAR: - n_seg_threshold_L = [500, 300, 200, 100, 50] - npixels_L = [64] + n_seg_threshold_L = [700, 500, 400, 300, 200, 100, 50] + npixels_L = [128, 64] else: - n_seg_threshold_L = [1.5, 1.0] + n_seg_threshold_L = [1.0] npixels_L = [64, 32] if n_expected_calibrators > 0 or n_expected_simbad_sources > 0: @@ -452,12 +456,13 @@ def _try_catalog_method(): method_try_order = [_try_EO_method, _try_quad_method, _try_catalog_method] - if n_estimate_centered < 3: + if target_src.srctype == SRCTYPES.STAR: method_try_order = [_try_EO_method, _try_quad_method, _try_catalog_method] else: - method_try_order = [_try_quad_method, _try_EO_method, _try_catalog_method] + method_try_order = [_try_quad_method, _try_catalog_method, _try_EO_method] for m in method_try_order: + logger.debug(f"Trying {m.__name__} for {reducedfit}.") if (build_wcs := m()): break @@ -711,7 +716,7 @@ def _build_wcs_for_polarimetry_images_photo_quads(cls, redf: 'ReducedFit', summa @classmethod - def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', summary_kwargs : dict = {'build_summary_images':True, 'with_simbad':True}): + def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', summary_kwargs : dict = {'build_summary_images':True, 'with_simbad':True}, n_seg_threshold=1.5, npixels=64): r""" Deprecated. Build WCS for DIPOL polarimetry images by matching the found sources positions with the catalog. .. warning:: @@ -723,12 +728,13 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', """ + # disp_allowed_err = 1.5*cls.disp_std + disp_allowed_err = np.array([30,30]) # most times, should be much smaller (1.5*std) + # but in bad cases, this is ~1 sigma of the gaussians + logger.debug(f"{redf}: building WCS for DIPOL polarimetry images.") from iop4lib.db import AstroSource - - disp_sign_mean, disp_sign_std = np.array([-2.09032765e+02, 1.65384209e-02]), np.array([4.13289109, 0.66159702]) - disp_mean, disp_std = np.abs(disp_sign_mean), disp_sign_std # define target astro source target_src = redf.header_hintobject @@ -737,22 +743,21 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', raise Exception("No target source found in header, cannot build WCS.") data = redf.mdata - cx, cy = data.shape[1]//2, data.shape[0]//2 - - sources, _ = get_sources_daofind(data, fwhm=30.0, n_threshold=10, brightest=100) + cx, cy = redf.width//2, redf.height//2 - # Consider as candidates only the sources that are close to the center, and also only the brightest ones - # cut sources that are 1/3 of the image away from the image - idx = np.abs(sources['xcentroid']-cx) < 1/3 * redf.width - idx = idx & (np.abs(sources['ycentroid']-cy) < 1/3 * redf.height) - sources = sources[idx] + positions = cls._estimate_positions_from_segments(redf, n_seg_threshold=n_seg_threshold, npixels=npixels, centered=True) + positions_non_centered = cls._estimate_positions_from_segments(redf, n_seg_threshold=n_seg_threshold, npixels=npixels, centered=False) + if len(positions) == 0: + logger.error(f"{redf}: Found no sources in the field, cannot build WCS.") + return BuildWCSResult(success=False) + else: + logger.debug(f"{redf}: Found {len(positions)} with {n_seg_threshold=} {npixels=} sources in the field.") + # if the are more than 100, work with only 20 brightest (they are already sorted by flux) - if len(sources) > 20: - sources = sources[:20] - # build the position [(x,y), ...)] array - positions = np.transpose((sources['xcentroid'], sources['ycentroid'])) + if len(positions) > 20: + positions = positions[:20] if summary_kwargs['build_summary_images']: # plot summary of detected sources @@ -768,11 +773,9 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', ax.axhline(cy+1/3*redf.height, xmin=0, xmax=redf.width, color='y', linestyle='--') ax.axvline(cx-1/3*redf.width, ymin=0, ymax=redf.height, color='y', linestyle='--') ax.axvline(cx+1/3*redf.width, ymin=0, ymax=redf.height, color='y', linestyle='--') + ax.set_title(f"Detected sources {npixels=}, {n_seg_threshold=}") fig.savefig(Path(redf.filedpropdir) / "astrometry_detected_sources.png", bbox_inches="tight") fig.clear() - - #distances_to_center = [np.sqrt((x-cx)**2+(y-cy)**2) for x,y in positions] - # idx_sorted_by_distance = np.argsort(distances_to_center) # non stable sorting! use np.argsort(-array) better angle_mean, angle_std = get_angle_from_history(redf, target_src) @@ -793,9 +796,6 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', # WCS (pre_wcs below). If no calibrators in the DB is found we can try querying simbad, to see if # there is some known star close, and them use them. - # If also no SIMBAD sources are found, we can only constraint the distances as much as possible, - # obtain a candidate, we can check that they are indeed pairs. But this is not a good solution. - pre_wcs = build_wcs_centered_on((cx,cy), redf=redf, angle=angle) # get list if calibrators for this source in the DB expected to be inside the subframe @@ -809,6 +809,7 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', logger.debug("Found only two sources in the field, assuming they are the ordinary and extraordinary images.") target_O, target_E = positions else: + if len(expected_sources_in_field) == 0: logger.warning(f"{redf}: No other DB sources in the field to check. Checking SIMBAD sources...") simbad_search_radius = Angle(cls.arcsec_per_pix*redf.width/3600, unit="deg") @@ -819,44 +820,70 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', # The function get_candidate_rank_by_matchs returns a rank for each candidate, the higher the rank, # the more likely it is to be the target source according to the matches with the catalog sources. - ranks, calibrators_fits = zip(*[get_candidate_rank_by_matchs(redf, pos, angle=angle, r_search=15, calibrators=expected_sources_in_field) for pos in positions[:6]]) + # filter the positionsto rank only to those that could be pairs + from iop4lib.utils.quadmatching import distance + from iop4lib.utils.sourcepairing import get_best_pairs + + list1, list2, _, _ = get_best_pairs(*zip(*itertools.product(positions,positions)), cls.disp_sign_mean, disp_sign_err=disp_allowed_err) + + positions_to_rank = list() + positions_to_rank.extend(list1) + positions_to_rank.extend(list2) + positions_to_rank = np.array(positions_to_rank) + logger.debug(f"{positions_to_rank=}") + + # N_max_to_rank = 10 + # positions_to_rank = positions_to_rank[:N_max_to_rank] + + ranks, _ = zip(*[get_candidate_rank_by_matchs(redf, pos, angle=angle, r_search=15, calibrators=expected_sources_in_field) for pos in positions_to_rank]) ranks = np.array(ranks) # idx_sorted_by_rank = np.argsort(ranks)[::-1] # non stable sort idx_sort_by_rank = np.argsort(-ranks, kind="stable") # stable sort - sorted_fits_by_rank = [calibrators_fits[i] for i in idx_sort_by_rank] - sorted_positions_by_rank = positions[idx_sort_by_rank] + with np.printoptions(precision=2, suppress=True): + logger.debug(f"{positions_to_rank=}") + logger.debug(f"{ranks=}") + logger.debug(f"{idx_sort_by_rank=}") # If the procedure worked, the first two sources should be the # ordinary and extraordinary images, which should have the most similar # fluxes, there fore check if they are next to each others. # if they are not, the procedure might have failed, give a warning. - logger.debug(f"Ranks: {ranks}, {idx_sort_by_rank=}") - if abs(idx_sort_by_rank[0] - idx_sort_by_rank[1]) != 1: - logger.warning("These might not be pairs, adyacent by rank flux mismatch detected") - - # if the first higher ranks are not higher than the rest, it did not discriminate well, give a warning - if not np.all(min(ranks[idx_sort_by_rank][0:2]) > ranks[idx_sort_by_rank][2:]): - logger.warning(f"Ranks did not discriminate well: {ranks=}. Using pairs.") - target_O = sorted_positions_by_rank[0] - err_pair = [np.sqrt((np.abs(target_O[0]-pos[0])-disp_mean[0])**2 + (np.abs(target_O[1]-pos[1])-disp_mean[1])**2) for pos in sorted_positions_by_rank] - idx_min_err_pair = np.argsort(err_pair)[0] - - if err_pair[idx_min_err_pair] > 60: - logger.error(f"Best pair has error {err_pair[idx_min_err_pair]:.0f} px > 60 px, returning success = False.") - return BuildWCSResult(success=False) - elif err_pair[np.argsort(err_pair)[1]] < 60: - logger.warning(f"Second best pair has error {err_pair[np.argsort(err_pair)[1]]:.0f} px < 60 px, I can not discriminate, returning success = False.") - return BuildWCSResult(success=False) + logger.warning("adyacent by rank flux mismatch detected") - target_E = sorted_positions_by_rank[idx_min_err_pair] - fits_O, fits_E = sorted_fits_by_rank[0], sorted_fits_by_rank[idx_min_err_pair] + if not any([(np.isfinite(r) and r>0) for r in ranks]): + logger.error("None of the ranks worked, returning success = False.") + return BuildWCSResult(success=False) else: - target_O, target_E = sorted_positions_by_rank[:2] - fits_O, fits_E = sorted_fits_by_rank[0], sorted_fits_by_rank[1] + logger.debug(f"Ranks discriminated well") + pre_list1, pre_list2 = zip(*itertools.product([positions_to_rank[i] for i in range(len(positions_to_rank)) if np.isfinite(ranks[i]) and ranks[i] >= np.nanmax(ranks)], positions)) + + # log some debug info about the pairs diference and the difference with respect the expected disp_sign_mean + + for i, (pos1, pos2) in enumerate(zip(pre_list1, pre_list2)): + dist = distance(pos1, pos2) + disp = np.abs(np.subtract(pos1, pos2)) + diff = np.abs(np.subtract(pos1, pos2))-np.abs(cls.disp_sign_mean) + with np.printoptions(precision=1, suppress=True): + logger.debug(f"{i=}, {pos1=!s}, {pos2=!s}, {dist=!s}, {disp=!s}, {diff=!s}") + + # get the best pairs according to the disp_sign_mean + # since we dont know if pre_list1 is the ordinary or extraordinary image, try with + # disp_sign_mean and -disp_sign_mean + + list1, list2, d0_new, disp_sign_new = get_best_pairs(pre_list1, pre_list2, cls.disp_sign_mean, disp_sign_err=disp_allowed_err) + logger.debug(f"{list1=}, {list2=}, {d0_new=}, {disp_sign_new=}") + if len(list1) == 0: + list1, list2, d0_new, disp_sign_new = get_best_pairs(pre_list1, pre_list2, -cls.disp_sign_mean, disp_sign_err=disp_allowed_err) + logger.debug(f"{list1=}, {list2=}, {d0_new=}, {disp_sign_new=}") + if len(list1) == 0: + logger.error("No pairs found, returning success = False.") + return BuildWCSResult(success=False) + + target_O, target_E = list1[0], list2[0] else: @@ -864,14 +891,12 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', return BuildWCSResult(success=False) - # Make the ordinary image the one ro the right always. + # Make the ordinary image the one in the right always. if target_O[0] < target_E[0]: target_O, target_E = target_E, target_O # from preliminary astrometry of photometry images - # pair distance should be [-2.09032765e+02, 1.65384209e-02] +- [4.13289109, 0.66159702])) - # tests for pairs +-30, should be more than enough # if they are not pairs, the procedure definitely failed, raise exception if not np.isclose(np.abs(target_E[0]-target_O[0]), cls.disp_mean[0], atol=disp_allowed_err[0]): @@ -911,10 +936,15 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', known_pos_px = [target_px] known_pos_px.extend([(fit[1][0].x_mean.value, fit[1][0].y_mean.value) for fit in fits]) - logger.debug("Fitting " + ", ".join([f"ra {coord.ra.deg} dec {coord.dec.deg} to {pos}" for coord, pos in zip(known_pos_skycoord, known_pos_px)])) + try: + logger.debug("Fitting " + ", ".join([f"ra {coord.ra.deg} dec {coord.dec.deg} to {pos}" for coord, pos in zip(known_pos_skycoord, known_pos_px)])) - wcs_fitted = fit_wcs_from_points(np.array(known_pos_px).T, SkyCoord(known_pos_skycoord), projection=build_wcs_centered_on(target_px, redf=redf, angle=angle)) - wcslist.append(wcs_fitted) + wcs_fitted = fit_wcs_from_points(np.array(known_pos_px).T, SkyCoord(known_pos_skycoord), projection=build_wcs_centered_on(target_px, redf=redf, angle=angle)) + wcslist.append(wcs_fitted) + except Exception as e: + logger.error(f"Exception {e} while fitting WCS, using pre-computed angle {angle:.2f} deg for {target_src}.") + wcslist = [build_wcs_centered_on(target_px, redf=redf, angle=angle) for target_px in [target_O, target_E]] + if summary_kwargs['build_summary_images']: @@ -925,16 +955,13 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', plot_preview_astrometry(redf, with_simbad=True, has_pairs=True, wcs1=wcslist[0], wcs2=wcslist[1], ax=ax, fig=fig) fig.savefig(Path(redf.filedpropdir) / "astrometry_summary.png", bbox_inches="tight") fig.clear() - - import gc - gc.collect() return BuildWCSResult(success=True, wcslist=wcslist, info={'method':'_build_wcs_for_polarimetry_images_catalog_matching'}) @classmethod - def _build_wcs_for_polarimetry_from_target_O_and_E(cls, redf: 'ReducedFit', summary_kwargs : dict = {'build_summary_images':True, 'with_simbad':True}): + def _build_wcs_for_polarimetry_from_target_O_and_E(cls, redf: 'ReducedFit', summary_kwargs : dict = {'build_summary_images':True, 'with_simbad':True}, n_seg_threshold=3.0, npixels=64) -> BuildWCSResult: r""" Deprecated. Build WCS for DIPOL polarimetry images by matching the found sources positions with the catalog. .. warning:: @@ -946,9 +973,13 @@ def _build_wcs_for_polarimetry_from_target_O_and_E(cls, redf: 'ReducedFit', summ """ + disp_allowed_err = 1.5*cls.disp_std + from iop4lib.db import AstroSource + from iop4lib.utils.sourcepairing import get_best_pairs + from iop4lib.utils.quadmatching import distance - logger.debug(f"{redf}: building WCS for DIPOL polarimetry images.") + logger.debug(f"{redf}: building WCS for DIPOL polarimetry images from target_O and target_E with {npixels=}, {n_seg_threshold=}.") # definitions @@ -985,15 +1016,42 @@ def _build_wcs_for_polarimetry_from_target_O_and_E(cls, redf: 'ReducedFit', summ fig.savefig(Path(redf.filedpropdir) / "astrometry_detected_sources.png", bbox_inches="tight") fig.clear() - if not len(positions) == 2: - logger.error(f"{redf}: {len(positions)} sources found, expected 2.") - return BuildWCSResult(success=False, wcslist=list(), info={'n_bright_sources':len(positions)}) + if len(positions) == 1: + logger.error(f"{redf}: Found only one source in the field, cannot build WCS.") + return BuildWCSResult(success=False) + if len(positions) > 2: + logger.warning(f"{redf}: {len(positions)} sources found, expected 2. Maybe after looking at pairs only, we can find the right ones.") + + pre_list1, pre_list2 = zip(*itertools.product(positions, positions)) + # log some debug info about the pairs diference and the difference with respect the expected disp_sign_mean + for i, (pos1, pos2) in enumerate(zip(pre_list1, pre_list2)): + dist = distance(pos1, pos2) + disp = np.abs(np.subtract(pos1, pos2)) + diff = np.abs(np.subtract(pos1, pos2))-np.abs(cls.disp_sign_mean) + with np.printoptions(precision=1, suppress=True): + logger.debug(f"{i=}, {pos1=!s}, {pos2=!s}, {dist=!s}, {disp=!s}, {diff=!s}") + + list1, list2, d0_new, disp_sign_new = get_best_pairs(pre_list1, pre_list2, cls.disp_sign_mean, disp_sign_err=disp_allowed_err) + + logger.debug(f"{list1=}, {list2=}, {d0_new=}, {disp_sign_new=}") + + if len(list1) == 0: + list1, list2, d0_new, disp_sign_new = get_best_pairs(pre_list1, pre_list2, -cls.disp_sign_mean, disp_sign_err=disp_allowed_err) + logger.debug(f"{list1=}, {list2=}, {d0_new=}, {disp_sign_new=}") + + if len(list1) != 1: + logger.error(f"We expected exactly one source, but we found {len(list1)} pairs, returning success = False.") + return BuildWCSResult(success=False) + else: + positions = [list1[0], list2[0]] + + # Check that the sources are pairs - if not np.isclose(np.abs(positions[0][0]-positions[1][0]), cls.disp_mean[0], atol=3*cls.disp_std[0]): + if not np.isclose(np.abs(positions[0][0]-positions[1][0]), cls.disp_mean[0], atol=disp_allowed_err[0]): logger.error(f"These are not pairs, x mismatch detected according to hard-coded pair distance: disp x = {np.abs(positions[0][0]-positions[1][0]):.0f} px") return BuildWCSResult(success=False, info={'n_bright_sources':len(positions)}) - if not np.isclose(np.abs(positions[0][1]-positions[1][1]), cls.disp_mean[1], atol=3*cls.disp_std[1]): + if not np.isclose(np.abs(positions[0][1]-positions[1][1]), cls.disp_mean[1], atol=disp_allowed_err[1]): logger.error(f"These are not pairs, y mismatch detected according to hard-coded pair distance: disp y = {np.abs(positions[0][1]-positions[1][1]):.0f} px") return BuildWCSResult(success=False, info={'n_bright_sources':len(positions)}) @@ -1037,7 +1095,7 @@ def _build_wcs_for_polarimetry_from_target_O_and_E(cls, redf: 'ReducedFit', summ @classmethod def estimate_common_apertures(cls, reducedfits, reductionmethod=None, fit_boxsize=None, search_boxsize=(90,90)): - aperpix, r_in, r_out, fit_res_dict = super().estimate_common_apertures(reducedfits, reductionmethod=reductionmethod, fit_boxsize=fit_boxsize, search_boxsize=search_boxsize) + aperpix, r_in, r_out, fit_res_dict = super().estimate_common_apertures(reducedfits, reductionmethod=reductionmethod, fit_boxsize=fit_boxsize, search_boxsize=search_boxsize, fwhm_min=5.0, fwhm_max=60) sigma = fit_res_dict['sigma'] return 1.8*sigma, 5*sigma, 10*sigma, fit_res_dict @@ -1135,37 +1193,66 @@ def compute_relative_polarimetry(cls, polarimetry_group): fluxD[pair] = {} fluxD[pair][angle] = (flux, flux_err) - F = np.array([(fluxD['O'][angle][0] - fluxD['E'][angle][0]) / (fluxD['O'][angle][0] + fluxD['E'][angle][0]) for angle in angles_L]) - I = np.mean([(fluxD['O'][angle][0] + fluxD['E'][angle][0]) for angle in angles_L]) + F_O = np.array([(fluxD['O'][angle][0]) for angle in angles_L]) + dF_O = np.array([(fluxD['O'][angle][1]) for angle in angles_L]) + + F_E = np.array([(fluxD['E'][angle][0]) for angle in angles_L]) + dF_E = np.array([(fluxD['E'][angle][1]) for angle in angles_L]) + + F = (F_O - F_E) / (F_O + F_E) + dF = 1 / ( F_O**2 + F_E**2 ) * np.sqrt(dF_O**2 + dF_E**2) + + I = (F_O + F_E) + dI = np.sqrt(dF_O**2 + dF_E**2) N = len(angles_L) # Compute both the uncorrected and corrected values - Qr_uncorr = 2/N * np.sum([F[i]*np.cos(np.pi/2*i) for i in range(N)]) - Qr = Qr_uncorr + 3.77/100 # TODO: check and derive this value - Ur_uncorr = 2/N * np.sum([F[i]*np.sin(np.pi/2*i) for i in range(N)]) - Ur = Ur_uncorr - 0.057/100 # TODO: check and derive this value + Qr_uncorr = 2/N * sum([F[i] * math.cos(math.pi/2*i) for i in range(N)]) + dQr_uncorr = 2/N * math.sqrt(sum([dF[i]**2 * math.cos(math.pi/2*i)**2 for i in range(N)])) + + logger.debug(f"{Qr_uncorr=}, {dQr_uncorr=}") + + Ur_uncorr = 2/N * sum([F[i] * math.sin(math.pi/2*i) for i in range(N)]) + dUr_uncorr = 2/N * math.sqrt(sum([dF[i]**2 * math.sin(math.pi/2*i)**2 for i in range(N)])) + + + Q_inst = +0.057/100 + dQ_inst = 0 + + logger.debug(f"{Q_inst=}, {dQ_inst=}") + U_inst = -3.77/100 + dU_inst = 0 - def _get_p_and_chi(Qr, Ur): + Qr = Qr_uncorr + Q_inst # TODO: check and derive this value + dQr = math.sqrt(dQr_uncorr**2 + dQ_inst**2) + + logger.debug(f"{Qr=}, {dQr=}") + + Ur = Ur_uncorr + U_inst # TODO: check and derive this value + dUr = math.sqrt(dUr_uncorr**2 + dU_inst**2) + + logger.debug(f"{Ur=}, {dUr=}") + + def _get_p_and_chi(Qr, Ur, dQr, dUr): # linear polarization (0 to 1) - P = np.sqrt(Qr**2+Ur**2) - dP = None + P = math.sqrt(Qr**2+Ur**2) + dP = 1/P * math.sqrt((Qr*dQr)**2 + (Ur*dUr)**2) # polarization angle (degrees) - Theta_0 = 0 - if Qr >= 0: - Theta_0 = math.pi - if Ur > 0: - Theta_0 = -1 * math.pi - chi = - 0.5 * np.rad2deg(np.arctan(Qr/Ur) + Theta_0) - dchi = None - - return P, dP, chi, dchi + x = -Qr/Ur + dx = math.sqrt( (-1/Ur)**2+dUr**2 + (+Qr/Ur**2)**2*dQr**2 ) + chi = 0.5 * math.degrees(math.atan2(-Qr, Ur)) + dchi = 0.5 * 1/(1 + x**2) * dx + + return P, chi, dP, dchi # linear polarization (0 to 1) - P_uncorr, dP_uncorr, chi_uncorr, dchi_uncorr = _get_p_and_chi(Qr_uncorr, Ur_uncorr) - P, dP, chi, dchi = _get_p_and_chi(Qr, Ur) + P_uncorr, chi_uncorr, dP_uncorr, dchi_uncorr = _get_p_and_chi(Qr_uncorr, Ur_uncorr, dQr_uncorr, dUr_uncorr) + P, chi, dP, dchi = _get_p_and_chi(Qr, Ur, dQr, dUr) + + logger.debug(f"{P=}, {chi=}, {dP=}, {dchi=}") # No attempt to get magnitude from polarimetry fields in dipol, they have too low exposure, and many times there are no calibrators in the subframe. diff --git a/iop4lib/instruments/instrument.py b/iop4lib/instruments/instrument.py index d6fdb4c1..49e9ef31 100644 --- a/iop4lib/instruments/instrument.py +++ b/iop4lib/instruments/instrument.py @@ -610,7 +610,7 @@ def compute_relative_photometry(cls, redf: 'ReducedFit') -> None: @classmethod - def estimate_common_apertures(cls, reducedfits, reductionmethod=None, fit_boxsize=None, search_boxsize=(90,90)): + def estimate_common_apertures(cls, reducedfits, reductionmethod=None, fit_boxsize=None, search_boxsize=(90,90), fwhm_min=2, fwhm_max=50): r"""estimate an appropriate common aperture for a list of reduced fits. It fits the target source profile in the fields and returns some multiples of the fwhm which are used as the aperture and as the inner and outer radius of the annulus for local bkg estimation). @@ -637,7 +637,7 @@ def estimate_common_apertures(cls, reducedfits, reductionmethod=None, fit_boxsiz try: gaussian = fit_gaussian(px_start=redf.wcs.world_to_pixel(target.coord), redf=redf) fwhm = (2*np.sqrt(2*math.log(2))) * np.sqrt(gaussian[0].x_stddev.value**2+gaussian[0].y_stddev.value**2) - if not (2 < fwhm < 50): + if not (fwhm_min < fwhm < fwhm_max): logger.warning(f"ReducedFit {redf.id} {target.name}: fwhm = {fwhm} px, skipping this reduced fit") continue logger.debug(f"{target.name}: Gaussian FWHM: {fwhm:.1f} px") diff --git a/iop4lib/utils/__init__.py b/iop4lib/utils/__init__.py index 1eb1adec..90dddd85 100644 --- a/iop4lib/utils/__init__.py +++ b/iop4lib/utils/__init__.py @@ -486,15 +486,15 @@ def get_candidate_rank_by_matchs(redf: 'ReducedFit', ap = CircularAperture(xycen, r=aperpix) annulus = CircularAnnulus(xycen, r_in=r_in, r_out=r_out) - annulus_stats = ApertureStats(redf.mdata, annulus, sigma_clip=SigmaClip(sigma=5.0, maxiters=10)) + annulus_stats = ApertureStats(redf.mdata, annulus, sigma_clip=SigmaClip(sigma=5.0)) ap_stats = ApertureStats(redf.mdata, ap) flux_counts = ap_stats.sum - annulus_stats.mean*ap_stats.sum_aper_area.value if not flux_counts > 0: continue - if not ap_stats.max > 3*annulus_stats.mean: + elif not ap_stats.max > 2*annulus_stats.std: continue - if not 2 < sigma < 50: + if not 8 < sigma < 60: continue calibrators_fluxes.append(flux_counts) @@ -505,18 +505,13 @@ def get_candidate_rank_by_matchs(redf: 'ReducedFit', gc.collect() if len(calibrators_fluxes) > 0: - # this one is pretty nice, just look at how many matches with the calibs you find + # just look at how many matches with the calibs you find rank_1 = 1 - 0.5**np.sum(~np.isnan(calibrators_fluxes)) - # but if you have one or tow calibrators, you might be unlucy, look at their brigtness - rank_2 = np.nansum(calibrators_fluxes) if np.sum(~np.isnan(calibrators_fluxes)) > 0 else 1 - # however one single of thos brigh star can turn the rank to shit if not len(calibrators) > 0: logger.debug(f"No calibrators to rank, raising exception.") raise Exception - if len(calibrators) == 1: - rank = rank_1 * rank_2 - elif len(calibrators) > 1: + else: rank = rank_1 else: # if no calibrators could be fitted, return -np.inf rank = -np.inf diff --git a/tests/builld_test_dataset.py b/tests/builld_test_dataset.py index 41a23db7..045abccf 100644 --- a/tests/builld_test_dataset.py +++ b/tests/builld_test_dataset.py @@ -8,6 +8,9 @@ including raw light, bias, darks, flats and a test catalog file. Edit the pk_L list to add the pk of the results you want to include in the test dataset. + +Edit the raw_fileloc_L list to add the paths of additional files you want to include in the test dataset. + """ import iop4lib.config @@ -22,78 +25,126 @@ import hashlib from pathlib import Path + +###################### +### Configuration #### +###################### + + +clean_workdir = False + +Nmax_raw_per_master = 1 + +# only Nmax_raw_per_master will be used for these files + +fileloc_L = [ + # some dipol files to test calibration + "OSN-T090/2023-11-06/BLLac_IAR-0001R.fit", # DIPIL photometry astrocalibration / shotgun + "OSN-T090/2023-10-11/OJ248_R_IAR-0111.fts", # DIPOL polarimetry astrocalibration / catalog matching in a blazar + "OSN-T090/2023-10-25/HD204827_R_IAR-0384.fts", # DIPOL polarimetry astrocalibration / target E, O in a star + # DIPOL polarimetry astrocalibration / quad matching in a blazar + "OSN-T090/2023-11-06/BLLac_IAR-0001R.fit", # the photometry file + "OSN-T090/2023-11-06/BLLAC_R_IAR-0760.fts", # the polarimetry file +] + +# for this, Nmax_raw_per_master will be ignored (to keep the result as close to the real one as possible) + pk_L = [ - #48572, # OSN-T090 Andor Polarimetry 2022-09-08 23:03:14 2200+420 reduced fits 6565, 6566, 6567, 6568 mag R 13.328 \pm 0.164 p 12.48 % \pm 0.35 % chi 15.38 \pm 0.79, iop3 mag 13.38, p 14.0, chi 14.7 - #34354, # OSN-T090 Andor Photometry 2022-09-18 23:05:05 2200+420 reduced fits 39071 mag R 13.292 \pm 0.034, iop3 mag 13.35 - #64092, # CAHA-T220 CAFOS Polarimetry 2022-09-18 23:02:53 2200+420 reduced fits 40827, 40828, 40829, 40830 mag R 13.369 \pm 0.036 p 11.18 % \pm 0.12 % chi 25.45 \pm 0.30, iop3 mag 13.38, p 10.9, chi 25.2 - # OSN-T090 DIPOL Polarimetry 2023-10-11 1641+ + # 48572, # OSN-T090 Andor Polarimetry 2022-09-08 23:03:14 2200+420 reduced fits 6565, 6566, 6567, 6568 mag R 13.328 \pm 0.164 p 12.48 % \pm 0.35 % chi 15.38 \pm 0.79, iop3 mag 13.38, p 14.0, chi 14.7 + # 34354, # OSN-T090 Andor Photometry 2022-09-18 23:05:05 2200+420 reduced fits 39071 mag R 13.292 \pm 0.034, iop3 mag 13.35 + # 64092, # CAHA-T220 CAFOS Polarimetry 2022-09-18 23:02:53 2200+420 reduced fits 40827, 40828, 40829, 40830 mag R 13.369 \pm 0.036 p 11.18 % \pm 0.12 % chi 25.45 \pm 0.30, iop3 mag 13.38, p 10.9, chi 25.2 + # ?????, # OSN-T090 DIPOL Polarimetry 2023-10-11 3C345 [mjd 60228.82199 datetime 2023/10/11 19:43 p 29.238 ± 0.000 chi 38.379 ± 0.392] ] output_file = Path("~/iop4testdata.tar.gz").expanduser() workdir = Path("~/iop4testdata").expanduser() -# remove the workdir if exists, and create it again +# remove workdir if configured, and create it again if necessary -if os.path.exists(workdir): +if clean_workdir and os.path.exists(workdir): shutil.rmtree(workdir) -os.makedirs(workdir) +os.makedirs(workdir, exist_ok=True) + + +############################################### +### Files for the PhotoPolResults pk_L list ### +############################################### + + +print("Copying files for the PhotoPolResults pk_L list") # get the list of files needed and copy them to the workdir +rawfitL = list() + for pk in pk_L: res = PhotoPolResult.objects.get(id=pk) - rawfitL = list() + if res.instrument == "DIPOL": + Nmax = 1 # too many files otherwise + else: + Nmax = None for redf in res.reducedfits.all(): # append all rawfits needed for this reducedfit rawfitL.append(redf.rawfit) # the bias for its masterbias - for bias in redf.masterbias.rawfits.all(): + for bias in redf.masterbias.rawfits.all()[:None]: rawfitL.append(bias) # the darks for its masterdark - for dark in redf.masterdark.rawfits.all(): - rawfitL.append(dark) + if redf.masterdark is not None: + for dark in redf.masterdark.rawfits.all()[:None]: + rawfitL.append(dark) # the flats for its masterflat - for flat in redf.masterflat.rawfits.all(): + for flat in redf.masterflat.rawfits.all()[:None]: rawfitL.append(flat) # the bias for the masterbias of its masterflat - for bias in redf.masterflat.masterbias.rawfits.all(): + for bias in redf.masterflat.masterbias.rawfits.all()[:None]: rawfitL.append(bias) # the bias for the masterbias of its masterdark - for bias in redf.masterdark.masterbias.rawfits.all(): - rawfitL.append(bias) + if redf.masterdark is not None: + for bias in redf.masterdark.masterbias.rawfits.all()[:None]: + rawfitL.append(bias) # the bias and the darks for the masterdark of its masterflat - for dark in redf.masterflat.masterdark.rawfits.all(): - rawfitL.append(dark) + if redf.masterflat.masterdark is not None: + for dark in redf.masterflat.masterdark.rawfits.all()[:None]: + rawfitL.append(dark) - files_to_download = set([rawfit for rawfit in rawfitL if not os.path.exists(rawfit.filepath)]) +files_to_download = set([rawfit for rawfit in rawfitL if not os.path.exists(rawfit.filepath)]) - if len(files_to_download) > 0: - # download the files if needed - Telescope.get_by_name(rawfitL[0].epoch.telescope).download_files(files_to_download) +if len(files_to_download) > 0: + # download the files if needed + Telescope.get_by_name(rawfitL[0].epoch.telescope).download_rawfits(files_to_download) - for rawfit in rawfitL: +for rawfit in rawfitL: - relative_path = Path(rawfit.filepath).relative_to(iop4conf.datadir) - - dest = workdir / relative_path + print(f" cp {rawfit.fileloc}") + + relative_path = Path(rawfit.filepath).relative_to(iop4conf.datadir) + + dest = workdir / relative_path + + if not os.path.exists(os.path.dirname(dest)): + os.makedirs(os.path.dirname(dest)) - if not os.path.exists(os.path.dirname(dest)): - os.makedirs(os.path.dirname(dest)) + if not os.path.exists(dest): + shutil.copy(rawfit.filepath, dest) - if not os.path.exists(dest): - shutil.copy(rawfit.filepath, dest) -# create the catalog file + +############################### +### create the catalog file ### +############################### + +print("Creating the catalog file") astrosources_ids_L = list() @@ -104,10 +155,75 @@ astrosources_ids_L.append(calibrator.id) from django.core.serializers import serialize -with open(workdir / "testcatalog.yaml", "w") as f: +with open(workdir / "testcatalog.yaml", "a") as f: f.write(serialize("yaml", AstroSource.objects.filter(id__in=set(astrosources_ids_L)), use_natural_foreign_keys=True, use_natural_primary_keys=True)) + f.write('\n') + + + +############################# +### copy additional files ### +############################# + +print("Copying additional files") + +rawfitL = list() + +for fileloc in fileloc_L: + + redf = ReducedFit.by_fileloc(fileloc) + + # append all rawfits needed for this reducedfit + rawfitL.append(redf.rawfit) + + # the bias for its masterbias + for bias in redf.masterbias.rawfits.all()[:Nmax_raw_per_master]: + rawfitL.append(bias) + + # the darks for its masterdark + if redf.masterdark is not None: + for dark in redf.masterdark.rawfits.all()[:Nmax_raw_per_master]: + rawfitL.append(dark) + + # the flats for its masterflat + for flat in redf.masterflat.rawfits.all()[:Nmax_raw_per_master]: + rawfitL.append(flat) + + # the bias for the masterbias of its masterflat + for bias in redf.masterflat.masterbias.rawfits.all()[:Nmax_raw_per_master]: + rawfitL.append(bias) + + # the bias for the masterbias of its masterdark + if redf.masterdark is not None: + for bias in redf.masterdark.masterbias.rawfits.all()[:Nmax_raw_per_master]: + rawfitL.append(bias) + + # the bias and the darks for the masterdark of its masterflat + if redf.masterflat.masterdark is not None: + for dark in redf.masterflat.masterdark.rawfits.all()[:Nmax_raw_per_master]: + rawfitL.append(dark) + + +for rawfit in rawfitL: + + print(f" cp {rawfit.fileloc}") + + relative_path = Path(rawfit.filepath).relative_to(iop4conf.datadir) + + dest = workdir / relative_path + + if not os.path.exists(os.path.dirname(dest)): + os.makedirs(os.path.dirname(dest)) + + if not os.path.exists(dest): + shutil.copy(rawfit.filepath, dest) + + +########################### +### Create .tar.gz file ### +########################### -# Create .tar.gz file +print("Creating .tar.gz file") # in mac-os, additional ._* files are created to save xattrs, avoid it setting COPYFILE_DISABLE os.environ["COPYFILE_DISABLE"] = "true" diff --git a/tests/conftest.py b/tests/conftest.py index d5b11136..81c0f99d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,7 +12,7 @@ TEST_CONFIG = str(Path(iop4conf.datadir) / "config.tests.yaml") TESTDATA_FPATH = str(Path("~/iop4testdata.tar.gz").expanduser()) -TESTDATA_MD5SUM = '8cf7f2cd3b072f16749fb2c04bc5fb48' +TESTDATA_MD5SUM = '0f5432c54d6173f861b64d3d4280e2f7' TEST_DATADIR = str(Path(iop4conf.datadir) / "iop4testdata") TEST_DB_PATH = str(Path(iop4conf.db_path).expanduser().parent / ("test_" + str(Path(iop4conf.db_path).name))) @@ -80,4 +80,3 @@ def setUpClass(): def tearDownClass(): # remove test data dir os.system(f"rm -rf {iop4conf.datadir}") - diff --git a/tests/test_cahat220.py b/tests/test_caha_cafos.py similarity index 100% rename from tests/test_cahat220.py rename to tests/test_caha_cafos.py diff --git a/tests/test_generic.py b/tests/test_generic.py index 5c82e4ee..c1eb0856 100644 --- a/tests/test_generic.py +++ b/tests/test_generic.py @@ -44,5 +44,5 @@ def test_testconfig_testdb(load_test_catalog): assert (PhotoPolResult.objects.count() == 0) # there should be some test sources in the DB, and their calibrators - assert (0 < AstroSource.objects.count() < 20) + assert AstroSource.objects.count() > 0 assert AstroSource.objects.filter(name="2200+420").exists() \ No newline at end of file diff --git a/tests/test_osnt090.py b/tests/test_osnt090_andor.py similarity index 100% rename from tests/test_osnt090.py rename to tests/test_osnt090_andor.py diff --git a/tests/test_osnt090_dipol.py b/tests/test_osnt090_dipol.py new file mode 100644 index 00000000..03a73d0d --- /dev/null +++ b/tests/test_osnt090_dipol.py @@ -0,0 +1,91 @@ +import pytest +from pathlib import Path + +from .conftest import TEST_CONFIG + +# iop4lib config +import iop4lib.config +iop4conf = iop4lib.Config(config_path=TEST_CONFIG) + +# other imports +import os +from pytest import approx + +# logging +import logging +logger = logging.getLogger(__name__) + +# fixtures +from .fixtures import load_test_catalog + + + + +@pytest.mark.django_db(transaction=True) +def test_astrometric_calibration(load_test_catalog): + + from iop4lib.db import Epoch, RawFit, ReducedFit, AstroSource + from iop4lib.enums import IMGTYPES, SRCTYPES + from iop4lib.utils.quadmatching import distance + + epochname_L = ["OSN-T090/2023-09-26", "OSN-T090/2023-10-11", "OSN-T090/2023-10-12", "OSN-T090/2023-11-06"] + epoch_L = [Epoch.create(epochname=epochname) for epochname in epochname_L] + + for epoch in epoch_L: + epoch.build_master_biases() + + for epoch in epoch_L: + epoch.build_master_darks() + + for epoch in epoch_L: + epoch.build_master_flats() + + + # Test 1. Photometry field + + fileloc = "OSN-T090/2023-11-06/BLLac_IAR-0001R.fit" + rawfit = RawFit.by_fileloc(fileloc=fileloc) + redf = ReducedFit.create(rawfit=rawfit) + redf.build_file() + + # Test 2. Polarimetry field with quad matching (uses previous photometry field) + + fileloc = "OSN-T090/2023-11-06/BLLAC_R_IAR-0760.fts" + rawfit = RawFit.by_fileloc(fileloc=fileloc) + redf = ReducedFit.create(rawfit=rawfit) + redf.build_file() + + # check source position in the image + + src = AstroSource.objects.get(name="2200+420") + + assert redf.header_hintobject.name == src.name + assert redf.sources_in_field.filter(name=src.name).exists() + + pos_O = src.coord.to_pixel(wcs=redf.wcs1) + pos_E = src.coord.to_pixel(wcs=redf.wcs2) + + assert (distance(pos_O, [634, 297]) < 25) # O position + assert (distance(pos_E, [437, 319]) < 50) # E position # might be worse b.c. of companion star + + # Test 3. Polarimetry field using catalog matching + # This one is quite weak, so it might fail + + fileloc = "OSN-T090/2023-10-11/OJ248_R_IAR-0111.fts" + rawfit = RawFit.by_fileloc(fileloc=fileloc) + redf = ReducedFit.create(rawfit=rawfit) + redf.build_file() + + # check source position in the image + + src = AstroSource.objects.get(name="0827+243") + + assert redf.header_hintobject.name == src.name + assert redf.sources_in_field.filter(name=src.name).exists() + + pos_O = src.coord.to_pixel(wcs=redf.wcs1) + pos_E = src.coord.to_pixel(wcs=redf.wcs2) + + assert (distance(pos_O, [618, 259]) < 50) # O position + assert (distance(pos_E, [402, 268]) < 50) # E position + \ No newline at end of file From 61071836092a2250f4d0a2575b99493b6823fd60 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Mon, 13 Nov 2023 19:51:43 +0000 Subject: [PATCH 147/168] do not run tests in github CI dipol tests add too much weight --- .github/workflows/ci.yml | 59 ++++++++++++++++------------------------ 1 file changed, 23 insertions(+), 36 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a9b34b6f..957c1006 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -63,43 +63,30 @@ jobs: - name: Install the package in editable mode with all additional dependencies run: pip install --editable .[all] - - name: Install httpdirfs to access astrometry index files without downloading them - run: sudo apt install httpdirfs - - # - name: Try to restore httpdirfs cache data - # # this should make subsuquent commits in the same PR faster - # uses: actions/cache@/restore@v3 - # with: - # path: $HOME/.cache/httpdirfs/ - # key: httpdirfs-astrometry-5200-1-2-3-4 - - - name: Mount astrometry index file in default location - run: mkdir -p $HOME/.astrometry_cache/5200 && httpdirfs --cache 'https://portal.nersc.gov/project/cosmo/temp/dstn/index-5200/LITE/' $HOME/.astrometry_cache/5200/ - - - name: Check that it was correctly mounted - run: ls $HOME/.astrometry_cache/5200/index-5200-00.fits - - - name: Download test data - env: - TEST_DATA_PASSWORD: ${{ secrets.test_data_password }} - run: | - export TESTDATA_MD5SUM=`grep 'TESTDATA_MD5SUM' ./tests/conftest.py | awk -F"'" '{print $2}' | tr -d '\n'` - wget --post-data "pass=$TEST_DATA_PASSWORD" "https://vhega.iaa.es/iop4/iop4testdata.tar.gz?md5sum=$TESTDATA_MD5SUM" -O $HOME/iop4testdata.tar.gz - - - name: Run tests (with -o log_cli=true -o log_cli_level=DEBUG to debug CI actions) - run: pytest -o log_cli=true -o log_cli_level=DEBUG -vxs tests/ + # - name: Install httpdirfs to access astrometry index files without downloading them + # run: sudo apt install httpdirfs - - name: Output some info for debugging - # | true so erros in this step are ignored - run: | - df -h || true - du -sh $HOME/.cache/httpdirfs/ || true - - # - name: Save httpdirfs cache data - # uses: actions/cache/save@v3 - # with: - # path: $HOME/.cache/httpdirfs/ - # key: httpdirfs-astrometry-5200-1-2-3-4 + # - name: Mount astrometry index file in default location + # run: mkdir -p $HOME/.astrometry_cache/5200 && httpdirfs --cache 'https://portal.nersc.gov/project/cosmo/temp/dstn/index-5200/LITE/' $HOME/.astrometry_cache/5200/ + + # - name: Check that it was correctly mounted + # run: ls $HOME/.astrometry_cache/5200/index-5200-00.fits + + # - name: Download test data + # env: + # TEST_DATA_PASSWORD: ${{ secrets.test_data_password }} + # run: | + # export TESTDATA_MD5SUM=`grep 'TESTDATA_MD5SUM' ./tests/conftest.py | awk -F"'" '{print $2}' | tr -d '\n'` + # wget --post-data "pass=$TEST_DATA_PASSWORD" "https://vhega.iaa.es/iop4/iop4testdata.tar.gz?md5sum=$TESTDATA_MD5SUM" -O $HOME/iop4testdata.tar.gz + + # - name: Run tests (with -o log_cli=true -o log_cli_level=DEBUG to debug CI actions) + # run: pytest -o log_cli=true -o log_cli_level=DEBUG -vxs tests/ + + # - name: Output some info for debugging + # # | true so erros in this step are ignored + # run: | + # df -h || true + # du -sh $HOME/.cache/httpdirfs/ || true From 9da32e8ea8ba04fa22d7877a9690ade5638f21da Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Mon, 13 Nov 2023 23:38:05 +0000 Subject: [PATCH 148/168] sourcedetection: convolve_fft for larger kernels --- iop4lib/utils/sourcedetection.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/iop4lib/utils/sourcedetection.py b/iop4lib/utils/sourcedetection.py index f1030a52..29c21b4c 100644 --- a/iop4lib/utils/sourcedetection.py +++ b/iop4lib/utils/sourcedetection.py @@ -4,7 +4,7 @@ import numpy as np from photutils.detection import DAOStarFinder -from astropy.convolution import convolve +from astropy.convolution import convolve, convolve_fft from photutils.segmentation import make_2dgaussian_kernel from astropy.stats import SigmaClip, sigma_clipped_stats from photutils.background import Background2D, MedianBackground, SExtractorBackground @@ -30,8 +30,13 @@ def apply_gaussian_smooth(data, fwhm, kernel_size=None): if kernel_size is None: kernel_size = 2*int(fwhm)+1 + if kernel_size < 30: + fconv = convolve + else: + fconv = convolve_fft + kernel = make_2dgaussian_kernel(fwhm, size=kernel_size) - data = convolve(data, kernel) + data = fconv(data, kernel) return data def get_sources_daofind(data, threshold=None, fwhm=8.0, n_threshold=5.0, brightest=100, exclude_border=True): @@ -84,8 +89,13 @@ def get_segmentation(imgdata_bkg_substracted, threshold, fwhm=1.0, kernel_size=N if kernel_size is None: kernel_size = 2*int(fwhm)+1 + if kernel_size < 30: + fconv = convolve + else: + fconv = convolve_fft + kernel = make_2dgaussian_kernel(fwhm, size=kernel_size) - convolved_data = convolve(imgdata_bkg_substracted, kernel) + convolved_data = fconv(imgdata_bkg_substracted, kernel) if deblend: finder = SourceFinder(npixels=npixels, deblend=deblend, progress_bar=False) From 1e9c954152db12a784360eeb8ec134dc725a04a8 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Mon, 13 Nov 2023 23:43:59 +0000 Subject: [PATCH 149/168] dipol: convolve star images before calibration they are too bright and usually are defocused this causes the image to appear ring-like if not convolved, the star position is not correctly estimated --- iop4lib/instruments/dipol.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index e4cc53ce..bb3d7bbe 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -267,8 +267,6 @@ def get_invariable_str(s): return AstroSource.objects.get(name=source['name']) for source in catalog: - if not source['other_name']: - continue if get_invariable_str(search_str) in get_invariable_str(source['name']): return AstroSource.objects.get(name=source['name']) @@ -331,7 +329,13 @@ def has_pairs(cls, fit_instance: 'ReducedFit' or 'RawFit') -> bool: @classmethod - def _estimate_positions_from_segments(cls, redf, fwhm=1.0, npixels=64, n_seg_threshold=3.0, centered=True): + def _estimate_positions_from_segments(cls, redf, fwhm=None, npixels=64, n_seg_threshold=3.0, centered=True): + + if redf.header_hintobject.srctype == SRCTYPES.STAR and redf.exptime <= 5: + fwhm = 80.0 + else: + fwhm = 1.0 + # get the sources positions data = redf.data @@ -407,7 +411,7 @@ def build_wcs(cls, reducedfit: 'ReducedFit', summary_kwargs : dict = {'build_sum def _try_EO_method(): if target_src.srctype == SRCTYPES.STAR: - n_seg_threshold_L = [700, 600, 500, 400, 300, 200, 100, 50] + n_seg_threshold_L = [300, 200, 100, 50, 25, 12, 6] if reducedfit.exptime <= 5: npixels_L = [128, 256, 64] else: @@ -425,7 +429,7 @@ def _try_quad_method(): if redf_phot is not None: if target_src.srctype == SRCTYPES.STAR: - n_threshold_L = [700, 600, 500, 400, 300, 200, 100] + n_threshold_L = [300, 200, 100, 50, 25, 12, 6] else: n_threshold_L = [15,5,3] @@ -729,7 +733,7 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', """ # disp_allowed_err = 1.5*cls.disp_std - disp_allowed_err = np.array([30,30]) # most times, should be much smaller (1.5*std) + disp_allowed_err = np.array([30,30]) # most times should be much smaller (1.5*std) # but in bad cases, this is ~1 sigma of the gaussians logger.debug(f"{redf}: building WCS for DIPOL polarimetry images.") @@ -868,7 +872,7 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', disp = np.abs(np.subtract(pos1, pos2)) diff = np.abs(np.subtract(pos1, pos2))-np.abs(cls.disp_sign_mean) with np.printoptions(precision=1, suppress=True): - logger.debug(f"{i=}, {pos1=!s}, {pos2=!s}, {dist=!s}, {disp=!s}, {diff=!s}") + logger.debug(f"{i=},\t{pos1=!s},\t{pos2=!s},\t{dist=!s},\t{disp=!s},\t{diff=!s}") # get the best pairs according to the disp_sign_mean # since we dont know if pre_list1 is the ordinary or extraordinary image, try with @@ -973,7 +977,9 @@ def _build_wcs_for_polarimetry_from_target_O_and_E(cls, redf: 'ReducedFit', summ """ - disp_allowed_err = 1.5*cls.disp_std + # disp_allowed_err = 1.5*cls.disp_std + disp_allowed_err = np.array([30,30]) # most times should be much smaller (1.5*std) + # but in bad cases, this is ~1 sigma of the gaussians from iop4lib.db import AstroSource from iop4lib.utils.sourcepairing import get_best_pairs From 45ec0f84a4e7dde3e46866c7dba1bc297497c32c Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 14 Nov 2023 16:59:31 +0000 Subject: [PATCH 150/168] auto build finding chart --- .../iop4admin/view_astrosourcedetails.html | 4 ++ iop4admin/views/__init__.py | 45 +----------- iop4admin/views/astrosource.py | 65 +++++++++++++++++ iop4admin/views/epoch.py | 37 ++++++++++ iop4admin/views/fitfile.py | 1 - iop4lib/db/astrosource.py | 17 +++-- iop4lib/utils/plotting.py | 69 ++++++++++++++++++- 7 files changed, 187 insertions(+), 51 deletions(-) create mode 100644 iop4admin/views/astrosource.py create mode 100644 iop4admin/views/epoch.py diff --git a/iop4admin/templates/iop4admin/view_astrosourcedetails.html b/iop4admin/templates/iop4admin/view_astrosourcedetails.html index 3491a4d4..a825eca0 100644 --- a/iop4admin/templates/iop4admin/view_astrosourcedetails.html +++ b/iop4admin/templates/iop4admin/view_astrosourcedetails.html @@ -31,5 +31,9 @@

Comment

{{ object.comment_html | safe }} + +

Finding chart

+ + {{ block.super }} {% endblock %} \ No newline at end of file diff --git a/iop4admin/views/__init__.py b/iop4admin/views/__init__.py index d9de2862..c4bddf24 100644 --- a/iop4admin/views/__init__.py +++ b/iop4admin/views/__init__.py @@ -1,45 +1,4 @@ -from iop4lib.db import * from .singleobj import * from .fitfile import * - - -class AstroSourceDetailsView(SingleObjView): - model = AstroSource - template_name = "iop4admin/view_astrosourcedetails.html" - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - - obj = self.get_object() - - fields_and_values = {field.name:field.value_to_string(obj) for field in AstroSource._meta.fields if field.name != "comment" and getattr(obj, field.name) is not None} - context['fields_and_values'] = fields_and_values - - return context - - -class EpochDetailsView(SingleObjView): - model = Epoch - template_name = "iop4admin/view_epochdetails.html" - - def get_context_data(self, **kwargs): - import itertools - - context = super().get_context_data(**kwargs) - - obj = self.get_object() - - try: - header_key_S = set(itertools.chain.from_iterable([rawfit.header.keys() for rawfit in obj.rawfits.all()])) - context["header_key_S"] = list(header_key_S) - except Exception as e: - pass - - #header_key_D = dict() - #for key in header_key_S: - # header_key_D[key] = len([rawfit.id for rawfit in obj.rawfits.all() if key in rawfit.header]) - - - context["rawfitsummarystatus"] = obj.get_summary_rawfits_status() - - return context \ No newline at end of file +from .epoch import * +from .astrosource import * \ No newline at end of file diff --git a/iop4admin/views/astrosource.py b/iop4admin/views/astrosource.py new file mode 100644 index 00000000..8671f0a9 --- /dev/null +++ b/iop4admin/views/astrosource.py @@ -0,0 +1,65 @@ +# iop4lib config +import iop4lib.config +iop4conf = iop4lib.Config(config_db=False) + +# other imports +import os +from pathlib import Path +import io +import base64 +import numpy as np +import matplotlib as mplt +import matplotlib.pyplot as plt + +# iop4lib +from iop4lib.db import AstroSource +from .singleobj import SingleObjView +from iop4lib.utils.plotting import plot_finding_chart + +# logging +import logging +logger = logging.getLogger(__name__) + +class AstroSourceDetailsView(SingleObjView): + model = AstroSource + template_name = "iop4admin/view_astrosourcedetails.html" + + def get_context_data(self, **kwargs): + context = super().get_context_data(**kwargs) + + obj = self.get_object() + + fields_and_values = {field.name:field.value_to_string(obj) for field in AstroSource._meta.fields if field.name != "comment" and getattr(obj, field.name) is not None} + context['fields_and_values'] = fields_and_values + + # finding chart + finding_char_path = Path(obj.filedpropdir) / "finding_chart.png" + + if True: #not os.path.exists(finding_char_path): + buf = io.BytesIO() + + width, height = 800, 800 + + fig = mplt.figure.Figure(figsize=(width/100, height/100), dpi=iop4conf.mplt_default_dpi) + ax = fig.subplots() + + plot_finding_chart(obj, ax=ax, fig=fig) + + fig.savefig(buf, format='png', bbox_inches='tight') + fig.clf() + + buf.seek(0) + imgbytes = buf.read() + + if not os.path.exists(obj.filedpropdir): + os.makedirs(obj.filedpropdir) + with open(finding_char_path, 'wb') as f: + f.write(imgbytes) + else: + with open(finding_char_path, 'rb') as f: + imgbytes = f.read() + + context['finding_chart_b64'] = base64.b64encode(imgbytes).decode('utf-8') + + return context + diff --git a/iop4admin/views/epoch.py b/iop4admin/views/epoch.py new file mode 100644 index 00000000..116a2c6b --- /dev/null +++ b/iop4admin/views/epoch.py @@ -0,0 +1,37 @@ +# iop4lib config +import iop4lib.config +iop4conf = iop4lib.Config(config_db=False) + +# other imports + +import itertools + +# iop4lib + +from iop4lib.db import Epoch +from .singleobj import SingleObjView + +# logging + +import logging +logger = logging.getLogger(__name__) + +class EpochDetailsView(SingleObjView): + model = Epoch + template_name = "iop4admin/view_epochdetails.html" + + def get_context_data(self, **kwargs): + + context = super().get_context_data(**kwargs) + + obj = self.get_object() + + try: + header_key_S = set(itertools.chain.from_iterable([rawfit.header.keys() for rawfit in obj.rawfits.all()])) + context["header_key_S"] = list(header_key_S) + except Exception as e: + pass + + context["rawfitsummarystatus"] = obj.get_summary_rawfits_status() + + return context \ No newline at end of file diff --git a/iop4admin/views/fitfile.py b/iop4admin/views/fitfile.py index 618cb2fa..ad702849 100644 --- a/iop4admin/views/fitfile.py +++ b/iop4admin/views/fitfile.py @@ -1,4 +1,3 @@ - # iop4lib config import iop4lib.config iop4conf = iop4lib.Config(config_db=False) diff --git a/iop4lib/db/astrosource.py b/iop4lib/db/astrosource.py index 9605a28c..678fe4e3 100644 --- a/iop4lib/db/astrosource.py +++ b/iop4lib/db/astrosource.py @@ -1,11 +1,15 @@ import iop4lib.config iop4conf = iop4lib.Config(config_db=False) +# django imports from django.db import models from django.db.models import Q -# other imports +# iop4lib imports from ..enums import * + +# other imports +import os import pypandoc import warnings import astropy.io.fits as fits @@ -40,6 +44,7 @@ class AstroSource(models.Model): comment = models.TextField(null=True, blank=True) # Blazar fields + redshift = models.FloatField(null=True, blank=True) # Calibration stars fields @@ -63,11 +68,10 @@ class AstroSource(models.Model): # Natural key - # allows us te relate the sources and calibration stars by names only + # allows us to relate the sources and calibration stars by names only # custom manager allows us to use natural keys when loading fixtures objects = AstroSourceManager() - # this method allows us to dump using natural keys def natural_key(self): return (self.name,) @@ -112,9 +116,6 @@ def is_in_field(self, wcs, height, width): return False # helper properties - - def get_aperpix(self): - return 12 @property def coord(self): @@ -131,6 +132,10 @@ def comment_html(self): return html_src + @property + def filedpropdir(self): + return os.path.join(iop4conf.datadir, "astrosource", self.name) + # Class methods @classmethod diff --git a/iop4lib/utils/plotting.py b/iop4lib/utils/plotting.py index a47a5fa0..d729d7f6 100644 --- a/iop4lib/utils/plotting.py +++ b/iop4lib/utils/plotting.py @@ -585,4 +585,71 @@ def build_astrometry_summary_images(redf, astrocalib_proc_vars, summary_kwargs): ax = fig.subplots(nrows=1, ncols=1, subplot_kw={'projection': astrocalib_proc_vars['wcs1']}) plot_preview_astrometry(redf, **astrocalib_proc_vars, ax=ax, fig=fig, with_simbad=summary_kwargs['with_simbad']) fig.savefig(Path(redf.filedpropdir) / "astrometry_4_img_result.png", bbox_inches="tight") - fig.clf() \ No newline at end of file + fig.clf() + + + + +def plot_finding_chart(target_src, fig=None, ax=None): + + from iop4lib.db import AstroSource + from iop4lib.utils import get_simbad_sources + + radius = u.Quantity("6 arcmin") + + if fig is None: + fig = plt.gcf() + + if ax is None: + ax = plt.gca() + + simbad_sources = get_simbad_sources(target_src.coord, radius=radius, Nmax=5, exclude_self=False) + calibrators = AstroSource.objects.filter(calibrates=target_src) + + for src in calibrators: + ax.plot([src.coord.ra.deg], [src.coord.dec.deg], 'rx', alpha=1) + ax.annotate(text=src.name, xy=(src.coord.ra.deg+0.001, src.coord.dec.deg), xytext=(+30,0), textcoords="offset pixels", color="red", fontsize=10, weight="bold", verticalalignment="center", horizontalalignment="left", arrowprops=dict(color="red", width=0.5, headwidth=1, headlength=3)) + + for src in simbad_sources: + ax.plot([src.coord.ra.deg], [src.coord.dec.deg], 'b+', alpha=1) + ax.annotate(text=src.name, xy=(src.coord.ra.deg-0.001, src.coord.dec.deg), xytext=(-30,0), textcoords="offset pixels", color="blue", fontsize=10, weight="bold", verticalalignment="center", horizontalalignment="right", arrowprops=dict(color="blue", width=0.5, headwidth=1, headlength=3)) + + ax.plot([target_src.coord.ra.deg], [target_src.coord.dec.deg], 'ro', alpha=1) + + # limits (center around target source) + + ax.set_xlim([target_src.coord.ra.deg - radius.to_value("deg")/2, target_src.coord.ra.deg + radius.to_value("deg")/2]) + ax.set_ylim([target_src.coord.dec.deg - radius.to_value("deg")/2, target_src.coord.dec.deg + radius.to_value("deg")/2]) + + # labels + + ax.set_xlabel("RA [deg]") + ax.set_ylabel("DEC [deg]") + ax.set_title(f"{target_src.name} ({target_src.other_name})") + + # legend + + target_h = ax.plot([],[], 'ro', label=target_src.name)[0] + simbad_h = ax.plot([],[], 'b+', label="SIMBAD sources")[0] + calibrators_h = ax.plot([],[], 'rx', label="IOP4 Calibrators")[0] + ax.legend(handles=[target_h, calibrators_h, simbad_h], loc="upper right") + + ax.grid(True, color='gray', ls='dashed') + + # secondary axes in hms and dms + + lims_ra = ax.get_xlim() + ax_x2 = ax.twiny() + ax_x2_ticks = ax.get_xticks() + ax_x2.set_xticks(ax_x2_ticks) + ax_x2.set_xticklabels([Angle(x, unit="deg").to_string(unit="hourangle", sep="hms") for x in ax_x2_ticks]) + ax_x2.set_xlabel("RA [hms]") + ax_x2.set_xlim(lims_ra) + + lims_dec = ax.get_ylim() + ax_y2 = ax.twinx() + ax_y2_ticks = ax.get_yticks() + ax_y2.set_yticks(ax_y2_ticks) + ax_y2.set_yticklabels([Angle(x, unit="deg").to_string(unit="deg", sep="dms") for x in ax_y2_ticks]) + ax_y2.set_ylabel("DEC [dms]") + ax_y2.set_ylim(lims_dec) \ No newline at end of file From 579a950ee2a18b5045f86ce5d90a1f331612cf34 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 14 Nov 2023 18:39:34 +0000 Subject: [PATCH 151/168] improve dipol astro calibration --- iop4lib/instruments/dipol.py | 155 +++++++++++++++++------------- iop4lib/instruments/instrument.py | 9 +- iop4lib/utils/__init__.py | 37 ++++--- 3 files changed, 111 insertions(+), 90 deletions(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index bb3d7bbe..b1802192 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -321,7 +321,7 @@ def get_astrometry_position_hint(cls, rawfit: 'RawFit', allsky=False, n_field_wi @classmethod def has_pairs(cls, fit_instance: 'ReducedFit' or 'RawFit') -> bool: - """ DIPOL ALWAYS HAS PAIRS?!!!! """ + """ DIPOL ALWAYS HAS PAIRS """ return True @@ -329,21 +329,22 @@ def has_pairs(cls, fit_instance: 'ReducedFit' or 'RawFit') -> bool: @classmethod - def _estimate_positions_from_segments(cls, redf, fwhm=None, npixels=64, n_seg_threshold=3.0, centered=True): + def _estimate_positions_from_segments(cls, redf=None, data=None, fwhm=None, npixels=64, n_seg_threshold=3.0, centered=True): - if redf.header_hintobject.srctype == SRCTYPES.STAR and redf.exptime <= 5: + if redf is not None and redf.header_hintobject.srctype == SRCTYPES.STAR and redf.exptime <= 5: fwhm = 80.0 else: fwhm = 1.0 # get the sources positions - data = redf.data - - mean, median, std = sigma_clipped_stats(data, sigma=5.0) + if data is None: + data = redf.mdata + + height, width = data.shape - bkg = get_bkg(redf.mdata, filter_size=5, box_size=redf.width//10) - imgdata_bkg_substracted = redf.mdata - bkg.background + bkg = get_bkg(data, filter_size=5, box_size=width//10) + imgdata_bkg_substracted = data - bkg.background seg_threshold = n_seg_threshold * bkg.background_rms segment_map, convolved_data = get_segmentation(imgdata_bkg_substracted, fwhm=fwhm, npixels=npixels, threshold=seg_threshold) @@ -354,9 +355,9 @@ def _estimate_positions_from_segments(cls, redf, fwhm=None, npixels=64, n_seg_th if centered: # select only the sources in the center - cx, cy = redf.width//2, redf.height//2 - idx = np.abs(positions[:,0]-cx) < 1/3 * redf.width - idx = idx & (np.abs(positions[:,1]-cy) < 1/3 * redf.height) + cx, cy = width//2, height//2 + idx = np.abs(positions[:,0]-cx) < 1/3 * width + idx = idx & (np.abs(positions[:,1]-cy) < 1/3 * height) positions = positions[idx] return positions @@ -424,27 +425,29 @@ def _try_EO_method(): if (build_wcs_result := cls._build_wcs_for_polarimetry_from_target_O_and_E(reducedfit, summary_kwargs=summary_kwargs, n_seg_threshold=n_seg_threshold, npixels=npixels)): break return build_wcs_result - + def _try_quad_method(): if redf_phot is not None: - + if target_src.srctype == SRCTYPES.STAR: - n_threshold_L = [300, 200, 100, 50, 25, 12, 6] + n_seg_threshold_L = [300, 200, 200, 100, 50, 25, 12, 6] + npixels_L = [128, 64] else: - n_threshold_L = [15,5,3] + n_seg_threshold_L = [1.0] + npixels_L = [64, 32] - for fwhm, n_threshold in itertools.product([30,15], n_threshold_L): - if (build_wcs_result := cls._build_wcs_for_polarimetry_images_photo_quads(reducedfit, summary_kwargs=summary_kwargs, n_threshold=n_threshold, find_fwhm=fwhm, smooth_fwhm=4)): + for npixels, n_seg_threshold in itertools.product(npixels_L, n_seg_threshold_L): + if (build_wcs_result := cls._build_wcs_for_polarimetry_images_photo_quads(reducedfit, summary_kwargs=summary_kwargs, n_seg_threshold=n_seg_threshold, npixels=npixels)): break else: build_wcs_result = BuildWCSResult(success=False) return build_wcs_result - + def _try_catalog_method(): if target_src.srctype == SRCTYPES.STAR: - n_seg_threshold_L = [700, 500, 400, 300, 200, 100, 50] - npixels_L = [128, 64] + n_seg_threshold_L = [300, 200, 200, 100, 50, 25, 12, 6] + npixels_L = [128, 64] else: n_seg_threshold_L = [1.0] npixels_L = [64, 32] @@ -462,14 +465,24 @@ def _try_catalog_method(): if target_src.srctype == SRCTYPES.STAR: method_try_order = [_try_EO_method, _try_quad_method, _try_catalog_method] - else: - method_try_order = [_try_quad_method, _try_catalog_method, _try_EO_method] + elif target_src.srctype == SRCTYPES.BLAZAR: + ## reduce flase positives by forcing to use quad method if it must work + if redf_phot is not None and n_estimate > 6: + method_try_order = [_try_quad_method] + else: + method_try_order = [_try_catalog_method, _try_quad_method, _try_EO_method] for m in method_try_order: logger.debug(f"Trying {m.__name__} for {reducedfit}.") if (build_wcs := m()): break + build_wcs.info["m.__name__"] = m.__name__ + build_wcs.info["n_estimate"] = n_estimate + build_wcs.info["n_estimate_centered"] = n_estimate_centered + build_wcs.info["n_expected_simbad_sources"] = n_expected_simbad_sources + build_wcs.info["n_expected_calibrators"] = n_expected_calibrators + return build_wcs else: @@ -511,8 +524,8 @@ def _build_shotgun_params(cls, redf: 'ReducedFit'): @classmethod - def _build_wcs_for_polarimetry_images_photo_quads(cls, redf: 'ReducedFit', summary_kwargs : dict = {'build_summary_images':True, 'with_simbad':True}, n_threshold=5.0, find_fwhm=30, smooth_fwhm=4): - + def _build_wcs_for_polarimetry_images_photo_quads(cls, redf: 'ReducedFit', summary_kwargs : dict = {'build_summary_images':True, 'with_simbad':True}, n_seg_threshold=1.5, npixels=32): + from iop4lib.db import ReducedFit if (target_src := redf.header_hintobject) is None: @@ -533,6 +546,8 @@ def _build_wcs_for_polarimetry_images_photo_quads(cls, redf: 'ReducedFit', summa logger.error(f"No astro-calibrated photometry field found for {redf_pol}.") return BuildWCSResult(success=False) + logger.debug(f"Invoked with {n_seg_threshold=}, {npixels=}") + # get the subframe of the photometry field that corresponds to this polarimetry field, (approx) x_start = redf_pol.rawfit.header['XORGSUBF'] y_start = redf_pol.rawfit.header['YORGSUBF'] @@ -548,28 +563,14 @@ def _build_wcs_for_polarimetry_images_photo_quads(cls, redf: 'ReducedFit', summa sets_L = list() - for data in [redf_pol.mdata, photdata_subframe]: - - if smooth_fwhm: - kernel_size = 2*int(smooth_fwhm)+1 - kernel = make_2dgaussian_kernel(smooth_fwhm, size=kernel_size) - data = convolve(data, kernel) - - mean, median, std = sigma_clipped_stats(data, sigma=5.0) - - daofind = DAOStarFinder(fwhm=find_fwhm, threshold=n_threshold*std, brightest=100, exclude_border=True) - sources = daofind(data - median) - - if len(sources) < 4: - return BuildWCSResult(success=False) - - sources.sort('flux', reverse=True) + for redf, data in zip([redf_pol, redf_phot], [redf_pol.mdata, photdata_subframe]): - sources = sources[:10] - - positions = np.transpose((sources['xcentroid'], sources['ycentroid'])) + positions = cls._estimate_positions_from_segments(redf=redf, data=data, n_seg_threshold=n_seg_threshold, npixels=npixels, centered=False) + positions = positions[:10] sets_L.append(positions) + + logger.debug(f"Using {len(sets_L[0])} sources in polarimetry field and {len(sets_L[1])} in photometry field.") if summary_kwargs['build_summary_images']: fig = mplt.figure.Figure(figsize=(12,6), dpi=iop4conf.mplt_default_dpi) @@ -622,7 +623,7 @@ def _build_wcs_for_polarimetry_images_photo_quads(cls, redf: 'ReducedFit', summa distances_selected = all_distances[idx_selected] if np.sum(idx_selected) == 0: - logger.error(f"No quads with distance < 4.0, returning success = False.") + logger.error(f"No quads with distance < 4.0, minimum at {min(all_distances)=} returning success = False.") return BuildWCSResult(success=False, wcslist=None, info={'redf_phot__pk':redf_phot.pk, 'redf_phot__fileloc':redf_phot.fileloc}) else: idx_selected = np.argsort(distances_selected)[:5] @@ -713,7 +714,7 @@ def _build_wcs_for_polarimetry_images_photo_quads(cls, redf: 'ReducedFit', summa fig.clear() - result = BuildWCSResult(success=True, wcslist=wcslist, info={'method':'_build_wcs_for_polarimetry_images_photo_quads', 'redf_phot__pk':redf_phot.pk, 'redf_phot__fileloc':redf_phot.fileloc, 'smooth_fwhm':smooth_fwhm, 'n_threshold':n_threshold, 'find_fwhm':find_fwhm}) + result = BuildWCSResult(success=True, wcslist=wcslist, info={'redf_phot__pk':redf_phot.pk, 'redf_phot__fileloc':redf_phot.fileloc, n_seg_threshold:n_seg_threshold, npixels:npixels}) return result @@ -749,8 +750,8 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', data = redf.mdata cx, cy = redf.width//2, redf.height//2 - positions = cls._estimate_positions_from_segments(redf, n_seg_threshold=n_seg_threshold, npixels=npixels, centered=True) - positions_non_centered = cls._estimate_positions_from_segments(redf, n_seg_threshold=n_seg_threshold, npixels=npixels, centered=False) + positions = cls._estimate_positions_from_segments(redf=redf, n_seg_threshold=n_seg_threshold, npixels=npixels, centered=True) + positions_non_centered = cls._estimate_positions_from_segments(redf=redf, n_seg_threshold=n_seg_threshold, npixels=npixels, centered=False) if len(positions) == 0: logger.error(f"{redf}: Found no sources in the field, cannot build WCS.") @@ -790,6 +791,8 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', else: angle = angle_mean + logger.debug(f"Angle {angle=}") + # Now, if there is only two sources, they must be the ordinary and extraordinary images. We # use them, if they are not, the procedure failed, raise exception. @@ -872,17 +875,23 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', disp = np.abs(np.subtract(pos1, pos2)) diff = np.abs(np.subtract(pos1, pos2))-np.abs(cls.disp_sign_mean) with np.printoptions(precision=1, suppress=True): - logger.debug(f"{i=},\t{pos1=!s},\t{pos2=!s},\t{dist=!s},\t{disp=!s},\t{diff=!s}") + logger.debug(f"{i=},\t{pos1=!s},\t{pos2=!s},\t{dist=:.2f},\t{disp=!s},\t{diff=!s}") # get the best pairs according to the disp_sign_mean # since we dont know if pre_list1 is the ordinary or extraordinary image, try with # disp_sign_mean and -disp_sign_mean list1, list2, d0_new, disp_sign_new = get_best_pairs(pre_list1, pre_list2, cls.disp_sign_mean, disp_sign_err=disp_allowed_err) - logger.debug(f"{list1=}, {list2=}, {d0_new=}, {disp_sign_new=}") + + with np.printoptions(precision=1, suppress=True): + logger.debug(f"{list1=}, {list2=}, {d0_new=}, {disp_sign_new=}") + if len(list1) == 0: list1, list2, d0_new, disp_sign_new = get_best_pairs(pre_list1, pre_list2, -cls.disp_sign_mean, disp_sign_err=disp_allowed_err) - logger.debug(f"{list1=}, {list2=}, {d0_new=}, {disp_sign_new=}") + + with np.printoptions(precision=1, suppress=True): + logger.debug(f"{list1=}, {list2=}, {d0_new=}, {disp_sign_new=}") + if len(list1) == 0: logger.error("No pairs found, returning success = False.") return BuildWCSResult(success=False) @@ -920,11 +929,11 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', calibrators_in_field = [src for src in AstroSource.objects.filter(calibrates__in=[target_src]).all() if src.is_in_field(pre_wcs, redf.height, redf.width)] - logger.debug(f"Found {len(calibrators_in_field)} calibrators in field for {target_src}") + logger.debug(f"Found {len(calibrators_in_field)} calibrators in field for {target_src.name}: {calibrators_in_field}") if len(calibrators_in_field) <= 1 or len(positions) <= 2: logger.warning(f"Using pre-computed angle {angle:.2f} deg for {target_src}.") - wcslist = [build_wcs_centered_on(target_px, redf=redf, angle=angle) for target_px in [target_O, target_E]] + wcslist = [build_wcs_centered_on(target_px, redf=redf, angle=angle) for target_px in [target_O, target_E]] else: logger.debug(f"Using {len(calibrators_in_field)} calibrators in field to fit WCS for {target_src}.") @@ -932,24 +941,31 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', _, (fits_O, fits_E) = zip(*[get_candidate_rank_by_matchs(redf, pos, angle=angle, r_search=30, calibrators=expected_sources_in_field) for pos in [target_O, target_E]]) - for target_px, fits in zip([target_O, target_E], [fits_O, fits_E]): - known_pos_skycoord = [target_src.coord] - # fit[0] is the astro source fitted, fit[1] (fit[1][0] is the gaussian, fit[1][1] is the constant - known_pos_skycoord.extend([fit[0].coord for fit in fits]) - known_pos_px = [target_px] - known_pos_px.extend([(fit[1][0].x_mean.value, fit[1][0].y_mean.value) for fit in fits]) + for img_label, target_px, fits in zip(["Ordinary", "Extraordinary"], [target_O, target_E], [fits_O, fits_E]): - try: - logger.debug("Fitting " + ", ".join([f"ra {coord.ra.deg} dec {coord.dec.deg} to {pos}" for coord, pos in zip(known_pos_skycoord, known_pos_px)])) + if len(fits) > 1: + # fit[0] is the astro source fitted, fit[1] (fit[1][0] is the gaussian, fit[1][1] is the constant - wcs_fitted = fit_wcs_from_points(np.array(known_pos_px).T, SkyCoord(known_pos_skycoord), projection=build_wcs_centered_on(target_px, redf=redf, angle=angle)) - wcslist.append(wcs_fitted) - except Exception as e: - logger.error(f"Exception {e} while fitting WCS, using pre-computed angle {angle:.2f} deg for {target_src}.") - wcslist = [build_wcs_centered_on(target_px, redf=redf, angle=angle) for target_px in [target_O, target_E]] - + known_pos_src = [target_src] + known_pos_src.extend([fit[0] for fit in fits]) + known_pos_px = [target_px] + known_pos_px.extend([(fit[1][0].x_mean.value, fit[1][0].y_mean.value) for fit in fits]) + + try: + with np.printoptions(precision=2, suppress=True): + for px, src in zip(known_pos_px, known_pos_src): + logger.debug(f"Fitting {img_label} {src.name} ({src.coord.ra.deg:.2f} ra, {src.coord.dec.deg:.2f} dec) to ({px[0]:.1f}, {px[1]:.1f})") + + wcs_fitted = fit_wcs_from_points(np.array(known_pos_px).T, SkyCoord([src.coord for src in known_pos_src]), projection=build_wcs_centered_on(target_px, redf=redf, angle=angle)) + wcslist.append(wcs_fitted) + except Exception as e: + logger.error(f"Exception {e} while fitting WCS, using pre-computed angle {angle:.2f} deg for {target_src}.") + wcslist.append(build_wcs_centered_on(target_px, redf=redf, angle=angle)) + else: + logger.debug("Using pre-computed wcs.") + wcslist.append(build_wcs_centered_on(target_px, redf=redf, angle=angle)) if summary_kwargs['build_summary_images']: logger.debug(f"Building summary images for {redf}.") @@ -960,7 +976,7 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', fig.savefig(Path(redf.filedpropdir) / "astrometry_summary.png", bbox_inches="tight") fig.clear() - return BuildWCSResult(success=True, wcslist=wcslist, info={'method':'_build_wcs_for_polarimetry_images_catalog_matching'}) + return BuildWCSResult(success=True, wcslist=wcslist, info={}) @@ -999,7 +1015,7 @@ def _build_wcs_for_polarimetry_from_target_O_and_E(cls, redf: 'ReducedFit', summ # get the sources positions cx, cy = redf.width//2, redf.height//2 - positions = cls._estimate_positions_from_segments(redf, n_seg_threshold=n_seg_threshold, npixels=npixels, centered=True) + positions = cls._estimate_positions_from_segments(redf=redf, n_seg_threshold=n_seg_threshold, npixels=npixels, centered=True) if len(positions) == 0: logger.error(f"{redf}: Found no sources in the field, cannot build WCS.") @@ -1030,13 +1046,14 @@ def _build_wcs_for_polarimetry_from_target_O_and_E(cls, redf: 'ReducedFit', summ logger.warning(f"{redf}: {len(positions)} sources found, expected 2. Maybe after looking at pairs only, we can find the right ones.") pre_list1, pre_list2 = zip(*itertools.product(positions, positions)) + # log some debug info about the pairs diference and the difference with respect the expected disp_sign_mean for i, (pos1, pos2) in enumerate(zip(pre_list1, pre_list2)): dist = distance(pos1, pos2) disp = np.abs(np.subtract(pos1, pos2)) diff = np.abs(np.subtract(pos1, pos2))-np.abs(cls.disp_sign_mean) with np.printoptions(precision=1, suppress=True): - logger.debug(f"{i=}, {pos1=!s}, {pos2=!s}, {dist=!s}, {disp=!s}, {diff=!s}") + logger.debug(f"{i=}, {pos1=!s}, {pos2=!s}, dist={dist:.2f}, {disp=!s}, {diff=!s}") list1, list2, d0_new, disp_sign_new = get_best_pairs(pre_list1, pre_list2, cls.disp_sign_mean, disp_sign_err=disp_allowed_err) diff --git a/iop4lib/instruments/instrument.py b/iop4lib/instruments/instrument.py index 49e9ef31..c864e679 100644 --- a/iop4lib/instruments/instrument.py +++ b/iop4lib/instruments/instrument.py @@ -369,18 +369,15 @@ def astrometric_calibration(cls, reducedfit: 'ReducedFit', **build_wcs_kwargs): # Save some extra info (not in the header) - try: - # redf.astrometry_info = [to_save] - - if not 'date' in build_wcs_result.info: - build_wcs_result.info['date'] = datetime.datetime.now() + if not 'date' in build_wcs_result.info: + build_wcs_result.info['date'] = datetime.datetime.now() + try: if isinstance(reducedfit.astrometry_info, list): reducedfit.astrometry_info = list(itertools.chain(reducedfit.astrometry_info, [build_wcs_result.info])) else: reducedfit.astrometry_info = [build_wcs_result.info] except NameError: - logger.debug("Could not save astrometry info to filed property.") reducedfit.astrometry_info = [build_wcs_result.info] else: diff --git a/iop4lib/utils/__init__.py b/iop4lib/utils/__init__.py index 90dddd85..9a7e06de 100644 --- a/iop4lib/utils/__init__.py +++ b/iop4lib/utils/__init__.py @@ -246,7 +246,7 @@ def fit_sigma(pos_px: (float, float), *args, **kwargs) -> float: -def fit_gaussian(px_start, redf, sigma_start=7, r_max=90, r_search=None): +def fit_gaussian(px_start, redf=None, data=None, sigma_start=7, r_max=None, r_search=None): r""" Fits a 2D gaussian + constant to the data around the given position, and returns the fitted model. Parameters @@ -268,29 +268,36 @@ def fit_gaussian(px_start, redf, sigma_start=7, r_max=90, r_search=None): from astropy.modeling.models import Const2D, Gaussian2D from iop4lib.instruments import Instrument - mdata = redf.mdata + if redf is not None: + if data is None: + data = redf.mdata - if r_max is None: - # 0.4 arcsecs is excellent seeing - r_max = int((30*0.4) / Instrument.by_name(redf.instrument).arcsec_per_pix) + if r_max is None: + # 0.4 arcsecs is excellent seeing + r_max = int((30*0.4) / Instrument.by_name(redf.instrument).arcsec_per_pix) + else: + if r_max is None: + r_max = 90 + + height, width = data.shape x_start, y_start = px_start - X, Y = np.meshgrid(np.arange(redf.width), np.arange(redf.height)) + X, Y = np.meshgrid(np.arange(width), np.arange(height)) if r_search is not None: idx_region = np.sqrt((X-x_start)**2 + (Y-y_start)**2) < r_search - idx_region_max = np.argmax(mdata[idx_region]) + idx_region_max = np.argmax(data[idx_region]) x_start, y_start = X[idx_region][idx_region_max], Y[idx_region][idx_region_max] idx_fit_region = np.sqrt((X-x_start)**2 + (Y-y_start)**2) < r_max X = X[idx_fit_region].flatten() Y = Y[idx_fit_region].flatten() - Z = mdata[idx_fit_region].compressed() + Z = np.ma.array(data[idx_fit_region]).compressed() fit = fitting.LevMarLSQFitter() - gaussian = Gaussian2D(amplitude=mdata[int(y_start), int(x_start)], x_mean=x_start, y_mean=y_start, x_stddev=sigma_start, y_stddev=sigma_start) + Const2D(np.median(Z)) + gaussian = Gaussian2D(amplitude=data[int(y_start), int(x_start)], x_mean=x_start, y_mean=y_start, x_stddev=sigma_start, y_stddev=sigma_start) + Const2D(np.median(Z)) gaussian[0].x_stddev.tied = lambda model: model[0].y_stddev gaussian_fit = fit(gaussian, X, Y, Z) @@ -469,7 +476,7 @@ def get_candidate_rank_by_matchs(redf: 'ReducedFit', for src in calibrators: try: logger.debug(f"Trying to fit calibrator {src.name} at {src.coord.to_pixel(wcs)}") - fitted_gaussian = fit_gaussian(px_start=src.coord.to_pixel(wcs), redf=redf, r_search=r_search) + fitted_gaussian = fit_gaussian(px_start=src.coord.to_pixel(wcs), redf=redf, r_search=r_search, sigma_start=15) # 15px is closer to dipol's sigmas xycen = fitted_gaussian[0].x_mean.value, fitted_gaussian[0].y_mean.value sigma = np.sqrt(fitted_gaussian[0].x_stddev.value**2 + fitted_gaussian[0].y_stddev.value**2) #logger.debug(f"Sigma for calibrator {src.name}: {sigma} px") @@ -490,6 +497,9 @@ def get_candidate_rank_by_matchs(redf: 'ReducedFit', ap_stats = ApertureStats(redf.mdata, ap) flux_counts = ap_stats.sum - annulus_stats.mean*ap_stats.sum_aper_area.value + with np.printoptions(precision=2, suppress=True): + logger.debug(f"Calibrator {src.name} at {src.coord.to_pixel(wcs)}, sigma {sigma:.2f} SNR {ap_stats.max/annulus_stats.std:.2f}") + if not flux_counts > 0: continue elif not ap_stats.max > 2*annulus_stats.std: @@ -500,10 +510,6 @@ def get_candidate_rank_by_matchs(redf: 'ReducedFit', calibrators_fluxes.append(flux_counts) calibrators_fit_L.append((src, fitted_gaussian)) - logger.debug(f"Calibrator {src.name} at {src.coord.to_pixel(wcs)}, sigma = {sigma}: {flux_counts:.1f} counts") - - gc.collect() - if len(calibrators_fluxes) > 0: # just look at how many matches with the calibs you find rank_1 = 1 - 0.5**np.sum(~np.isnan(calibrators_fluxes)) @@ -516,7 +522,8 @@ def get_candidate_rank_by_matchs(redf: 'ReducedFit', else: # if no calibrators could be fitted, return -np.inf rank = -np.inf - logger.debug(f"Rank of candidate {candidate_pos} for src {target_src.name} (n={len(calibrators_fluxes)}): {rank}") + with np.printoptions(precision=2, suppress=True): + logger.debug(f"Rank of candidate {candidate_pos} for src {target_src.name} (n={len(calibrators_fluxes)}): {rank}") return rank, calibrators_fit_L From c8c8185c0b43b2dc1cbf77e2f1bc7cc8fb2633c6 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 14 Nov 2023 18:42:52 +0000 Subject: [PATCH 152/168] fix --- iop4lib/instruments/dipol.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index b1802192..2f98e410 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -446,8 +446,8 @@ def _try_quad_method(): def _try_catalog_method(): if target_src.srctype == SRCTYPES.STAR: - n_seg_threshold_L = [300, 200, 200, 100, 50, 25, 12, 6] - npixels_L = [128, 64] + n_seg_threshold_L = [300, 200, 200, 100, 50, 25, 12, 6] + npixels_L = [128, 64] else: n_seg_threshold_L = [1.0] npixels_L = [64, 32] From 26c2b1d2c2d5b06bad96ce30cefa34b88874df63 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Tue, 14 Nov 2023 19:52:54 +0000 Subject: [PATCH 153/168] reduce first photometry, then polarimetry --- iop4lib/iop4.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/iop4lib/iop4.py b/iop4lib/iop4.py index 0875c867..dc48977b 100644 --- a/iop4lib/iop4.py +++ b/iop4lib/iop4.py @@ -62,7 +62,8 @@ def process_epochs(epochname_list, force_rebuild, check_remote_list): logger.info("Science files will be reduced.") rawfits = RawFit.objects.filter(epoch__in=epoch_L, imgtype=IMGTYPES.LIGHT).all() - Epoch.reduce_rawfits(rawfits, force_rebuild=force_rebuild) + Epoch.reduce_rawfits(rawfits.filter(obsmode=OBSMODES.PHOTOMETRY), force_rebuild=force_rebuild) + Epoch.reduce_rawfits(rawfits.filter(obsmode=OBSMODES.POLARIMETRY), force_rebuild=force_rebuild) logger.info("Computing relative photometry results.") From b2849367fa557990cdafc499a2018403d8ba9a9f Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 08:44:59 +0000 Subject: [PATCH 154/168] process epochs in reverse order --- iop4lib/iop4.py | 1 + 1 file changed, 1 insertion(+) diff --git a/iop4lib/iop4.py b/iop4lib/iop4.py index dc48977b..27435298 100644 --- a/iop4lib/iop4.py +++ b/iop4lib/iop4.py @@ -315,6 +315,7 @@ def main(): if not args.list_only: if len(epochnames_to_process) > 0: logger.info("Processing epochs.") + epochnames_to_process, _ = sorted(zip(*[(epochname, Epoch.epochname_to_tel_night(epochname)[1]) for epochname in epochnames_to_process]), reverse=True) process_epochs(epochnames_to_process, args.force_rebuild, check_remote_list=~args.skip_remote_file_list) else: logger.info("Invoked with --list-only!") From aa52fcb1d0cde83b1a108120fa88344a16889e42 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 08:46:59 +0000 Subject: [PATCH 155/168] fix --- iop4lib/iop4.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iop4lib/iop4.py b/iop4lib/iop4.py index 27435298..021e1231 100644 --- a/iop4lib/iop4.py +++ b/iop4lib/iop4.py @@ -315,7 +315,7 @@ def main(): if not args.list_only: if len(epochnames_to_process) > 0: logger.info("Processing epochs.") - epochnames_to_process, _ = sorted(zip(*[(epochname, Epoch.epochname_to_tel_night(epochname)[1]) for epochname in epochnames_to_process]), reverse=True) + epochnames_to_process, _ = sorted(zip(*[(epochname, Epoch.epochname_to_tel_night(epochname)[1].strftime("%Y-%m-%d")) for epochname in epochnames_to_process]), reverse=True) process_epochs(epochnames_to_process, args.force_rebuild, check_remote_list=~args.skip_remote_file_list) else: logger.info("Invoked with --list-only!") From c13eece02524c01757bc685834a603699b546d1f Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 08:57:31 +0000 Subject: [PATCH 156/168] fix order --- iop4lib/iop4.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iop4lib/iop4.py b/iop4lib/iop4.py index 021e1231..87118eb5 100644 --- a/iop4lib/iop4.py +++ b/iop4lib/iop4.py @@ -315,7 +315,7 @@ def main(): if not args.list_only: if len(epochnames_to_process) > 0: logger.info("Processing epochs.") - epochnames_to_process, _ = sorted(zip(*[(epochname, Epoch.epochname_to_tel_night(epochname)[1].strftime("%Y-%m-%d")) for epochname in epochnames_to_process]), reverse=True) + epochnames_to_process, _ = sorted(zip(*[(epochname, Epoch.epochname_to_tel_night(epochname)[1].strftime("%Y-%m-%d")) for epochname in epochnames_to_process]), key=lambda x: x[1], reverse=True) process_epochs(epochnames_to_process, args.force_rebuild, check_remote_list=~args.skip_remote_file_list) else: logger.info("Invoked with --list-only!") From 11ac9747e6fd8c90b2edce78495ce055d8484865 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 09:39:54 +0000 Subject: [PATCH 157/168] configurable timeout for parallel.py --- config/config.example.yaml | 1 + iop4lib/utils/parallel.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/config/config.example.yaml b/config/config.example.yaml index f05a1afc..b949c237 100644 --- a/config/config.example.yaml +++ b/config/config.example.yaml @@ -10,6 +10,7 @@ set_rawdata_readonly: False # True / False (sets raw fits file to readonl db_path: ~/.iop4data/iop4.db # Path to iop4 sqlite database file. astrometry_cache_path: ~/.astrometry_cache/ #
Path to store the astromery index files. max_concurrent_threads: 4 # Number of threads / processes to use (e.g. 4). +astrometry_timeout: 7 # Timeout in minutes for astrometry solving. ################### ### RAY CLUSTER ### diff --git a/iop4lib/utils/parallel.py b/iop4lib/utils/parallel.py index f24cbec6..5366eb4b 100644 --- a/iop4lib/utils/parallel.py +++ b/iop4lib/utils/parallel.py @@ -148,7 +148,7 @@ def _epoch_bulkreduce_multiprocessing_worker(reduced_fit: 'ReducedFit'): try: # Start a timer that will send SIGALRM in 20 minutes signal.signal(signal.SIGALRM, _epoch_bulkreduce_multiprocessing_worker_timeout_handler) - signal.alarm(20*60) + signal.alarm(iop4conf.astrometry_timeout*60) reduced_fit.build_file() signal.alarm(0) # cancel the alarm except Exception as e: From e7b2fcb44c9826fbcb50880f986d6c6e07a6a304 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 09:46:07 +0000 Subject: [PATCH 158/168] clear results before recomputing for each epoch --- iop4lib/iop4.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/iop4lib/iop4.py b/iop4lib/iop4.py index 87118eb5..5fea413b 100644 --- a/iop4lib/iop4.py +++ b/iop4lib/iop4.py @@ -65,15 +65,10 @@ def process_epochs(epochname_list, force_rebuild, check_remote_list): Epoch.reduce_rawfits(rawfits.filter(obsmode=OBSMODES.PHOTOMETRY), force_rebuild=force_rebuild) Epoch.reduce_rawfits(rawfits.filter(obsmode=OBSMODES.POLARIMETRY), force_rebuild=force_rebuild) - logger.info("Computing relative photometry results.") - - for epoch in epoch_L: - epoch.compute_relative_photometry() - - logger.info("Computing relative polarimetry results.") - + logger.info("Computing results.") for epoch in epoch_L: - epoch.compute_relative_polarimetry() + PhotoPolResult.objects.filter(epoch=epoch).delete() + epoch.compute_relative_polarimetry() def list_local_epochnames(): From e681c2d61e28909e1ce969d8f2a04b6521123202 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 12:17:19 +0000 Subject: [PATCH 159/168] fix iop4 --- iop4lib/iop4.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iop4lib/iop4.py b/iop4lib/iop4.py index 5fea413b..787fa37f 100644 --- a/iop4lib/iop4.py +++ b/iop4lib/iop4.py @@ -310,7 +310,7 @@ def main(): if not args.list_only: if len(epochnames_to_process) > 0: logger.info("Processing epochs.") - epochnames_to_process, _ = sorted(zip(*[(epochname, Epoch.epochname_to_tel_night(epochname)[1].strftime("%Y-%m-%d")) for epochname in epochnames_to_process]), key=lambda x: x[1], reverse=True) + epochnames_to_process, _ = list(zip(*sorted([(epochname, Epoch.epochname_to_tel_night(epochname)[1].strftime("%Y-%m-%d")) for epochname in epochnames_to_process], key=lambda x: x[1], reverse=True))) process_epochs(epochnames_to_process, args.force_rebuild, check_remote_list=~args.skip_remote_file_list) else: logger.info("Invoked with --list-only!") From bcb2ca8cca81b5c101466342a647b15d684c6a3e Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 12:17:33 +0000 Subject: [PATCH 160/168] fixed aperture for bl lacertae --- iop4lib/instruments/dipol.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index 2f98e410..32c978d7 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -1120,6 +1120,13 @@ def _build_wcs_for_polarimetry_from_target_O_and_E(cls, redf: 'ReducedFit', summ def estimate_common_apertures(cls, reducedfits, reductionmethod=None, fit_boxsize=None, search_boxsize=(90,90)): aperpix, r_in, r_out, fit_res_dict = super().estimate_common_apertures(reducedfits, reductionmethod=reductionmethod, fit_boxsize=fit_boxsize, search_boxsize=search_boxsize, fwhm_min=5.0, fwhm_max=60) sigma = fit_res_dict['sigma'] + + if reducedfits[0].header_hintobject.name == "2200+420": + r = min(1.8*sigma, 17) + r_in = max(5*sigma, 80) + r_out = 2*r_in + return r, r_in, r_out, fit_res_dict + return 1.8*sigma, 5*sigma, 10*sigma, fit_res_dict From 2a792bcf470030190dfff051dbddf76bf9574ed4 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 19:39:36 +0000 Subject: [PATCH 161/168] fix dipol uncerts --- iop4lib/instruments/dipol.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index 32c978d7..1df90565 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -1230,7 +1230,7 @@ def compute_relative_polarimetry(cls, polarimetry_group): dF_E = np.array([(fluxD['E'][angle][1]) for angle in angles_L]) F = (F_O - F_E) / (F_O + F_E) - dF = 1 / ( F_O**2 + F_E**2 ) * np.sqrt(dF_O**2 + dF_E**2) + dF = 2 / ( F_O + F_E )**2 * np.sqrt(F_E**2 * dF_O**2 + F_O**2 * dF_E**2) I = (F_O + F_E) dI = np.sqrt(dF_O**2 + dF_E**2) From 118955ea9e47b96baa267b66cc48a7da0e58a071 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 19:52:30 +0000 Subject: [PATCH 162/168] Fix dipol uncertainties, again --- iop4lib/instruments/dipol.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index 1df90565..ca1ece0b 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -1270,11 +1270,10 @@ def _get_p_and_chi(Qr, Ur, dQr, dUr): # linear polarization (0 to 1) P = math.sqrt(Qr**2+Ur**2) dP = 1/P * math.sqrt((Qr*dQr)**2 + (Ur*dUr)**2) + # polarization angle (degrees) - x = -Qr/Ur - dx = math.sqrt( (-1/Ur)**2+dUr**2 + (+Qr/Ur**2)**2*dQr**2 ) chi = 0.5 * math.degrees(math.atan2(-Qr, Ur)) - dchi = 0.5 * 1/(1 + x**2) * dx + dchi = 0.5 * math.degrees( 1 / (Qr**2 + Ur**2) * math.sqrt((Qr*dUr)**2 + (Ur*dQr)**2) ) return P, chi, dP, dchi From b424035271e629876a27b7aa42fca03e6bc8fb51 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 19:57:18 +0000 Subject: [PATCH 163/168] invoke GC explictly --- iop4lib/instruments/dipol.py | 14 +++++++++++--- iop4lib/utils/__init__.py | 2 ++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index ca1ece0b..95acb7e2 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -24,6 +24,7 @@ import itertools import datetime import math +import gc # iop4lib imports from iop4lib.enums import * @@ -418,7 +419,7 @@ def _try_EO_method(): else: npixels_L = [64, 128] else: - n_seg_threshold_L = [6.0, 3.0, 1.5, 1.0] + n_seg_threshold_L = [6.0, 3.0, 1.5, 1.0, 0.9, 0.8, 0.7, 0.6] npixels_L = [64] for npixels, n_seg_threshold in itertools.product(npixels_L, n_seg_threshold_L): @@ -433,7 +434,7 @@ def _try_quad_method(): n_seg_threshold_L = [300, 200, 200, 100, 50, 25, 12, 6] npixels_L = [128, 64] else: - n_seg_threshold_L = [1.0] + n_seg_threshold_L = [1.0, 0.9] npixels_L = [64, 32] for npixels, n_seg_threshold in itertools.product(npixels_L, n_seg_threshold_L): @@ -449,7 +450,7 @@ def _try_catalog_method(): n_seg_threshold_L = [300, 200, 200, 100, 50, 25, 12, 6] npixels_L = [128, 64] else: - n_seg_threshold_L = [1.0] + n_seg_threshold_L = [1.0, 0.9, 0.8, 0.7, 0.6] npixels_L = [64, 32] if n_expected_calibrators > 0 or n_expected_simbad_sources > 0: @@ -839,6 +840,10 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', positions_to_rank = np.array(positions_to_rank) logger.debug(f"{positions_to_rank=}") + if len(positions_to_rank) < 2: + logger.error("No pairs found, returning success = False.") + return BuildWCSResult(success=False) + # N_max_to_rank = 10 # positions_to_rank = positions_to_rank[:N_max_to_rank] @@ -976,6 +981,9 @@ def _build_wcs_for_polarimetry_images_catalog_matching(cls, redf: 'ReducedFit', fig.savefig(Path(redf.filedpropdir) / "astrometry_summary.png", bbox_inches="tight") fig.clear() + + gc.collect() + return BuildWCSResult(success=True, wcslist=wcslist, info={}) diff --git a/iop4lib/utils/__init__.py b/iop4lib/utils/__init__.py index 9a7e06de..b9e54943 100644 --- a/iop4lib/utils/__init__.py +++ b/iop4lib/utils/__init__.py @@ -510,6 +510,8 @@ def get_candidate_rank_by_matchs(redf: 'ReducedFit', calibrators_fluxes.append(flux_counts) calibrators_fit_L.append((src, fitted_gaussian)) + gc.collect() + if len(calibrators_fluxes) > 0: # just look at how many matches with the calibs you find rank_1 = 1 - 0.5**np.sum(~np.isnan(calibrators_fluxes)) From ececc11892569595b73b36fe92da8ca0906631ae Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 20:06:43 +0000 Subject: [PATCH 164/168] attempt to run tests in CI --- .github/workflows/ci.yml | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 957c1006..42320852 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -63,30 +63,30 @@ jobs: - name: Install the package in editable mode with all additional dependencies run: pip install --editable .[all] - # - name: Install httpdirfs to access astrometry index files without downloading them - # run: sudo apt install httpdirfs + - name: Install httpdirfs to access astrometry index files without downloading them + run: sudo apt install httpdirfs - # - name: Mount astrometry index file in default location - # run: mkdir -p $HOME/.astrometry_cache/5200 && httpdirfs --cache 'https://portal.nersc.gov/project/cosmo/temp/dstn/index-5200/LITE/' $HOME/.astrometry_cache/5200/ + - name: Mount astrometry index file in default location + run: mkdir -p $HOME/.astrometry_cache/5200 && httpdirfs --cache 'https://portal.nersc.gov/project/cosmo/temp/dstn/index-5200/LITE/' $HOME/.astrometry_cache/5200/ - # - name: Check that it was correctly mounted - # run: ls $HOME/.astrometry_cache/5200/index-5200-00.fits + - name: Check that it was correctly mounted + run: ls $HOME/.astrometry_cache/5200/index-5200-00.fits - # - name: Download test data - # env: - # TEST_DATA_PASSWORD: ${{ secrets.test_data_password }} - # run: | - # export TESTDATA_MD5SUM=`grep 'TESTDATA_MD5SUM' ./tests/conftest.py | awk -F"'" '{print $2}' | tr -d '\n'` - # wget --post-data "pass=$TEST_DATA_PASSWORD" "https://vhega.iaa.es/iop4/iop4testdata.tar.gz?md5sum=$TESTDATA_MD5SUM" -O $HOME/iop4testdata.tar.gz + - name: Download test data + env: + TEST_DATA_PASSWORD: ${{ secrets.test_data_password }} + run: | + export TESTDATA_MD5SUM=`grep 'TESTDATA_MD5SUM' ./tests/conftest.py | awk -F"'" '{print $2}' | tr -d '\n'` + wget --post-data "pass=$TEST_DATA_PASSWORD" "https://vhega.iaa.es/iop4/iop4testdata.tar.gz?md5sum=$TESTDATA_MD5SUM" -O $HOME/iop4testdata.tar.gz - # - name: Run tests (with -o log_cli=true -o log_cli_level=DEBUG to debug CI actions) - # run: pytest -o log_cli=true -o log_cli_level=DEBUG -vxs tests/ - - # - name: Output some info for debugging - # # | true so erros in this step are ignored - # run: | - # df -h || true - # du -sh $HOME/.cache/httpdirfs/ || true + - name: Run tests (with -o log_cli=true -o log_cli_level=DEBUG to debug CI actions) + run: pytest -o log_cli=true -o log_cli_level=DEBUG -vxs tests/ + + - name: Output some info for debugging + # | true so erros in this step are ignored + run: | + df -h || true + du -sh $HOME/.cache/httpdirfs/ || true From 651434350a7403faf08e4db9d6b1572f9305e5f5 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 20:41:40 +0000 Subject: [PATCH 165/168] dipol tests: more tests for astro calibration * using the target E,O method and a star image --- tests/test_osnt090_dipol.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/tests/test_osnt090_dipol.py b/tests/test_osnt090_dipol.py index 03a73d0d..896c6c4a 100644 --- a/tests/test_osnt090_dipol.py +++ b/tests/test_osnt090_dipol.py @@ -28,7 +28,7 @@ def test_astrometric_calibration(load_test_catalog): from iop4lib.enums import IMGTYPES, SRCTYPES from iop4lib.utils.quadmatching import distance - epochname_L = ["OSN-T090/2023-09-26", "OSN-T090/2023-10-11", "OSN-T090/2023-10-12", "OSN-T090/2023-11-06"] + epochname_L = ["OSN-T090/2023-10-25", "OSN-T090/2023-09-26", "OSN-T090/2023-10-11", "OSN-T090/2023-10-12", "OSN-T090/2023-11-06"] epoch_L = [Epoch.create(epochname=epochname) for epochname in epochname_L] for epoch in epoch_L: @@ -88,4 +88,23 @@ def test_astrometric_calibration(load_test_catalog): assert (distance(pos_O, [618, 259]) < 50) # O position assert (distance(pos_E, [402, 268]) < 50) # E position - \ No newline at end of file + + # Test 4. Polarimetry field using target E, O + + fileloc = "OSN-T090/2023-10-25/HD204827_R_IAR-0384.fts" + rawfit = RawFit.by_fileloc(fileloc=fileloc) + redf = ReducedFit.create(rawfit=rawfit) + redf.build_file() + + # check source position in the image + + src = AstroSource.objects.get(name="HD 204827") + + assert redf.header_hintobject.name == src.name + assert redf.sources_in_field.filter(name=src.name).exists() + + pos_O = src.coord.to_pixel(wcs=redf.wcs1) + pos_E = src.coord.to_pixel(wcs=redf.wcs2) + + assert (distance(pos_O, [684, 397]) < 50) # O position + assert (distance(pos_E, [475, 411]) < 50) # E position \ No newline at end of file From 2eef18c32ca452ecac0a88375fec6a1408f69e97 Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 21:01:27 +0000 Subject: [PATCH 166/168] bump test dataset version --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 81c0f99d..0edfd7b6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,7 +12,7 @@ TEST_CONFIG = str(Path(iop4conf.datadir) / "config.tests.yaml") TESTDATA_FPATH = str(Path("~/iop4testdata.tar.gz").expanduser()) -TESTDATA_MD5SUM = '0f5432c54d6173f861b64d3d4280e2f7' +TESTDATA_MD5SUM = '4d393377f8c659e2ead2fa252a9a38b2' TEST_DATADIR = str(Path(iop4conf.datadir) / "iop4testdata") TEST_DB_PATH = str(Path(iop4conf.db_path).expanduser().parent / ("test_" + str(Path(iop4conf.db_path).name))) From 1dc5add12d239366e0d752ba6a94b4110d74239e Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 21:45:43 +0000 Subject: [PATCH 167/168] increase astrometry timeout in default config --- config/config.example.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/config.example.yaml b/config/config.example.yaml index b949c237..0351a59f 100644 --- a/config/config.example.yaml +++ b/config/config.example.yaml @@ -10,7 +10,7 @@ set_rawdata_readonly: False # True / False (sets raw fits file to readonl db_path: ~/.iop4data/iop4.db # Path to iop4 sqlite database file. astrometry_cache_path: ~/.astrometry_cache/ # Path to store the astromery index files. max_concurrent_threads: 4 # Number of threads / processes to use (e.g. 4). -astrometry_timeout: 7 # Timeout in minutes for astrometry solving. +astrometry_timeout: 20 # Timeout in minutes for astrometry solving. ################### ### RAY CLUSTER ### From 3f3276c364bf59a7b0bd791525cfc0f969a2434e Mon Sep 17 00:00:00 2001 From: Juan Escudero Date: Wed, 15 Nov 2023 23:47:58 +0000 Subject: [PATCH 168/168] rename hash function --- iop4lib/instruments/dipol.py | 10 +++++----- iop4lib/utils/quadmatching.py | 12 ++++++------ 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/iop4lib/instruments/dipol.py b/iop4lib/instruments/dipol.py index 95acb7e2..bd31099d 100644 --- a/iop4lib/instruments/dipol.py +++ b/iop4lib/instruments/dipol.py @@ -558,7 +558,7 @@ def _build_wcs_for_polarimetry_images_photo_quads(cls, redf: 'ReducedFit', summa idx = np.s_[y_start:y_end, x_start:x_end] - photdata_subframe = redf_phot.mdata[idx] # if we use the hash_juan_old, which is not invariant under fliiping, we need to flip the image in y (redf_phot.mdata[idx][::-1,:]) + photdata_subframe = redf_phot.mdata[idx] # if we use the hash_ish_old, which is not invariant under fliiping, we need to flip the image in y (redf_phot.mdata[idx][::-1,:]) # find 10 brightest sources in each field @@ -594,8 +594,8 @@ def _build_wcs_for_polarimetry_images_photo_quads(cls, redf: 'ReducedFit', summa quads_1 = np.array(list(itertools.combinations(sets_L[0], 4))) quads_2 = np.array(list(itertools.combinations(sets_L[1], 4))) - from iop4lib.utils.quadmatching import hash_juan, distance, order, qorder_juan, find_linear_transformation - hash_func, qorder = hash_juan, qorder_juan + from iop4lib.utils.quadmatching import hash_ish, distance, order, qorder_ish, find_linear_transformation + hash_func, qorder = hash_ish, qorder_ish hashes_1 = np.array([hash_func(quad) for quad in quads_1]) hashes_2 = np.array([hash_func(quad) for quad in quads_2]) @@ -682,8 +682,8 @@ def _build_wcs_for_polarimetry_images_photo_quads(cls, redf: 'ReducedFit', summa # give an unique ordering to the quads - quads_1 = [qorder_juan(quad) for quad in quads_1] - quads_2 = [qorder_juan(quad) for quad in quads_2] + quads_1 = [qorder_ish(quad) for quad in quads_1] + quads_2 = [qorder_ish(quad) for quad in quads_2] # get the pre wcs with the target in the center of the image diff --git a/iop4lib/utils/quadmatching.py b/iop4lib/utils/quadmatching.py index 19aba3f0..a25d36ed 100644 --- a/iop4lib/utils/quadmatching.py +++ b/iop4lib/utils/quadmatching.py @@ -65,7 +65,7 @@ def order(points): return sorted_points -def hash_juan_old(points): +def hash_ish_old(points): P1,P2,P3,P4 = points P1,P2,P3,P4 = order(points) d1,d2,d3,d4 = map(np.linalg.norm, [P2-P1,P3-P2,P4-P3,P1-P4]) @@ -73,7 +73,7 @@ def hash_juan_old(points): -def quad_coords_juan(A,B,C,D): +def quad_coords_ish(A,B,C,D): P = (A+B)/2 A, B, C, D = A-P,B-P,C-P,D-P @@ -118,10 +118,10 @@ def force_AB_maxdist(points): return result -def hash_juan(points): +def hash_ish(points): A,B,C,D = points A,B,C,D = force_AB_maxdist([A,B,C,D]) - A,B,C,D = quad_coords_juan(A,B,C,D) + A,B,C,D = quad_coords_ish(A,B,C,D) FX = np.array([[-1,0],[0,1]]) FY = np.array([[1,0],[0,-1]]) @@ -141,11 +141,11 @@ def hash_juan(points): return d1,d2,d3,d4 -def qorder_juan(points): +def qorder_ish(points): A,B,C,D = points A,B,C,D = force_AB_maxdist([A,B,C,D]) - Ap,Bp,Cp,Dp = quad_coords_juan(A,B,C,D) + Ap,Bp,Cp,Dp = quad_coords_ish(A,B,C,D) if not distance(Ap,Bp)
pkmjddatemagp (%)chi (º) flag (int)flags (str)
[[ point.pk ]][[ point.pk ]][[ formatFloat(point.x1,5) ]][[ point.datestr ]][[ formatFloat(point.y1,3) ]] ± [[ formatFloat(point.y1_err,3) ]]-[[ formatFloat(100*point.y2,3) ]] ± [[ formatFloat(100*point.y2_err,3) ]]-[[ formatFloat(point.y3,3) ]] ± [[ formatFloat(point.y3_err,3) ]]- [[ point.flags ]][[ point.flagsstr ]] - bad photometry - bad polarimetry + bad photometry + bad polarimetry
{{ k }}{{ v |truncatechars:400 }}{{ v |truncatechars:500 }}