diff --git a/axidence/dtypes.py b/axidence/dtypes.py index 568ad63..3dea0e0 100644 --- a/axidence/dtypes.py +++ b/axidence/dtypes.py @@ -7,10 +7,12 @@ kind_colors.update( { + "run_meta": "#ffff00", "events_salting": "#0080ff", "peaks_salted": "#00c0ff", "events_salted": "#00ffff", "peaks_paired": "#ff00ff", + "truth_paired": "#ff00ff", "events_paired": "#ffccff", "isolated_s1": "#80ff00", "isolated_s2": "#80ff00", diff --git a/axidence/plugins/meta/run_meta.py b/axidence/plugins/meta/run_meta.py index 4147254..ee7d374 100644 --- a/axidence/plugins/meta/run_meta.py +++ b/axidence/plugins/meta/run_meta.py @@ -10,6 +10,7 @@ class RunMeta(ExhaustPlugin): __version__ = "0.0.0" depends_on = "event_basics" provides = "run_meta" + data_kind = "run_meta" save_when = strax.SaveWhen.EXPLICIT dtype = strax.time_fields diff --git a/axidence/plugins/pairing/peaks_paired.py b/axidence/plugins/pairing/peaks_paired.py index 921b695..53c650d 100644 --- a/axidence/plugins/pairing/peaks_paired.py +++ b/axidence/plugins/pairing/peaks_paired.py @@ -9,10 +9,10 @@ from ...utils import copy_dtype from ...dtypes import peak_positions_dtype -from ...plugin import ExhaustPlugin, RunMetaPlugin +from ...plugin import ExhaustPlugin -class PeaksPaired(ExhaustPlugin, RunMetaPlugin): +class PeaksPaired(ExhaustPlugin): __version__ = "0.0.0" depends_on = ("isolated_s1", "isolated_s2", "cut_event_building_salted", "event_shadow_salted") provides = ("peaks_paired", "truth_paired") @@ -25,24 +25,6 @@ class PeaksPaired(ExhaustPlugin, RunMetaPlugin): help="Seed for pairing", ) - real_run_start = straxen.URLConfig( - default=None, - type=(int, None), - help="Real start time of run [ns]", - ) - - real_run_end = straxen.URLConfig( - default=None, - type=(int, None), - help="Real start time of run [ns]", - ) - - strict_real_run_time_check = straxen.URLConfig( - default=True, - type=bool, - help="Whether to strictly check the real run time is provided", - ) - min_drift_length = straxen.URLConfig( default=0, type=(int, float), @@ -135,7 +117,6 @@ def infer_dtype(self): return dict(peaks_paired=peaks_dtype, truth_paired=truth_dtype) def setup(self, prepare=True): - self.init_run_meta() self.min_drift_time = int(self.min_drift_length / self.electron_drift_velocity) self.max_drift_time = int(self.max_drift_length / self.electron_drift_velocity) if self.pairing_seed is None: @@ -211,6 +192,8 @@ def split_chunks(self, n_peaks): def build_arrays( self, + start, + end, drift_time, s1_group_number, s2_group_number, @@ -223,9 +206,7 @@ def build_arrays( # set center time of S1 & S2 # paired events are separated by roughly `event_interval` - s1_center_time = ( - np.arange(len(drift_time)).astype(int) * self.paring_event_interval + self.run_start - ) + s1_center_time = np.arange(len(drift_time)).astype(int) * self.paring_event_interval + start s2_center_time = s1_center_time + drift_time # total number of isolated S1 & S2 peaks peaks_arrays = np.zeros(n_peaks.sum(), dtype=self.dtype["peaks_paired"]) @@ -322,7 +303,7 @@ def build_arrays( return peaks_arrays, truth_arrays - def compute(self, isolated_s1, isolated_s2, events_salted): + def compute(self, isolated_s1, isolated_s2, events_salted, start, end): for i, s in enumerate([isolated_s1, isolated_s2]): if np.any(np.diff(s["group_number"]) < 0): raise ValueError(f"Group number is not sorted in isolated S{i}!") @@ -350,7 +331,7 @@ def compute(self, isolated_s1, isolated_s2, events_salted): paring_rate_full, s1_group_number, s2_group_number, drift_time = self.simple_pairing( isolated_s1, main_isolated_s2, - self.run_time, + (end - start) / units.s, self.max_drift_time, self.min_drift_time, paring_rate_correction, @@ -377,6 +358,8 @@ def compute(self, isolated_s1, isolated_s2, events_salted): chunk_i = 0 left_i, right_i = slices[chunk_i] peaks_arrays, truth_arrays = self.build_arrays( + start, + end, drift_time[left_i:right_i], s1_group_number[left_i:right_i], s2_group_number[left_i:right_i], @@ -389,18 +372,14 @@ def compute(self, isolated_s1, isolated_s2, events_salted): peaks_arrays["event_number"] += left_i truth_arrays["event_number"] += left_i - start = ( - self.run_start + left_i * self.paring_event_interval - self.paring_event_interval // 2 - ) - end = ( - self.run_start + right_i * self.paring_event_interval - self.paring_event_interval // 2 - ) + _start = start + left_i * self.paring_event_interval - int(self.paring_event_interval // 2) + _end = start + right_i * self.paring_event_interval - int(self.paring_event_interval // 2) result = dict() result["peaks_paired"] = self.chunk( - start=start, end=end, data=peaks_arrays, data_type="peaks_paired" + start=_start, end=_end, data=peaks_arrays, data_type="peaks_paired" ) result["truth_paired"] = self.chunk( - start=start, end=end, data=truth_arrays, data_type="truth_paired" + start=_start, end=_end, data=truth_arrays, data_type="truth_paired" ) # chunk size should be less than default chunk size in strax assert result["peaks_paired"].nbytes < self.chunk_target_size_mb * 1e6 diff --git a/axidence/plugins/salting/event_fields.py b/axidence/plugins/salting/event_fields.py index 15fc857..5f86bf2 100644 --- a/axidence/plugins/salting/event_fields.py +++ b/axidence/plugins/salting/event_fields.py @@ -7,6 +7,7 @@ class EventFieldsSalted(Plugin): + child_plugin = True def compute(self, events_salted, peaks_salted, peaks): _peaks = merge_salted_real(peaks_salted, peaks, peaks.dtype) diff --git a/axidence/plugins/salting/events_salting.py b/axidence/plugins/salting/events_salting.py index 771ff3c..06c326a 100644 --- a/axidence/plugins/salting/events_salting.py +++ b/axidence/plugins/salting/events_salting.py @@ -10,6 +10,7 @@ class EventsSalting(ExhaustPlugin, DownChunkingPlugin, EventPositions, EventBasics): __version__ = "0.0.0" + child_plugin = True depends_on = "run_meta" provides = "events_salting" data_kind = "events_salting" @@ -114,8 +115,8 @@ def sample_time(self, start, end): """Sample the time according to the start and end of the run.""" self.event_time_interval = int(units.s // self.salting_rate) - # if units.s / self.salting_rate < self.drift_time_max * self.n_drift_time_window * 2: - # raise ValueError("Salting rate is too high according the drift time window!") + if units.s / self.salting_rate < self.drift_time_max * self.n_drift_time_window * 2: + raise ValueError("Salting rate is too high according the drift time window!") time = np.arange( start + self.veto_length_run_start, @@ -189,7 +190,7 @@ def sampling(self, start, end): self.set_chunk_splitting() - def compute(self, events, start, end): + def compute(self, run_meta, start, end): """Copy and assign the salting events into chunk.""" self.sampling(start, end) for chunk_i in range(len(self.slices)): @@ -204,6 +205,7 @@ def compute(self, events, start, end): _end = end else: _end = self.events_salting["time"][indices[1] - 1] + self.time_right + yield self.chunk( start=_start, end=_end, data=self.events_salting[indices[0] : indices[1]] ) diff --git a/axidence/plugins/salting/peaks_salted.py b/axidence/plugins/salting/peaks_salted.py index afd9d13..f9f5dbb 100644 --- a/axidence/plugins/salting/peaks_salted.py +++ b/axidence/plugins/salting/peaks_salted.py @@ -8,6 +8,7 @@ class PeaksSalted(PeakBasics): __version__ = "0.0.0" + child_plugin = True depends_on = "events_salting" provides = "peaks_salted" data_kind = "peaks_salted"