Skip to content

Commit

Permalink
Change base classes of PeaksPaired
Browse files Browse the repository at this point in the history
  • Loading branch information
dachengx committed Apr 29, 2024
1 parent e41ced5 commit 6a91637
Show file tree
Hide file tree
Showing 6 changed files with 23 additions and 37 deletions.
2 changes: 2 additions & 0 deletions axidence/dtypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,12 @@

kind_colors.update(
{
"run_meta": "#ffff00",
"events_salting": "#0080ff",
"peaks_salted": "#00c0ff",
"events_salted": "#00ffff",
"peaks_paired": "#ff00ff",
"truth_paired": "#ff00ff",
"events_paired": "#ffccff",
"isolated_s1": "#80ff00",
"isolated_s2": "#80ff00",
Expand Down
1 change: 1 addition & 0 deletions axidence/plugins/meta/run_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ class RunMeta(ExhaustPlugin):
__version__ = "0.0.0"
depends_on = "event_basics"
provides = "run_meta"
data_kind = "run_meta"
save_when = strax.SaveWhen.EXPLICIT

dtype = strax.time_fields
Expand Down
47 changes: 13 additions & 34 deletions axidence/plugins/pairing/peaks_paired.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@

from ...utils import copy_dtype
from ...dtypes import peak_positions_dtype
from ...plugin import ExhaustPlugin, RunMetaPlugin
from ...plugin import ExhaustPlugin


class PeaksPaired(ExhaustPlugin, RunMetaPlugin):
class PeaksPaired(ExhaustPlugin):
__version__ = "0.0.0"
depends_on = ("isolated_s1", "isolated_s2", "cut_event_building_salted", "event_shadow_salted")
provides = ("peaks_paired", "truth_paired")
Expand All @@ -25,24 +25,6 @@ class PeaksPaired(ExhaustPlugin, RunMetaPlugin):
help="Seed for pairing",
)

real_run_start = straxen.URLConfig(
default=None,
type=(int, None),
help="Real start time of run [ns]",
)

real_run_end = straxen.URLConfig(
default=None,
type=(int, None),
help="Real start time of run [ns]",
)

strict_real_run_time_check = straxen.URLConfig(
default=True,
type=bool,
help="Whether to strictly check the real run time is provided",
)

min_drift_length = straxen.URLConfig(
default=0,
type=(int, float),
Expand Down Expand Up @@ -135,7 +117,6 @@ def infer_dtype(self):
return dict(peaks_paired=peaks_dtype, truth_paired=truth_dtype)

def setup(self, prepare=True):
self.init_run_meta()
self.min_drift_time = int(self.min_drift_length / self.electron_drift_velocity)
self.max_drift_time = int(self.max_drift_length / self.electron_drift_velocity)
if self.pairing_seed is None:
Expand Down Expand Up @@ -211,6 +192,8 @@ def split_chunks(self, n_peaks):

def build_arrays(
self,
start,
end,
drift_time,
s1_group_number,
s2_group_number,
Expand All @@ -223,9 +206,7 @@ def build_arrays(

# set center time of S1 & S2
# paired events are separated by roughly `event_interval`
s1_center_time = (
np.arange(len(drift_time)).astype(int) * self.paring_event_interval + self.run_start
)
s1_center_time = np.arange(len(drift_time)).astype(int) * self.paring_event_interval + start
s2_center_time = s1_center_time + drift_time
# total number of isolated S1 & S2 peaks
peaks_arrays = np.zeros(n_peaks.sum(), dtype=self.dtype["peaks_paired"])
Expand Down Expand Up @@ -322,7 +303,7 @@ def build_arrays(

return peaks_arrays, truth_arrays

def compute(self, isolated_s1, isolated_s2, events_salted):
def compute(self, isolated_s1, isolated_s2, events_salted, start, end):
for i, s in enumerate([isolated_s1, isolated_s2]):
if np.any(np.diff(s["group_number"]) < 0):
raise ValueError(f"Group number is not sorted in isolated S{i}!")
Expand Down Expand Up @@ -350,7 +331,7 @@ def compute(self, isolated_s1, isolated_s2, events_salted):
paring_rate_full, s1_group_number, s2_group_number, drift_time = self.simple_pairing(
isolated_s1,
main_isolated_s2,
self.run_time,
(end - start) / units.s,
self.max_drift_time,
self.min_drift_time,
paring_rate_correction,
Expand All @@ -377,6 +358,8 @@ def compute(self, isolated_s1, isolated_s2, events_salted):
chunk_i = 0
left_i, right_i = slices[chunk_i]
peaks_arrays, truth_arrays = self.build_arrays(
start,
end,
drift_time[left_i:right_i],
s1_group_number[left_i:right_i],
s2_group_number[left_i:right_i],
Expand All @@ -389,18 +372,14 @@ def compute(self, isolated_s1, isolated_s2, events_salted):
peaks_arrays["event_number"] += left_i
truth_arrays["event_number"] += left_i

start = (
self.run_start + left_i * self.paring_event_interval - self.paring_event_interval // 2
)
end = (
self.run_start + right_i * self.paring_event_interval - self.paring_event_interval // 2
)
_start = start + left_i * self.paring_event_interval - int(self.paring_event_interval // 2)
_end = start + right_i * self.paring_event_interval - int(self.paring_event_interval // 2)
result = dict()
result["peaks_paired"] = self.chunk(
start=start, end=end, data=peaks_arrays, data_type="peaks_paired"
start=_start, end=_end, data=peaks_arrays, data_type="peaks_paired"
)
result["truth_paired"] = self.chunk(
start=start, end=end, data=truth_arrays, data_type="truth_paired"
start=_start, end=_end, data=truth_arrays, data_type="truth_paired"
)
# chunk size should be less than default chunk size in strax
assert result["peaks_paired"].nbytes < self.chunk_target_size_mb * 1e6
Expand Down
1 change: 1 addition & 0 deletions axidence/plugins/salting/event_fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@


class EventFieldsSalted(Plugin):
child_plugin = True

def compute(self, events_salted, peaks_salted, peaks):
_peaks = merge_salted_real(peaks_salted, peaks, peaks.dtype)
Expand Down
8 changes: 5 additions & 3 deletions axidence/plugins/salting/events_salting.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

class EventsSalting(ExhaustPlugin, DownChunkingPlugin, EventPositions, EventBasics):
__version__ = "0.0.0"
child_plugin = True
depends_on = "run_meta"
provides = "events_salting"
data_kind = "events_salting"
Expand Down Expand Up @@ -114,8 +115,8 @@ def sample_time(self, start, end):
"""Sample the time according to the start and end of the run."""
self.event_time_interval = int(units.s // self.salting_rate)

# if units.s / self.salting_rate < self.drift_time_max * self.n_drift_time_window * 2:
# raise ValueError("Salting rate is too high according the drift time window!")
if units.s / self.salting_rate < self.drift_time_max * self.n_drift_time_window * 2:
raise ValueError("Salting rate is too high according the drift time window!")

time = np.arange(
start + self.veto_length_run_start,
Expand Down Expand Up @@ -189,7 +190,7 @@ def sampling(self, start, end):

self.set_chunk_splitting()

def compute(self, events, start, end):
def compute(self, run_meta, start, end):
"""Copy and assign the salting events into chunk."""
self.sampling(start, end)
for chunk_i in range(len(self.slices)):
Expand All @@ -204,6 +205,7 @@ def compute(self, events, start, end):
_end = end
else:
_end = self.events_salting["time"][indices[1] - 1] + self.time_right

yield self.chunk(
start=_start, end=_end, data=self.events_salting[indices[0] : indices[1]]
)
1 change: 1 addition & 0 deletions axidence/plugins/salting/peaks_salted.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

class PeaksSalted(PeakBasics):
__version__ = "0.0.0"
child_plugin = True
depends_on = "events_salting"
provides = "peaks_salted"
data_kind = "peaks_salted"
Expand Down

0 comments on commit 6a91637

Please sign in to comment.