-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
>> Peak processing Restructured plugins Fixed area fraction top calculation Peak classification works well, moved to peak_basics (to tune with options) Introduced gain correction (to tune with options, for now) Introduced position reconstruction with center of gravity >> Events processing Restructured plugins Added simple S2 area correction for electron lifetime (to tune with options, for now) Added positions at event level Added new peak info at event level: waveform and area per channel >> Extra A new function to see default options per plugin (st.get_config_defaults) More plugins registered in the context (and tested!) --------- Co-authored-by: acolijn <[email protected]> Co-authored-by: tobiasdenhollander <[email protected]>
- Loading branch information
1 parent
64d0378
commit 39ade32
Showing
19 changed files
with
422 additions
and
403 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,100 @@ | ||
from immutabledict import immutabledict | ||
import numpy as np | ||
import strax | ||
import amstrax | ||
|
||
export, __all__ = strax.exporter() | ||
|
||
|
||
@export | ||
@strax.takes_config( | ||
strax.Option( | ||
"channel_map", | ||
track=False, | ||
type=immutabledict, | ||
help="immutabledict mapping subdetector to (min, max) " | ||
), | ||
) | ||
class EventAreaPerChannel(strax.Plugin): | ||
"""Simple plugin that provides area per channel for main and alternative S1/S2 in the event.""" | ||
|
||
depends_on = ("event_basics", "peaks") | ||
provides = ("event_area_per_channel", "event_n_channel") | ||
data_kind = immutabledict(zip(provides, ("events", "events"))) | ||
__version__ = "0.1.1" | ||
|
||
compressor = "zstd" | ||
save_when = immutabledict({ | ||
"event_area_per_channel": strax.SaveWhen.EXPLICIT, | ||
"event_n_channel": strax.SaveWhen.ALWAYS, | ||
}) | ||
|
||
def infer_dtype(self): | ||
# setting data type from peak dtype | ||
pfields_ = self.deps["peaks"].dtype_for("peaks").fields | ||
# populating data type | ||
infoline = { | ||
"s1": "main S1", | ||
"s2": "main S2", | ||
"alt_s1": "alternative S1", | ||
"alt_s2": "alternative S2", | ||
} | ||
dtype = [] | ||
# populating APC | ||
ptypes = ["s1", "s2", "alt_s1", "alt_s2"] | ||
for type_ in ptypes: | ||
dtype += [ | ||
( | ||
(f"Area per channel for {infoline[type_]}", f"{type_}_area_per_channel"), | ||
pfields_["area_per_channel"][0], | ||
) | ||
] | ||
dtype += [ | ||
( | ||
(f"Length of the interval in samples for {infoline[type_]}", f"{type_}_length"), | ||
pfields_["length"][0], | ||
) | ||
] | ||
dtype += [ | ||
( | ||
(f"Width of one sample for {infoline[type_]} [ns]", f"{type_}_dt"), | ||
pfields_["dt"][0], | ||
) | ||
] | ||
# populating S1 n channel properties | ||
n_channel_dtype = [ | ||
(("Main S1 count of contributing PMTs", "s1_n_channels"), np.int16), | ||
] | ||
return { | ||
"event_area_per_channel": dtype + n_channel_dtype + strax.time_fields, | ||
"event_n_channel": n_channel_dtype + strax.time_fields, | ||
} | ||
|
||
def compute(self, events, peaks): | ||
area_per_channel = np.zeros(len(events), self.dtype["event_area_per_channel"]) | ||
area_per_channel["time"] = events["time"] | ||
area_per_channel["endtime"] = strax.endtime(events) | ||
n_channel = np.zeros(len(events), self.dtype["event_n_channel"]) | ||
n_channel["time"] = events["time"] | ||
n_channel["endtime"] = strax.endtime(events) | ||
|
||
split_peaks = strax.split_by_containment(peaks, events) | ||
for event_i, (event, sp) in enumerate(zip(events, split_peaks)): | ||
for type_ in ["s1", "s2", "alt_s1", "alt_s2"]: | ||
type_index = event[f"{type_}_index"] | ||
if type_index != -1: | ||
type_area_per_channel = sp["area_per_channel"][type_index] | ||
area_per_channel[f"{type_}_area_per_channel"][event_i] = type_area_per_channel | ||
area_per_channel[f"{type_}_length"][event_i] = sp["length"][type_index] | ||
area_per_channel[f"{type_}_dt"][event_i] = sp["dt"][type_index] | ||
if type_ == "s1": | ||
area_per_channel["s1_n_channels"][event_i] = ( | ||
type_area_per_channel > 0 | ||
).sum() | ||
for field in ["s1_n_channels", ]: | ||
n_channel[field] = area_per_channel[field] | ||
result = { | ||
"event_area_per_channel": area_per_channel, | ||
"event_n_channel": n_channel, | ||
} | ||
return result |
Oops, something went wrong.