-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathpyLifetimeSpectraGenerator.py
133 lines (102 loc) · 5.69 KB
/
pyLifetimeSpectraGenerator.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 16 10:59:33 2021
@author: Danny Petschke
@email: [email protected]
"""
#*************************************************************************************************
#**")
#** Copyright (c) 2021 Danny Petschke. All rights reserved.
#**")
#** This program is free software: you can redistribute it and/or modify
#** it under the terms of the GNU General Public License as published by
#** the Free Software Foundation, either version 3 of the License, or
#** (at your option) any later version.
#**
#** This program is distributed in the hope that it will be useful,
#** but WITHOUT ANY WARRANTY; without even the implied warranty of
#** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#** GNU General Public License for more details.
#**")
#** You should have received a copy of the GNU General Public License
#** along with this program. If not, see http://www.gnu.org/licenses/.
#**
#** Contact: [email protected]
#**
#*************************************************************************************************
from random import randrange, uniform
import matplotlib.pyplot as plt
from scipy.stats import expon, norm, lognorm
import numpy as np
# convolution of numerical data using the convolution theorem ...
def convolveData(a, b):
return np.real(np.fft.ifft(np.fft.fft(a)*np.fft.fft(b)))
# poisson noise(λ) = gaussian(μ = λ, σ² = λ) ...
def poissonNoise(mean,noise=1.0):
return np.random.normal(loc=0.0,scale=noise*np.sqrt(mean+0.00001))
# generate a single normalized Gaussian instrumental response (IRF) without noise and background ...
def generateGaussianIRF(numberOfBins = 500,
binWidth_in_ps = 25,
t0_in_ps = 3000.,
fwhm_in_ps = 200.):
time_in_ps_x = np.arange(0.5*binWidth_in_ps,(numberOfBins+0.5)*binWidth_in_ps,binWidth_in_ps)
irf_sigma = fwhm_in_ps/2.3548
irf_y = norm.pdf((time_in_ps_x-t0_in_ps)/irf_sigma)/irf_sigma
irf_y /= sum(irf_y)
return time_in_ps_x,irf_y
# generates a lifetime spectrum with or without (w/o) convoluted kernel ...
def generateLTSpectrum(numberOfBins = 500,
binWidth_in_ps = 25,
integralCounts = 5000000,
constBkgrdCounts = 5,
tau_in_ps = [160.,385.,2500.],
I = [0.85,0.145,0.005],
irf_data = [], # numerical IRF data to be convoluted with the exp-decays (requires equal length of bins)
noise = True, # apply noise ?
noise_level = 1., # units of stddev
convoluteWithIRF = True):
time_in_ps_x = np.arange(0.5*binWidth_in_ps, (numberOfBins + 0.5)*binWidth_in_ps, binWidth_in_ps)
if convoluteWithIRF:
assert len(time_in_ps_x) == len(irf_data)
if constBkgrdCounts > 0:
integralCounts -= constBkgrdCounts*numberOfBins
assert integralCounts > 0
counts_y = np.zeros(numberOfBins)
for i in range(0, len(tau_in_ps)):
counts_y += I[i]*(1./tau_in_ps[i])*expon.pdf((time_in_ps_x)/tau_in_ps[i])
# normalize data ...
counts_y /= sum(counts_y)
if convoluteWithIRF:
# normalize data ...
irf_data / sum(irf_data)
# convolute with IRF data ...
counts_y = convolveData(counts_y,irf_data)
counts_y = counts_y*integralCounts + constBkgrdCounts
if noise:
for i in range(0, len(counts_y)):
counts_y[i] = int(counts_y[i]) + int(poissonNoise(mean=counts_y[i],noise=noise_level))
return time_in_ps_x,counts_y
# generates a Gaussian distribution (pdf) of characteristic lifetimes ...
def generateGaussianDistributionOfLifetimes(number_of_tau_grid_points = 1000,
tau_grid_range_in_ps = [10.,10000.],
loc_tau_in_ps = [380.,1600.], # mean of Gaussian
scale_tau_in_ps = [5.,50.], # standard deviation of Gaussian
I = [0.15,0.85]): # contribution of the individual Gaussian distributions
tau_x = np.linspace(tau_grid_range_in_ps[0],tau_grid_range_in_ps[1],number_of_tau_grid_points)
pdf_y = np.zeros(number_of_tau_grid_points)
for i in range(0,len(loc_tau_in_ps)):
pdf_y += I[i]*norm.pdf(x=tau_x,loc=loc_tau_in_ps[i],scale=scale_tau_in_ps[i])
pdf_y /= sum(pdf_y)
return tau_x,pdf_y
# generates a log-normal distribution (pdf) of characteristic lifetimes: (1/(s x sqrt(2 PI))) exp( -0.5 (log(x)/s)² ) ...
def generateLogNormalDistributionOfLifetimes(number_of_tau_grid_points = 1000,
tau_grid_range_in_ps = [10.,10000.],
loc_tau_in_ps = [380.,1600.], # location
scale_tau_in_ps = [5.,50.], # standard deviation
I = [0.15,0.85]): # contribution of the individual Gaussian distributions
tau_x = np.linspace(tau_grid_range_in_ps[0],tau_grid_range_in_ps[1],number_of_tau_grid_points)
pdf_y = np.zeros(number_of_tau_grid_points)
for i in range(0,len(loc_tau_in_ps)):
pdf_y += I[i]*lognorm.pdf(x=tau_x,loc=loc_tau_in_ps[i],s=scale_tau_in_ps[i],scale=np.exp(loc_tau_in_ps[i]))
pdf_y /= sum(pdf_y)
return tau_x,pdf_y