Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Pendf single process #255

Open
wants to merge 16 commits into
base: v1.0
Choose a base branch
from
286 changes: 239 additions & 47 deletions sandy/core/endf6.py
Original file line number Diff line number Diff line change
Expand Up @@ -2175,56 +2175,174 @@ def apply_perturbations(self, smp, processes=1, **kwargs):
else:
return self._mp_apply_perturbations(smp, processes=processes, **kwargs)

@staticmethod
def pendf_perturb(pxs, pendf):
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

call it perturb_xs

"""
Perturb pendf file according to the distributions generated by
"tape.get_perturbations()"

Parameters
----------
pxs: perturbation coefficients for the sample .
pendf: pendf file to perturb.

Returns
-------
Perturbed pendf file in ENDF6 format.

"""
xs = sandy.Xs.from_endf6(pendf)
xs_pert = xs._perturb(pxs)
pendf_pert = xs_pert.reconstruct_sums(drop=True).to_endf6(pendf).update_intro()
return pendf_pert

def _mp_apply_perturbations(self, smps,
processes=1,
temperature=0,
to_ace=False,
implicit_effect=False,
to_file=False,
filename="{ZA}_{SMP}",
verbose=False,
njoy_kws={},
**kwargs
):
"""

Apply perturbations to the data contained in ENDF6 file. At the
moment only the procedure for cross sections is implemented. Options
are included to directly convert perturbed pendf to ace and write data
on files.

Parameters
----------
smps : samples obtained taking the relative covariances from the
evaluated files and a unit vector as mean.
processes : number of processes employed to complete the task.
Employed to convert endf in ace format in parallel if >1.
The default is 1.
temperature: temperature at which perturbed xs are evaluated.
The default is 0.
to_ace: option to write ace files from perturbed pendf.
The default is False.
implicit_effect: if True pendf at Temperature is generated and
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

clarify that broadr is used on one hand on the production of the pendf file, and on the other hand on the production of the ace file

njoy module "broadr" is not called for the
generation of ace file.
If False pendf at 0K is produced and then "broadr"
module is called during conversion to ace to obtain
perturbed file at requested T.
The default is False.
to_file: option to write endf6 or ace to a file.
The default is False.

filename: if option to_file to customize file name.
The default is "{ZA}_{SMP}".
verbose : `bool`, optional
flag to print reminder of file generation of screen.
The default is False.
njoy_kws: keyword argument to pass to `tape.get_pendf()`.
**kwargs : keyword argument to pass to "tape.get_ace()".

def _mp_apply_xs_perturbations(self, smp, processes, **kwargs):
# need to pass xs.data (dataframe), because sandy.Xs instance cannot be pickled
xs = sandy.Xs.from_endf6(self).data
seq = smp.iterate_xs_samples()

pool = mp.Pool(processes=processes)
outs = {n: pool.apply_async(pendf_xs_perturb_worker, (self, xs, n, p), kwargs) for n, p in seq}
outs = {n: out.get() for n, out in outs.items()}
pool.close()
pool.join()
return outs

def _mp_apply_perturbations(self, smps, processes,
pendf_kws={},
**kwargs):
# Passed NEDF-6 and PENDF as dictionaries because class instances cannot be pickled
endf6 = self.data
if 33 in smps:
pendf = self.get_pendf(**pendf_kws).data
kwargs["process_xs"] = True

# Samples passed as generator in apply_async
def null_gen():
"generator mimicking dictionary and returning only None"
while True:
yield None, None

def get_key(*lst):
return np.array([x for x in lst if x is not None]).item()

seq_xs = smps[33].iterate_xs_samples() if 33 in smps else null_gen()
seq_nu = smps[31].iterate_xs_samples() if 31 in smps else null_gen()
seqs = zip(seq_xs, seq_nu)


pool = mp.Pool(processes=processes)
outs = {get_key(nxs, nnu): pool.apply_async(endf6_perturb_worker, (endf6, pendf, nxs, pxs), kwargs)
for (nxs, pxs), (nnu, pnu) in seqs}
outs = {n: out.get() for n, out in outs.items()}
pool.close()
pool.join()
return outs
Returns
-------
A dictionary of endf/pendf file or ace files depending on to_ace.

def _apply_xs_perturbations(self, smp, **kwargs):
xs = sandy.Xs.from_endf6(self)
for n, x in xs.perturb(smp, **kwargs):
# instead of defining the function twice, just call the worker also here
yield n, pendf_perturb_worker(self, n, x, **kwargs)
Examples
--------

"""

Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In intro data preprocessing comment saying that kwargs are used for workers and njoy_kws for get_pendf (pendf creation)

#to transfer keywords to the worker
kwargs["filename"] = filename
kwargs["to_file"] = to_file
kwargs["verbose"] = verbose
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

include verbose in input in kwargs and then call it using kwargs["verbose"]


if to_ace:
if implicit_effect:
njoy_kws["temperature"] = kwargs["temperature"] = temperature
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

comment on this

kwargs["broadr"] = False
else:
njoy_kws["temperature"] = 0
kwargs["temperature"] = temperature
kwargs["broadr"] = True
else:
#if I'm not creating ace I directly generate pendf at requested
#Temperature
njoy_kws["temperature"] = temperature


Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Specific case for xs that requires PENDF generation

Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

will be extended (with more if clauses) for other data types

if 33 in smps:
seq_xs = smps[33].iterate_xs_samples()
pendf_ = self.get_pendf(**njoy_kws)

# if flag proceed to the creation of ace file - possibility to
#use multiprocessing
if to_ace:
outs = {}
if processes == 1:
# Passed ENDF-6 and PENDF as dictionaries because class
#instances cannot be pickled
outs = {nxs: to_ace_worker(self.data,
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

to be deleted

self.pendf_perturb(pxs, pendf_).data,
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

need a function/method to englobe pertrub_xs, perturb_nu, perturb_chi, perturb_mu.
Inputs are endf6 + pendf + perturbed xs, nu, chi, mu
Outputs are pert endf6 and pert pendf.

possibly it returns an iterator

nxs, **kwargs)
for (nxs, pxs) in seq_xs}
for (nxs,pxs) in seq_xs:
pendf_pert = self.pendf_perturb(pxs, pendf_)
outs[nxs] = to_ace_worker(self.data,
pendf_pert.data,
nxs, **kwargs)
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

to_ace_worker(self.data, pendf_pert.data, nxs, filename=filename, to_file=to_file, **kwargs)?

elif processes > 1:

pool = mp.Pool(processes=processes)
# Passed ENDF-6 and PENDF as dictionaries because class
#instances cannot be pickled

for (nxs,pxs) in seq_xs:
pendf_pert = self.pendf_perturb(pxs, pendf_)
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

change to regular method

outs[nxs] = pool.apply_async(to_ace_worker,
(self.data,
pendf_pert.data,
nxs),
kwargs)

outs = {n: out.get() for n, out in outs.items()}
pool.close()
pool.join()

return outs
else:

# filename options, in case writing to file
mat = self.mat[0]
intro = self.read_section(mat, 1, 451)
za = int(intro["ZA"])
meta = int(intro["LISO"])
zam = sandy.zam.za2zam(za, meta=meta)
params = dict(
MAT=mat,
ZAM=zam,
ZA=za,
META=meta,
)
outs = {}
for (nxs, pxs) in seq_xs:
pendf_pert = self.pendf_perturb(pxs,pendf_)
outs[nxs] = dict(endf = self, pendf = pendf_pert)
if to_file:
params["SMP"] = np.array(nxs).item()
fn = filename.format(**params)
file = f"{fn}.endf6"
if verbose:
print(f"writing to file '{file}'")
self.to_file(file)
if pendf_pert:
file = f"{fn}.pendf"
if verbose:
print(f"writing to file '{file}'")
pendf_pert.to_file(file)

return outs


def pendf_perturb_worker(e6, n, xs, **kwargs):
# This is the function that needs to be changed for any concatenation
Expand Down Expand Up @@ -2350,3 +2468,77 @@ def endf6_perturb_worker(e6, pendf, nxs, pxs,
return

return out

def to_ace_worker(e6, pendf, n,
ace_kws={},
**kwargs):

"""
Convert perturbed pendf instance into an ACE and eventually write it into
a file.

Parameters
----------
e6: Endf6 instance.
pendf: perturbed pendf.
n: sample number.
ace_kws: keywards to pass to "tape.get_ace()". The default is {}.
**kwargs:
to_file: write the output to file.
verbose: show njoy processes instead of having in background.
temperature: temperature at which ace file is generated.
filename: name to give to external file.

Returns
-------
Dict containing "ace" and "xsdir" string. If to_file the 2 strings are
written in a file.

"""


# to endf6
endf6_pert = sandy.Endf6(e6.copy())


filename = kwargs["filename"]
to_file = kwargs.get("to_file", False)
verbose = kwargs.get("verbose", False)
ace_kws["broadr"] = kwargs["broadr"]
temperature = ace_kws["temperature"] = kwargs.get("temperature",0)
suffix = ace_kws.get("suffix", sandy.njoy.get_temperature_suffix(temperature))
ace_kws["suffix"] = "." + suffix
pendf_ = sandy.Endf6(pendf.copy())
ace = endf6_pert.get_ace(pendf=pendf_, **ace_kws)
ace_kws.pop("suffix") #to avoid appending during the loop
if verbose:
print(f"ACE file of sample {n} generated. Process ID: {os.getpid()}")
if to_file:
mat = endf6_pert.mat[0]
intro = endf6_pert.read_section(mat, 1, 451)
za = int(intro["ZA"])
meta = int(intro["LISO"])
zam = sandy.zam.za2zam(za, meta=meta)
ismp = n
params = dict(
MAT=mat,
ZAM=zam,
ZA=za,
META=meta,
SMP=ismp,
)

fn = filename.format(**params)
file = f"{fn}.{suffix}c"
with open(file, "w") as f:
if verbose:
print(f"writing to file '{file}'")
f.write(ace["ace"])
file = f"{file}.xsd"
with open(file, "w") as f:
if verbose:
print(f"writing to file '{file}'")
f.write(ace["xsdir"])
return
return ace

16 changes: 7 additions & 9 deletions sandy/sampling.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,11 +204,7 @@ def run(cli):
iargs = parse(cli.split())

endf6 = sandy.Endf6.from_file(iargs.file)

njoy_kws = dict(
err=0.01,
temperature=0,
)
njoy_kws = {}
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why did you remove these default options.
I think the reconstruction tolerance err does not need to be the default value of 0.001.

These keywords are only used to produce the ERRORR file right?
Then a low tolerance should be ok.

Also, removing the temperature would just assume by default that the temperature is 0.
Is this the reason why you removed it?

if iargs.mt33:
njoy_kws["errorr33_kws"] = dict(mt=iargs.mt33)

Expand All @@ -224,16 +220,18 @@ def run(cli):

smps = endf6.get_perturbations(iargs.samples, njoy_kws=njoy_kws, smp_kws=smp_kws)

ace_kws = dict(
temperature=iargs.temperatures[0] if hasattr(iargs.temperatures, "__len__") else iargs.temperatures
)
if iargs.temperatures:
temperature = iargs.temperatures[0] if hasattr(iargs.temperatures, "__len__") else iargs.temperatures
else:
temperature = 0

endf6._mp_apply_perturbations(
smps,
processes=iargs.processes,
to_file=True,
to_ace=iargs.acer,
filename=iargs.outname,
ace_kws=ace_kws,
temperature=temperature,
verbose=iargs.verbose,
)

Expand Down