diff --git a/src/aiida_quantumespresso_hp/workflows/hubbard.py b/src/aiida_quantumespresso_hp/workflows/hubbard.py index 05a3dc6..0789695 100644 --- a/src/aiida_quantumespresso_hp/workflows/hubbard.py +++ b/src/aiida_quantumespresso_hp/workflows/hubbard.py @@ -120,6 +120,8 @@ def define(cls, spec): spec.input('skip_relax_iterations', valid_type=orm.Int, required=False, validator=validate_positive, help=('The number of iterations for skipping the `relax` ' 'step without performing check on parameters convergence.')) + spec.input('radial_analysis', valid_type=orm.Dict, required=False, + help='If specified, it performs a nearest neighbour analysis and feed the radius to hp.x') spec.input('relax_frequency', valid_type=orm.Int, required=False, validator=validate_positive, help='Integer value referring to the number of iterations to wait before performing the `relax` step.') spec.expose_inputs(PwRelaxWorkChain, namespace='relax', @@ -255,6 +257,7 @@ def get_builder_from_protocol( builder.hubbard = hubbard builder.tolerance_onsite = orm.Float(inputs['tolerance_onsite']) builder.tolerance_intersite = orm.Float(inputs['tolerance_intersite']) + builder.radial_analysis = orm.Dict(inputs['radial_analysis']) builder.max_iterations = orm.Int(inputs['max_iterations']) builder.meta_convergence = orm.Bool(inputs['meta_convergence']) builder.clean_workdir = orm.Bool(inputs['clean_workdir']) @@ -550,6 +553,20 @@ def run_hp(self): workchain = self.ctx.workchains_scf[-1] inputs = AttributeDict(self.exposed_inputs(HpWorkChain, namespace='hubbard')) + + if 'radial_analysis' in self.inputs: + from qe_tools import CONSTANTS + + kwargs = self.inputs.radial_analysis.get_dict() + hubbard_utils = HubbardUtils(self.ctx.current_hubbard_structure) + radius = hubbard_utils.get_intersites_radius(**kwargs) # in Angstrom + + parameters = inputs.hp.parameters.get_dict() + parameters['INPUTHP'].pop('num_neigh', None) + parameters['INPUTHP']['rmax'] = radius / CONSTANTS.bohr_to_ang + + inputs.hp.parameters = orm.Dict(parameters) + inputs.clean_workdir = self.inputs.clean_workdir inputs.hp.parent_scf = workchain.outputs.remote_folder inputs.hp.hubbard_structure = self.ctx.current_hubbard_structure @@ -558,7 +575,7 @@ def run_hp(self): running = self.submit(HpWorkChain, **inputs) self.report(f'launching HpWorkChain<{running.pk}> iteration #{self.ctx.iteration}') - return ToContext(workchains_hp=append_(running)) + self.to_context(**{'workchains_hp': append_(running)}) def inspect_hp(self): """Analyze the last completed HpWorkChain. diff --git a/src/aiida_quantumespresso_hp/workflows/protocols/hubbard.yaml b/src/aiida_quantumespresso_hp/workflows/protocols/hubbard.yaml index cb32044..9c39c49 100644 --- a/src/aiida_quantumespresso_hp/workflows/protocols/hubbard.yaml +++ b/src/aiida_quantumespresso_hp/workflows/protocols/hubbard.yaml @@ -4,6 +4,10 @@ default_inputs: meta_convergence: True tolerance_onsite: 0.1 tolerance_intersite: 0.01 + radial_analysis: + radius_max: 10.0 # in Angstrom + thr: 0.01 + nn_finder: 'crystal' scf: kpoints_distance: 0.4 diff --git a/tests/conftest.py b/tests/conftest.py index 31950db..a64a69e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -58,7 +58,7 @@ def _fixture_code(entry_point_name): try: return load_code(label=label) - except exceptions.NotExistent: + except (exceptions.NotExistent, exceptions.MultipleObjectsError): return InstalledCode( label=label, computer=fixture_localhost, diff --git a/tests/workflows/protocols/test_hubbard/test_default.yml b/tests/workflows/protocols/test_hubbard/test_default.yml index ee18383..59bad83 100644 --- a/tests/workflows/protocols/test_hubbard/test_default.yml +++ b/tests/workflows/protocols/test_hubbard/test_default.yml @@ -19,6 +19,10 @@ hubbard: hubbard_structure: CoLiO2 max_iterations: 10 meta_convergence: true +radial_analysis: + nn_finder: 'crystal' + radius_max: 10.0 + thr: 0.01 relax: base: kpoints_distance: 0.15 diff --git a/tests/workflows/test_hubbard.py b/tests/workflows/test_hubbard.py index 11b5152..13dc817 100644 --- a/tests/workflows/test_hubbard.py +++ b/tests/workflows/test_hubbard.py @@ -247,6 +247,28 @@ def test_relax_frequency(generate_workchain_hubbard, generate_inputs_hubbard): assert not process.should_run_relax() # skip +@pytest.mark.usefixtures('aiida_profile') +def test_radial_analysis( + generate_workchain_hubbard, + generate_inputs_hubbard, + generate_scf_workchain_node, +): + """Test `SelfConsistentHubbardWorkChain` outline when radial analysis is activated. + + We want to make sure `rmax` is in `hp.parameters`. + """ + inputs = generate_inputs_hubbard() + inputs['radial_analysis'] = Dict({}) # no need to specify inputs, it will use the defaults + process = generate_workchain_hubbard(inputs=inputs) + + process.setup() + process.ctx.workchains_scf = [generate_scf_workchain_node(remote_folder=True)] + process.run_hp() + + # parameters = process.ctx['workchains_hp'][-1].inputs['hp']['parameters'].get_dict() + # assert 'rmax' in parameters + + @pytest.mark.usefixtures('aiida_profile') def test_should_check_convergence(generate_workchain_hubbard, generate_inputs_hubbard): """Test `SelfConsistentHubbardWorkChain.should_check_convergence`."""