From 0d82c1ec5438fe3179f9794c31f1808557636e2e Mon Sep 17 00:00:00 2001 From: Lorenzo <79980269+bastonero@users.noreply.github.com> Date: Mon, 22 May 2023 14:52:53 +0200 Subject: [PATCH] `Hubbard`: fix not overridden `max_iterations` (#37) The `max_iterations` input was not overridden. We also change the `skip_first_relax` from `non_db` True to False, and make it storable. --- src/aiida_quantumespresso_hp/workflows/hubbard.py | 10 +++++----- .../workflows/protocols/hubbard.yaml | 1 + tests/workflows/protocols/test_hubbard.py | 12 +++++++++--- .../protocols/test_hubbard/test_default.yml | 1 + tests/workflows/test_hubbard.py | 4 +++- 5 files changed, 19 insertions(+), 9 deletions(-) diff --git a/src/aiida_quantumespresso_hp/workflows/hubbard.py b/src/aiida_quantumespresso_hp/workflows/hubbard.py index 52d5235..7ad2b81 100644 --- a/src/aiida_quantumespresso_hp/workflows/hubbard.py +++ b/src/aiida_quantumespresso_hp/workflows/hubbard.py @@ -116,9 +116,8 @@ def define(cls, spec): ) spec.input( 'skip_first_relax', - valid_type=bool, - default=lambda: False, - non_db=True, + valid_type=orm.Bool, + default=lambda: orm.Bool(False), help='If True, skip the first relaxation' ) spec.input( @@ -288,9 +287,10 @@ def get_builder_from_protocol( builder.relax = relax builder.scf = scf builder.hubbard = hubbard - builder.skip_first_relax = inputs['skip_first_relax'] + builder.skip_first_relax = orm.Bool(inputs['skip_first_relax']) builder.tolerance_onsite = orm.Float(inputs['tolerance_onsite']) builder.tolerance_intersite = orm.Float(inputs['tolerance_intersite']) + builder.max_iterations = orm.Int(inputs['max_iterations']) builder.meta_convergence = orm.Bool(inputs['meta_convergence']) builder.clean_workdir = orm.Bool(inputs['clean_workdir']) @@ -305,7 +305,7 @@ def setup(self): self.ctx.is_insulator = None self.ctx.is_magnetic = False self.ctx.iteration = 0 - self.ctx.skip_first_relax = self.inputs.skip_first_relax + self.ctx.skip_first_relax = self.inputs.skip_first_relax.value self.ctx.relax_frequency = 1 if 'relax_frequency' in self.inputs: self.ctx.relax_frequency = self.inputs.relax_frequency.value diff --git a/src/aiida_quantumespresso_hp/workflows/protocols/hubbard.yaml b/src/aiida_quantumespresso_hp/workflows/protocols/hubbard.yaml index 6ec25ed..b4aa3ad 100644 --- a/src/aiida_quantumespresso_hp/workflows/protocols/hubbard.yaml +++ b/src/aiida_quantumespresso_hp/workflows/protocols/hubbard.yaml @@ -1,5 +1,6 @@ default_inputs: clean_workdir: True + max_iterations: 10 meta_convergence: True tolerance_onsite: 0.1 tolerance_intersite: 0.01 diff --git a/tests/workflows/protocols/test_hubbard.py b/tests/workflows/protocols/test_hubbard.py index 3c3a7bb..9da437b 100644 --- a/tests/workflows/protocols/test_hubbard.py +++ b/tests/workflows/protocols/test_hubbard.py @@ -32,15 +32,21 @@ def test_default(fixture_code, data_regression, generate_hubbard_structure, seri @pytest.mark.parametrize( 'overrides', ( - { - 'relax_frequency': 3 - }, { 'tolerance_onsite': 1 }, { 'tolerance_intersite': 1 }, + { + 'skip_first_relax': True + }, + { + 'relax_frequency': 3 + }, + { + 'max_iterations': 1 + }, { 'meta_convergence': False }, diff --git a/tests/workflows/protocols/test_hubbard/test_default.yml b/tests/workflows/protocols/test_hubbard/test_default.yml index 44bc63f..275708d 100644 --- a/tests/workflows/protocols/test_hubbard/test_default.yml +++ b/tests/workflows/protocols/test_hubbard/test_default.yml @@ -17,6 +17,7 @@ hubbard: parallelize_qpoints: true qpoints_distance: 0.8 hubbard_structure: CoLiO2 +max_iterations: 10 meta_convergence: true relax: base: diff --git a/tests/workflows/test_hubbard.py b/tests/workflows/test_hubbard.py index 3a6df09..f350bba 100644 --- a/tests/workflows/test_hubbard.py +++ b/tests/workflows/test_hubbard.py @@ -164,8 +164,10 @@ def test_magnetic_setup(generate_workchain_hubbard, generate_inputs_hubbard): @pytest.mark.usefixtures('aiida_profile') def test_skip_first_relax(generate_workchain_hubbard, generate_inputs_hubbard): """Test `SelfConsistentHubbardWorkChain` when skipping only the first relax.""" + from aiida.orm import Bool + inputs = generate_inputs_hubbard() - inputs['skip_first_relax'] = True + inputs['skip_first_relax'] = Bool(True) process = generate_workchain_hubbard(inputs=inputs) process.setup()