From c94c46500acc1f792858ed9550edbebdfc851e32 Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
<3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Fri, 10 Nov 2023 14:55:37 +0200
Subject: [PATCH] Fix misspellings
---
capsul/application.py | 6 +-
capsul/config/configuration.py | 2 +-
capsul/database/__init__.py | 4 +-
capsul/database/redis.py | 2 +-
capsul/database/sqlite.py | 2 +-
capsul/dataset.py | 6 +-
capsul/engine/__init__.py | 2 +-
capsul/info.py | 23 ++-
capsul/pipeline/custom_nodes/cv_node.py | 2 +-
capsul/pipeline/custom_nodes/loo_node.py | 3 +-
capsul/pipeline/custom_nodes/map_node.py | 3 +-
capsul/pipeline/custom_nodes/reduce_node.py | 2 +-
capsul/pipeline/custom_nodes/strcat_node.py | 3 +-
capsul/pipeline/custom_nodes/strconv.py | 2 +-
capsul/pipeline/pipeline.py | 58 +++---
capsul/pipeline/pipeline_nodes.py | 8 +-
capsul/pipeline/pipeline_tools.py | 26 +--
capsul/pipeline/process_iteration.py | 4 +-
capsul/pipeline/python_export.py | 26 ++-
.../test/fake_morphologist/acpcorientation.py | 2 +-
.../test/fake_morphologist/aimsconverter.py | 2 +-
.../baladinnormalizationtoaims.py | 2 +-
.../fake_morphologist/brainsegmentation.py | 2 +-
.../test/fake_morphologist/brainvolumes.py | 2 +-
.../fslnormalizationtoaims.py | 2 +-
.../greywhiteclassificationhemi.py | 2 +-
.../test/fake_morphologist/greywhitemesh.py | 2 +-
.../fake_morphologist/greywhitetopology.py | 2 +-
.../test/fake_morphologist/histoanalysis.py | 2 +-
.../test/fake_morphologist/importt1mri.py | 2 +-
.../test/fake_morphologist/morpho_report.py | 2 +-
.../normalization_aimsmiregister.py | 2 +-
.../normalization_baladin.py | 2 +-
.../normalization_fsl_reinit.py | 2 +-
.../normalization_t1_spm12_reinit.py | 2 +-
.../normalization_t1_spm8_reinit.py | 2 +-
.../test/fake_morphologist/pialmesh.py | 2 +-
.../test/fake_morphologist/reorientanatomy.py | 2 +-
.../test/fake_morphologist/scalpmesh.py | 2 +-
.../test/fake_morphologist/skullstripping.py | 2 +-
.../test/fake_morphologist/splitbrain.py | 2 +-
.../test/fake_morphologist/spmsn3dtoaims.py | 2 +-
.../fake_morphologist/sulcideeplabeling.py | 2 +-
.../test/fake_morphologist/sulcigraph.py | 2 +-
.../sulcigraphmorphometrybysubject.py | 2 +-
.../fake_morphologist/sulcilabellingann.py | 2 +-
.../sulcilabellingspamglobal.py | 2 +-
.../sulcilabellingspamlocal.py | 2 +-
.../sulcilabellingspammarkov.py | 2 +-
.../test/fake_morphologist/sulciskeleton.py | 2 +-
.../fake_morphologist/t1biascorrection.py | 2 +-
.../talairachtransformation.py | 2 +-
...alairachtransformationfromnormalization.py | 2 +-
capsul/pipeline/test/test_activation.py | 2 +-
.../test/test_complex_pipeline_activations.py | 8 +-
capsul/pipeline/test/test_custom_nodes.py | 12 +-
capsul/pipeline/test/test_double_switch.py | 2 +-
.../pipeline/test/test_iterative_process.py | 2 +-
.../test/test_optional_output_switch.py | 2 +-
capsul/pipeline/test/test_pipeline.py | 4 +-
.../pipeline/test/test_pipeline_parameters.py | 2 +-
.../pipeline/test/test_pipeline_workflow.py | 4 +-
capsul/pipeline/test/test_switch_pipeline.py | 2 +-
.../pipeline/test/test_switch_subpipeline.py | 8 +-
capsul/pipeline/test/test_temporary.py | 1 -
capsul/pipeline/topological_sort.py | 12 +-
capsul/process/nipype_process.py | 16 +-
capsul/process/node.py | 4 +-
capsul/process/process.py | 30 +--
.../test/test_load_from_description.py | 12 +-
capsul/process/test/test_metadata_schema.py | 4 +-
capsul/process/test/test_runprocess.py | 2 +-
capsul/qt_apps/pipeline_viewer_app.py | 3 +-
capsul/qt_apps/resources/icones.py | 1 -
capsul/qt_apps/utils/application.py | 3 +-
capsul/qt_apps/utils/fill_treectrl.py | 5 +-
capsul/qt_apps/utils/find_pipelines.py | 18 +-
capsul/qt_apps/utils/window.py | 3 +-
capsul/qt_gui/widgets/activation_inspector.py | 24 +--
.../widgets/attributed_process_widget.py | 2 +-
capsul/qt_gui/widgets/config_gui.py | 4 +-
capsul/qt_gui/widgets/links_debugger.py | 6 +-
.../qt_gui/widgets/pipeline_developer_view.py | 184 +++++++++---------
.../widgets/pipeline_file_warning_widget.py | 2 +-
capsul/qt_gui/widgets/pipeline_user_view.py | 12 +-
capsul/qt_gui/widgets/settings_editor.py | 10 +-
capsul/qt_gui/widgets/viewer_widget.py | 2 +-
capsul/run.py | 2 +-
capsul/sphinxext/__init__.py | 2 +-
capsul/sphinxext/capsul_pipeline_rst.py | 22 +--
capsul/sphinxext/capsul_pipeline_view.py | 19 +-
capsul/sphinxext/capsul_sphinx_layout.py | 19 +-
capsul/sphinxext/capsul_usecases_rst.py | 14 +-
capsul/sphinxext/layoutdocgen.py | 35 ++--
capsul/sphinxext/load_pilots.py | 3 +-
capsul/sphinxext/pipelinedocgen.py | 32 +--
.../resources/custom_ext/hidden_code_block.py | 3 +-
.../custom_ext/hidden_technical_block.py | 13 +-
.../resources/custom_ext/link_to_block.py | 7 +-
capsul/sphinxext/resources/installation.rst | 2 +-
.../resources/numpy_ext/docscrape.py | 29 ++-
.../resources/numpy_ext/docscrape_sphinx.py | 16 +-
.../sphinxext/resources/numpy_ext/numpydoc.py | 9 +-
capsul/sphinxext/test/test_usercases_doc.py | 4 -
capsul/sphinxext/usecasesdocgen.py | 22 +--
capsul/test/test_completion.py | 6 +-
capsul/test/test_fake_morphologist.py | 6 +-
capsul/test/test_tiny_morphologist.py | 6 +-
capsul/ui/static/engine.html | 2 +-
completion.md | 6 +-
doc/source/conf.py | 9 +-
doc/source/installation.rst | 2 +-
doc/source/sphinxext/numpy_ext/docscrape.py | 29 ++-
.../sphinxext/numpy_ext/docscrape_sphinx.py | 16 +-
doc/source/sphinxext/numpy_ext/numpydoc.py | 7 +-
doc/source/status.rst | 2 +-
doc/source/user_guide_tree/advanced_usage.rst | 4 +-
doc/source/user_guide_tree/xml_spec.rst | 10 +-
readme.md | 2 +-
119 files changed, 485 insertions(+), 535 deletions(-)
diff --git a/capsul/application.py b/capsul/application.py
index fb43d6285..801557353 100644
--- a/capsul/application.py
+++ b/capsul/application.py
@@ -55,7 +55,7 @@ def _is_nipype_interface_subclass(obj):
class Capsul:
"""User entry point to Capsul features.
This objects reads Capsul configuration in site and user environments.
- It allows configuration customization and instanciation of a
+ It allows configuration customization and instantiation of a
CapsulEngine instance to reach an execution environment.
If database_path is given, it replaces
@@ -255,13 +255,13 @@ def executable(definition, **kwargs):
except ImportError as e:
raise TypeError(
f"Class {definition} cannot be used to create a Process "
- "beacause its module cannot be imported : {e}"
+ "because its module cannot be imported : {e}"
)
cls = getattr(module, object_name, None)
if cls is not definition:
raise TypeError(
f"Class {definition} cannot be used to create a Process "
- f"beacause variable {object_name} of module {module_name} "
+ f"because variable {object_name} of module {module_name} "
f"contains {cls}"
)
result = definition(definition=f"{module_name}.{object_name}")
diff --git a/capsul/config/configuration.py b/capsul/config/configuration.py
index 2f38c0f23..a8d5b1a4d 100644
--- a/capsul/config/configuration.py
+++ b/capsul/config/configuration.py
@@ -168,7 +168,7 @@ class EngineConfiguration(Controller):
)
def add_module(self, module_name, allow_existing=False):
- """Loads a modle and adds it in the engine configuration.
+ """Loads a module and adds it in the engine configuration.
This operation is performed automatically, thus should not need to be
called manually.
diff --git a/capsul/database/__init__.py b/capsul/database/__init__.py
index 948073eb4..e9e00e69d 100644
--- a/capsul/database/__init__.py
+++ b/capsul/database/__init__.py
@@ -499,7 +499,7 @@ def worker_ended(self, engine_id, worker_id):
def persistent(self, engine_id):
"""
- Return wether an engine is persistent or not.
+ Return whether an engine is persistent or not.
"""
raise NotImplementedError
@@ -512,7 +512,7 @@ def set_persistent(self, engine_id, persistent):
def dispose_engine(self, engine_id):
"""
Tell Capsul that this engine will not be used anymore by any client.
- The ressource it uses must be freed as soon as possible. If no
+ The resource it uses must be freed as soon as possible. If no
execution is running, engine is destroyed. Otherwise, workers will
process ongoing executions and cleanup when done.
"""
diff --git a/capsul/database/redis.py b/capsul/database/redis.py
index 2e2b4626a..b5f5d1f45 100644
--- a/capsul/database/redis.py
+++ b/capsul/database/redis.py
@@ -559,7 +559,7 @@ def dispose_engine(self, engine_id):
# Removes association between label and engine_id
self.redis.hdel("capsul:engine", label)
self.redis.hdel(f"capsul:{engine_id}", "label")
- # Check if some executions had been submited or are ongoing
+ # Check if some executions had been submitted or are ongoing
# An empty list modified with Redis Lua scripts may be encoded as empty dict
executions = json.loads(
self.redis.hget(f"capsul:{engine_id}", "executions")
diff --git a/capsul/database/sqlite.py b/capsul/database/sqlite.py
index 458877f3f..04eb7b69e 100644
--- a/capsul/database/sqlite.py
+++ b/capsul/database/sqlite.py
@@ -471,7 +471,7 @@ def job_finished_json(
sql = "SELECT ready, ongoing, failed, waiting, done FROM capsul_execution WHERE engine_id=? AND execution_id=?"
row = sqlite.execute(sql, [engine_id, execution_id]).fetchone()
- ready, ongoing, failed, waiting, done = [json.loads(i) for i in row]
+ ready, ongoing, failed, waiting, done = (json.loads(i) for i in row)
ongoing.remove(job_id)
if return_code != 0:
failed.append(job_id)
diff --git a/capsul/dataset.py b/capsul/dataset.py
index 351c528bc..8a9cc7166 100644
--- a/capsul/dataset.py
+++ b/capsul/dataset.py
@@ -29,7 +29,7 @@ class Dataset(Controller):
Dataset representation.
You don't need to define or instantiate this class yourself, it will be done automatically and internally in the path generation system.
- Instead, users need to define datsets in the Capsul config. See :func:`generate_paths`.
+ Instead, users need to define datasets in the Capsul config. See :func:`generate_paths`.
"""
path: Directory
@@ -676,7 +676,7 @@ def apply(self, metadata, process, parameter, initial_meta):
continue
if callable(v):
if debug:
- print("call modifier funciton for", k)
+ print("call modifier function for", k)
print(
":",
v(
@@ -700,7 +700,7 @@ def apply(self, metadata, process, parameter, initial_meta):
setattr(metadata, k, v)
else:
if debug:
- print("call modifier funciton")
+ print("call modifier function")
modifier(metadata, process, parameter, initial_meta=initial_meta)
diff --git a/capsul/engine/__init__.py b/capsul/engine/__init__.py
index 36493ead0..ddf7149f6 100644
--- a/capsul/engine/__init__.py
+++ b/capsul/engine/__init__.py
@@ -19,7 +19,7 @@ def execution_context(engine_label, engine_config, executable):
# {'spm': {'spm12-standalone': {...}, 'spm8': {...}}
# whereas EXecutionContext expects an execution-side single, filtered
# config: {'spm': {...}}
- # Thie filtering is done here in this function, but later after the context
+ # This filtering is done here in this function, but later after the context
# is built.
# So for now, give it only the dataset and config_modules part, removing
# all modules config.
diff --git a/capsul/info.py b/capsul/info.py
index 55c653b6b..3c01c1e28 100644
--- a/capsul/info.py
+++ b/capsul/info.py
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
import os.path
import sys
@@ -19,7 +18,7 @@
_version_extra = version_extra
# Expected by setup.py: string of form "X.Y.Z"
-__version__ = "{0}.{1}.{2}".format(version_major, version_minor, version_micro)
+__version__ = "{}.{}.{}".format(version_major, version_minor, version_micro)
brainvisa_dependencies = [
"soma-base",
@@ -84,25 +83,25 @@
PROVIDES = ["capsul"]
REQUIRES = [
"redis <4.5.0",
- "pydantic >={0}".format(PYDANTIC_MIN_VERSION),
- "soma-base >={0}".format(SOMA_MIN_VERSION),
- "soma-workflow >={0}".format(SOMA_WORKFLOW_MIN_VERSION),
- "populse-db >={0}".format(POPULSE_DB_MIN_VERSION),
+ "pydantic >={}".format(PYDANTIC_MIN_VERSION),
+ "soma-base >={}".format(SOMA_MIN_VERSION),
+ "soma-workflow >={}".format(SOMA_WORKFLOW_MIN_VERSION),
+ "populse-db >={}".format(POPULSE_DB_MIN_VERSION),
"PyYAML",
]
EXTRA_REQUIRES = {
"test": ["pytest", "jupyter"],
"doc": [
"sphinx >=1.0",
- "numpy >={0}".format(NUMPY_MIN_VERSION),
+ "numpy >={}".format(NUMPY_MIN_VERSION),
],
"nipype": [
"traits >={}".format(TRAITS_MIN_VERSION),
- "numpy >={0}".format(NUMPY_MIN_VERSION),
- "scipy >={0}".format(SCIPY_MIN_VERSION),
- "nibabel >={0}".format(NIBABEL_MIN_VERSION),
- "networkx >={0}".format(NETWORKX_MIN_VERSION),
- "nipype =={0}".format(NIPYPE_VERSION),
+ "numpy >={}".format(NUMPY_MIN_VERSION),
+ "scipy >={}".format(SCIPY_MIN_VERSION),
+ "nibabel >={}".format(NIBABEL_MIN_VERSION),
+ "networkx >={}".format(NETWORKX_MIN_VERSION),
+ "nipype =={}".format(NIPYPE_VERSION),
],
}
diff --git a/capsul/pipeline/custom_nodes/cv_node.py b/capsul/pipeline/custom_nodes/cv_node.py
index 896a01104..ed07ba0de 100644
--- a/capsul/pipeline/custom_nodes/cv_node.py
+++ b/capsul/pipeline/custom_nodes/cv_node.py
@@ -26,7 +26,7 @@ def __init__(self, pipeline, name, input_type=None):
in_fields.append({"name": tr, "optional": True})
for tr in out_fieldsl:
out_fields.append({"name": tr, "optional": True})
- super(CrossValidationFoldNode, self).__init__(
+ super().__init__(
None, pipeline, name, in_fields, out_fields
)
if input_type:
diff --git a/capsul/pipeline/custom_nodes/loo_node.py b/capsul/pipeline/custom_nodes/loo_node.py
index 12f61d2b3..143fd87e8 100644
--- a/capsul/pipeline/custom_nodes/loo_node.py
+++ b/capsul/pipeline/custom_nodes/loo_node.py
@@ -4,7 +4,6 @@
"""
-from __future__ import absolute_import
from capsul.process.node import Node
from soma.controller import Controller, Any, type_from_str
@@ -47,7 +46,7 @@ def __init__(
in_fields.append({"name": tr, "optional": True})
for tr in out_fieldsl:
out_fields.append({"name": tr, "optional": True})
- super(LeaveOneOutNode, self).__init__(
+ super().__init__(
None, pipeline, name, in_fields, out_fields
)
if input_type:
diff --git a/capsul/pipeline/custom_nodes/map_node.py b/capsul/pipeline/custom_nodes/map_node.py
index 0b7f4ac0c..55fafae4a 100644
--- a/capsul/pipeline/custom_nodes/map_node.py
+++ b/capsul/pipeline/custom_nodes/map_node.py
@@ -4,7 +4,6 @@
"""
-from __future__ import absolute_import
from capsul.process.node import Node, Plug
from soma.controller import Controller, File, undefined, field, type_from_str
@@ -54,7 +53,7 @@ def __init__(
for tr in input_names:
in_fields.append({"name": tr, "optional": False})
- super(MapNode, self).__init__(None, pipeline, name, in_fields, out_fields)
+ super().__init__(None, pipeline, name, in_fields, out_fields)
for tr, ptype in zip(input_names, ptypes):
self.add_field(tr, list[ptype], output=False, default_factory=list)
diff --git a/capsul/pipeline/custom_nodes/reduce_node.py b/capsul/pipeline/custom_nodes/reduce_node.py
index 8d49044fc..1b7df4ae4 100644
--- a/capsul/pipeline/custom_nodes/reduce_node.py
+++ b/capsul/pipeline/custom_nodes/reduce_node.py
@@ -51,7 +51,7 @@ def __init__(
for tr in output_names:
out_fields.append({"name": tr, "optional": False})
- super(ReduceNode, self).__init__(None, pipeline, name, in_fields, out_fields)
+ super().__init__(None, pipeline, name, in_fields, out_fields)
for tr, ptype in zip(output_names, ptypes):
self.add_field(
diff --git a/capsul/pipeline/custom_nodes/strcat_node.py b/capsul/pipeline/custom_nodes/strcat_node.py
index 8c7eb7202..150c58483 100644
--- a/capsul/pipeline/custom_nodes/strcat_node.py
+++ b/capsul/pipeline/custom_nodes/strcat_node.py
@@ -3,7 +3,6 @@
-------------------
"""
-from __future__ import absolute_import
from capsul.process.node import Node
from soma.controller import Controller, Any, type_from_str
@@ -62,7 +61,7 @@ def __init__(
node_inputs.append(
{"name": concat_plug, "optional": concat_plug in make_optional}
)
- super(StrCatNode, self).__init__(
+ super().__init__(
None, pipeline, name, node_inputs, node_outputs
)
self._concat_sequence = params
diff --git a/capsul/pipeline/custom_nodes/strconv.py b/capsul/pipeline/custom_nodes/strconv.py
index c8b6a0a74..a47267d0a 100644
--- a/capsul/pipeline/custom_nodes/strconv.py
+++ b/capsul/pipeline/custom_nodes/strconv.py
@@ -24,7 +24,7 @@ def __init__(self, pipeline, name, input_type=None):
in_fields.append({"name": tr, "optional": True})
for tr in out_fieldsl:
out_fields.append({"name": tr, "optional": True})
- super(StrConvNode, self).__init__(None, pipeline, name, in_fields, out_fields)
+ super().__init__(None, pipeline, name, in_fields, out_fields)
if input_type:
ptype = input_type
else:
diff --git a/capsul/pipeline/pipeline.py b/capsul/pipeline/pipeline.py
index e484aa508..de8101148 100644
--- a/capsul/pipeline/pipeline.py
+++ b/capsul/pipeline/pipeline.py
@@ -143,7 +143,7 @@ def pipeline_definition(self):
Attributes
----------
nodes: dict {node_name: node}
- a dictionary containing the pipline nodes and where the pipeline node
+ a dictionary containing the pipeline nodes and where the pipeline node
name is ''
"""
@@ -217,8 +217,8 @@ def __init__(self, autoexport_nodes_parameters=None, **kwargs):
raise TypeError("No definition string given to Pipeline constructor")
# Inheritance
- super(Pipeline, self).__init__(**kwargs)
- super(Pipeline, self).add_field(
+ super().__init__(**kwargs)
+ super().add_field(
"nodes_activation", Controller, hidden=self.hide_nodes_activation
)
@@ -400,7 +400,7 @@ def add_process(
if name in self.nodes:
raise ValueError(
"Pipeline cannot have two nodes with the "
- "same name : {0}".format(name)
+ "same name : {}".format(name)
)
if skip_invalid:
@@ -464,7 +464,7 @@ def remove_node(self, node_name):
if not plug.output:
for link_def in list(plug.links_from):
src_node, src_plug = link_def[:2]
- link_descr = "%s.%s->%s.%s" % (
+ link_descr = "{}.{}->{}.{}".format(
src_node,
src_plug,
node_name,
@@ -474,7 +474,7 @@ def remove_node(self, node_name):
else:
for link_def in list(plug.links_to):
dst_node, dst_plug = link_def[:2]
- link_descr = "%s.%s->%s.%s" % (
+ link_descr = "{}.{}->{}.{}".format(
node_name,
plug_name,
dst_node,
@@ -574,7 +574,7 @@ def create_switch(
Each key of this dictionary is a possible value for
the switch parameter. The corresponding dictionary value contains
all the links between other nodes plugs and switch outputs that are
- activated when the value is selected. Theses links are given as
+ activated when the value is selected. These links are given as
a dictionary whose items are (output, source) where output is the
name of an output parameter and source is a string containing a
node name and a parameter name separated by a dot (or just a
@@ -589,7 +589,7 @@ def create_switch(
default the value is taken from the first connected source.
switch_value: str (optional)
Initial value of the switch parameter (one of the inputs names).
- Defaults to fisrt possible switch value.
+ Defaults to first possible switch value.
Examples
--------
@@ -669,13 +669,13 @@ def add_switch(
output_types=None,
switch_value=None,
):
- """Obsolete. May create a non functionnal switch. Use create_switch()
+ """Obsolete. May create a non functional switch. Use create_switch()
instead.
"""
# Check the unicity of the name we want to insert
if name in self.nodes:
raise ValueError(
- "Pipeline cannot have two nodes with the same " "name: {0}".format(name)
+ "Pipeline cannot have two nodes with the same " "name: {}".format(name)
)
# Create the node
@@ -864,7 +864,7 @@ def parse_parameter(self, name, check=True):
node = None
plug = None
else:
- raise ValueError("{0} is not a valid node name".format(node_name))
+ raise ValueError("{} is not a valid node name".format(node_name))
plug_name = name[dot + 1 :]
# Check if plug nexists
@@ -885,8 +885,8 @@ def parse_parameter(self, name, check=True):
break
if err and check:
raise ValueError(
- "'{0}' is not a valid parameter name for "
- "node '{1}'".format(
+ "'{}' is not a valid parameter name for "
+ "node '{}'".format(
plug_name, (node_name if node_name else "pipeline")
)
)
@@ -968,16 +968,16 @@ def add_link(self, link, weak_link=False, allow_export=False):
# Assure that pipeline plugs are not linked
if not source_plug.output and source_node is not self:
- raise ValueError("Cannot link from an input plug: {0}".format(link))
+ raise ValueError("Cannot link from an input plug: {}".format(link))
if source_plug.output and source_node is self:
raise ValueError(
- "Cannot link from a pipeline output " "plug: {0}".format(link)
+ "Cannot link from a pipeline output " "plug: {}".format(link)
)
if dest_plug.output and dest_node is not self:
- raise ValueError("Cannot link to an output plug: {0}".format(link))
+ raise ValueError("Cannot link to an output plug: {}".format(link))
if not dest_plug.output and dest_node is self:
raise ValueError(
- "Cannot link to a pipeline input " "plug: {0}".format(link)
+ "Cannot link to a pipeline input " "plug: {}".format(link)
)
# Propagate the plug value from source to destination
@@ -1924,10 +1924,10 @@ def pipeline_state(self):
)
plugs_list.append((plug_name, plug_dict))
for nn, pn, n, p, weak_link in plug.links_to:
- link_name = "%s:%s" % (n.full_name, pn)
+ link_name = "{}:{}".format(n.full_name, pn)
links_to_dict[link_name] = weak_link
for nn, pn, n, p, weak_link in plug.links_from:
- link_name = "%s:%s" % (n.full_name, pn)
+ link_name = "{}:{}".format(n.full_name, pn)
links_from_dict[link_name] = weak_link
return result
@@ -1946,17 +1946,17 @@ def compare_to_state(self, pipeline_state):
def compare_dict(ref_dict, other_dict):
for ref_key, ref_value in ref_dict.items():
if ref_key not in other_dict:
- yield "%s = %s is missing" % (ref_key, repr(ref_value))
+ yield "{} = {} is missing".format(ref_key, repr(ref_value))
else:
other_value = other_dict.pop(ref_key)
if ref_value != other_value:
- yield "%s = %s differs from %s" % (
+ yield "{} = {} differs from {}".format(
ref_key,
repr(ref_value),
repr(other_value),
)
for other_key, other_value in other_dict.items():
- yield "%s=%s is new" % (other_key, repr(other_value))
+ yield "{}={} is new".format(other_key, repr(other_value))
pipeline_state = deepcopy(pipeline_state)
for node in self.all_nodes():
@@ -1967,7 +1967,7 @@ def compare_dict(ref_dict, other_dict):
else:
plugs_list = OrderedDict(node_dict.pop("plugs"))
result.extend(
- 'in node "%s": %s' % (node_name, i)
+ 'in node "{}": {}'.format(node_name, i)
for i in compare_dict(
dict(
name=node.name,
@@ -1999,7 +1999,7 @@ def compare_dict(ref_dict, other_dict):
links_to_dict = plug_dict.pop("links_to")
links_from_dict = plug_dict.pop("links_from")
result.extend(
- 'in plug "%s:%s": %s' % (node_name, plug_name, i)
+ 'in plug "{}:{}": {}'.format(node_name, plug_name, i)
for i in compare_dict(
dict(
enabled=plug.enabled,
@@ -2012,7 +2012,7 @@ def compare_dict(ref_dict, other_dict):
)
)
for nn, pn, n, p, weak_link in plug.links_to:
- link_name = "%s:%s" % (n.full_name, pn)
+ link_name = "{}:{}".format(n.full_name, pn)
if link_name not in links_to_dict:
result.append(
'in plug "%s:%s": missing link to %s'
@@ -2042,7 +2042,7 @@ def compare_dict(ref_dict, other_dict):
)
)
for nn, pn, n, p, weak_link in plug.links_from:
- link_name = "%s:%s" % (n.full_name, pn)
+ link_name = "{}:{}".format(n.full_name, pn)
if link_name not in links_from_dict:
result.append(
'in plug "%s:%s": missing link from '
@@ -2144,7 +2144,7 @@ def add_pipeline_step(self, step_name, nodes, enabled=True):
"logical order regarding the workflow streams. They are "
"different from sub-pipelines in that steps are purely "
"virtual groups, they do not have parameters. To activate "
- "or diasable a step, just do:\n"
+ "or disable a step, just do:\n"
"pipeline.steps.my_step = False\n"
"\n"
"To get the nodes list in a step:\n"
@@ -2313,7 +2313,7 @@ def check_requirements(self, environment="global", message_list=None):
A pipeline will return a list of unique configuration values.
"""
# start with pipeline-level requirements
- conf = super(Pipeline, self).check_requirements(
+ conf = super().check_requirements(
environment, message_list=message_list
)
if conf is None:
@@ -2527,7 +2527,7 @@ def get_linked_items(
not activated.
The result is a generator of pairs (node, plug_name).
- direction may be a sting, 'links_from', 'links_to', or a tuple
+ direction may be a string, 'links_from', 'links_to', or a tuple
('links_from', 'links_to').
"""
if plug_name is None:
diff --git a/capsul/pipeline/pipeline_nodes.py b/capsul/pipeline/pipeline_nodes.py
index d09e8da90..c6f92f04d 100644
--- a/capsul/pipeline/pipeline_nodes.py
+++ b/capsul/pipeline/pipeline_nodes.py
@@ -135,7 +135,7 @@ def __init__(
raise Exception(
"The Switch node input and output parameters "
"are inconsistent: expect list, "
- "got {0}, {1}".format(type(inputs), type(outputs))
+ "got {}, {}".format(type(inputs), type(outputs))
)
# private copy of outputs and inputs
@@ -147,7 +147,7 @@ def __init__(
for switch_name in inputs:
flat_inputs.extend(
[
- "{0}_switch_{1}".format(switch_name, plug_name)
+ "{}_switch_{}".format(switch_name, plug_name)
for plug_name in outputs
]
)
@@ -320,14 +320,14 @@ def _any_attribute_changed(self, new, old, name):
def __setstate__(self, state):
self.__block_output_propagation = True
- super(Switch, self).__setstate__(state)
+ super().__setstate__(state)
def get_connections_through(self, plug_name, single=False):
if not self.activated or not self.enabled:
return []
plug = self.plugs[plug_name]
if plug.output:
- connected_plug_name = "%s_switch_%s" % (self.switch, plug_name)
+ connected_plug_name = "{}_switch_{}".format(self.switch, plug_name)
else:
splitter = plug_name.split("_switch_")
if len(splitter) != 2:
diff --git a/capsul/pipeline/pipeline_tools.py b/capsul/pipeline/pipeline_tools.py
index 307b66cba..764ab3875 100644
--- a/capsul/pipeline/pipeline_tools.py
+++ b/capsul/pipeline/pipeline_tools.py
@@ -344,7 +344,7 @@ def _link_color(plug, link):
if use_nodes_pos:
pos = nodes_pos.get(id)
if pos is not None:
- node_props.update({"pos": "%f,%f" % (pos[0] * scale, -pos[1] * scale)})
+ node_props.update({"pos": "{:f},{:f}".format(pos[0] * scale, -pos[1] * scale)})
size = nodes_sizes.get(id)
if size is not None:
node_props.update(
@@ -375,7 +375,7 @@ def _link_color(plug, link):
edge = (id, dest)
old_edge = edges.get(edge)
if old_edge is not None:
- # use stongest color/style
+ # use strongest color/style
if not old_edge[2]:
weak = False
style = old_edge[0]["style"]
@@ -463,7 +463,7 @@ def dot_graph_from_workflow(
if use_nodes_pos:
pos = pipeline.node_position.get(n)
if pos is not None:
- node_props.update({"pos": "%f,%f" % (pos[0] * scale, -pos[1] * scale)})
+ node_props.update({"pos": "{:f},{:f}".format(pos[0] * scale, -pos[1] * scale)})
size = nodes_sizes.get(n)
if size is not None:
node_props.update(
@@ -524,13 +524,13 @@ def _str_repr(item):
)
if len(props) != 0:
attstr = " " + attstr
- fileobj.write(' %s [label="%s" style="filled"%s];\n' % (id, node, attstr))
+ fileobj.write(' {} [label="{}" style="filled"{}];\n'.format(id, node, attstr))
for edge, descr in dot_graph[1].items():
props = descr[0]
attstr = " ".join(
["=".join([aname, _str_repr(val)]) for aname, val in props.items()]
)
- fileobj.write(' "%s" -> "%s" [%s];\n' % (edge[0], edge[1], attstr))
+ fileobj.write(' "{}" -> "{}" [{}];\n'.format(edge[0], edge[1], attstr))
fileobj.write("}\n")
@@ -734,7 +734,7 @@ def nodes_with_existing_outputs(
process = node
if recursive and isinstance(process, Pipeline):
nodes += [
- ("%s.%s" % (node_name, new_name), new_node)
+ ("{}.{}".format(node_name, new_name), new_node)
for new_name, new_node in process.nodes.items()
if new_name != ""
]
@@ -810,7 +810,7 @@ def nodes_with_missing_inputs(pipeline, recursive=True):
process = node.process
if recursive and isinstance(process, Pipeline):
nodes += [
- ("%s.%s" % (node_name, new_name), new_node)
+ ("{}.{}".format(node_name, new_name), new_node)
for new_name, new_node in process.nodes.items()
if new_name != ""
]
@@ -1215,7 +1215,7 @@ def load_pipeline_parameters(filename, pipeline):
"""
if filename:
- with io.open(filename, "r", encoding="utf8") as file:
+ with open(filename, encoding="utf8") as file:
dic = json.load(file)
if "pipeline_parameters" not in dic:
@@ -1262,7 +1262,7 @@ def find_node(pipeline, node):
if sn is not n and isinstance(sn, Pipeline):
pipelines.append((sn, names + [sk]))
- raise KeyError("Node %s not found in the pipeline %s" % (node.name, pipeline.name))
+ raise KeyError("Node {} not found in the pipeline {}".format(node.name, pipeline.name))
def nodes_full_names(executable):
@@ -1378,7 +1378,7 @@ def __init__(self, **kwargs):
has_default = True
elif field.default_factory != dataclasses.MISSING:
# difficult/implssible to replicate...
- class def_fac(object):
+ class def_fac:
def __init__(self, value):
self.value = value
@@ -1392,11 +1392,11 @@ def __repr__(self):
meta["optional"] = True
meta_str = ""
if meta:
- meta_str = ", ".join("%s=%s" % (k, repr(v)) for k, v in meta.items())
+ meta_str = ", ".join("{}={}".format(k, repr(v)) for k, v in meta.items())
meta_str = ", " + meta_str
- f.write(' self.add_field("%s", %s%s)\n' % (name, t_str, meta_str))
+ f.write(' self.add_field("{}", {}{})\n'.format(name, t_str, meta_str))
if value is not undefined:
- f.write(" self.%s = %s\n" % (name, repr(value)))
+ f.write(" self.{} = {}\n".format(name, repr(value)))
f.write(
"""
diff --git a/capsul/pipeline/process_iteration.py b/capsul/pipeline/process_iteration.py
index 4653354a0..ac78f6899 100644
--- a/capsul/pipeline/process_iteration.py
+++ b/capsul/pipeline/process_iteration.py
@@ -26,7 +26,7 @@ def __init__(self, definition, process, iterative_parameters, context_name=None)
# Avoid circular import
from capsul.api import executable
- super(ProcessIteration, self).__init__(definition=definition)
+ super().__init__(definition=definition)
self.process = executable(process)
if context_name is not None:
self.process.context_name = context_name
@@ -133,7 +133,7 @@ def iteration_size(self):
raise ValueError(
"Iterative parameter values must be lists of the same size: %s"
% "\n".join(
- "%s=%s" % (n, len(getattr(self, n)))
+ "{}={}".format(n, len(getattr(self, n)))
for n in self.iterative_parameters
if getattr(self, n) is not undefined
)
diff --git a/capsul/pipeline/python_export.py b/capsul/pipeline/python_export.py
index 9ae3dce2a..139b6d34d 100644
--- a/capsul/pipeline/python_export.py
+++ b/capsul/pipeline/python_export.py
@@ -7,8 +7,6 @@
------------------------
"""
-from __future__ import print_function
-from __future__ import absolute_import
from soma.controller import Controller, undefined
import os
@@ -75,7 +73,7 @@ def _write_process(process, pyf, name, enabled, skip_invalid):
if skip_invalid:
node_options += ", skip_invalid=True"
print(
- ' self.add_process("%s", "%s"%s)' % (name, procname, node_options),
+ ' self.add_process("{}", "{}"{})'.format(name, procname, node_options),
file=pyf,
)
@@ -142,7 +140,7 @@ def _write_process(process, pyf, name, enabled, skip_invalid):
)
if isinstance(snode, Pipeline):
- sself_str = '%s.nodes["%s"]' % (self_str, "%s")
+ sself_str = '{}.nodes["{}"]'.format(self_str, "%s")
for node_name, snode in snode.nodes.items():
scnode = cnode.nodes[node_name]
@@ -179,9 +177,9 @@ def _write_custom_node(node, pyf, name, enabled):
nodename = ".".join((mod, classname))
if hasattr(node, "configured_controller"):
c = node.configured_controller()
- params = dict(
- (p, v) for p, v in c.asdict().items() if v not in (None, undefined)
- )
+ params = {
+ p: v for p, v in c.asdict().items() if v not in (None, undefined)
+ }
print(
' self.add_custom_node("%s", "%s", %s)'
% (name, nodename, get_repr_value(params)),
@@ -189,7 +187,7 @@ def _write_custom_node(node, pyf, name, enabled):
)
else:
print(
- ' self.add_custom_node("%s", "%s")' % (name, nodename), file=pyf
+ ' self.add_custom_node("{}", "{}")'.format(name, nodename), file=pyf
)
# optional plugs
for plug_name, plug in node.plugs.items():
@@ -360,14 +358,14 @@ def _write_links(pipeline, pyf):
exported_plug = _write_export(pipeline, pyf, src)
exported.add(src)
else:
- src = "%s.%s" % (node_name, plug_name)
+ src = "{}.{}".format(node_name, plug_name)
if link[0] == "":
dst = link[1]
if dst not in exported:
exported_plug = _write_export(pipeline, pyf, dst)
exported.add(dst)
else:
- dst = "%s.%s" % (link[0], link[1])
+ dst = "{}.{}".format(link[0], link[1])
if not exported_plug or ".".join(exported_plug) not in (
src,
dst,
@@ -422,7 +420,7 @@ def _write_nodes_positions(pipeline, pyf):
if not isinstance(pos, (list, tuple)):
# pos is probably a QPointF
pos = (pos.x(), pos.y())
- print(' "%s": %s,' % (node_name, repr(pos)), file=pyf)
+ print(' "{}": {},'.format(node_name, repr(pos)), file=pyf)
print(" }", file=pyf)
if hasattr(pipeline, "scene_scale_factor"):
print(
@@ -440,7 +438,7 @@ def _write_nodes_dimensions(pipeline, pyf):
for node_name, dim in pipeline.node_dimension.items():
if not isinstance(dim, (list, tuple)):
dim = (dim.width(), dim.height())
- print(' "%s": %s,' % (node_name, repr(dim)), file=pyf)
+ print(' "{}": {},'.format(node_name, repr(dim)), file=pyf)
print(" }", file=pyf)
######################################################
@@ -461,7 +459,7 @@ def _write_doc(pipeline, pyf):
for i in notepos:
if (
splitdoc[i + 2].find(
- "* Type '{0}.help()'".format(
+ "* Type '{}.help()'".format(
pipeline.__class__.__name__
)
)
@@ -500,7 +498,7 @@ def _write_values(pipeline, pyf):
if first:
first = False
print("\n # default and initial values", file=pyf)
- print(" self.%s = %s" % (param_name, value_repr), file=pyf)
+ print(" self.{} = {}".format(param_name, value_repr), file=pyf)
class_name = type(pipeline).__name__
if class_name == "Pipeline":
diff --git a/capsul/pipeline/test/fake_morphologist/acpcorientation.py b/capsul/pipeline/test/fake_morphologist/acpcorientation.py
index 192e316fd..744b60958 100644
--- a/capsul/pipeline/test/fake_morphologist/acpcorientation.py
+++ b/capsul/pipeline/test/fake_morphologist/acpcorientation.py
@@ -10,7 +10,7 @@
class AcpcOrientation(Process):
def __init__(self, **kwargs):
- super(AcpcOrientation, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "StandardACPC"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/aimsconverter.py b/capsul/pipeline/test/fake_morphologist/aimsconverter.py
index 79da03947..d9f330bd0 100644
--- a/capsul/pipeline/test/fake_morphologist/aimsconverter.py
+++ b/capsul/pipeline/test/fake_morphologist/aimsconverter.py
@@ -5,7 +5,7 @@
class AimsConverter(Process):
def __init__(self, **kwargs):
- super(AimsConverter, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "converter"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/baladinnormalizationtoaims.py b/capsul/pipeline/test/fake_morphologist/baladinnormalizationtoaims.py
index 2d10397d7..577ad1ff0 100644
--- a/capsul/pipeline/test/fake_morphologist/baladinnormalizationtoaims.py
+++ b/capsul/pipeline/test/fake_morphologist/baladinnormalizationtoaims.py
@@ -5,7 +5,7 @@
class BaladinNormalizationToAims(Process):
def __init__(self, **kwargs):
- super(BaladinNormalizationToAims, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "ConvertBaladinNormalizationToAIMS"
self.add_field("read", File, read=True, extensions=[".txt"], write=False)
diff --git a/capsul/pipeline/test/fake_morphologist/brainsegmentation.py b/capsul/pipeline/test/fake_morphologist/brainsegmentation.py
index e47efff21..9ecc360b4 100644
--- a/capsul/pipeline/test/fake_morphologist/brainsegmentation.py
+++ b/capsul/pipeline/test/fake_morphologist/brainsegmentation.py
@@ -5,7 +5,7 @@
class BrainSegmentation(Process):
def __init__(self, **kwargs):
- super(BrainSegmentation, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "BrainSegmentation"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/brainvolumes.py b/capsul/pipeline/test/fake_morphologist/brainvolumes.py
index 43cf56faa..bb27d2111 100644
--- a/capsul/pipeline/test/fake_morphologist/brainvolumes.py
+++ b/capsul/pipeline/test/fake_morphologist/brainvolumes.py
@@ -5,7 +5,7 @@
class brainvolumes(Process):
def __init__(self, **kwargs):
- super(brainvolumes, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "GlobalMorphometry"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/fslnormalizationtoaims.py b/capsul/pipeline/test/fake_morphologist/fslnormalizationtoaims.py
index bb5d729a2..743ccc8b0 100644
--- a/capsul/pipeline/test/fake_morphologist/fslnormalizationtoaims.py
+++ b/capsul/pipeline/test/fake_morphologist/fslnormalizationtoaims.py
@@ -5,7 +5,7 @@
class FSLnormalizationToAims(Process):
def __init__(self, **kwargs):
- super(FSLnormalizationToAims, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "ConvertFSLnormalizationToAIMS"
self.add_field("read", File, read=True, extensions=[".mat"], write=False)
diff --git a/capsul/pipeline/test/fake_morphologist/greywhiteclassificationhemi.py b/capsul/pipeline/test/fake_morphologist/greywhiteclassificationhemi.py
index bed86c03b..9cfbd8cb4 100644
--- a/capsul/pipeline/test/fake_morphologist/greywhiteclassificationhemi.py
+++ b/capsul/pipeline/test/fake_morphologist/greywhiteclassificationhemi.py
@@ -5,7 +5,7 @@
class GreyWhiteClassificationHemi(Process):
def __init__(self, **kwargs):
- super(GreyWhiteClassificationHemi, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "GreyWhiteClassification"
self.add_field("side", Literal["left", "right"], optional=True)
diff --git a/capsul/pipeline/test/fake_morphologist/greywhitemesh.py b/capsul/pipeline/test/fake_morphologist/greywhitemesh.py
index 6cb0e50e1..fec7e0467 100644
--- a/capsul/pipeline/test/fake_morphologist/greywhitemesh.py
+++ b/capsul/pipeline/test/fake_morphologist/greywhitemesh.py
@@ -5,7 +5,7 @@
class GreyWhiteMesh(Process):
def __init__(self, **kwargs):
- super(GreyWhiteMesh, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "GreyWhiteMesh"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/greywhitetopology.py b/capsul/pipeline/test/fake_morphologist/greywhitetopology.py
index ccb7b6453..d95a15c1c 100644
--- a/capsul/pipeline/test/fake_morphologist/greywhitetopology.py
+++ b/capsul/pipeline/test/fake_morphologist/greywhitetopology.py
@@ -5,7 +5,7 @@
class GreyWhiteTopology(Process):
def __init__(self, **kwargs):
- super(GreyWhiteTopology, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "GreyWhiteTopology"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/histoanalysis.py b/capsul/pipeline/test/fake_morphologist/histoanalysis.py
index 32f7b4e85..bf690b1ac 100644
--- a/capsul/pipeline/test/fake_morphologist/histoanalysis.py
+++ b/capsul/pipeline/test/fake_morphologist/histoanalysis.py
@@ -5,7 +5,7 @@
class HistoAnalysis(Process):
def __init__(self, **kwargs):
- super(HistoAnalysis, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "HistoAnalysis"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/importt1mri.py b/capsul/pipeline/test/fake_morphologist/importt1mri.py
index cb0412ae7..b1989a7ae 100644
--- a/capsul/pipeline/test/fake_morphologist/importt1mri.py
+++ b/capsul/pipeline/test/fake_morphologist/importt1mri.py
@@ -5,7 +5,7 @@
class ImportT1MRI(Process):
def __init__(self, **kwargs):
- super(ImportT1MRI, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "importation"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/morpho_report.py b/capsul/pipeline/test/fake_morphologist/morpho_report.py
index bc0e876e1..050894103 100644
--- a/capsul/pipeline/test/fake_morphologist/morpho_report.py
+++ b/capsul/pipeline/test/fake_morphologist/morpho_report.py
@@ -5,7 +5,7 @@
class morpho_report(Process):
def __init__(self, **kwargs):
- super(morpho_report, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "Report"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/normalization_aimsmiregister.py b/capsul/pipeline/test/fake_morphologist/normalization_aimsmiregister.py
index f8e639ee1..b0edc48b2 100644
--- a/capsul/pipeline/test/fake_morphologist/normalization_aimsmiregister.py
+++ b/capsul/pipeline/test/fake_morphologist/normalization_aimsmiregister.py
@@ -5,7 +5,7 @@
class normalization_aimsmiregister(Process):
def __init__(self, **kwargs):
- super(normalization_aimsmiregister, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "Normalization_AimsMIRegister"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/normalization_baladin.py b/capsul/pipeline/test/fake_morphologist/normalization_baladin.py
index cdbd39f33..6611598af 100644
--- a/capsul/pipeline/test/fake_morphologist/normalization_baladin.py
+++ b/capsul/pipeline/test/fake_morphologist/normalization_baladin.py
@@ -5,7 +5,7 @@
class Normalization_Baladin(Process):
def __init__(self, **kwargs):
- super(Normalization_Baladin, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "NormalizeBaladin"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/normalization_fsl_reinit.py b/capsul/pipeline/test/fake_morphologist/normalization_fsl_reinit.py
index 9840f91ae..cc841cad7 100644
--- a/capsul/pipeline/test/fake_morphologist/normalization_fsl_reinit.py
+++ b/capsul/pipeline/test/fake_morphologist/normalization_fsl_reinit.py
@@ -5,7 +5,7 @@
class Normalization_FSL_reinit(Process):
def __init__(self, **kwargs):
- super(Normalization_FSL_reinit, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "NormalizeFSL"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/normalization_t1_spm12_reinit.py b/capsul/pipeline/test/fake_morphologist/normalization_t1_spm12_reinit.py
index 09bfba6c5..7991b8272 100644
--- a/capsul/pipeline/test/fake_morphologist/normalization_t1_spm12_reinit.py
+++ b/capsul/pipeline/test/fake_morphologist/normalization_t1_spm12_reinit.py
@@ -5,7 +5,7 @@
class normalization_t1_spm12_reinit(Process):
def __init__(self, **kwargs):
- super(normalization_t1_spm12_reinit, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "normalization_t1_spm12_reinit"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/normalization_t1_spm8_reinit.py b/capsul/pipeline/test/fake_morphologist/normalization_t1_spm8_reinit.py
index 8b99f6488..cd7de8e30 100644
--- a/capsul/pipeline/test/fake_morphologist/normalization_t1_spm8_reinit.py
+++ b/capsul/pipeline/test/fake_morphologist/normalization_t1_spm8_reinit.py
@@ -5,7 +5,7 @@
class normalization_t1_spm8_reinit(Process):
def __init__(self, **kwargs):
- super(normalization_t1_spm8_reinit, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "normalization_t1_spm8_reinit"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/pialmesh.py b/capsul/pipeline/test/fake_morphologist/pialmesh.py
index afe2a649a..a42fda393 100644
--- a/capsul/pipeline/test/fake_morphologist/pialmesh.py
+++ b/capsul/pipeline/test/fake_morphologist/pialmesh.py
@@ -5,7 +5,7 @@
class PialMesh(Process):
def __init__(self, **kwargs):
- super(PialMesh, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "PialMesh"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/reorientanatomy.py b/capsul/pipeline/test/fake_morphologist/reorientanatomy.py
index 878e9889d..e3c4c6ecb 100644
--- a/capsul/pipeline/test/fake_morphologist/reorientanatomy.py
+++ b/capsul/pipeline/test/fake_morphologist/reorientanatomy.py
@@ -5,7 +5,7 @@
class ReorientAnatomy(Process):
def __init__(self, **kwargs):
- super(ReorientAnatomy, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "ReorientAnatomy"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/scalpmesh.py b/capsul/pipeline/test/fake_morphologist/scalpmesh.py
index 46f022db0..758241b31 100644
--- a/capsul/pipeline/test/fake_morphologist/scalpmesh.py
+++ b/capsul/pipeline/test/fake_morphologist/scalpmesh.py
@@ -5,7 +5,7 @@
class ScalpMesh(Process):
def __init__(self, **kwargs):
- super(ScalpMesh, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "HeadMesh"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/skullstripping.py b/capsul/pipeline/test/fake_morphologist/skullstripping.py
index be0ddc5e1..d2ab0f502 100644
--- a/capsul/pipeline/test/fake_morphologist/skullstripping.py
+++ b/capsul/pipeline/test/fake_morphologist/skullstripping.py
@@ -5,7 +5,7 @@
class skullstripping(Process):
def __init__(self, **kwargs):
- super(skullstripping, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "SkullStripping"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/splitbrain.py b/capsul/pipeline/test/fake_morphologist/splitbrain.py
index a81c41e8d..8e4b2c78f 100644
--- a/capsul/pipeline/test/fake_morphologist/splitbrain.py
+++ b/capsul/pipeline/test/fake_morphologist/splitbrain.py
@@ -5,7 +5,7 @@
class SplitBrain(Process):
def __init__(self, **kwargs):
- super(SplitBrain, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "SplitBrain"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/spmsn3dtoaims.py b/capsul/pipeline/test/fake_morphologist/spmsn3dtoaims.py
index f768a2745..7ede865ae 100644
--- a/capsul/pipeline/test/fake_morphologist/spmsn3dtoaims.py
+++ b/capsul/pipeline/test/fake_morphologist/spmsn3dtoaims.py
@@ -5,7 +5,7 @@
class SPMsn3dToAims(Process):
def __init__(self, **kwargs):
- super(SPMsn3dToAims, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "ConvertSPMnormalizationToAIMS"
self.add_field("read", File, read=True, extensions=[".mat"], write=False)
diff --git a/capsul/pipeline/test/fake_morphologist/sulcideeplabeling.py b/capsul/pipeline/test/fake_morphologist/sulcideeplabeling.py
index b227f6d49..eb77e2d93 100644
--- a/capsul/pipeline/test/fake_morphologist/sulcideeplabeling.py
+++ b/capsul/pipeline/test/fake_morphologist/sulcideeplabeling.py
@@ -5,7 +5,7 @@
class SulciDeepLabeling(Process):
def __init__(self, **kwargs):
- super(SulciDeepLabeling, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "CNN_recognition19"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/sulcigraph.py b/capsul/pipeline/test/fake_morphologist/sulcigraph.py
index 0f833c5ba..9adf9ff0e 100644
--- a/capsul/pipeline/test/fake_morphologist/sulcigraph.py
+++ b/capsul/pipeline/test/fake_morphologist/sulcigraph.py
@@ -5,7 +5,7 @@
class SulciGraph(Process):
def __init__(self, **kwargs):
- super(SulciGraph, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "CorticalFoldsGraph"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/sulcigraphmorphometrybysubject.py b/capsul/pipeline/test/fake_morphologist/sulcigraphmorphometrybysubject.py
index 198e37bdb..f719005a9 100644
--- a/capsul/pipeline/test/fake_morphologist/sulcigraphmorphometrybysubject.py
+++ b/capsul/pipeline/test/fake_morphologist/sulcigraphmorphometrybysubject.py
@@ -5,7 +5,7 @@
class sulcigraphmorphometrybysubject(Process):
def __init__(self, **kwargs):
- super(sulcigraphmorphometrybysubject, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "SulcalMorphometry"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/sulcilabellingann.py b/capsul/pipeline/test/fake_morphologist/sulcilabellingann.py
index 30443ff8a..650e854a7 100644
--- a/capsul/pipeline/test/fake_morphologist/sulcilabellingann.py
+++ b/capsul/pipeline/test/fake_morphologist/sulcilabellingann.py
@@ -5,7 +5,7 @@
class SulciLabellingANN(Process):
def __init__(self, **kwargs):
- super(SulciLabellingANN, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "recognition2000"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/sulcilabellingspamglobal.py b/capsul/pipeline/test/fake_morphologist/sulcilabellingspamglobal.py
index 9999d56f6..8db7d8f52 100644
--- a/capsul/pipeline/test/fake_morphologist/sulcilabellingspamglobal.py
+++ b/capsul/pipeline/test/fake_morphologist/sulcilabellingspamglobal.py
@@ -5,7 +5,7 @@
class SulciLabellingSPAMGlobal(Process):
def __init__(self, **kwargs):
- super(SulciLabellingSPAMGlobal, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "global_recognition"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/sulcilabellingspamlocal.py b/capsul/pipeline/test/fake_morphologist/sulcilabellingspamlocal.py
index 4eb18e26e..06cd701a1 100644
--- a/capsul/pipeline/test/fake_morphologist/sulcilabellingspamlocal.py
+++ b/capsul/pipeline/test/fake_morphologist/sulcilabellingspamlocal.py
@@ -5,7 +5,7 @@
class SulciLabellingSPAMLocal(Process):
def __init__(self, **kwargs):
- super(SulciLabellingSPAMLocal, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "local_recognition"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/sulcilabellingspammarkov.py b/capsul/pipeline/test/fake_morphologist/sulcilabellingspammarkov.py
index 043a75a66..38a82b575 100644
--- a/capsul/pipeline/test/fake_morphologist/sulcilabellingspammarkov.py
+++ b/capsul/pipeline/test/fake_morphologist/sulcilabellingspammarkov.py
@@ -5,7 +5,7 @@
class SulciLabellingSPAMMarkov(Process):
def __init__(self, **kwargs):
- super(SulciLabellingSPAMMarkov, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "markovian_recognition"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/sulciskeleton.py b/capsul/pipeline/test/fake_morphologist/sulciskeleton.py
index 648134c12..d280bb810 100644
--- a/capsul/pipeline/test/fake_morphologist/sulciskeleton.py
+++ b/capsul/pipeline/test/fake_morphologist/sulciskeleton.py
@@ -5,7 +5,7 @@
class SulciSkeleton(Process):
def __init__(self, **kwargs):
- super(SulciSkeleton, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "SulciSkeleton"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/t1biascorrection.py b/capsul/pipeline/test/fake_morphologist/t1biascorrection.py
index d529de5dd..b74a5e584 100644
--- a/capsul/pipeline/test/fake_morphologist/t1biascorrection.py
+++ b/capsul/pipeline/test/fake_morphologist/t1biascorrection.py
@@ -5,7 +5,7 @@
class T1BiasCorrection(Process):
def __init__(self, **kwargs):
- super(T1BiasCorrection, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "BiasCorrection"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/talairachtransformation.py b/capsul/pipeline/test/fake_morphologist/talairachtransformation.py
index 2621b3641..a0c99c28c 100644
--- a/capsul/pipeline/test/fake_morphologist/talairachtransformation.py
+++ b/capsul/pipeline/test/fake_morphologist/talairachtransformation.py
@@ -5,7 +5,7 @@
class TalairachTransformation(Process):
def __init__(self, **kwargs):
- super(TalairachTransformation, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "TalairachTransformation"
self.add_field(
diff --git a/capsul/pipeline/test/fake_morphologist/talairachtransformationfromnormalization.py b/capsul/pipeline/test/fake_morphologist/talairachtransformationfromnormalization.py
index e3f54732b..74a2ba0f4 100644
--- a/capsul/pipeline/test/fake_morphologist/talairachtransformationfromnormalization.py
+++ b/capsul/pipeline/test/fake_morphologist/talairachtransformationfromnormalization.py
@@ -5,7 +5,7 @@
class TalairachTransformationFromNormalization(Process):
def __init__(self, **kwargs):
- super(TalairachTransformationFromNormalization, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = "TalairachFromNormalization"
self.add_field(
diff --git a/capsul/pipeline/test/test_activation.py b/capsul/pipeline/test/test_activation.py
index de8cca2ca..94496da7d 100644
--- a/capsul/pipeline/test/test_activation.py
+++ b/capsul/pipeline/test/test_activation.py
@@ -9,7 +9,7 @@ class DummyProcess(Process):
"""Dummy Test Process"""
def __init__(self, definition):
- super(DummyProcess, self).__init__(
+ super().__init__(
"capsul.pipeline.test.test_activation.DummyProcess"
)
diff --git a/capsul/pipeline/test/test_complex_pipeline_activations.py b/capsul/pipeline/test/test_complex_pipeline_activations.py
index 309724afc..82af2294a 100644
--- a/capsul/pipeline/test/test_complex_pipeline_activations.py
+++ b/capsul/pipeline/test/test_complex_pipeline_activations.py
@@ -859,19 +859,19 @@ def test_complex_activations(self):
node = node_pipeline.nodes[node_name]
except KeyError:
raise KeyError(
- "Pipeline {0} has no node named {1}".format(
+ "Pipeline {} has no node named {}".format(
node_pipeline.pipeline, node_name
)
)
try:
- what = "activation of node {0}".format(
+ what = "activation of node {}".format(
full_node_name or "main pipeline node"
)
expected = node_activations.get("_activated")
if expected is not None:
got = node.activated
self.assertEqual(expected, got)
- what = "enabled for node {0}".format(
+ what = "enabled for node {}".format(
full_node_name or "main pipeline node"
)
expected = node_activations.get("_enabled")
@@ -880,7 +880,7 @@ def test_complex_activations(self):
self.assertEqual(expected, got)
except AssertionError:
raise AssertionError(
- "Wrong activation within ComplexPipeline with parameters {0}: {1} is supposed to be {2} but is {3}".format(
+ "Wrong activation within ComplexPipeline with parameters {}: {} is supposed to be {} but is {}".format(
kwargs, what, expected, got
)
)
diff --git a/capsul/pipeline/test/test_custom_nodes.py b/capsul/pipeline/test/test_custom_nodes.py
index 7306ac53f..044139267 100644
--- a/capsul/pipeline/test/test_custom_nodes.py
+++ b/capsul/pipeline/test/test_custom_nodes.py
@@ -26,11 +26,11 @@ def execute(self, context):
with open(self.out1, "w") as f:
print("test: %s" % os.path.basename(self.out1), file=f)
print("##############", file=f)
- with open(self.in1, "r") as ff:
+ with open(self.in1) as ff:
f.write(ff.read())
print("model: %s" % os.path.basename(self.model), file=f)
print("##############", file=f)
- with open(self.model, "r") as ff:
+ with open(self.model) as ff:
f.write(ff.read())
# TODO FIXME: this should be automatic
output_dict = {"out1": self.out1}
@@ -275,12 +275,10 @@ def pipeline_definition(self):
self.add_iterative_process(
"test_it",
"capsul.pipeline.test.test_custom_nodes.CVtest",
- non_iterative_plugs=set(
- [
+ non_iterative_plugs={
"model",
"base",
- ]
- ),
+ },
make_optional=["out1"],
)
self.add_custom_node(
@@ -534,7 +532,7 @@ def add_py_tmpfile(self, pyfname):
# print('cache_dir:', cache_dir)
cpver = "cpython-%d%d.pyc" % sys.version_info[:2]
pyfname_we = osp.basename(pyfname[: pyfname.rfind(".")])
- pycfname = osp.join(cache_dir, "%s.%s" % (pyfname_we, cpver))
+ pycfname = osp.join(cache_dir, "{}.{}".format(pyfname_we, cpver))
self.temp_files.append(pycfname)
# print('added py tmpfile:', pyfname, pycfname)
diff --git a/capsul/pipeline/test/test_double_switch.py b/capsul/pipeline/test/test_double_switch.py
index f3e316419..0cf02edb1 100644
--- a/capsul/pipeline/test/test_double_switch.py
+++ b/capsul/pipeline/test/test_double_switch.py
@@ -7,7 +7,7 @@ class DummyProcess(Process):
"""Dummy Test Process"""
def __init__(self, definition=None):
- super(DummyProcess, self).__init__(
+ super().__init__(
"capsul.pipeline.test.test_double_switch.DummyProcess"
)
diff --git a/capsul/pipeline/test/test_iterative_process.py b/capsul/pipeline/test/test_iterative_process.py
index 93b62e6c3..c45264523 100644
--- a/capsul/pipeline/test/test_iterative_process.py
+++ b/capsul/pipeline/test/test_iterative_process.py
@@ -279,7 +279,7 @@ def test_iterative_big_pipeline_workflow(self):
subject = workflow.parameters_values[proxy[1]]
subjects.add(subject)
self.assertIn(subject, ["toto", "tutu", "tata", "titi", "tete"])
- self.assertEqual(subjects, set(["toto", "tutu", "tata", "titi", "tete"]))
+ self.assertEqual(subjects, {"toto", "tutu", "tata", "titi", "tete"})
def test_iterative_pipeline_workflow_run(self):
self.small_pipeline.output_image = [
diff --git a/capsul/pipeline/test/test_optional_output_switch.py b/capsul/pipeline/test/test_optional_output_switch.py
index 9bead6e53..759823582 100644
--- a/capsul/pipeline/test/test_optional_output_switch.py
+++ b/capsul/pipeline/test/test_optional_output_switch.py
@@ -10,7 +10,7 @@ class DummyProcess(Process):
"""Dummy Test Process"""
def __init__(self, definition=None):
- super(DummyProcess, self).__init__(
+ super().__init__(
"capsul.pipeline.test.test_optional_output_switch"
)
diff --git a/capsul/pipeline/test/test_pipeline.py b/capsul/pipeline/test/test_pipeline.py
index 50095742e..666869c62 100644
--- a/capsul/pipeline/test/test_pipeline.py
+++ b/capsul/pipeline/test/test_pipeline.py
@@ -20,7 +20,7 @@ class DummyProcess(Process):
def __init__(self, definition=None):
if definition is None:
definition = "capsul.pipeline.test.test_pipeline.DummyProcess"
- super(DummyProcess, self).__init__(definition)
+ super().__init__(definition)
# inputs
self.add_field("input_image", File, optional=False)
@@ -103,7 +103,7 @@ def add_py_tmpfile(self, pyfname):
# print('cache_dir:', cache_dir)
cpver = "cpython-%d%d.pyc" % sys.version_info[:2]
pyfname_we = osp.basename(pyfname[: pyfname.rfind(".")])
- pycfname = osp.join(cache_dir, "%s.%s" % (pyfname_we, cpver))
+ pycfname = osp.join(cache_dir, "{}.{}".format(pyfname_we, cpver))
self.temp_files.append(pycfname)
# print('added py tmpfile:', pyfname, pycfname)
diff --git a/capsul/pipeline/test/test_pipeline_parameters.py b/capsul/pipeline/test/test_pipeline_parameters.py
index 29210bc91..ba830e3d5 100644
--- a/capsul/pipeline/test/test_pipeline_parameters.py
+++ b/capsul/pipeline/test/test_pipeline_parameters.py
@@ -23,7 +23,7 @@ def load_pipeline_dictionary(filename):
:param filename: the json filename
"""
if filename:
- with open(filename, "r", encoding="utf8") as file:
+ with open(filename, encoding="utf8") as file:
return json.load(file)
diff --git a/capsul/pipeline/test/test_pipeline_workflow.py b/capsul/pipeline/test/test_pipeline_workflow.py
index 3410ef119..6e8cd0848 100644
--- a/capsul/pipeline/test/test_pipeline_workflow.py
+++ b/capsul/pipeline/test/test_pipeline_workflow.py
@@ -11,7 +11,7 @@ class DummyProcess(Process):
"""Dummy Test Process"""
def __init__(self, definition):
- super(DummyProcess, self).__init__(definition)
+ super().__init__(definition)
# inputs
self.add_field("input", File, optional=False)
@@ -36,7 +36,7 @@ class DummyProcessSPM(DummyProcess):
class DummyListProcess(Process):
def __init__(self, definition):
- super(DummyListProcess, self).__init__(definition)
+ super().__init__(definition)
# inputs
self.add_field("inputs", list[File], optional=False)
diff --git a/capsul/pipeline/test/test_switch_pipeline.py b/capsul/pipeline/test/test_switch_pipeline.py
index 067431608..b4667f321 100644
--- a/capsul/pipeline/test/test_switch_pipeline.py
+++ b/capsul/pipeline/test/test_switch_pipeline.py
@@ -11,7 +11,7 @@ class DummyProcess(Process):
"""Dummy Test Process"""
def __init__(self, definition=None):
- super(DummyProcess, self).__init__("capsul.pipeline.test.test_switch_pipeline")
+ super().__init__("capsul.pipeline.test.test_switch_pipeline")
# inputs
self.add_field("input_image", str, optional=False)
diff --git a/capsul/pipeline/test/test_switch_subpipeline.py b/capsul/pipeline/test/test_switch_subpipeline.py
index 72d6b0f1b..87d87e156 100644
--- a/capsul/pipeline/test/test_switch_subpipeline.py
+++ b/capsul/pipeline/test/test_switch_subpipeline.py
@@ -10,7 +10,7 @@ class DummyProcess(Process):
"""Dummy Test Process"""
def __init__(self, definition=None):
- super(DummyProcess, self).__init__(
+ super().__init__(
"capsul.pipeline.test.test_switch_subpipeline.DummyProcess"
)
@@ -31,7 +31,7 @@ class DummyProcess1_1(Process):
"""Dummy Test Process with 1 input and one output"""
def __init__(self, definition=None):
- super(DummyProcess1_1, self).__init__(
+ super().__init__(
"capsul.pipeline.test.test_switch_subpipeline.DummyProcess1_1"
)
@@ -49,7 +49,7 @@ class DummyProcess2_1(Process):
"""Dummy Test Process with 2 inputs and one output"""
def __init__(self, definition=None):
- super(DummyProcess2_1, self).__init__(
+ super().__init__(
"capsul.pipeline.test.test_switch_subpipeline.DummyProcess2_1"
)
@@ -68,7 +68,7 @@ class DummyProcess4_1(Process):
"""Dummy Test Process with 4 inputs and one output"""
def __init__(self, definition=None):
- super(DummyProcess4_1, self).__init__(
+ super().__init__(
"capsul.pipeline.test.test_switch_subpipeline.DummyProcess4_1"
)
diff --git a/capsul/pipeline/test/test_temporary.py b/capsul/pipeline/test/test_temporary.py
index f773132a9..ee6964b23 100644
--- a/capsul/pipeline/test/test_temporary.py
+++ b/capsul/pipeline/test/test_temporary.py
@@ -6,7 +6,6 @@
from soma.controller import File, field
from capsul.api import Process, Pipeline, Capsul
import shutil
-from six.moves import zip
class DummyProcess1(Process):
diff --git a/capsul/pipeline/topological_sort.py b/capsul/pipeline/topological_sort.py
index f1265772a..fee6445b7 100644
--- a/capsul/pipeline/topological_sort.py
+++ b/capsul/pipeline/topological_sort.py
@@ -8,7 +8,7 @@
"""
-class GraphNode(object):
+class GraphNode:
"""Simple Graph Node Structure
Attributes
@@ -103,7 +103,7 @@ def remove_link_from(self, node):
self.links_from_degree -= 1
-class Graph(object):
+class Graph:
"""Simple Graph Structure on which we want to perform a
topological tree (no cycle).
@@ -139,10 +139,10 @@ def add_node(self, node):
the node to insert
"""
if not isinstance(node, GraphNode):
- raise Exception("Expect a GraphNode, got {0}".format(node))
+ raise Exception("Expect a GraphNode, got {}".format(node))
if node.name in self._nodes:
raise Exception(
- "Expect a GraphNode with a unique name, " "got {0}".format(node)
+ "Expect a GraphNode with a unique name, " "got {}".format(node)
)
self._nodes[node.name] = node
@@ -170,12 +170,12 @@ def add_link(self, from_node, to_node):
"""
if from_node not in self._nodes:
raise Exception(
- "Node {0} is not defined in the Graph."
+ "Node {} is not defined in the Graph."
"Use add_node() method".format(from_node)
)
if to_node not in self._nodes:
raise Exception(
- "Node {0} is not defined in the Graph."
+ "Node {} is not defined in the Graph."
"Use add_node() method".format(to_node)
)
if (from_node, to_node) not in self._links:
diff --git a/capsul/process/nipype_process.py b/capsul/process/nipype_process.py
index 1fc8fcaf4..b59bfeb1e 100644
--- a/capsul/process/nipype_process.py
+++ b/capsul/process/nipype_process.py
@@ -192,8 +192,8 @@ def sync_process_output_traits(process_instance):
ex_type, ex, tb = sys.exc_info()
logging.debug(
"Something wrong in the nipype output trait "
- "synchronization:\n\n\tError: {0} - {1}\n"
- "\tTraceback:\n{2}".format(
+ "synchronization:\n\n\tError: {} - {}\n"
+ "\tTraceback:\n{}".format(
ex_type, ex, "".join(traceback.format_tb(tb))
)
)
@@ -232,8 +232,8 @@ def sync_process_output_traits(process_instance):
ex_type, ex, tb = sys.exc_info()
logging.debug(
"Something wrong in the nipype output trait "
- "synchronization:\n\n\tError: {0} - {1}\n"
- "\tTraceback:\n{2}".format(
+ "synchronization:\n\n\tError: {} - {}\n"
+ "\tTraceback:\n{}".format(
ex_type, ex, "".join(traceback.format_tb(tb))
)
)
@@ -263,8 +263,8 @@ def sync_process_output_traits(process_instance):
ex_type, ex, tb = sys.exc_info()
logging.debug(
"Something wrong in the nipype output trait "
- "synchronization:\n\n\tError: {0} - {1}\n"
- "\tTraceback:\n{2}".format(
+ "synchronization:\n\n\tError: {} - {}\n"
+ "\tTraceback:\n{}".format(
ex_type, ex, "".join(traceback.format_tb(tb))
)
)
@@ -399,7 +399,7 @@ def _make_matlab_command(self, content):
if process_instance.field(field_name) is not None:
field_name = "nipype_" + field_name
- # Relax nipye exists trait contrain
+ # Relax nipye exists trait constraint
relax_exists_constraint(trait)
# Clone the nipype trait
@@ -561,7 +561,7 @@ def relax_exists_constraint(trait):
trait: trait
a trait that will be relaxed from the exist constraint
"""
- # If we have a single trait, just modify the 'exists' contrain
+ # If we have a single trait, just modify the 'exists' constraint
# if specified
if hasattr(trait.handler, "exists"):
trait.handler.exists = False
diff --git a/capsul/process/node.py b/capsul/process/node.py
index 90f9cd55b..24c5c9c14 100644
--- a/capsul/process/node.py
+++ b/capsul/process/node.py
@@ -252,7 +252,7 @@ def __init__(
if "name" not in parameter:
raise Exception(
"Can't create parameter with unknown"
- "identifier and parameter {0}".format(parameter)
+ "identifier and parameter {}".format(parameter)
)
parameter = parameter.copy()
# force the parameter type
@@ -263,7 +263,7 @@ def __init__(
raise Exception(
"Can't create Node. Expect a dict structure "
"to initialize the Node, "
- "got {0}: {1}".format(type(parameter), parameter)
+ "got {}: {}".format(type(parameter), parameter)
)
def __del__(self):
diff --git a/capsul/process/process.py b/capsul/process/process.py
index 537b9062f..1e5e4032a 100644
--- a/capsul/process/process.py
+++ b/capsul/process/process.py
@@ -152,11 +152,11 @@ def json(self, include_parameters=True):
return result
def json_parameters(self):
- return super(Process, self).json()
+ return super().json()
def before_execute(self, context):
"""This method is called by CapsulEngine before calling
- execute(). By default it does nothing but can be overriden
+ execute(). By default it does nothing but can be overridden
in derived classes.
"""
pass
@@ -292,7 +292,7 @@ def get_help(self, returnhelp=False, use_labels=False):
# Update the documentation with a description of the pipeline
# when the xml to pipeline wrapper has been used
if returnhelp and hasattr(self, "_pipeline_desc"):
- str_desc = "".join([" {0}".format(line) for line in self._pipeline_desc])
+ str_desc = "".join([" {}".format(line) for line in self._pipeline_desc])
doctring += [
".. hidden-code-block:: python",
" :starthidden: True",
@@ -309,26 +309,26 @@ def get_help(self, returnhelp=False, use_labels=False):
# when the function to process wrapper has been used
if hasattr(self, "_func_name") and hasattr(self, "_func_module"):
doctring += [
- "This process has been wrapped from {0}.{1}.".format(
+ "This process has been wrapped from {}.{}.".format(
self._func_module, self._func_name
),
"",
]
if returnhelp:
doctring += [
- ".. currentmodule:: {0}".format(self._func_module),
+ ".. currentmodule:: {}".format(self._func_module),
"",
".. autosummary::",
" :toctree: ./",
"",
- " {0}".format(self._func_name),
+ " {}".format(self._func_name),
"",
]
# Append the input and output fields help
if use_labels:
- in_label = [".. _%s.%s_inputs:\n\n" % (self.__module__, self.name)]
- out_label = [".. _%s.%s_outputs:\n\n" % (self.__module__, self.name)]
+ in_label = [".. _{}.{}_inputs:\n\n".format(self.__module__, self.name)]
+ out_label = [".. _{}.{}_outputs:\n\n".format(self.__module__, self.name)]
else:
in_label = []
out_label = []
@@ -594,7 +594,7 @@ def after_execute(self, exec_result, context):
# restore initial values, keeping outputs
# The situation here is that:
- # * output_directory should drive "final" output valules
+ # * output_directory should drive "final" output values
# * we may have been using a temporary output directory, thus output
# values are already set to this temp dir, not the final one.
# (at least when use_temp_output_dir is set).
@@ -659,7 +659,7 @@ def _move_outputs(self):
shutil.rmtree(tmp_output)
del self._destination
- self.destinaton = self._former_output_directory
+ self.destination = self._former_output_directory
if hasattr(self, "output_directory"):
self.output_directory = self._former_output_directory
del self._former_output_directory
@@ -914,7 +914,7 @@ def init_with_skip(self, *args, **kwargs):
if len(stack) >= 2:
s2 = stack[-2]
if s2[2] == "nipype_factory":
- instance = super(NipypeProcess, cls).__new__(cls, *args, **kwargs)
+ instance = super().__new__(cls, *args, **kwargs)
setattr(instance, "__%s_np_init_done__" % cls.__name__, False)
return instance
nipype_class = getattr(cls, "_nipype_class_type", None)
@@ -947,7 +947,7 @@ def init_with_skip(self, *args, **kwargs):
instance.id = instance.__class__.__module__ + "." + instance.name
instance.__postinit__(*nargs, **nkwargs)
else:
- instance = super(NipypeProcess, cls).__new__(cls, *args, **kwargs)
+ instance = super().__new__(cls, *args, **kwargs)
setattr(instance, "__%s_np_init_done__" % cls.__name__, False)
return instance
@@ -1076,7 +1076,7 @@ class Smooth(NipypeProcess):
]
if use_temp_output_dir is None:
use_temp_output_dir = True
- super(NipypeProcess, self).__init__(
+ super().__init__(
definition=definition,
activate_copy=True,
inputs_to_copy=inputs_to_copy,
@@ -1089,7 +1089,7 @@ class Smooth(NipypeProcess):
else:
if use_temp_output_dir is None:
use_temp_output_dir = False
- super(NipypeProcess, self).__init__(
+ super().__init__(
definition=definition,
activate_copy=False,
use_temp_output_dir=use_temp_output_dir,
@@ -1220,4 +1220,4 @@ def after_execute(self, exec_result, context):
)
if os.path.exists(script_file):
shutil.move(script_file, getattr(self, script_tname))
- return super(NipypeProcess, self).after_execute(exec_result, context)
+ return super().after_execute(exec_result, context)
diff --git a/capsul/process/test/test_load_from_description.py b/capsul/process/test/test_load_from_description.py
index 27cce2e68..08c60e9f5 100644
--- a/capsul/process/test/test_load_from_description.py
+++ b/capsul/process/test/test_load_from_description.py
@@ -41,7 +41,7 @@ def to_warp_func(
#
Link: | -%s | -%s | +{} | +{} |