From c94c46500acc1f792858ed9550edbebdfc851e32 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Fri, 10 Nov 2023 14:55:37 +0200 Subject: [PATCH] Fix misspellings --- capsul/application.py | 6 +- capsul/config/configuration.py | 2 +- capsul/database/__init__.py | 4 +- capsul/database/redis.py | 2 +- capsul/database/sqlite.py | 2 +- capsul/dataset.py | 6 +- capsul/engine/__init__.py | 2 +- capsul/info.py | 23 ++- capsul/pipeline/custom_nodes/cv_node.py | 2 +- capsul/pipeline/custom_nodes/loo_node.py | 3 +- capsul/pipeline/custom_nodes/map_node.py | 3 +- capsul/pipeline/custom_nodes/reduce_node.py | 2 +- capsul/pipeline/custom_nodes/strcat_node.py | 3 +- capsul/pipeline/custom_nodes/strconv.py | 2 +- capsul/pipeline/pipeline.py | 58 +++--- capsul/pipeline/pipeline_nodes.py | 8 +- capsul/pipeline/pipeline_tools.py | 26 +-- capsul/pipeline/process_iteration.py | 4 +- capsul/pipeline/python_export.py | 26 ++- .../test/fake_morphologist/acpcorientation.py | 2 +- .../test/fake_morphologist/aimsconverter.py | 2 +- .../baladinnormalizationtoaims.py | 2 +- .../fake_morphologist/brainsegmentation.py | 2 +- .../test/fake_morphologist/brainvolumes.py | 2 +- .../fslnormalizationtoaims.py | 2 +- .../greywhiteclassificationhemi.py | 2 +- .../test/fake_morphologist/greywhitemesh.py | 2 +- .../fake_morphologist/greywhitetopology.py | 2 +- .../test/fake_morphologist/histoanalysis.py | 2 +- .../test/fake_morphologist/importt1mri.py | 2 +- .../test/fake_morphologist/morpho_report.py | 2 +- .../normalization_aimsmiregister.py | 2 +- .../normalization_baladin.py | 2 +- .../normalization_fsl_reinit.py | 2 +- .../normalization_t1_spm12_reinit.py | 2 +- .../normalization_t1_spm8_reinit.py | 2 +- .../test/fake_morphologist/pialmesh.py | 2 +- .../test/fake_morphologist/reorientanatomy.py | 2 +- .../test/fake_morphologist/scalpmesh.py | 2 +- .../test/fake_morphologist/skullstripping.py | 2 +- .../test/fake_morphologist/splitbrain.py | 2 +- .../test/fake_morphologist/spmsn3dtoaims.py | 2 +- .../fake_morphologist/sulcideeplabeling.py | 2 +- .../test/fake_morphologist/sulcigraph.py | 2 +- .../sulcigraphmorphometrybysubject.py | 2 +- .../fake_morphologist/sulcilabellingann.py | 2 +- .../sulcilabellingspamglobal.py | 2 +- .../sulcilabellingspamlocal.py | 2 +- .../sulcilabellingspammarkov.py | 2 +- .../test/fake_morphologist/sulciskeleton.py | 2 +- .../fake_morphologist/t1biascorrection.py | 2 +- .../talairachtransformation.py | 2 +- ...alairachtransformationfromnormalization.py | 2 +- capsul/pipeline/test/test_activation.py | 2 +- .../test/test_complex_pipeline_activations.py | 8 +- capsul/pipeline/test/test_custom_nodes.py | 12 +- capsul/pipeline/test/test_double_switch.py | 2 +- .../pipeline/test/test_iterative_process.py | 2 +- .../test/test_optional_output_switch.py | 2 +- capsul/pipeline/test/test_pipeline.py | 4 +- .../pipeline/test/test_pipeline_parameters.py | 2 +- .../pipeline/test/test_pipeline_workflow.py | 4 +- capsul/pipeline/test/test_switch_pipeline.py | 2 +- .../pipeline/test/test_switch_subpipeline.py | 8 +- capsul/pipeline/test/test_temporary.py | 1 - capsul/pipeline/topological_sort.py | 12 +- capsul/process/nipype_process.py | 16 +- capsul/process/node.py | 4 +- capsul/process/process.py | 30 +-- .../test/test_load_from_description.py | 12 +- capsul/process/test/test_metadata_schema.py | 4 +- capsul/process/test/test_runprocess.py | 2 +- capsul/qt_apps/pipeline_viewer_app.py | 3 +- capsul/qt_apps/resources/icones.py | 1 - capsul/qt_apps/utils/application.py | 3 +- capsul/qt_apps/utils/fill_treectrl.py | 5 +- capsul/qt_apps/utils/find_pipelines.py | 18 +- capsul/qt_apps/utils/window.py | 3 +- capsul/qt_gui/widgets/activation_inspector.py | 24 +-- .../widgets/attributed_process_widget.py | 2 +- capsul/qt_gui/widgets/config_gui.py | 4 +- capsul/qt_gui/widgets/links_debugger.py | 6 +- .../qt_gui/widgets/pipeline_developer_view.py | 184 +++++++++--------- .../widgets/pipeline_file_warning_widget.py | 2 +- capsul/qt_gui/widgets/pipeline_user_view.py | 12 +- capsul/qt_gui/widgets/settings_editor.py | 10 +- capsul/qt_gui/widgets/viewer_widget.py | 2 +- capsul/run.py | 2 +- capsul/sphinxext/__init__.py | 2 +- capsul/sphinxext/capsul_pipeline_rst.py | 22 +-- capsul/sphinxext/capsul_pipeline_view.py | 19 +- capsul/sphinxext/capsul_sphinx_layout.py | 19 +- capsul/sphinxext/capsul_usecases_rst.py | 14 +- capsul/sphinxext/layoutdocgen.py | 35 ++-- capsul/sphinxext/load_pilots.py | 3 +- capsul/sphinxext/pipelinedocgen.py | 32 +-- .../resources/custom_ext/hidden_code_block.py | 3 +- .../custom_ext/hidden_technical_block.py | 13 +- .../resources/custom_ext/link_to_block.py | 7 +- capsul/sphinxext/resources/installation.rst | 2 +- .../resources/numpy_ext/docscrape.py | 29 ++- .../resources/numpy_ext/docscrape_sphinx.py | 16 +- .../sphinxext/resources/numpy_ext/numpydoc.py | 9 +- capsul/sphinxext/test/test_usercases_doc.py | 4 - capsul/sphinxext/usecasesdocgen.py | 22 +-- capsul/test/test_completion.py | 6 +- capsul/test/test_fake_morphologist.py | 6 +- capsul/test/test_tiny_morphologist.py | 6 +- capsul/ui/static/engine.html | 2 +- completion.md | 6 +- doc/source/conf.py | 9 +- doc/source/installation.rst | 2 +- doc/source/sphinxext/numpy_ext/docscrape.py | 29 ++- .../sphinxext/numpy_ext/docscrape_sphinx.py | 16 +- doc/source/sphinxext/numpy_ext/numpydoc.py | 7 +- doc/source/status.rst | 2 +- doc/source/user_guide_tree/advanced_usage.rst | 4 +- doc/source/user_guide_tree/xml_spec.rst | 10 +- readme.md | 2 +- 119 files changed, 485 insertions(+), 535 deletions(-) diff --git a/capsul/application.py b/capsul/application.py index fb43d6285..801557353 100644 --- a/capsul/application.py +++ b/capsul/application.py @@ -55,7 +55,7 @@ def _is_nipype_interface_subclass(obj): class Capsul: """User entry point to Capsul features. This objects reads Capsul configuration in site and user environments. - It allows configuration customization and instanciation of a + It allows configuration customization and instantiation of a CapsulEngine instance to reach an execution environment. If database_path is given, it replaces @@ -255,13 +255,13 @@ def executable(definition, **kwargs): except ImportError as e: raise TypeError( f"Class {definition} cannot be used to create a Process " - "beacause its module cannot be imported : {e}" + "because its module cannot be imported : {e}" ) cls = getattr(module, object_name, None) if cls is not definition: raise TypeError( f"Class {definition} cannot be used to create a Process " - f"beacause variable {object_name} of module {module_name} " + f"because variable {object_name} of module {module_name} " f"contains {cls}" ) result = definition(definition=f"{module_name}.{object_name}") diff --git a/capsul/config/configuration.py b/capsul/config/configuration.py index 2f38c0f23..a8d5b1a4d 100644 --- a/capsul/config/configuration.py +++ b/capsul/config/configuration.py @@ -168,7 +168,7 @@ class EngineConfiguration(Controller): ) def add_module(self, module_name, allow_existing=False): - """Loads a modle and adds it in the engine configuration. + """Loads a module and adds it in the engine configuration. This operation is performed automatically, thus should not need to be called manually. diff --git a/capsul/database/__init__.py b/capsul/database/__init__.py index 948073eb4..e9e00e69d 100644 --- a/capsul/database/__init__.py +++ b/capsul/database/__init__.py @@ -499,7 +499,7 @@ def worker_ended(self, engine_id, worker_id): def persistent(self, engine_id): """ - Return wether an engine is persistent or not. + Return whether an engine is persistent or not. """ raise NotImplementedError @@ -512,7 +512,7 @@ def set_persistent(self, engine_id, persistent): def dispose_engine(self, engine_id): """ Tell Capsul that this engine will not be used anymore by any client. - The ressource it uses must be freed as soon as possible. If no + The resource it uses must be freed as soon as possible. If no execution is running, engine is destroyed. Otherwise, workers will process ongoing executions and cleanup when done. """ diff --git a/capsul/database/redis.py b/capsul/database/redis.py index 2e2b4626a..b5f5d1f45 100644 --- a/capsul/database/redis.py +++ b/capsul/database/redis.py @@ -559,7 +559,7 @@ def dispose_engine(self, engine_id): # Removes association between label and engine_id self.redis.hdel("capsul:engine", label) self.redis.hdel(f"capsul:{engine_id}", "label") - # Check if some executions had been submited or are ongoing + # Check if some executions had been submitted or are ongoing # An empty list modified with Redis Lua scripts may be encoded as empty dict executions = json.loads( self.redis.hget(f"capsul:{engine_id}", "executions") diff --git a/capsul/database/sqlite.py b/capsul/database/sqlite.py index 458877f3f..04eb7b69e 100644 --- a/capsul/database/sqlite.py +++ b/capsul/database/sqlite.py @@ -471,7 +471,7 @@ def job_finished_json( sql = "SELECT ready, ongoing, failed, waiting, done FROM capsul_execution WHERE engine_id=? AND execution_id=?" row = sqlite.execute(sql, [engine_id, execution_id]).fetchone() - ready, ongoing, failed, waiting, done = [json.loads(i) for i in row] + ready, ongoing, failed, waiting, done = (json.loads(i) for i in row) ongoing.remove(job_id) if return_code != 0: failed.append(job_id) diff --git a/capsul/dataset.py b/capsul/dataset.py index 351c528bc..8a9cc7166 100644 --- a/capsul/dataset.py +++ b/capsul/dataset.py @@ -29,7 +29,7 @@ class Dataset(Controller): Dataset representation. You don't need to define or instantiate this class yourself, it will be done automatically and internally in the path generation system. - Instead, users need to define datsets in the Capsul config. See :func:`generate_paths`. + Instead, users need to define datasets in the Capsul config. See :func:`generate_paths`. """ path: Directory @@ -676,7 +676,7 @@ def apply(self, metadata, process, parameter, initial_meta): continue if callable(v): if debug: - print("call modifier funciton for", k) + print("call modifier function for", k) print( ":", v( @@ -700,7 +700,7 @@ def apply(self, metadata, process, parameter, initial_meta): setattr(metadata, k, v) else: if debug: - print("call modifier funciton") + print("call modifier function") modifier(metadata, process, parameter, initial_meta=initial_meta) diff --git a/capsul/engine/__init__.py b/capsul/engine/__init__.py index 36493ead0..ddf7149f6 100644 --- a/capsul/engine/__init__.py +++ b/capsul/engine/__init__.py @@ -19,7 +19,7 @@ def execution_context(engine_label, engine_config, executable): # {'spm': {'spm12-standalone': {...}, 'spm8': {...}} # whereas EXecutionContext expects an execution-side single, filtered # config: {'spm': {...}} - # Thie filtering is done here in this function, but later after the context + # This filtering is done here in this function, but later after the context # is built. # So for now, give it only the dataset and config_modules part, removing # all modules config. diff --git a/capsul/info.py b/capsul/info.py index 55c653b6b..3c01c1e28 100644 --- a/capsul/info.py +++ b/capsul/info.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import import os.path import sys @@ -19,7 +18,7 @@ _version_extra = version_extra # Expected by setup.py: string of form "X.Y.Z" -__version__ = "{0}.{1}.{2}".format(version_major, version_minor, version_micro) +__version__ = "{}.{}.{}".format(version_major, version_minor, version_micro) brainvisa_dependencies = [ "soma-base", @@ -84,25 +83,25 @@ PROVIDES = ["capsul"] REQUIRES = [ "redis <4.5.0", - "pydantic >={0}".format(PYDANTIC_MIN_VERSION), - "soma-base >={0}".format(SOMA_MIN_VERSION), - "soma-workflow >={0}".format(SOMA_WORKFLOW_MIN_VERSION), - "populse-db >={0}".format(POPULSE_DB_MIN_VERSION), + "pydantic >={}".format(PYDANTIC_MIN_VERSION), + "soma-base >={}".format(SOMA_MIN_VERSION), + "soma-workflow >={}".format(SOMA_WORKFLOW_MIN_VERSION), + "populse-db >={}".format(POPULSE_DB_MIN_VERSION), "PyYAML", ] EXTRA_REQUIRES = { "test": ["pytest", "jupyter"], "doc": [ "sphinx >=1.0", - "numpy >={0}".format(NUMPY_MIN_VERSION), + "numpy >={}".format(NUMPY_MIN_VERSION), ], "nipype": [ "traits >={}".format(TRAITS_MIN_VERSION), - "numpy >={0}".format(NUMPY_MIN_VERSION), - "scipy >={0}".format(SCIPY_MIN_VERSION), - "nibabel >={0}".format(NIBABEL_MIN_VERSION), - "networkx >={0}".format(NETWORKX_MIN_VERSION), - "nipype =={0}".format(NIPYPE_VERSION), + "numpy >={}".format(NUMPY_MIN_VERSION), + "scipy >={}".format(SCIPY_MIN_VERSION), + "nibabel >={}".format(NIBABEL_MIN_VERSION), + "networkx >={}".format(NETWORKX_MIN_VERSION), + "nipype =={}".format(NIPYPE_VERSION), ], } diff --git a/capsul/pipeline/custom_nodes/cv_node.py b/capsul/pipeline/custom_nodes/cv_node.py index 896a01104..ed07ba0de 100644 --- a/capsul/pipeline/custom_nodes/cv_node.py +++ b/capsul/pipeline/custom_nodes/cv_node.py @@ -26,7 +26,7 @@ def __init__(self, pipeline, name, input_type=None): in_fields.append({"name": tr, "optional": True}) for tr in out_fieldsl: out_fields.append({"name": tr, "optional": True}) - super(CrossValidationFoldNode, self).__init__( + super().__init__( None, pipeline, name, in_fields, out_fields ) if input_type: diff --git a/capsul/pipeline/custom_nodes/loo_node.py b/capsul/pipeline/custom_nodes/loo_node.py index 12f61d2b3..143fd87e8 100644 --- a/capsul/pipeline/custom_nodes/loo_node.py +++ b/capsul/pipeline/custom_nodes/loo_node.py @@ -4,7 +4,6 @@ """ -from __future__ import absolute_import from capsul.process.node import Node from soma.controller import Controller, Any, type_from_str @@ -47,7 +46,7 @@ def __init__( in_fields.append({"name": tr, "optional": True}) for tr in out_fieldsl: out_fields.append({"name": tr, "optional": True}) - super(LeaveOneOutNode, self).__init__( + super().__init__( None, pipeline, name, in_fields, out_fields ) if input_type: diff --git a/capsul/pipeline/custom_nodes/map_node.py b/capsul/pipeline/custom_nodes/map_node.py index 0b7f4ac0c..55fafae4a 100644 --- a/capsul/pipeline/custom_nodes/map_node.py +++ b/capsul/pipeline/custom_nodes/map_node.py @@ -4,7 +4,6 @@ """ -from __future__ import absolute_import from capsul.process.node import Node, Plug from soma.controller import Controller, File, undefined, field, type_from_str @@ -54,7 +53,7 @@ def __init__( for tr in input_names: in_fields.append({"name": tr, "optional": False}) - super(MapNode, self).__init__(None, pipeline, name, in_fields, out_fields) + super().__init__(None, pipeline, name, in_fields, out_fields) for tr, ptype in zip(input_names, ptypes): self.add_field(tr, list[ptype], output=False, default_factory=list) diff --git a/capsul/pipeline/custom_nodes/reduce_node.py b/capsul/pipeline/custom_nodes/reduce_node.py index 8d49044fc..1b7df4ae4 100644 --- a/capsul/pipeline/custom_nodes/reduce_node.py +++ b/capsul/pipeline/custom_nodes/reduce_node.py @@ -51,7 +51,7 @@ def __init__( for tr in output_names: out_fields.append({"name": tr, "optional": False}) - super(ReduceNode, self).__init__(None, pipeline, name, in_fields, out_fields) + super().__init__(None, pipeline, name, in_fields, out_fields) for tr, ptype in zip(output_names, ptypes): self.add_field( diff --git a/capsul/pipeline/custom_nodes/strcat_node.py b/capsul/pipeline/custom_nodes/strcat_node.py index 8c7eb7202..150c58483 100644 --- a/capsul/pipeline/custom_nodes/strcat_node.py +++ b/capsul/pipeline/custom_nodes/strcat_node.py @@ -3,7 +3,6 @@ ------------------- """ -from __future__ import absolute_import from capsul.process.node import Node from soma.controller import Controller, Any, type_from_str @@ -62,7 +61,7 @@ def __init__( node_inputs.append( {"name": concat_plug, "optional": concat_plug in make_optional} ) - super(StrCatNode, self).__init__( + super().__init__( None, pipeline, name, node_inputs, node_outputs ) self._concat_sequence = params diff --git a/capsul/pipeline/custom_nodes/strconv.py b/capsul/pipeline/custom_nodes/strconv.py index c8b6a0a74..a47267d0a 100644 --- a/capsul/pipeline/custom_nodes/strconv.py +++ b/capsul/pipeline/custom_nodes/strconv.py @@ -24,7 +24,7 @@ def __init__(self, pipeline, name, input_type=None): in_fields.append({"name": tr, "optional": True}) for tr in out_fieldsl: out_fields.append({"name": tr, "optional": True}) - super(StrConvNode, self).__init__(None, pipeline, name, in_fields, out_fields) + super().__init__(None, pipeline, name, in_fields, out_fields) if input_type: ptype = input_type else: diff --git a/capsul/pipeline/pipeline.py b/capsul/pipeline/pipeline.py index e484aa508..de8101148 100644 --- a/capsul/pipeline/pipeline.py +++ b/capsul/pipeline/pipeline.py @@ -143,7 +143,7 @@ def pipeline_definition(self): Attributes ---------- nodes: dict {node_name: node} - a dictionary containing the pipline nodes and where the pipeline node + a dictionary containing the pipeline nodes and where the pipeline node name is '' """ @@ -217,8 +217,8 @@ def __init__(self, autoexport_nodes_parameters=None, **kwargs): raise TypeError("No definition string given to Pipeline constructor") # Inheritance - super(Pipeline, self).__init__(**kwargs) - super(Pipeline, self).add_field( + super().__init__(**kwargs) + super().add_field( "nodes_activation", Controller, hidden=self.hide_nodes_activation ) @@ -400,7 +400,7 @@ def add_process( if name in self.nodes: raise ValueError( "Pipeline cannot have two nodes with the " - "same name : {0}".format(name) + "same name : {}".format(name) ) if skip_invalid: @@ -464,7 +464,7 @@ def remove_node(self, node_name): if not plug.output: for link_def in list(plug.links_from): src_node, src_plug = link_def[:2] - link_descr = "%s.%s->%s.%s" % ( + link_descr = "{}.{}->{}.{}".format( src_node, src_plug, node_name, @@ -474,7 +474,7 @@ def remove_node(self, node_name): else: for link_def in list(plug.links_to): dst_node, dst_plug = link_def[:2] - link_descr = "%s.%s->%s.%s" % ( + link_descr = "{}.{}->{}.{}".format( node_name, plug_name, dst_node, @@ -574,7 +574,7 @@ def create_switch( Each key of this dictionary is a possible value for the switch parameter. The corresponding dictionary value contains all the links between other nodes plugs and switch outputs that are - activated when the value is selected. Theses links are given as + activated when the value is selected. These links are given as a dictionary whose items are (output, source) where output is the name of an output parameter and source is a string containing a node name and a parameter name separated by a dot (or just a @@ -589,7 +589,7 @@ def create_switch( default the value is taken from the first connected source. switch_value: str (optional) Initial value of the switch parameter (one of the inputs names). - Defaults to fisrt possible switch value. + Defaults to first possible switch value. Examples -------- @@ -669,13 +669,13 @@ def add_switch( output_types=None, switch_value=None, ): - """Obsolete. May create a non functionnal switch. Use create_switch() + """Obsolete. May create a non functional switch. Use create_switch() instead. """ # Check the unicity of the name we want to insert if name in self.nodes: raise ValueError( - "Pipeline cannot have two nodes with the same " "name: {0}".format(name) + "Pipeline cannot have two nodes with the same " "name: {}".format(name) ) # Create the node @@ -864,7 +864,7 @@ def parse_parameter(self, name, check=True): node = None plug = None else: - raise ValueError("{0} is not a valid node name".format(node_name)) + raise ValueError("{} is not a valid node name".format(node_name)) plug_name = name[dot + 1 :] # Check if plug nexists @@ -885,8 +885,8 @@ def parse_parameter(self, name, check=True): break if err and check: raise ValueError( - "'{0}' is not a valid parameter name for " - "node '{1}'".format( + "'{}' is not a valid parameter name for " + "node '{}'".format( plug_name, (node_name if node_name else "pipeline") ) ) @@ -968,16 +968,16 @@ def add_link(self, link, weak_link=False, allow_export=False): # Assure that pipeline plugs are not linked if not source_plug.output and source_node is not self: - raise ValueError("Cannot link from an input plug: {0}".format(link)) + raise ValueError("Cannot link from an input plug: {}".format(link)) if source_plug.output and source_node is self: raise ValueError( - "Cannot link from a pipeline output " "plug: {0}".format(link) + "Cannot link from a pipeline output " "plug: {}".format(link) ) if dest_plug.output and dest_node is not self: - raise ValueError("Cannot link to an output plug: {0}".format(link)) + raise ValueError("Cannot link to an output plug: {}".format(link)) if not dest_plug.output and dest_node is self: raise ValueError( - "Cannot link to a pipeline input " "plug: {0}".format(link) + "Cannot link to a pipeline input " "plug: {}".format(link) ) # Propagate the plug value from source to destination @@ -1924,10 +1924,10 @@ def pipeline_state(self): ) plugs_list.append((plug_name, plug_dict)) for nn, pn, n, p, weak_link in plug.links_to: - link_name = "%s:%s" % (n.full_name, pn) + link_name = "{}:{}".format(n.full_name, pn) links_to_dict[link_name] = weak_link for nn, pn, n, p, weak_link in plug.links_from: - link_name = "%s:%s" % (n.full_name, pn) + link_name = "{}:{}".format(n.full_name, pn) links_from_dict[link_name] = weak_link return result @@ -1946,17 +1946,17 @@ def compare_to_state(self, pipeline_state): def compare_dict(ref_dict, other_dict): for ref_key, ref_value in ref_dict.items(): if ref_key not in other_dict: - yield "%s = %s is missing" % (ref_key, repr(ref_value)) + yield "{} = {} is missing".format(ref_key, repr(ref_value)) else: other_value = other_dict.pop(ref_key) if ref_value != other_value: - yield "%s = %s differs from %s" % ( + yield "{} = {} differs from {}".format( ref_key, repr(ref_value), repr(other_value), ) for other_key, other_value in other_dict.items(): - yield "%s=%s is new" % (other_key, repr(other_value)) + yield "{}={} is new".format(other_key, repr(other_value)) pipeline_state = deepcopy(pipeline_state) for node in self.all_nodes(): @@ -1967,7 +1967,7 @@ def compare_dict(ref_dict, other_dict): else: plugs_list = OrderedDict(node_dict.pop("plugs")) result.extend( - 'in node "%s": %s' % (node_name, i) + 'in node "{}": {}'.format(node_name, i) for i in compare_dict( dict( name=node.name, @@ -1999,7 +1999,7 @@ def compare_dict(ref_dict, other_dict): links_to_dict = plug_dict.pop("links_to") links_from_dict = plug_dict.pop("links_from") result.extend( - 'in plug "%s:%s": %s' % (node_name, plug_name, i) + 'in plug "{}:{}": {}'.format(node_name, plug_name, i) for i in compare_dict( dict( enabled=plug.enabled, @@ -2012,7 +2012,7 @@ def compare_dict(ref_dict, other_dict): ) ) for nn, pn, n, p, weak_link in plug.links_to: - link_name = "%s:%s" % (n.full_name, pn) + link_name = "{}:{}".format(n.full_name, pn) if link_name not in links_to_dict: result.append( 'in plug "%s:%s": missing link to %s' @@ -2042,7 +2042,7 @@ def compare_dict(ref_dict, other_dict): ) ) for nn, pn, n, p, weak_link in plug.links_from: - link_name = "%s:%s" % (n.full_name, pn) + link_name = "{}:{}".format(n.full_name, pn) if link_name not in links_from_dict: result.append( 'in plug "%s:%s": missing link from ' @@ -2144,7 +2144,7 @@ def add_pipeline_step(self, step_name, nodes, enabled=True): "logical order regarding the workflow streams. They are " "different from sub-pipelines in that steps are purely " "virtual groups, they do not have parameters. To activate " - "or diasable a step, just do:\n" + "or disable a step, just do:\n" "pipeline.steps.my_step = False\n" "\n" "To get the nodes list in a step:\n" @@ -2313,7 +2313,7 @@ def check_requirements(self, environment="global", message_list=None): A pipeline will return a list of unique configuration values. """ # start with pipeline-level requirements - conf = super(Pipeline, self).check_requirements( + conf = super().check_requirements( environment, message_list=message_list ) if conf is None: @@ -2527,7 +2527,7 @@ def get_linked_items( not activated. The result is a generator of pairs (node, plug_name). - direction may be a sting, 'links_from', 'links_to', or a tuple + direction may be a string, 'links_from', 'links_to', or a tuple ('links_from', 'links_to'). """ if plug_name is None: diff --git a/capsul/pipeline/pipeline_nodes.py b/capsul/pipeline/pipeline_nodes.py index d09e8da90..c6f92f04d 100644 --- a/capsul/pipeline/pipeline_nodes.py +++ b/capsul/pipeline/pipeline_nodes.py @@ -135,7 +135,7 @@ def __init__( raise Exception( "The Switch node input and output parameters " "are inconsistent: expect list, " - "got {0}, {1}".format(type(inputs), type(outputs)) + "got {}, {}".format(type(inputs), type(outputs)) ) # private copy of outputs and inputs @@ -147,7 +147,7 @@ def __init__( for switch_name in inputs: flat_inputs.extend( [ - "{0}_switch_{1}".format(switch_name, plug_name) + "{}_switch_{}".format(switch_name, plug_name) for plug_name in outputs ] ) @@ -320,14 +320,14 @@ def _any_attribute_changed(self, new, old, name): def __setstate__(self, state): self.__block_output_propagation = True - super(Switch, self).__setstate__(state) + super().__setstate__(state) def get_connections_through(self, plug_name, single=False): if not self.activated or not self.enabled: return [] plug = self.plugs[plug_name] if plug.output: - connected_plug_name = "%s_switch_%s" % (self.switch, plug_name) + connected_plug_name = "{}_switch_{}".format(self.switch, plug_name) else: splitter = plug_name.split("_switch_") if len(splitter) != 2: diff --git a/capsul/pipeline/pipeline_tools.py b/capsul/pipeline/pipeline_tools.py index 307b66cba..764ab3875 100644 --- a/capsul/pipeline/pipeline_tools.py +++ b/capsul/pipeline/pipeline_tools.py @@ -344,7 +344,7 @@ def _link_color(plug, link): if use_nodes_pos: pos = nodes_pos.get(id) if pos is not None: - node_props.update({"pos": "%f,%f" % (pos[0] * scale, -pos[1] * scale)}) + node_props.update({"pos": "{:f},{:f}".format(pos[0] * scale, -pos[1] * scale)}) size = nodes_sizes.get(id) if size is not None: node_props.update( @@ -375,7 +375,7 @@ def _link_color(plug, link): edge = (id, dest) old_edge = edges.get(edge) if old_edge is not None: - # use stongest color/style + # use strongest color/style if not old_edge[2]: weak = False style = old_edge[0]["style"] @@ -463,7 +463,7 @@ def dot_graph_from_workflow( if use_nodes_pos: pos = pipeline.node_position.get(n) if pos is not None: - node_props.update({"pos": "%f,%f" % (pos[0] * scale, -pos[1] * scale)}) + node_props.update({"pos": "{:f},{:f}".format(pos[0] * scale, -pos[1] * scale)}) size = nodes_sizes.get(n) if size is not None: node_props.update( @@ -524,13 +524,13 @@ def _str_repr(item): ) if len(props) != 0: attstr = " " + attstr - fileobj.write(' %s [label="%s" style="filled"%s];\n' % (id, node, attstr)) + fileobj.write(' {} [label="{}" style="filled"{}];\n'.format(id, node, attstr)) for edge, descr in dot_graph[1].items(): props = descr[0] attstr = " ".join( ["=".join([aname, _str_repr(val)]) for aname, val in props.items()] ) - fileobj.write(' "%s" -> "%s" [%s];\n' % (edge[0], edge[1], attstr)) + fileobj.write(' "{}" -> "{}" [{}];\n'.format(edge[0], edge[1], attstr)) fileobj.write("}\n") @@ -734,7 +734,7 @@ def nodes_with_existing_outputs( process = node if recursive and isinstance(process, Pipeline): nodes += [ - ("%s.%s" % (node_name, new_name), new_node) + ("{}.{}".format(node_name, new_name), new_node) for new_name, new_node in process.nodes.items() if new_name != "" ] @@ -810,7 +810,7 @@ def nodes_with_missing_inputs(pipeline, recursive=True): process = node.process if recursive and isinstance(process, Pipeline): nodes += [ - ("%s.%s" % (node_name, new_name), new_node) + ("{}.{}".format(node_name, new_name), new_node) for new_name, new_node in process.nodes.items() if new_name != "" ] @@ -1215,7 +1215,7 @@ def load_pipeline_parameters(filename, pipeline): """ if filename: - with io.open(filename, "r", encoding="utf8") as file: + with open(filename, encoding="utf8") as file: dic = json.load(file) if "pipeline_parameters" not in dic: @@ -1262,7 +1262,7 @@ def find_node(pipeline, node): if sn is not n and isinstance(sn, Pipeline): pipelines.append((sn, names + [sk])) - raise KeyError("Node %s not found in the pipeline %s" % (node.name, pipeline.name)) + raise KeyError("Node {} not found in the pipeline {}".format(node.name, pipeline.name)) def nodes_full_names(executable): @@ -1378,7 +1378,7 @@ def __init__(self, **kwargs): has_default = True elif field.default_factory != dataclasses.MISSING: # difficult/implssible to replicate... - class def_fac(object): + class def_fac: def __init__(self, value): self.value = value @@ -1392,11 +1392,11 @@ def __repr__(self): meta["optional"] = True meta_str = "" if meta: - meta_str = ", ".join("%s=%s" % (k, repr(v)) for k, v in meta.items()) + meta_str = ", ".join("{}={}".format(k, repr(v)) for k, v in meta.items()) meta_str = ", " + meta_str - f.write(' self.add_field("%s", %s%s)\n' % (name, t_str, meta_str)) + f.write(' self.add_field("{}", {}{})\n'.format(name, t_str, meta_str)) if value is not undefined: - f.write(" self.%s = %s\n" % (name, repr(value))) + f.write(" self.{} = {}\n".format(name, repr(value))) f.write( """ diff --git a/capsul/pipeline/process_iteration.py b/capsul/pipeline/process_iteration.py index 4653354a0..ac78f6899 100644 --- a/capsul/pipeline/process_iteration.py +++ b/capsul/pipeline/process_iteration.py @@ -26,7 +26,7 @@ def __init__(self, definition, process, iterative_parameters, context_name=None) # Avoid circular import from capsul.api import executable - super(ProcessIteration, self).__init__(definition=definition) + super().__init__(definition=definition) self.process = executable(process) if context_name is not None: self.process.context_name = context_name @@ -133,7 +133,7 @@ def iteration_size(self): raise ValueError( "Iterative parameter values must be lists of the same size: %s" % "\n".join( - "%s=%s" % (n, len(getattr(self, n))) + "{}={}".format(n, len(getattr(self, n))) for n in self.iterative_parameters if getattr(self, n) is not undefined ) diff --git a/capsul/pipeline/python_export.py b/capsul/pipeline/python_export.py index 9ae3dce2a..139b6d34d 100644 --- a/capsul/pipeline/python_export.py +++ b/capsul/pipeline/python_export.py @@ -7,8 +7,6 @@ ------------------------ """ -from __future__ import print_function -from __future__ import absolute_import from soma.controller import Controller, undefined import os @@ -75,7 +73,7 @@ def _write_process(process, pyf, name, enabled, skip_invalid): if skip_invalid: node_options += ", skip_invalid=True" print( - ' self.add_process("%s", "%s"%s)' % (name, procname, node_options), + ' self.add_process("{}", "{}"{})'.format(name, procname, node_options), file=pyf, ) @@ -142,7 +140,7 @@ def _write_process(process, pyf, name, enabled, skip_invalid): ) if isinstance(snode, Pipeline): - sself_str = '%s.nodes["%s"]' % (self_str, "%s") + sself_str = '{}.nodes["{}"]'.format(self_str, "%s") for node_name, snode in snode.nodes.items(): scnode = cnode.nodes[node_name] @@ -179,9 +177,9 @@ def _write_custom_node(node, pyf, name, enabled): nodename = ".".join((mod, classname)) if hasattr(node, "configured_controller"): c = node.configured_controller() - params = dict( - (p, v) for p, v in c.asdict().items() if v not in (None, undefined) - ) + params = { + p: v for p, v in c.asdict().items() if v not in (None, undefined) + } print( ' self.add_custom_node("%s", "%s", %s)' % (name, nodename, get_repr_value(params)), @@ -189,7 +187,7 @@ def _write_custom_node(node, pyf, name, enabled): ) else: print( - ' self.add_custom_node("%s", "%s")' % (name, nodename), file=pyf + ' self.add_custom_node("{}", "{}")'.format(name, nodename), file=pyf ) # optional plugs for plug_name, plug in node.plugs.items(): @@ -360,14 +358,14 @@ def _write_links(pipeline, pyf): exported_plug = _write_export(pipeline, pyf, src) exported.add(src) else: - src = "%s.%s" % (node_name, plug_name) + src = "{}.{}".format(node_name, plug_name) if link[0] == "": dst = link[1] if dst not in exported: exported_plug = _write_export(pipeline, pyf, dst) exported.add(dst) else: - dst = "%s.%s" % (link[0], link[1]) + dst = "{}.{}".format(link[0], link[1]) if not exported_plug or ".".join(exported_plug) not in ( src, dst, @@ -422,7 +420,7 @@ def _write_nodes_positions(pipeline, pyf): if not isinstance(pos, (list, tuple)): # pos is probably a QPointF pos = (pos.x(), pos.y()) - print(' "%s": %s,' % (node_name, repr(pos)), file=pyf) + print(' "{}": {},'.format(node_name, repr(pos)), file=pyf) print(" }", file=pyf) if hasattr(pipeline, "scene_scale_factor"): print( @@ -440,7 +438,7 @@ def _write_nodes_dimensions(pipeline, pyf): for node_name, dim in pipeline.node_dimension.items(): if not isinstance(dim, (list, tuple)): dim = (dim.width(), dim.height()) - print(' "%s": %s,' % (node_name, repr(dim)), file=pyf) + print(' "{}": {},'.format(node_name, repr(dim)), file=pyf) print(" }", file=pyf) ###################################################### @@ -461,7 +459,7 @@ def _write_doc(pipeline, pyf): for i in notepos: if ( splitdoc[i + 2].find( - "* Type '{0}.help()'".format( + "* Type '{}.help()'".format( pipeline.__class__.__name__ ) ) @@ -500,7 +498,7 @@ def _write_values(pipeline, pyf): if first: first = False print("\n # default and initial values", file=pyf) - print(" self.%s = %s" % (param_name, value_repr), file=pyf) + print(" self.{} = {}".format(param_name, value_repr), file=pyf) class_name = type(pipeline).__name__ if class_name == "Pipeline": diff --git a/capsul/pipeline/test/fake_morphologist/acpcorientation.py b/capsul/pipeline/test/fake_morphologist/acpcorientation.py index 192e316fd..744b60958 100644 --- a/capsul/pipeline/test/fake_morphologist/acpcorientation.py +++ b/capsul/pipeline/test/fake_morphologist/acpcorientation.py @@ -10,7 +10,7 @@ class AcpcOrientation(Process): def __init__(self, **kwargs): - super(AcpcOrientation, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "StandardACPC" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/aimsconverter.py b/capsul/pipeline/test/fake_morphologist/aimsconverter.py index 79da03947..d9f330bd0 100644 --- a/capsul/pipeline/test/fake_morphologist/aimsconverter.py +++ b/capsul/pipeline/test/fake_morphologist/aimsconverter.py @@ -5,7 +5,7 @@ class AimsConverter(Process): def __init__(self, **kwargs): - super(AimsConverter, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "converter" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/baladinnormalizationtoaims.py b/capsul/pipeline/test/fake_morphologist/baladinnormalizationtoaims.py index 2d10397d7..577ad1ff0 100644 --- a/capsul/pipeline/test/fake_morphologist/baladinnormalizationtoaims.py +++ b/capsul/pipeline/test/fake_morphologist/baladinnormalizationtoaims.py @@ -5,7 +5,7 @@ class BaladinNormalizationToAims(Process): def __init__(self, **kwargs): - super(BaladinNormalizationToAims, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "ConvertBaladinNormalizationToAIMS" self.add_field("read", File, read=True, extensions=[".txt"], write=False) diff --git a/capsul/pipeline/test/fake_morphologist/brainsegmentation.py b/capsul/pipeline/test/fake_morphologist/brainsegmentation.py index e47efff21..9ecc360b4 100644 --- a/capsul/pipeline/test/fake_morphologist/brainsegmentation.py +++ b/capsul/pipeline/test/fake_morphologist/brainsegmentation.py @@ -5,7 +5,7 @@ class BrainSegmentation(Process): def __init__(self, **kwargs): - super(BrainSegmentation, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "BrainSegmentation" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/brainvolumes.py b/capsul/pipeline/test/fake_morphologist/brainvolumes.py index 43cf56faa..bb27d2111 100644 --- a/capsul/pipeline/test/fake_morphologist/brainvolumes.py +++ b/capsul/pipeline/test/fake_morphologist/brainvolumes.py @@ -5,7 +5,7 @@ class brainvolumes(Process): def __init__(self, **kwargs): - super(brainvolumes, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "GlobalMorphometry" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/fslnormalizationtoaims.py b/capsul/pipeline/test/fake_morphologist/fslnormalizationtoaims.py index bb5d729a2..743ccc8b0 100644 --- a/capsul/pipeline/test/fake_morphologist/fslnormalizationtoaims.py +++ b/capsul/pipeline/test/fake_morphologist/fslnormalizationtoaims.py @@ -5,7 +5,7 @@ class FSLnormalizationToAims(Process): def __init__(self, **kwargs): - super(FSLnormalizationToAims, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "ConvertFSLnormalizationToAIMS" self.add_field("read", File, read=True, extensions=[".mat"], write=False) diff --git a/capsul/pipeline/test/fake_morphologist/greywhiteclassificationhemi.py b/capsul/pipeline/test/fake_morphologist/greywhiteclassificationhemi.py index bed86c03b..9cfbd8cb4 100644 --- a/capsul/pipeline/test/fake_morphologist/greywhiteclassificationhemi.py +++ b/capsul/pipeline/test/fake_morphologist/greywhiteclassificationhemi.py @@ -5,7 +5,7 @@ class GreyWhiteClassificationHemi(Process): def __init__(self, **kwargs): - super(GreyWhiteClassificationHemi, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "GreyWhiteClassification" self.add_field("side", Literal["left", "right"], optional=True) diff --git a/capsul/pipeline/test/fake_morphologist/greywhitemesh.py b/capsul/pipeline/test/fake_morphologist/greywhitemesh.py index 6cb0e50e1..fec7e0467 100644 --- a/capsul/pipeline/test/fake_morphologist/greywhitemesh.py +++ b/capsul/pipeline/test/fake_morphologist/greywhitemesh.py @@ -5,7 +5,7 @@ class GreyWhiteMesh(Process): def __init__(self, **kwargs): - super(GreyWhiteMesh, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "GreyWhiteMesh" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/greywhitetopology.py b/capsul/pipeline/test/fake_morphologist/greywhitetopology.py index ccb7b6453..d95a15c1c 100644 --- a/capsul/pipeline/test/fake_morphologist/greywhitetopology.py +++ b/capsul/pipeline/test/fake_morphologist/greywhitetopology.py @@ -5,7 +5,7 @@ class GreyWhiteTopology(Process): def __init__(self, **kwargs): - super(GreyWhiteTopology, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "GreyWhiteTopology" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/histoanalysis.py b/capsul/pipeline/test/fake_morphologist/histoanalysis.py index 32f7b4e85..bf690b1ac 100644 --- a/capsul/pipeline/test/fake_morphologist/histoanalysis.py +++ b/capsul/pipeline/test/fake_morphologist/histoanalysis.py @@ -5,7 +5,7 @@ class HistoAnalysis(Process): def __init__(self, **kwargs): - super(HistoAnalysis, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "HistoAnalysis" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/importt1mri.py b/capsul/pipeline/test/fake_morphologist/importt1mri.py index cb0412ae7..b1989a7ae 100644 --- a/capsul/pipeline/test/fake_morphologist/importt1mri.py +++ b/capsul/pipeline/test/fake_morphologist/importt1mri.py @@ -5,7 +5,7 @@ class ImportT1MRI(Process): def __init__(self, **kwargs): - super(ImportT1MRI, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "importation" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/morpho_report.py b/capsul/pipeline/test/fake_morphologist/morpho_report.py index bc0e876e1..050894103 100644 --- a/capsul/pipeline/test/fake_morphologist/morpho_report.py +++ b/capsul/pipeline/test/fake_morphologist/morpho_report.py @@ -5,7 +5,7 @@ class morpho_report(Process): def __init__(self, **kwargs): - super(morpho_report, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "Report" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/normalization_aimsmiregister.py b/capsul/pipeline/test/fake_morphologist/normalization_aimsmiregister.py index f8e639ee1..b0edc48b2 100644 --- a/capsul/pipeline/test/fake_morphologist/normalization_aimsmiregister.py +++ b/capsul/pipeline/test/fake_morphologist/normalization_aimsmiregister.py @@ -5,7 +5,7 @@ class normalization_aimsmiregister(Process): def __init__(self, **kwargs): - super(normalization_aimsmiregister, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "Normalization_AimsMIRegister" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/normalization_baladin.py b/capsul/pipeline/test/fake_morphologist/normalization_baladin.py index cdbd39f33..6611598af 100644 --- a/capsul/pipeline/test/fake_morphologist/normalization_baladin.py +++ b/capsul/pipeline/test/fake_morphologist/normalization_baladin.py @@ -5,7 +5,7 @@ class Normalization_Baladin(Process): def __init__(self, **kwargs): - super(Normalization_Baladin, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "NormalizeBaladin" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/normalization_fsl_reinit.py b/capsul/pipeline/test/fake_morphologist/normalization_fsl_reinit.py index 9840f91ae..cc841cad7 100644 --- a/capsul/pipeline/test/fake_morphologist/normalization_fsl_reinit.py +++ b/capsul/pipeline/test/fake_morphologist/normalization_fsl_reinit.py @@ -5,7 +5,7 @@ class Normalization_FSL_reinit(Process): def __init__(self, **kwargs): - super(Normalization_FSL_reinit, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "NormalizeFSL" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/normalization_t1_spm12_reinit.py b/capsul/pipeline/test/fake_morphologist/normalization_t1_spm12_reinit.py index 09bfba6c5..7991b8272 100644 --- a/capsul/pipeline/test/fake_morphologist/normalization_t1_spm12_reinit.py +++ b/capsul/pipeline/test/fake_morphologist/normalization_t1_spm12_reinit.py @@ -5,7 +5,7 @@ class normalization_t1_spm12_reinit(Process): def __init__(self, **kwargs): - super(normalization_t1_spm12_reinit, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "normalization_t1_spm12_reinit" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/normalization_t1_spm8_reinit.py b/capsul/pipeline/test/fake_morphologist/normalization_t1_spm8_reinit.py index 8b99f6488..cd7de8e30 100644 --- a/capsul/pipeline/test/fake_morphologist/normalization_t1_spm8_reinit.py +++ b/capsul/pipeline/test/fake_morphologist/normalization_t1_spm8_reinit.py @@ -5,7 +5,7 @@ class normalization_t1_spm8_reinit(Process): def __init__(self, **kwargs): - super(normalization_t1_spm8_reinit, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "normalization_t1_spm8_reinit" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/pialmesh.py b/capsul/pipeline/test/fake_morphologist/pialmesh.py index afe2a649a..a42fda393 100644 --- a/capsul/pipeline/test/fake_morphologist/pialmesh.py +++ b/capsul/pipeline/test/fake_morphologist/pialmesh.py @@ -5,7 +5,7 @@ class PialMesh(Process): def __init__(self, **kwargs): - super(PialMesh, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "PialMesh" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/reorientanatomy.py b/capsul/pipeline/test/fake_morphologist/reorientanatomy.py index 878e9889d..e3c4c6ecb 100644 --- a/capsul/pipeline/test/fake_morphologist/reorientanatomy.py +++ b/capsul/pipeline/test/fake_morphologist/reorientanatomy.py @@ -5,7 +5,7 @@ class ReorientAnatomy(Process): def __init__(self, **kwargs): - super(ReorientAnatomy, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "ReorientAnatomy" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/scalpmesh.py b/capsul/pipeline/test/fake_morphologist/scalpmesh.py index 46f022db0..758241b31 100644 --- a/capsul/pipeline/test/fake_morphologist/scalpmesh.py +++ b/capsul/pipeline/test/fake_morphologist/scalpmesh.py @@ -5,7 +5,7 @@ class ScalpMesh(Process): def __init__(self, **kwargs): - super(ScalpMesh, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "HeadMesh" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/skullstripping.py b/capsul/pipeline/test/fake_morphologist/skullstripping.py index be0ddc5e1..d2ab0f502 100644 --- a/capsul/pipeline/test/fake_morphologist/skullstripping.py +++ b/capsul/pipeline/test/fake_morphologist/skullstripping.py @@ -5,7 +5,7 @@ class skullstripping(Process): def __init__(self, **kwargs): - super(skullstripping, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "SkullStripping" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/splitbrain.py b/capsul/pipeline/test/fake_morphologist/splitbrain.py index a81c41e8d..8e4b2c78f 100644 --- a/capsul/pipeline/test/fake_morphologist/splitbrain.py +++ b/capsul/pipeline/test/fake_morphologist/splitbrain.py @@ -5,7 +5,7 @@ class SplitBrain(Process): def __init__(self, **kwargs): - super(SplitBrain, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "SplitBrain" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/spmsn3dtoaims.py b/capsul/pipeline/test/fake_morphologist/spmsn3dtoaims.py index f768a2745..7ede865ae 100644 --- a/capsul/pipeline/test/fake_morphologist/spmsn3dtoaims.py +++ b/capsul/pipeline/test/fake_morphologist/spmsn3dtoaims.py @@ -5,7 +5,7 @@ class SPMsn3dToAims(Process): def __init__(self, **kwargs): - super(SPMsn3dToAims, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "ConvertSPMnormalizationToAIMS" self.add_field("read", File, read=True, extensions=[".mat"], write=False) diff --git a/capsul/pipeline/test/fake_morphologist/sulcideeplabeling.py b/capsul/pipeline/test/fake_morphologist/sulcideeplabeling.py index b227f6d49..eb77e2d93 100644 --- a/capsul/pipeline/test/fake_morphologist/sulcideeplabeling.py +++ b/capsul/pipeline/test/fake_morphologist/sulcideeplabeling.py @@ -5,7 +5,7 @@ class SulciDeepLabeling(Process): def __init__(self, **kwargs): - super(SulciDeepLabeling, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "CNN_recognition19" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/sulcigraph.py b/capsul/pipeline/test/fake_morphologist/sulcigraph.py index 0f833c5ba..9adf9ff0e 100644 --- a/capsul/pipeline/test/fake_morphologist/sulcigraph.py +++ b/capsul/pipeline/test/fake_morphologist/sulcigraph.py @@ -5,7 +5,7 @@ class SulciGraph(Process): def __init__(self, **kwargs): - super(SulciGraph, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "CorticalFoldsGraph" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/sulcigraphmorphometrybysubject.py b/capsul/pipeline/test/fake_morphologist/sulcigraphmorphometrybysubject.py index 198e37bdb..f719005a9 100644 --- a/capsul/pipeline/test/fake_morphologist/sulcigraphmorphometrybysubject.py +++ b/capsul/pipeline/test/fake_morphologist/sulcigraphmorphometrybysubject.py @@ -5,7 +5,7 @@ class sulcigraphmorphometrybysubject(Process): def __init__(self, **kwargs): - super(sulcigraphmorphometrybysubject, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "SulcalMorphometry" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/sulcilabellingann.py b/capsul/pipeline/test/fake_morphologist/sulcilabellingann.py index 30443ff8a..650e854a7 100644 --- a/capsul/pipeline/test/fake_morphologist/sulcilabellingann.py +++ b/capsul/pipeline/test/fake_morphologist/sulcilabellingann.py @@ -5,7 +5,7 @@ class SulciLabellingANN(Process): def __init__(self, **kwargs): - super(SulciLabellingANN, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "recognition2000" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/sulcilabellingspamglobal.py b/capsul/pipeline/test/fake_morphologist/sulcilabellingspamglobal.py index 9999d56f6..8db7d8f52 100644 --- a/capsul/pipeline/test/fake_morphologist/sulcilabellingspamglobal.py +++ b/capsul/pipeline/test/fake_morphologist/sulcilabellingspamglobal.py @@ -5,7 +5,7 @@ class SulciLabellingSPAMGlobal(Process): def __init__(self, **kwargs): - super(SulciLabellingSPAMGlobal, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "global_recognition" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/sulcilabellingspamlocal.py b/capsul/pipeline/test/fake_morphologist/sulcilabellingspamlocal.py index 4eb18e26e..06cd701a1 100644 --- a/capsul/pipeline/test/fake_morphologist/sulcilabellingspamlocal.py +++ b/capsul/pipeline/test/fake_morphologist/sulcilabellingspamlocal.py @@ -5,7 +5,7 @@ class SulciLabellingSPAMLocal(Process): def __init__(self, **kwargs): - super(SulciLabellingSPAMLocal, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "local_recognition" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/sulcilabellingspammarkov.py b/capsul/pipeline/test/fake_morphologist/sulcilabellingspammarkov.py index 043a75a66..38a82b575 100644 --- a/capsul/pipeline/test/fake_morphologist/sulcilabellingspammarkov.py +++ b/capsul/pipeline/test/fake_morphologist/sulcilabellingspammarkov.py @@ -5,7 +5,7 @@ class SulciLabellingSPAMMarkov(Process): def __init__(self, **kwargs): - super(SulciLabellingSPAMMarkov, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "markovian_recognition" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/sulciskeleton.py b/capsul/pipeline/test/fake_morphologist/sulciskeleton.py index 648134c12..d280bb810 100644 --- a/capsul/pipeline/test/fake_morphologist/sulciskeleton.py +++ b/capsul/pipeline/test/fake_morphologist/sulciskeleton.py @@ -5,7 +5,7 @@ class SulciSkeleton(Process): def __init__(self, **kwargs): - super(SulciSkeleton, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "SulciSkeleton" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/t1biascorrection.py b/capsul/pipeline/test/fake_morphologist/t1biascorrection.py index d529de5dd..b74a5e584 100644 --- a/capsul/pipeline/test/fake_morphologist/t1biascorrection.py +++ b/capsul/pipeline/test/fake_morphologist/t1biascorrection.py @@ -5,7 +5,7 @@ class T1BiasCorrection(Process): def __init__(self, **kwargs): - super(T1BiasCorrection, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "BiasCorrection" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/talairachtransformation.py b/capsul/pipeline/test/fake_morphologist/talairachtransformation.py index 2621b3641..a0c99c28c 100644 --- a/capsul/pipeline/test/fake_morphologist/talairachtransformation.py +++ b/capsul/pipeline/test/fake_morphologist/talairachtransformation.py @@ -5,7 +5,7 @@ class TalairachTransformation(Process): def __init__(self, **kwargs): - super(TalairachTransformation, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "TalairachTransformation" self.add_field( diff --git a/capsul/pipeline/test/fake_morphologist/talairachtransformationfromnormalization.py b/capsul/pipeline/test/fake_morphologist/talairachtransformationfromnormalization.py index e3f54732b..74a2ba0f4 100644 --- a/capsul/pipeline/test/fake_morphologist/talairachtransformationfromnormalization.py +++ b/capsul/pipeline/test/fake_morphologist/talairachtransformationfromnormalization.py @@ -5,7 +5,7 @@ class TalairachTransformationFromNormalization(Process): def __init__(self, **kwargs): - super(TalairachTransformationFromNormalization, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = "TalairachFromNormalization" self.add_field( diff --git a/capsul/pipeline/test/test_activation.py b/capsul/pipeline/test/test_activation.py index de8cca2ca..94496da7d 100644 --- a/capsul/pipeline/test/test_activation.py +++ b/capsul/pipeline/test/test_activation.py @@ -9,7 +9,7 @@ class DummyProcess(Process): """Dummy Test Process""" def __init__(self, definition): - super(DummyProcess, self).__init__( + super().__init__( "capsul.pipeline.test.test_activation.DummyProcess" ) diff --git a/capsul/pipeline/test/test_complex_pipeline_activations.py b/capsul/pipeline/test/test_complex_pipeline_activations.py index 309724afc..82af2294a 100644 --- a/capsul/pipeline/test/test_complex_pipeline_activations.py +++ b/capsul/pipeline/test/test_complex_pipeline_activations.py @@ -859,19 +859,19 @@ def test_complex_activations(self): node = node_pipeline.nodes[node_name] except KeyError: raise KeyError( - "Pipeline {0} has no node named {1}".format( + "Pipeline {} has no node named {}".format( node_pipeline.pipeline, node_name ) ) try: - what = "activation of node {0}".format( + what = "activation of node {}".format( full_node_name or "main pipeline node" ) expected = node_activations.get("_activated") if expected is not None: got = node.activated self.assertEqual(expected, got) - what = "enabled for node {0}".format( + what = "enabled for node {}".format( full_node_name or "main pipeline node" ) expected = node_activations.get("_enabled") @@ -880,7 +880,7 @@ def test_complex_activations(self): self.assertEqual(expected, got) except AssertionError: raise AssertionError( - "Wrong activation within ComplexPipeline with parameters {0}: {1} is supposed to be {2} but is {3}".format( + "Wrong activation within ComplexPipeline with parameters {}: {} is supposed to be {} but is {}".format( kwargs, what, expected, got ) ) diff --git a/capsul/pipeline/test/test_custom_nodes.py b/capsul/pipeline/test/test_custom_nodes.py index 7306ac53f..044139267 100644 --- a/capsul/pipeline/test/test_custom_nodes.py +++ b/capsul/pipeline/test/test_custom_nodes.py @@ -26,11 +26,11 @@ def execute(self, context): with open(self.out1, "w") as f: print("test: %s" % os.path.basename(self.out1), file=f) print("##############", file=f) - with open(self.in1, "r") as ff: + with open(self.in1) as ff: f.write(ff.read()) print("model: %s" % os.path.basename(self.model), file=f) print("##############", file=f) - with open(self.model, "r") as ff: + with open(self.model) as ff: f.write(ff.read()) # TODO FIXME: this should be automatic output_dict = {"out1": self.out1} @@ -275,12 +275,10 @@ def pipeline_definition(self): self.add_iterative_process( "test_it", "capsul.pipeline.test.test_custom_nodes.CVtest", - non_iterative_plugs=set( - [ + non_iterative_plugs={ "model", "base", - ] - ), + }, make_optional=["out1"], ) self.add_custom_node( @@ -534,7 +532,7 @@ def add_py_tmpfile(self, pyfname): # print('cache_dir:', cache_dir) cpver = "cpython-%d%d.pyc" % sys.version_info[:2] pyfname_we = osp.basename(pyfname[: pyfname.rfind(".")]) - pycfname = osp.join(cache_dir, "%s.%s" % (pyfname_we, cpver)) + pycfname = osp.join(cache_dir, "{}.{}".format(pyfname_we, cpver)) self.temp_files.append(pycfname) # print('added py tmpfile:', pyfname, pycfname) diff --git a/capsul/pipeline/test/test_double_switch.py b/capsul/pipeline/test/test_double_switch.py index f3e316419..0cf02edb1 100644 --- a/capsul/pipeline/test/test_double_switch.py +++ b/capsul/pipeline/test/test_double_switch.py @@ -7,7 +7,7 @@ class DummyProcess(Process): """Dummy Test Process""" def __init__(self, definition=None): - super(DummyProcess, self).__init__( + super().__init__( "capsul.pipeline.test.test_double_switch.DummyProcess" ) diff --git a/capsul/pipeline/test/test_iterative_process.py b/capsul/pipeline/test/test_iterative_process.py index 93b62e6c3..c45264523 100644 --- a/capsul/pipeline/test/test_iterative_process.py +++ b/capsul/pipeline/test/test_iterative_process.py @@ -279,7 +279,7 @@ def test_iterative_big_pipeline_workflow(self): subject = workflow.parameters_values[proxy[1]] subjects.add(subject) self.assertIn(subject, ["toto", "tutu", "tata", "titi", "tete"]) - self.assertEqual(subjects, set(["toto", "tutu", "tata", "titi", "tete"])) + self.assertEqual(subjects, {"toto", "tutu", "tata", "titi", "tete"}) def test_iterative_pipeline_workflow_run(self): self.small_pipeline.output_image = [ diff --git a/capsul/pipeline/test/test_optional_output_switch.py b/capsul/pipeline/test/test_optional_output_switch.py index 9bead6e53..759823582 100644 --- a/capsul/pipeline/test/test_optional_output_switch.py +++ b/capsul/pipeline/test/test_optional_output_switch.py @@ -10,7 +10,7 @@ class DummyProcess(Process): """Dummy Test Process""" def __init__(self, definition=None): - super(DummyProcess, self).__init__( + super().__init__( "capsul.pipeline.test.test_optional_output_switch" ) diff --git a/capsul/pipeline/test/test_pipeline.py b/capsul/pipeline/test/test_pipeline.py index 50095742e..666869c62 100644 --- a/capsul/pipeline/test/test_pipeline.py +++ b/capsul/pipeline/test/test_pipeline.py @@ -20,7 +20,7 @@ class DummyProcess(Process): def __init__(self, definition=None): if definition is None: definition = "capsul.pipeline.test.test_pipeline.DummyProcess" - super(DummyProcess, self).__init__(definition) + super().__init__(definition) # inputs self.add_field("input_image", File, optional=False) @@ -103,7 +103,7 @@ def add_py_tmpfile(self, pyfname): # print('cache_dir:', cache_dir) cpver = "cpython-%d%d.pyc" % sys.version_info[:2] pyfname_we = osp.basename(pyfname[: pyfname.rfind(".")]) - pycfname = osp.join(cache_dir, "%s.%s" % (pyfname_we, cpver)) + pycfname = osp.join(cache_dir, "{}.{}".format(pyfname_we, cpver)) self.temp_files.append(pycfname) # print('added py tmpfile:', pyfname, pycfname) diff --git a/capsul/pipeline/test/test_pipeline_parameters.py b/capsul/pipeline/test/test_pipeline_parameters.py index 29210bc91..ba830e3d5 100644 --- a/capsul/pipeline/test/test_pipeline_parameters.py +++ b/capsul/pipeline/test/test_pipeline_parameters.py @@ -23,7 +23,7 @@ def load_pipeline_dictionary(filename): :param filename: the json filename """ if filename: - with open(filename, "r", encoding="utf8") as file: + with open(filename, encoding="utf8") as file: return json.load(file) diff --git a/capsul/pipeline/test/test_pipeline_workflow.py b/capsul/pipeline/test/test_pipeline_workflow.py index 3410ef119..6e8cd0848 100644 --- a/capsul/pipeline/test/test_pipeline_workflow.py +++ b/capsul/pipeline/test/test_pipeline_workflow.py @@ -11,7 +11,7 @@ class DummyProcess(Process): """Dummy Test Process""" def __init__(self, definition): - super(DummyProcess, self).__init__(definition) + super().__init__(definition) # inputs self.add_field("input", File, optional=False) @@ -36,7 +36,7 @@ class DummyProcessSPM(DummyProcess): class DummyListProcess(Process): def __init__(self, definition): - super(DummyListProcess, self).__init__(definition) + super().__init__(definition) # inputs self.add_field("inputs", list[File], optional=False) diff --git a/capsul/pipeline/test/test_switch_pipeline.py b/capsul/pipeline/test/test_switch_pipeline.py index 067431608..b4667f321 100644 --- a/capsul/pipeline/test/test_switch_pipeline.py +++ b/capsul/pipeline/test/test_switch_pipeline.py @@ -11,7 +11,7 @@ class DummyProcess(Process): """Dummy Test Process""" def __init__(self, definition=None): - super(DummyProcess, self).__init__("capsul.pipeline.test.test_switch_pipeline") + super().__init__("capsul.pipeline.test.test_switch_pipeline") # inputs self.add_field("input_image", str, optional=False) diff --git a/capsul/pipeline/test/test_switch_subpipeline.py b/capsul/pipeline/test/test_switch_subpipeline.py index 72d6b0f1b..87d87e156 100644 --- a/capsul/pipeline/test/test_switch_subpipeline.py +++ b/capsul/pipeline/test/test_switch_subpipeline.py @@ -10,7 +10,7 @@ class DummyProcess(Process): """Dummy Test Process""" def __init__(self, definition=None): - super(DummyProcess, self).__init__( + super().__init__( "capsul.pipeline.test.test_switch_subpipeline.DummyProcess" ) @@ -31,7 +31,7 @@ class DummyProcess1_1(Process): """Dummy Test Process with 1 input and one output""" def __init__(self, definition=None): - super(DummyProcess1_1, self).__init__( + super().__init__( "capsul.pipeline.test.test_switch_subpipeline.DummyProcess1_1" ) @@ -49,7 +49,7 @@ class DummyProcess2_1(Process): """Dummy Test Process with 2 inputs and one output""" def __init__(self, definition=None): - super(DummyProcess2_1, self).__init__( + super().__init__( "capsul.pipeline.test.test_switch_subpipeline.DummyProcess2_1" ) @@ -68,7 +68,7 @@ class DummyProcess4_1(Process): """Dummy Test Process with 4 inputs and one output""" def __init__(self, definition=None): - super(DummyProcess4_1, self).__init__( + super().__init__( "capsul.pipeline.test.test_switch_subpipeline.DummyProcess4_1" ) diff --git a/capsul/pipeline/test/test_temporary.py b/capsul/pipeline/test/test_temporary.py index f773132a9..ee6964b23 100644 --- a/capsul/pipeline/test/test_temporary.py +++ b/capsul/pipeline/test/test_temporary.py @@ -6,7 +6,6 @@ from soma.controller import File, field from capsul.api import Process, Pipeline, Capsul import shutil -from six.moves import zip class DummyProcess1(Process): diff --git a/capsul/pipeline/topological_sort.py b/capsul/pipeline/topological_sort.py index f1265772a..fee6445b7 100644 --- a/capsul/pipeline/topological_sort.py +++ b/capsul/pipeline/topological_sort.py @@ -8,7 +8,7 @@ """ -class GraphNode(object): +class GraphNode: """Simple Graph Node Structure Attributes @@ -103,7 +103,7 @@ def remove_link_from(self, node): self.links_from_degree -= 1 -class Graph(object): +class Graph: """Simple Graph Structure on which we want to perform a topological tree (no cycle). @@ -139,10 +139,10 @@ def add_node(self, node): the node to insert """ if not isinstance(node, GraphNode): - raise Exception("Expect a GraphNode, got {0}".format(node)) + raise Exception("Expect a GraphNode, got {}".format(node)) if node.name in self._nodes: raise Exception( - "Expect a GraphNode with a unique name, " "got {0}".format(node) + "Expect a GraphNode with a unique name, " "got {}".format(node) ) self._nodes[node.name] = node @@ -170,12 +170,12 @@ def add_link(self, from_node, to_node): """ if from_node not in self._nodes: raise Exception( - "Node {0} is not defined in the Graph." + "Node {} is not defined in the Graph." "Use add_node() method".format(from_node) ) if to_node not in self._nodes: raise Exception( - "Node {0} is not defined in the Graph." + "Node {} is not defined in the Graph." "Use add_node() method".format(to_node) ) if (from_node, to_node) not in self._links: diff --git a/capsul/process/nipype_process.py b/capsul/process/nipype_process.py index 1fc8fcaf4..b59bfeb1e 100644 --- a/capsul/process/nipype_process.py +++ b/capsul/process/nipype_process.py @@ -192,8 +192,8 @@ def sync_process_output_traits(process_instance): ex_type, ex, tb = sys.exc_info() logging.debug( "Something wrong in the nipype output trait " - "synchronization:\n\n\tError: {0} - {1}\n" - "\tTraceback:\n{2}".format( + "synchronization:\n\n\tError: {} - {}\n" + "\tTraceback:\n{}".format( ex_type, ex, "".join(traceback.format_tb(tb)) ) ) @@ -232,8 +232,8 @@ def sync_process_output_traits(process_instance): ex_type, ex, tb = sys.exc_info() logging.debug( "Something wrong in the nipype output trait " - "synchronization:\n\n\tError: {0} - {1}\n" - "\tTraceback:\n{2}".format( + "synchronization:\n\n\tError: {} - {}\n" + "\tTraceback:\n{}".format( ex_type, ex, "".join(traceback.format_tb(tb)) ) ) @@ -263,8 +263,8 @@ def sync_process_output_traits(process_instance): ex_type, ex, tb = sys.exc_info() logging.debug( "Something wrong in the nipype output trait " - "synchronization:\n\n\tError: {0} - {1}\n" - "\tTraceback:\n{2}".format( + "synchronization:\n\n\tError: {} - {}\n" + "\tTraceback:\n{}".format( ex_type, ex, "".join(traceback.format_tb(tb)) ) ) @@ -399,7 +399,7 @@ def _make_matlab_command(self, content): if process_instance.field(field_name) is not None: field_name = "nipype_" + field_name - # Relax nipye exists trait contrain + # Relax nipye exists trait constraint relax_exists_constraint(trait) # Clone the nipype trait @@ -561,7 +561,7 @@ def relax_exists_constraint(trait): trait: trait a trait that will be relaxed from the exist constraint """ - # If we have a single trait, just modify the 'exists' contrain + # If we have a single trait, just modify the 'exists' constraint # if specified if hasattr(trait.handler, "exists"): trait.handler.exists = False diff --git a/capsul/process/node.py b/capsul/process/node.py index 90f9cd55b..24c5c9c14 100644 --- a/capsul/process/node.py +++ b/capsul/process/node.py @@ -252,7 +252,7 @@ def __init__( if "name" not in parameter: raise Exception( "Can't create parameter with unknown" - "identifier and parameter {0}".format(parameter) + "identifier and parameter {}".format(parameter) ) parameter = parameter.copy() # force the parameter type @@ -263,7 +263,7 @@ def __init__( raise Exception( "Can't create Node. Expect a dict structure " "to initialize the Node, " - "got {0}: {1}".format(type(parameter), parameter) + "got {}: {}".format(type(parameter), parameter) ) def __del__(self): diff --git a/capsul/process/process.py b/capsul/process/process.py index 537b9062f..1e5e4032a 100644 --- a/capsul/process/process.py +++ b/capsul/process/process.py @@ -152,11 +152,11 @@ def json(self, include_parameters=True): return result def json_parameters(self): - return super(Process, self).json() + return super().json() def before_execute(self, context): """This method is called by CapsulEngine before calling - execute(). By default it does nothing but can be overriden + execute(). By default it does nothing but can be overridden in derived classes. """ pass @@ -292,7 +292,7 @@ def get_help(self, returnhelp=False, use_labels=False): # Update the documentation with a description of the pipeline # when the xml to pipeline wrapper has been used if returnhelp and hasattr(self, "_pipeline_desc"): - str_desc = "".join([" {0}".format(line) for line in self._pipeline_desc]) + str_desc = "".join([" {}".format(line) for line in self._pipeline_desc]) doctring += [ ".. hidden-code-block:: python", " :starthidden: True", @@ -309,26 +309,26 @@ def get_help(self, returnhelp=False, use_labels=False): # when the function to process wrapper has been used if hasattr(self, "_func_name") and hasattr(self, "_func_module"): doctring += [ - "This process has been wrapped from {0}.{1}.".format( + "This process has been wrapped from {}.{}.".format( self._func_module, self._func_name ), "", ] if returnhelp: doctring += [ - ".. currentmodule:: {0}".format(self._func_module), + ".. currentmodule:: {}".format(self._func_module), "", ".. autosummary::", " :toctree: ./", "", - " {0}".format(self._func_name), + " {}".format(self._func_name), "", ] # Append the input and output fields help if use_labels: - in_label = [".. _%s.%s_inputs:\n\n" % (self.__module__, self.name)] - out_label = [".. _%s.%s_outputs:\n\n" % (self.__module__, self.name)] + in_label = [".. _{}.{}_inputs:\n\n".format(self.__module__, self.name)] + out_label = [".. _{}.{}_outputs:\n\n".format(self.__module__, self.name)] else: in_label = [] out_label = [] @@ -594,7 +594,7 @@ def after_execute(self, exec_result, context): # restore initial values, keeping outputs # The situation here is that: - # * output_directory should drive "final" output valules + # * output_directory should drive "final" output values # * we may have been using a temporary output directory, thus output # values are already set to this temp dir, not the final one. # (at least when use_temp_output_dir is set). @@ -659,7 +659,7 @@ def _move_outputs(self): shutil.rmtree(tmp_output) del self._destination - self.destinaton = self._former_output_directory + self.destination = self._former_output_directory if hasattr(self, "output_directory"): self.output_directory = self._former_output_directory del self._former_output_directory @@ -914,7 +914,7 @@ def init_with_skip(self, *args, **kwargs): if len(stack) >= 2: s2 = stack[-2] if s2[2] == "nipype_factory": - instance = super(NipypeProcess, cls).__new__(cls, *args, **kwargs) + instance = super().__new__(cls, *args, **kwargs) setattr(instance, "__%s_np_init_done__" % cls.__name__, False) return instance nipype_class = getattr(cls, "_nipype_class_type", None) @@ -947,7 +947,7 @@ def init_with_skip(self, *args, **kwargs): instance.id = instance.__class__.__module__ + "." + instance.name instance.__postinit__(*nargs, **nkwargs) else: - instance = super(NipypeProcess, cls).__new__(cls, *args, **kwargs) + instance = super().__new__(cls, *args, **kwargs) setattr(instance, "__%s_np_init_done__" % cls.__name__, False) return instance @@ -1076,7 +1076,7 @@ class Smooth(NipypeProcess): ] if use_temp_output_dir is None: use_temp_output_dir = True - super(NipypeProcess, self).__init__( + super().__init__( definition=definition, activate_copy=True, inputs_to_copy=inputs_to_copy, @@ -1089,7 +1089,7 @@ class Smooth(NipypeProcess): else: if use_temp_output_dir is None: use_temp_output_dir = False - super(NipypeProcess, self).__init__( + super().__init__( definition=definition, activate_copy=False, use_temp_output_dir=use_temp_output_dir, @@ -1220,4 +1220,4 @@ def after_execute(self, exec_result, context): ) if os.path.exists(script_file): shutil.move(script_file, getattr(self, script_tname)) - return super(NipypeProcess, self).after_execute(exec_result, context) + return super().after_execute(exec_result, context) diff --git a/capsul/process/test/test_load_from_description.py b/capsul/process/test/test_load_from_description.py index 27cce2e68..08c60e9f5 100644 --- a/capsul/process/test/test_load_from_description.py +++ b/capsul/process/test/test_load_from_description.py @@ -41,7 +41,7 @@ def to_warp_func( # # # +# doc="Method for thresolding."/> # # # @@ -57,7 +57,7 @@ def threshold( method: field( type_=Literal["gt", "ge", "lt", "le"], default="gt", - doc="Mehod for thresolding.", + doc="Method for thresolding.", ), threshold: field(type_=float, default=0), ): @@ -108,8 +108,8 @@ def tearDown(self): except OSError: pass - def test_process_warpping(self): - """Method to test the function to process on the fly warpping.""" + def test_process_warping(self): + """Method to test the function to process on the fly warping.""" capsul = Capsul(database_path="") process = capsul.executable( @@ -131,8 +131,8 @@ def test_process_warpping(self): ce.run(process, timeout=5) self.assertEqual(process.result, (1, "done")) - def test_pipeline_warpping(self): - """Method to test the xml description to pipeline on the fly warpping.""" + def test_pipeline_warping(self): + """Method to test the xml description to pipeline on the fly warping.""" pipeline_file = os.path.join(os.path.dirname(__file__), "pipeline.json") capsul = Capsul(database_path="") pipeline = capsul.executable(pipeline_file) diff --git a/capsul/process/test/test_metadata_schema.py b/capsul/process/test/test_metadata_schema.py index 75d9a92e6..088a127bd 100644 --- a/capsul/process/test/test_metadata_schema.py +++ b/capsul/process/test/test_metadata_schema.py @@ -16,7 +16,7 @@ class DummyProcess(Process): f: float = field(output=False) def __init__(self, definition): - super(DummyProcess, self).__init__(definition) + super().__init__(definition) self.add_field("truc", type_=File, write=False) self.add_field("bidule", type_=File, write=True) @@ -33,7 +33,7 @@ class DummyListProcess(Process): def execute(self, context): with open(self.result, "w") as f: - f.write("{\n truc=%s,\n bidule=%s\n}" % (self.truc, self.bidule)) + f.write("{{\n truc={},\n bidule={}\n}}".format(self.truc, self.bidule)) class CustomMetadataSchema(MetadataSchema): diff --git a/capsul/process/test/test_runprocess.py b/capsul/process/test/test_runprocess.py index a02ceb398..c8ee4e8c4 100644 --- a/capsul/process/test/test_runprocess.py +++ b/capsul/process/test/test_runprocess.py @@ -15,7 +15,7 @@ class DummyProcess(Process): f: field(type_=float, doc="help for parameter f") def execute(self, context=None): - print("DummyProcess exec, f={0}".format(self.f)) + print("DummyProcess exec, f={}".format(self.f)) class TestRunProcess(unittest.TestCase): diff --git a/capsul/qt_apps/pipeline_viewer_app.py b/capsul/qt_apps/pipeline_viewer_app.py index a501d0cc7..ce9478989 100644 --- a/capsul/qt_apps/pipeline_viewer_app.py +++ b/capsul/qt_apps/pipeline_viewer_app.py @@ -6,7 +6,6 @@ """ # System import -from __future__ import absolute_import import os import logging @@ -32,7 +31,7 @@ class PipelineViewerApp(Application): def __init__(self, *args, **kwargs): """Method to initialize the PipelineViewerApp class.""" # Inhetritance - super(PipelineViewerApp, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) # Initialize the application self.window = None diff --git a/capsul/qt_apps/resources/icones.py b/capsul/qt_apps/resources/icones.py index b7567e745..7716ff56e 100644 --- a/capsul/qt_apps/resources/icones.py +++ b/capsul/qt_apps/resources/icones.py @@ -5,7 +5,6 @@ # # WARNING! All changes made in this file will be lost! -from __future__ import absolute_import from soma.qt_gui.qt_backend import QtCore import sys diff --git a/capsul/qt_apps/utils/application.py b/capsul/qt_apps/utils/application.py index 76f14396c..810a10cd2 100644 --- a/capsul/qt_apps/utils/application.py +++ b/capsul/qt_apps/utils/application.py @@ -8,7 +8,6 @@ """ # System import -from __future__ import absolute_import import sys import optparse import logging @@ -117,7 +116,7 @@ def __init__(self, extra_options=None): # If a no valid logging level is found raise an Exception if level is None: raise Exception( - "Warning : unknown logging level " "{0}".format(self.options.debug) + "Warning : unknown logging level " "{}".format(self.options.debug) ) # Configure the logging module diff --git a/capsul/qt_apps/utils/fill_treectrl.py b/capsul/qt_apps/utils/fill_treectrl.py index 224ae3704..deb102125 100644 --- a/capsul/qt_apps/utils/fill_treectrl.py +++ b/capsul/qt_apps/utils/fill_treectrl.py @@ -9,7 +9,6 @@ ---------------------- """ -from __future__ import absolute_import import six # Soma import @@ -61,7 +60,7 @@ def add_tree_nodes(parent_item, menu, match, parent_module=""): the parent module string description ('module.sub_module') """ # Go through the current module sub modules - for module_name, child_modules in six.iteritems(menu): + for module_name, child_modules in menu.items(): # Filtering: check if we need to add this module in the tree if ( match == "" @@ -111,7 +110,7 @@ def search_in_menu(menu, match): return is_included # Go through the current module sub modules - for module_name, child_modules in six.iteritems(menu): + for module_name, child_modules in menu.items(): # Stop criteria if isinstance(child_modules, list): return is_included or match in module_name.lower() diff --git a/capsul/qt_apps/utils/find_pipelines.py b/capsul/qt_apps/utils/find_pipelines.py index ee09cf1c1..fe870b16e 100644 --- a/capsul/qt_apps/utils/find_pipelines.py +++ b/capsul/qt_apps/utils/find_pipelines.py @@ -49,7 +49,7 @@ def find_pipelines_from_description(module_name, url=None): try: __import__(module_name) except ImportError: - logger.error("Can't load module {0}".format(module_name)) + logger.error("Can't load module {}".format(module_name)) return {}, [] # Get the module path @@ -57,7 +57,7 @@ def find_pipelines_from_description(module_name, url=None): module_path = module.__path__[0] # Build the expected pipeline description file - description_file = os.path.join(module_path, "{0}.capsul".format(module_name)) + description_file = os.path.join(module_path, "{}.capsul".format(module_name)) # Load the description file if os.path.isfile(description_file): @@ -98,7 +98,7 @@ def find_pipeline_and_process(module_name): try: __import__(module_name) except ImportError: - logger.error("Can't load module {0}".format(module_name)) + logger.error("Can't load module {}".format(module_name)) return {}, [] # Get the module path @@ -112,7 +112,7 @@ def find_pipeline_and_process(module_name): sub_modules = find_packages(where=module_path, exclude=("doc",)) sub_modules = [module_name + "." + x for x in sub_modules] sub_modules.insert(0, module_name) - logger.debug("Modules found with setuptools: '{0}'.".format(sub_modules)) + logger.debug("Modules found with setuptools: '{}'.".format(sub_modules)) # Shift shift = len(module_name.split(".")) @@ -123,7 +123,7 @@ def find_pipeline_and_process(module_name): # Get the sub module path sub_module_path = os.path.join(module_path, *sub_module.split(".")[shift:]) - # List all the mdule in sub module path + # List all the module in sub module path sub_sub_module_names = [ sub_module + "." + x[:-3] for x in os.listdir(sub_module_path) @@ -137,7 +137,7 @@ def find_pipeline_and_process(module_name): except ImportError: exc_info = sys.exc_info() logger.error("".join(traceback.format_exception(*exc_info))) - logger.error("Can't load module " "{0}".format(sub_sub_module_name)) + logger.error("Can't load module " "{}".format(sub_sub_module_name)) continue # Get the module @@ -166,13 +166,13 @@ def find_pipeline_and_process(module_name): def lists2dict(list_of_pipeline_description, url, d): - """Convert a list of splited module names to a hierachic dictionary with - list leafs that contain the url to the module docuementation. + """Convert a list of split module names to a hierachic dictionary with + list leafs that contain the url to the module documentation. Parameters ---------- list_of_pipeline_description: list of list of str (mandatory) - the splited module names to organize bu modules + the split module names to organize by modules url: str (mandatory) the url to the module documentation diff --git a/capsul/qt_apps/utils/window.py b/capsul/qt_apps/utils/window.py index 940dfb4f7..b6d6c78aa 100644 --- a/capsul/qt_apps/utils/window.py +++ b/capsul/qt_apps/utils/window.py @@ -6,14 +6,13 @@ """ # Soma import -from __future__ import absolute_import from soma.qt_gui import qt_backend # Capsul import from capsul.qt_apps.resources.icones import * -class MyQUiLoader(object): +class MyQUiLoader: """Base window class based on ui file description.""" def __init__(self, uifile): diff --git a/capsul/qt_gui/widgets/activation_inspector.py b/capsul/qt_gui/widgets/activation_inspector.py index 29eaf2582..a4598ec29 100644 --- a/capsul/qt_gui/widgets/activation_inspector.py +++ b/capsul/qt_gui/widgets/activation_inspector.py @@ -52,7 +52,7 @@ def __init__(self, pipeline_path, record_file=None, *args, **kwargs): a file where the pipeline activation steps are stored. """ # Inhetritance - super(ActivationInspectorApp, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) # Load the pipeline self.pipeline = executable(pipeline_path) @@ -135,7 +135,7 @@ def __init__( os.close(record_file_s[0]) print("temporary record file:", record_file) - class AutoDeleteFile(object): + class AutoDeleteFile: def __init__(self, record_file): self.record_file = record_file @@ -161,7 +161,7 @@ def __del__(self): self.pipeline._debug_activations = self.record_file else: raise ValueError( - "The record file '{0}' can't be created since the " + "The record file '{}' can't be created since the " "base directory does not exists.".format(self.record_file) ) @@ -192,7 +192,7 @@ def add_controls_to_ui(self): error_message = "{0} has no attribute '{1}'" # Got through the class dynamic controls - for control_type, control_item in six.iteritems(self.controls): + for control_type, control_item in self.controls.items(): # Get the dynamic control name for control_name in control_item: # Try to set the control value to the ui class parameter @@ -217,8 +217,8 @@ def refresh_activation_from_record(self): record_pipeline_id = openrecord.readline().strip() if record_pipeline_id != str(self.pipeline.definition): raise ValueError( - "'{0}' recorded activations for pipeline '{1}' but not for " - "'{2}'".format( + "'{}' recorded activations for pipeline '{}' but not for " + "'{}'".format( self.record_file, record_pipeline_id, self.pipeline.definition ) ) @@ -244,14 +244,14 @@ def refresh_activation_from_record(self): # > Store the current activation stack if activation == "+": - current_activations["{0}:{1}".format(node, plug)] = True + current_activations["{}:{}".format(node, plug)] = True else: - del current_activations["{0}:{1}".format(node, plug)] + del current_activations["{}:{}".format(node, plug)] self.activations.append(current_activations.copy()) # > Add a line to the activation display self.ui.events.addItem( - "{0}{1} {2}:{3}".format(iteration, activation, node, plug) + "{}{} {}:{}".format(iteration, activation, node, plug) ) # Select the last activation step so the pipeline will be @@ -271,11 +271,11 @@ def update_pipeline_activation(self, index): for node in self.pipeline.all_nodes(): # Restore the plugs and nodes activations node_name = node.full_name - for plug_name, plug in six.iteritems(node.plugs): + for plug_name, plug in node.plugs.items(): plug.activated = activations.get( - "{0}:{1}".format(node_name, plug_name), False + "{}:{}".format(node_name, plug_name), False ) - node.activated = activations.get("{0}:".format(node_name), False) + node.activated = activations.get("{}:".format(node_name), False) # Refresh views relying on plugs and nodes selection for node in self.pipeline.all_nodes(): diff --git a/capsul/qt_gui/widgets/attributed_process_widget.py b/capsul/qt_gui/widgets/attributed_process_widget.py index e0f22065a..08b38319d 100644 --- a/capsul/qt_gui/widgets/attributed_process_widget.py +++ b/capsul/qt_gui/widgets/attributed_process_widget.py @@ -50,7 +50,7 @@ def __init__( user_level, and will only be visible if the ControllerWidget userl_evel is more than (or equal) the field level. """ - super(AttributedProcessWidget, self).__init__() + super().__init__() self.setLayout(QtGui.QVBoxLayout()) self.layout().setContentsMargins(0, 0, 0, 0) if exec_meta is None: diff --git a/capsul/qt_gui/widgets/config_gui.py b/capsul/qt_gui/widgets/config_gui.py index e1606a461..7ec65de9f 100644 --- a/capsul/qt_gui/widgets/config_gui.py +++ b/capsul/qt_gui/widgets/config_gui.py @@ -5,7 +5,7 @@ ControllerWidgetFactory, ControllerSubwidget, ) -from soma.qt_gui.collapsable import CollapsableWidget +from soma.qt_gui.collapsible import CollapsibleWidget from soma.controller import undefined from soma.qt_gui.qt_backend import Qt from functools import partial @@ -25,7 +25,7 @@ def create_widgets(self): buttons = ["+"] else: buttons = [] - self.widget = CollapsableWidget( + self.widget = CollapsibleWidget( self.inner_widget, label=label, expanded=(self.parent_interaction.depth == 0), diff --git a/capsul/qt_gui/widgets/links_debugger.py b/capsul/qt_gui/widgets/links_debugger.py index b0143f191..7a8778bc7 100644 --- a/capsul/qt_gui/widgets/links_debugger.py +++ b/capsul/qt_gui/widgets/links_debugger.py @@ -7,10 +7,8 @@ ------------------------------- """ -from __future__ import print_function # System import -from __future__ import absolute_import import os import tempfile import re @@ -31,7 +29,7 @@ class CapsulLinkDebuggerView(QtGui.QWidget): VALUE = 4 def __init__(self, pipeline, ui_file=None, record_file=None, parent=None): - super(CapsulLinkDebuggerView, self).__init__(parent) + super().__init__(parent) # load the user interface window if ui_file is None: @@ -52,7 +50,7 @@ def __init__(self, pipeline, ui_file=None, record_file=None, parent=None): os.close(record_file_s[0]) print("temporary record file:", record_file) - class AutoDeleteFile(object): + class AutoDeleteFile: def __init__(self, record_file): self.record_file = record_file diff --git a/capsul/qt_gui/widgets/pipeline_developer_view.py b/capsul/qt_gui/widgets/pipeline_developer_view.py index 79e353df0..3dec93c81 100644 --- a/capsul/qt_gui/widgets/pipeline_developer_view.py +++ b/capsul/qt_gui/widgets/pipeline_developer_view.py @@ -104,7 +104,7 @@ # ----------------------------------------------------------------------------- -class ColorType(object): +class ColorType: def __init__(self): pass @@ -135,7 +135,7 @@ class Plug(Qt.QGraphicsPolygonItem): def __init__( self, color, name, height, width, activated=True, optional=False, parent=None ): - super(Plug, self).__init__(parent) + super().__init__(parent) self.name = name # self.color = self._color(activated, optional) self.color = color @@ -198,7 +198,7 @@ def get_plug_point(self): return self.mapToParent(point) def mousePressEvent(self, event): - super(Plug, self).mousePressEvent(event) + super().mousePressEvent(event) if event.button() == QtCore.Qt.LeftButton: self.scene().plug_clicked.emit(self.name) event.accept() @@ -214,7 +214,7 @@ class EmbeddedSubPipelineItem(Qt.QGraphicsProxyWidget): """ def __init__(self, sub_pipeline_wid): - super(EmbeddedSubPipelineItem, self).__init__() + super().__init__() old_height = sub_pipeline_wid.sizeHint().height() sizegrip = QtGui.QSizeGrip(None) new_height = old_height + sub_pipeline_wid.horizontalScrollBar().height() @@ -226,7 +226,7 @@ def __init__(self, sub_pipeline_wid): class boxItem(QtGui.QGraphicsRectItem): def __init__(self, parent=None): - super(boxItem, self).__init__(parent) + super().__init__(parent) # self.setFlags(self.ItemIsFocusable) self.penBox = 0 self.name = "" @@ -244,7 +244,7 @@ def keyPressEvent(self, event): self.scene()._node_keydelete_clicked(self) event.accept() else: - super(boxItem, self).keyPressEvent(event) + super().keyPressEvent(event) class NodeGWidget(QtGui.QGraphicsItem): @@ -263,7 +263,7 @@ def __init__( show_opt_outputs=True, userlevel=0, ): - super(NodeGWidget, self).__init__(parent) + super().__init__(parent) self.infoActived = QtGui.QGraphicsTextItem("", self) self.colType = ColorType() @@ -377,7 +377,7 @@ def get_title(self): if self.sub_pipeline is None: return self.name else: - return "[{0}]".format(self.name) + return "[{}]".format(self.name) def update_parameters(self): self._update_param_timer.start(20) @@ -415,7 +415,7 @@ def update_labels(self, labels): self._create_label_marks() def _get_label(self, label, register=True): - class Label(object): + class Label: def __init__(self, label, color): self.text = label self.color = color @@ -622,7 +622,7 @@ def _colored_text_item(self, label, text=None, margin=2): label2 = QtGui.QLabel(text) label2.setObjectName("label") label2.setStyleSheet( - "background: rgba({0}, {1}, {2}, 255); " + "background: rgba({}, {}, {}, 255); " "border-radius: 7px; border: 0px solid; " "padding: 1px;".format(*color) ) @@ -653,7 +653,7 @@ def _build_regular_view_plugs(self): param_name = QtGui.QGraphicsTextItem(self) param_name.setHtml(param_text) - plug_name = "%s:%s" % (self.name, in_param) + plug_name = "{}:{}".format(self.name, in_param) try: # color = self.colorLink(field) @@ -697,7 +697,7 @@ def _build_regular_view_plugs(self): param_name = QtGui.QGraphicsTextItem(self) param_name.setHtml(param_text) - plug_name = "%s:%s" % (self.name, out_param) + plug_name = "{}:{}".format(self.name, out_param) try: # color = self.colorLink(field_type_str) @@ -941,7 +941,7 @@ def _create_parameter(self, param_name, pipeline_plug): else: param_name_item = QtGui.QGraphicsTextItem(self) param_name_item.setHtml(param_text) - plug_name = "%s:%s" % (self.name, param_name) + plug_name = "{}:{}".format(self.name, param_name) color = QtCore.Qt.black @@ -1384,7 +1384,7 @@ def mousePressEvent(self, event): if isinstance(item, Plug): item.mousePressEvent(event) return - super(NodeGWidget, self).mousePressEvent(event) + super().mousePressEvent(event) process = get_ref(self.process) if event.button() == QtCore.Qt.RightButton and process is not None: self.scene().node_right_clicked.emit(self.name, process) @@ -1407,7 +1407,7 @@ def mousePressEvent(self, event): event.accept() def keyPressEvent(self, event): - super(NodeGWidget, self).keyPressEvent(event) + super().keyPressEvent(event) if event.key() == QtCore.Qt.Key_Up: self.setPos(self.x(), self.y() - 1) @@ -1426,7 +1426,7 @@ class HandleItem(QtGui.QGraphicsRectItem): """A handle that can be moved by the mouse""" def __init__(self, parent=None): - super(HandleItem, self).__init__(Qt.QRectF(-10.0, -10.0, 10.0, 10.0), parent) + super().__init__(Qt.QRectF(-10.0, -10.0, 10.0, 10.0), parent) # self.setRect(Qt.QRectF(-4.0,-4.0,4.0,4.0)) self.posChangeCallbacks = [] self.setPen(QtGui.QPen(QtCore.Qt.NoPen)) @@ -1466,7 +1466,7 @@ def itemChange(self, change, value): return value # Call superclass method: - return super(HandleItem, self).itemChange(change, value) + return super().itemChange(change, value) def mouseReleaseEvent(self, mouseEvent): self.setSelected(False) @@ -1476,7 +1476,7 @@ def mouseReleaseEvent(self, mouseEvent): class Link(QtGui.QGraphicsPathItem): def __init__(self, origin, target, active, weak, color, parent=None): - super(Link, self).__init__(parent) + super().__init__(parent) self._set_pen(active, weak, color) @@ -1553,7 +1553,7 @@ def mousePressEvent(self, event): # self: the scene has to help us. self.scene()._link_right_clicked(self) else: - super(Link, self).mousePressEvent(event) + super().mousePressEvent(event) event.accept() def focusInEvent(self, event): @@ -1571,7 +1571,7 @@ def keyPressEvent(self, event): self.scene()._link_keydelete_clicked(self) event.accept() else: - super(Link, self).keyPressEvent(event) + super().keyPressEvent(event) class PipelineScene(QtGui.QGraphicsScene): @@ -1600,7 +1600,7 @@ class PipelineScene(QtGui.QGraphicsScene): node_keydelete_clicked = QtCore.Signal(str) def __init__(self, parent=None, userlevel=0): - super(PipelineScene, self).__init__(parent) + super().__init__(parent) self.gnodes = {} self.glinks = {} @@ -2248,7 +2248,7 @@ def edition_enabled(self): return self._enable_edition def keyPressEvent(self, event): - super(PipelineScene, self).keyPressEvent(event) + super().keyPressEvent(event) if not event.isAccepted(): if event.key() == QtCore.Qt.Key_P: # print position of boxes @@ -2276,7 +2276,7 @@ def link_tooltip_text(self, source_dest): Parameters ---------- - source_dest: tupe (2 tuples of 2 strings) + source_dest: tuple (2 tuples of 2 strings) link description: ((source_node, source_param), (dest_node, dest_param)) """ @@ -2311,26 +2311,26 @@ def link_tooltip_text(self, source_dest): field_type_str = field.type_str() inst_type = self.get_instance_type_string(value) typestr = ( - ("%s (%s)" % (inst_type, field_type_str)).replace("<", "").replace(">", "") + ("{} ({})".format(inst_type, field_type_str)).replace("<", "").replace(">", "") ) - msg = """

%s

+ msg = """

{}

- - + +
Link:%s%s{}{}
- + - + -""" % ( +""".format( source_dest[0][1], active, weak, @@ -2411,30 +2411,30 @@ def plug_tooltip_text(self, node, name): if field.metadata("output", False) and field.metadata("write", None) is False: field_type_str += ", output filename" typestr = ( - ("%s (%s)" % (self.get_instance_type_string(value), field_type_str)) + ("{} ({})".format(self.get_instance_type_string(value), field_type_str)) .replace("<", "") .replace(">", "") ) - msg = """

%s

+ msg = """

{}

type:%s{}
value:%s{}
- - - - + + + +
Plug:%s%s%s%s{}{}{}{}
- + - + -""" % ( +""".format( name, output, optional, @@ -2486,7 +2486,7 @@ def helpEvent(self, event): """ if self.logical_view: event.setAccepted(False) - super(PipelineScene, self).helpEvent(event) + super().helpEvent(event) return item = self.itemAt(event.scenePos(), Qt.QTransform()) if isinstance(item, Link): @@ -2526,7 +2526,7 @@ def helpEvent(self, event): # secondarily helpEvent() is protected. event.setAccepted(False) - super(PipelineScene, self).helpEvent(event) + super().helpEvent(event) def remove_node(self, node_name): print(self.gnodes) @@ -2705,7 +2705,7 @@ class ProcessNameEdit(Qt.QLineEdit): """A specialized QLineEdit with completion for process name""" def __init__(self, parent=None, class_type_check=is_executable): - super(PipelineDeveloperView.ProcessNameEdit, self).__init__(parent) + super().__init__(parent) self.compl = QtGui.QCompleter([]) self.setCompleter(self.compl) self.textEdited.connect(self.on_text_edited) @@ -2851,7 +2851,7 @@ def __init__( specified, then edition will be activated anyway. """ - super(PipelineDeveloperView, self).__init__(parent) + super().__init__(parent) # self.setAlignment(QtCore.Qt.AlignTop | QtCore.Qt.AlignLeft) self.setAlignment(QtCore.Qt.AlignCenter) @@ -2927,7 +2927,7 @@ def ensure_pipeline(self, pipeline): else: raise Exception( "Expect a Pipeline or a Process, not a " - "'{0}'.".format(repr(pipeline)) + "'{}'.".format(repr(pipeline)) ) return pipeline @@ -3151,10 +3151,10 @@ def wheelEvent(self, event): self.zoom_in() event.accept() if not done: - super(PipelineDeveloperView, self).wheelEvent(event) + super().wheelEvent(event) def mousePressEvent(self, event): - super(PipelineDeveloperView, self).mousePressEvent(event) + super().mousePressEvent(event) if not event.isAccepted(): if event.button() == QtCore.Qt.RightButton: self.open_background_menu() @@ -3179,7 +3179,7 @@ def mouseReleaseEvent(self, event): print("source to destination types are not compatible") print(e) - super(PipelineDeveloperView, self).mouseReleaseEvent(event) + super().mouseReleaseEvent(event) self.scene.update() def mouseMoveEvent(self, event): @@ -3197,7 +3197,7 @@ def mouseMoveEvent(self, event): self._move_grab_link(event) event.accept() else: - super(PipelineDeveloperView, self).mouseMoveEvent(event) + super().mouseMoveEvent(event) def dragEnterEvent(self, event): """Event handler when the mouse enters the widget. @@ -3314,7 +3314,7 @@ def window(self): if hasattr(self, "_graphics_item"): return self._graphics_item.scene().views()[0].window() else: - return super(PipelineDeveloperView, self).window() + return super().window() def onOpenProcessController(self, node_name, process): """Event to open a sub-process/sub-pipeline controller""" @@ -3354,7 +3354,7 @@ def open_node_menu(self, node_name, process): if isinstance(process, Switch): node_type = "switch" menu = QtGui.QMenu("Node: %s" % node_name, None) - title = menu.addAction("Node: %s (%s)" % (node_name, node_type)) + title = menu.addAction("Node: {} ({})".format(node_name, node_type)) title.setEnabled(False) menu.addSeparator() @@ -3741,7 +3741,7 @@ def enable_step(self, step_name, state): setattr(self.scene.pipeline.pipeline_steps, step_name, state) def disable_preceding_steps(self, step_name, dummy): - # don't know why we get this additionall dummy parameter (False) + # don't know why we get this additional dummy parameter (False) steps = self.scene.pipeline.pipeline_steps for field in steps.fields(): step = field.name @@ -3819,12 +3819,10 @@ def auto_dot_node_positions(self): """ scene = self.scene scale = 67.0 # dpi - nodes_sizes = dict( - [ - (name, (gnode.boundingRect().width(), gnode.boundingRect().height())) + nodes_sizes = { + name: (gnode.boundingRect().width(), gnode.boundingRect().height()) for name, gnode in scene.gnodes.items() - ] - ) + } dgraph = pipeline_tools.dot_graph_from_pipeline( scene.pipeline, nodes_sizes=nodes_sizes ) @@ -3843,24 +3841,20 @@ def auto_dot_node_positions(self): nodes_pos = self._read_dot_pos(toutfile_name) - rects = dict( - [(name, node.boundingRect()) for name, node in scene.gnodes.items()] - ) - pos = dict( - [ - ( - name, + rects = { + name: node.boundingRect() for name, node in scene.gnodes.items() + } + pos = { + name: ( -rects[name].width() / 2 + pos[0] * scale, -rects[name].height() / 2 - pos[1] * scale, - ), - ) + ) for id, name, pos in nodes_pos - ] - ) + } minx = min([x[0] for x in pos.values()]) miny = min([x[1] for x in pos.values()]) - pos = dict([(name, (p[0] - minx, p[1] - miny)) for name, p in pos.items()]) + pos = {name: (p[0] - minx, p[1] - miny) for name, p in pos.items()} # print('pos:') # print(pos) scene.pos = pos @@ -3975,9 +3969,9 @@ def conv_pos(p): return (p.x(), p.y()) return p - posdict = dict( - [(key, conv_pos(value)) for key, value in self.scene.pos.items()] - ) + posdict = { + key: conv_pos(value) for key, value in self.scene.pos.items() + } pprint(posdict) def del_node(self, node_name=None): @@ -4222,7 +4216,7 @@ class ProcessModuleInput(QtGui.QDialog): def __init__( self, display_str="process module/name", class_type_check=is_executable ): - super(PipelineDeveloperView.ProcessModuleInput, self).__init__() + super().__init__() self.setWindowTitle("%s:" % display_str) layout = QtGui.QGridLayout(self) layout.addWidget(QtGui.QLabel("module/process:"), 0, 0) @@ -4378,7 +4372,7 @@ def get_node_instance(class_str, pipeline): class IterativeProcessInput(ProcessModuleInput): def __init__(self, engine): - super(PipelineDeveloperView.IterativeProcessInput, self).__init__() + super().__init__() # hlay = Qt.QHBoxLayout() # self.layout().addLayout(hlay) lay = self.layout() @@ -4449,7 +4443,7 @@ def add_switch(self): class SwitchInput(QtGui.QDialog): def __init__(self): - super(SwitchInput, self).__init__() + super().__init__() self.setWindowTitle("switch parameters/name:") layout = QtGui.QGridLayout(self) layout.addWidget(QtGui.QLabel("inputs:"), 0, 0) @@ -4605,12 +4599,12 @@ def _release_grab_link(self, event, ret=False): # if (src != dst) and ("inputs."+src != dst) and not self.isInputYet(dst) : if (src != dst) and ("inputs." + src != dst): - self.scene.pipeline.add_link("%s->%s" % (src, dst)) + self.scene.pipeline.add_link("{}->{}".format(src, dst)) self.scene.update_pipeline() if ret: self._grabbed_plug = None - return "%s->%s" % (src, dst) + return "{}->{}".format(src, dst) self._grabbed_plug = None # def isInputYet(self,dest):##################################################################### add by OM @@ -4639,15 +4633,15 @@ def _link_delete_clicked(self, src_node, src_plug, dst_node, dst_plug): src = src_plug snode = self.scene.pipeline else: - src = "%s.%s" % (src_node, src_plug) + src = "{}.{}".format(src_node, src_plug) snode = self.scene.pipeline.nodes[src_node] if dst_node in ("", "outputs"): dst = dst_plug dnode = self.scene.pipeline else: - dst = "%s.%s" % (dst_node, dst_plug) + dst = "{}.{}".format(dst_node, dst_plug) dnode = self.scene.pipeline.nodes[dst_node] - name = "%s->%s" % (src, dst) + name = "{}->{}".format(src, dst) self._current_link = name # (src_node, src_plug, dst_node, dst_plug) self._del_link() del self._current_link @@ -4664,15 +4658,15 @@ def _link_clicked(self, src_node, src_plug, dst_node, dst_plug): src = src_plug snode = self.scene.pipeline else: - src = "%s.%s" % (src_node, src_plug) + src = "{}.{}".format(src_node, src_plug) snode = self.scene.pipeline.nodes[src_node] if dst_node in ("", "outputs"): dst = dst_plug dnode = self.scene.pipeline else: - dst = "%s.%s" % (dst_node, dst_plug) + dst = "{}.{}".format(dst_node, dst_plug) dnode = self.scene.pipeline.nodes[dst_node] - name = "%s->%s" % (src, dst) + name = "{}->{}".format(src, dst) self._current_link = name # (src_node, src_plug, dst_node, dst_plug) self._current_link_def = (src_node, src_plug, dst_node, dst_plug) @@ -4731,14 +4725,14 @@ def get_doc_browser(self, create=False): class DocBrowser(QWebEngineView): def __init__(self, pview, *args, **kwargs): - super(DocBrowser, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.setAttribute(Qt.Qt.WA_DeleteOnClose) self.pview = pview def closeEvent(self, event): self.pview.doc_browser = None event.accept() - super(DocBrowser, self).closeEvent(event) + super().closeEvent(event) doc_browser = DocBrowser(pv) # QWebEngineView() pv.doc_browser = doc_browser @@ -4948,7 +4942,7 @@ def _plug_right_clicked(self, name): class _PlugEdit(QtGui.QDialog): def __init__(self, show_weak=True, parent=None): - super(PipelineDeveloperView._PlugEdit, self).__init__(parent) + super().__init__(parent) layout = QtGui.QVBoxLayout(self) hlay1 = QtGui.QHBoxLayout() layout.addLayout(hlay1) @@ -5102,7 +5096,7 @@ def new_pipeline(self): def load_pipeline(self, filename="", load_pipeline=True): class LoadProcessUi(Qt.QDialog): def __init__(self, parent=None, old_filename=""): - super(LoadProcessUi, self).__init__(parent) + super().__init__(parent) self.old_filename = old_filename lay = Qt.QVBoxLayout() self.setLayout(lay) @@ -5241,27 +5235,27 @@ def hinted_tuple_hook(obj): ) if filename: - with io.open(filename, "r", encoding="utf8") as fileJson: + with open(filename, encoding="utf8") as fileJson: dic = json.load(fileJson) dic = json.loads(dic, object_hook=hinted_tuple_hook) if "pipeline_parameters" not in list(dic.keys()): raise KeyError( - 'No "pipeline_parameters" key found in {0}.'.format(filename) + 'No "pipeline_parameters" key found in {}.'.format(filename) ) for field_name, field_value in dic["pipeline_parameters"].items(): if field_name not in [ field.name for field in self.scene.pipeline.fields() ]: - print('No "{0}" parameter in pipeline.'.format(field_name)) + print('No "{}" parameter in pipeline.'.format(field_name)) try: setattr(self.scene.pipeline, field_name, field_value) except dataclasses.ValidationError: - print("Error for the plug {0}".format(field_name)) + print("Error for the plug {}".format(field_name)) self.scene.pipeline.update_nodes_and_plugs_activation() @@ -5284,14 +5278,14 @@ def hint_tuples(item): return [hint_tuples(e) for e in item] if isinstance(item, dict): - return dict( - (key, hint_tuples(value)) for key, value in item.items() - ) + return { + key: hint_tuples(value) for key, value in item.items() + } else: return item - return super(MultiDimensionalArrayEncoder, self).encode( + return super().encode( hint_tuples(obj) ) @@ -5312,7 +5306,7 @@ def hint_tuples(item): msg.setIcon(QMessageBox.Warning) msg.setText( 'The parameters must be saved in the ".json" format, ' - 'not the "{0}" format'.format(os.path.splitext(filename)[1]) + 'not the "{}" format'.format(os.path.splitext(filename)[1]) ) msg.setWindowTitle("Warning") msg.setStandardButtons(QMessageBox.Ok) diff --git a/capsul/qt_gui/widgets/pipeline_file_warning_widget.py b/capsul/qt_gui/widgets/pipeline_file_warning_widget.py index 9cff6d9ac..9dc945cd8 100644 --- a/capsul/qt_gui/widgets/pipeline_file_warning_widget.py +++ b/capsul/qt_gui/widgets/pipeline_file_warning_widget.py @@ -28,7 +28,7 @@ class PipelineFileWarningWidget(Qt.QSplitter): """ def __init__(self, missing_inputs, overwritten_outputs, parent=None): - super(PipelineFileWarningWidget, self).__init__(QtCore.Qt.Vertical, parent) + super().__init__(QtCore.Qt.Vertical, parent) """ Builds the check widget. diff --git a/capsul/qt_gui/widgets/pipeline_user_view.py b/capsul/qt_gui/widgets/pipeline_user_view.py index e1584ec11..de42003f1 100644 --- a/capsul/qt_gui/widgets/pipeline_user_view.py +++ b/capsul/qt_gui/widgets/pipeline_user_view.py @@ -7,10 +7,8 @@ ------------------------- """ -from __future__ import print_function # System import -from __future__ import absolute_import import sys import tempfile from soma.subprocess import check_call @@ -28,7 +26,7 @@ class PipelineUserView(QtGui.QWidget): def __init__(self, pipeline): """Initialize the WorkflowViewer class""" # Inheritance - super(PipelineUserView, self).__init__() + super().__init__() # Class attributets self.pipeline = pipeline @@ -52,12 +50,12 @@ def update(self): def write(self, out=sys.stdout): graph = self.pipeline.workflow_graph() - out.write("digraph workflow {\n".encode()) + out.write(b"digraph workflow {\n") ids = {} for n in graph._nodes: id = str(len(ids)) ids[n] = id - out.write((' %s [label="%s"];\n' % (id, n)).encode()) + out.write((' {} [label="{}"];\n'.format(id, n)).encode()) for n, v in graph._links: - out.write((" %s -> %s;\n" % (ids[n], ids[v])).encode()) - out.write("}\n".encode()) + out.write((" {} -> {};\n".format(ids[n], ids[v])).encode()) + out.write(b"}\n") diff --git a/capsul/qt_gui/widgets/settings_editor.py b/capsul/qt_gui/widgets/settings_editor.py index aa41a3f34..99891120d 100644 --- a/capsul/qt_gui/widgets/settings_editor.py +++ b/capsul/qt_gui/widgets/settings_editor.py @@ -6,7 +6,7 @@ class SettingsEditor(Qt.QDialog): def __init__(self, config, parent=None): - super(SettingsEditor, self).__init__(parent) + super().__init__(parent) self.config = config @@ -47,16 +47,14 @@ def update_gui(self): self.tab_wid.clear() self.module_tabs = {} resource = self.resource_combo.currentText() - non_modules = set( - [ + non_modules = { "dataset", "config_modules", "python_modules", "database", "persistent", "start_workers", - ] - ) + } mod_map = [ f.name for f in getattr(self.config, resource).fields() @@ -79,6 +77,6 @@ def change_resource(self, index): self.update_gui() def accept(self): - super(SettingsEditor, self).accept() + super().accept() for module_name, tab in self.module_tabs.items(): tab.accept() diff --git a/capsul/qt_gui/widgets/viewer_widget.py b/capsul/qt_gui/widgets/viewer_widget.py index 872e0ba48..32230616e 100644 --- a/capsul/qt_gui/widgets/viewer_widget.py +++ b/capsul/qt_gui/widgets/viewer_widget.py @@ -38,7 +38,7 @@ def __init__(self, viewer_node_name, pipeline, study_config): since the viewer node is unactivated """ # Inheritance - super(ViewerWidget, self).__init__() + super().__init__() # Default parameters self.viewer_node_name = viewer_node_name diff --git a/capsul/run.py b/capsul/run.py index 9983adf28..d6e22f488 100644 --- a/capsul/run.py +++ b/capsul/run.py @@ -105,7 +105,7 @@ def execute_job(database, engine_id, execution_id, job_uuid, debug=False): if __name__ == "__main__": if len(sys.argv) != 3: print( - "Wrong number of paramaters, 2 expected:" f"command={sys.argv}", + "Wrong number of parameters, 2 expected:" f"command={sys.argv}", file=sys.stderr, ) sys.exit(1) diff --git a/capsul/sphinxext/__init__.py b/capsul/sphinxext/__init__.py index 89c502a38..10aca90e1 100644 --- a/capsul/sphinxext/__init__.py +++ b/capsul/sphinxext/__init__.py @@ -1,7 +1,7 @@ """ Extension to sphinx to document Capsul processes -This moduls allows to make sphinx source to automatically document Capsul processes and pipelines. The module can be used as a commandline: +This modules allows to make sphinx source to automatically document Capsul processes and pipelines. The module can be used as a commandline: .. code-block:: bash diff --git a/capsul/sphinxext/capsul_pipeline_rst.py b/capsul/sphinxext/capsul_pipeline_rst.py index 9011d9340..93d393987 100644 --- a/capsul/sphinxext/capsul_pipeline_rst.py +++ b/capsul/sphinxext/capsul_pipeline_rst.py @@ -35,7 +35,7 @@ default=default_output_dir, help="output base directory. Docs will be generated in " "sub-directories there, named by their module names. " - "default: {0}".format(default_output_dir), + "default: {}".format(default_output_dir), ) parser.add_option( "-s", @@ -62,12 +62,12 @@ if options.verbose: logging.basicConfig( level=logging.DEBUG, - format="{0}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), + format="{}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), ) else: logging.basicConfig( level=logging.INFO, - format="{0}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), + format="{}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), ) base_outdir = options.outdir @@ -79,7 +79,7 @@ from capsul.sphinxext.pipelinedocgen import PipelineHelpWriter ############################################################################### -# Generate shemas first +# Generate schemas first ############################################################################### if schema and shutil.which("dot"): @@ -108,8 +108,8 @@ descriptions = find_pipeline_and_process(os.path.basename(options.module)) pipelines = descriptions["pipeline_descs"] processes = descriptions["process_descs"] -logger.info("Found '{0}' pipeline(s) in '{1}'.".format(len(pipelines), options.module)) -logger.info("Found '{0}' process(es) in '{1}'.".format(len(processes), options.module)) +logger.info("Found '{}' pipeline(s) in '{}'.".format(len(pipelines), options.module)) +logger.info("Found '{}' process(es) in '{}'.".format(len(processes), options.module)) ############################################################################### # Sort pipelines and processes by module names @@ -124,7 +124,7 @@ ): # From the modules full path 'm1.m2.pipeline/process' get the module # name 'm2' - module_names = set([x.split(".")[1] for x in modules]) + module_names = {x.split(".")[1] for x in modules} # Sort each item according to its module name. # The result is a dict of the form 'd[m2] = [pipeline/process1, ...]'. @@ -166,8 +166,8 @@ # Just print a summary logger.info( - "{0}: '{1}' files written for module '{2}' at location " - "{3}.".format( + "{}: '{}' files written for module '{}' at location " + "{}.".format( dtype, len(docwriter.written_modules), module_name, @@ -196,6 +196,6 @@ outdir, module_name, options.module.rsplit(".", 1)[0], have_usecases=False ) logger.info( - "Index: an index has been written for module '{0}' at " - "location {1}.".format(module_name, os.path.abspath(outdir)) + "Index: an index has been written for module '{}' at " + "location {}.".format(module_name, os.path.abspath(outdir)) ) diff --git a/capsul/sphinxext/capsul_pipeline_view.py b/capsul/sphinxext/capsul_pipeline_view.py index 4363f05ca..3577a4a94 100644 --- a/capsul/sphinxext/capsul_pipeline_view.py +++ b/capsul/sphinxext/capsul_pipeline_view.py @@ -2,7 +2,6 @@ """ # System import -from __future__ import absolute_import import os from optparse import OptionParser import logging @@ -36,7 +35,7 @@ default=default_output_dir, help="output base directory. Docs will be generated in " "sub-directories there, named by their module names. " - "default: {0}".format(default_output_dir), + "default: {}".format(default_output_dir), ) parser.add_option( "-s", @@ -57,12 +56,12 @@ if options.verbose: logging.basicConfig( level=logging.DEBUG, - format="{0}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), + format="{}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), ) else: logging.basicConfig( level=logging.INFO, - format="{0}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), + format="{}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), ) base_outdir = options.outdir @@ -78,14 +77,14 @@ pipelines = find_pipeline_and_process(os.path.basename(options.module))[ "pipeline_descs" ] -logger.info("Found '{0}' pipeline(s) in '{1}'.".format(len(pipelines), options.module)) +logger.info("Found '{}' pipeline(s) in '{}'.".format(len(pipelines), options.module)) # Sort pipelines processes # From the pipelines full path 'm1.m2.pipeline' get there module names 'm2' -module_names = set([x.split(".")[1] for x in pipelines]) +module_names = {x.split(".")[1] for x in pipelines} # Sort each pipeline according to its module name. # The result is a dict of the form 'd[m2] = [pipeline1, pipeline2, ...]'. -sorted_pipelines = dict((x, []) for x in module_names) +sorted_pipelines = {x: [] for x in module_names} for pipeline in pipelines: module_name = pipeline.split(".")[1] sorted_pipelines[module_name].append(pipeline) @@ -114,13 +113,13 @@ pipeline_instance, image_name, nodesep=0.1, include_io=False, rankdir="TB" ) logger.info( - "Pipeline '{0}' representation has been written at " - "location '{1}'.".format(module_pipeline, os.path.abspath(image_name)) + "Pipeline '{}' representation has been written at " + "location '{}'.".format(module_pipeline, os.path.abspath(image_name)) ) # Just print a summary logger.info( - "Summary: '{0}' files written for module '{1}'.".format( + "Summary: '{}' files written for module '{}'.".format( len(module_pipelines), module_name ) ) diff --git a/capsul/sphinxext/capsul_sphinx_layout.py b/capsul/sphinxext/capsul_sphinx_layout.py index 098ab506a..027fd02b4 100644 --- a/capsul/sphinxext/capsul_sphinx_layout.py +++ b/capsul/sphinxext/capsul_sphinx_layout.py @@ -11,7 +11,6 @@ """ # System import -from __future__ import absolute_import import os from optparse import OptionParser import logging @@ -43,7 +42,7 @@ default=default_output_dir, help="output base directory. Docs will be generated in " "sub-directories there, named by their module names. " - "default: {0}".format(default_output_dir), + "default: {}".format(default_output_dir), ) (options, args) = parser.parse_args() if options.module is None: @@ -54,12 +53,12 @@ if options.verbose: logging.basicConfig( level=logging.DEBUG, - format="{0}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), + format="{}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), ) else: logging.basicConfig( level=logging.INFO, - format="{0}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), + format="{}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), ) # Capsul import @@ -71,14 +70,14 @@ descriptions = find_pipeline_and_process(os.path.basename(options.module)) pipelines = descriptions["pipeline_descs"] processes = descriptions["process_descs"] -logger.info("Found '{0}' pipeline(s) in '{1}'.".format(len(pipelines), options.module)) -logger.info("Found '{0}' process(es) in '{1}'.".format(len(processes), options.module)) +logger.info("Found '{}' pipeline(s) in '{}'.".format(len(pipelines), options.module)) +logger.info("Found '{}' process(es) in '{}'.".format(len(processes), options.module)) # Get all the modules involved module_names = [x.split(".")[1] for x in pipelines] module_names.extend([x.split(".")[1] for x in processes]) module_names = set(module_names) -logger.info("Module names for layout generation '{0}'.".format(module_names)) +logger.info("Module names for layout generation '{}'.".format(module_names)) # Create object to write the sphinx template elements docwriter = LayoutHelperWriter(module_names, options.module) @@ -88,19 +87,19 @@ # Generate the sphinx main index ############################################################################### -logger.info("Generating documentation index in '{0}'.".format(os.path.abspath(outdir))) +logger.info("Generating documentation index in '{}'.".format(os.path.abspath(outdir))) docwriter.write_index(outdir, froot="documentation") ############################################################################### # Generate installation recommendation ############################################################################### -logger.info("Generating installation index in '{0}'.".format(os.path.abspath(outdir))) +logger.info("Generating installation index in '{}'.".format(os.path.abspath(outdir))) docwriter.write_installation(outdir) ############################################################################### # Generate the layout ############################################################################### -logger.info("Generating layout index in '{0}'.".format(os.path.abspath(outdir))) +logger.info("Generating layout index in '{}'.".format(os.path.abspath(outdir))) docwriter.write_layout(os.path.join(outdir, "_templates")) diff --git a/capsul/sphinxext/capsul_usecases_rst.py b/capsul/sphinxext/capsul_usecases_rst.py index 611b1eaf3..2e3dcc456 100644 --- a/capsul/sphinxext/capsul_usecases_rst.py +++ b/capsul/sphinxext/capsul_usecases_rst.py @@ -8,10 +8,8 @@ """Script to auto-generate use cases rst documentation. """ -from __future__ import print_function # System import -from __future__ import absolute_import import os import sys from optparse import OptionParser @@ -44,7 +42,7 @@ default=default_output_dir, help="output base directory. Docs will be generated in " "sub-directories there, named by their module names. " - "default: {0}".format(default_output_dir), + "default: {}".format(default_output_dir), ) (options, args) = parser.parse_args() if options.module is None: @@ -55,12 +53,12 @@ if options.verbose: logging.basicConfig( level=logging.DEBUG, - format="{0}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), + format="{}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), ) else: logging.basicConfig( level=logging.INFO, - format="{0}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), + format="{}::%(asctime)s::%(levelname)s::%(message)s".format(logger.name), ) base_outdir = options.outdir @@ -75,7 +73,7 @@ try: __import__(options.module) except ImportError: - logging.error("Can't load module {0}".format(options.module)) + logging.error("Can't load module {}".format(options.module)) exit(2) module = sys.modules[options.module] module_path = module.__path__[0] @@ -84,7 +82,7 @@ # Sort all the pilots # > from the pilots full path 'm1.m2.pipeline' get the module name 'm2' -module_names = set([x.split(".")[1] for x in pilots]) +module_names = {x.split(".")[1] for x in pilots} # > sort each pilot according to its module name. # > the result is a dict of the form 'd[m2] = [pilot1, ...]' sorted_pilots = {} @@ -114,7 +112,7 @@ # Just print a summary logger.info( - "'{0}' files written for module '{1}'.".format( + "'{}' files written for module '{}'.".format( len(docwriter.written_usecases), module_name ) ) diff --git a/capsul/sphinxext/layoutdocgen.py b/capsul/sphinxext/layoutdocgen.py index 6c7b93c3f..1388caa45 100644 --- a/capsul/sphinxext/layoutdocgen.py +++ b/capsul/sphinxext/layoutdocgen.py @@ -7,7 +7,6 @@ ########################################################################## # System import -from __future__ import absolute_import import os import sys import six @@ -16,7 +15,7 @@ import traceback -class LayoutHelperWriter(object): +class LayoutHelperWriter: """ A basic class to create sphinx layout and associated index. """ def __init__(self, module_names, root_module_name, rst_extension=".rst"): @@ -52,26 +51,26 @@ def generate_index_entry(self, module_name, indent=4): the reST formatted index description. """ # Try to get the module description - full_module_name = "{0}.{1}".format(self.root_module_name, module_name) + full_module_name = "{}.{}".format(self.root_module_name, module_name) try: __import__(full_module_name) except ImportError: exc_info = sys.exc_info() logging.error("".join(traceback.format_exception(*exc_info))) logging.error( - "Can't load module {0}".format(full_module_name)) + "Can't load module {}".format(full_module_name)) module = sys.modules[full_module_name] description = module.__doc__ # Then reST formatting spacer = " " * 4 ad = spacer + "
\n" - ad += spacer + "

\n".format(module_name) - ad += spacer + "{0} module\n".format(module_name) + ad += spacer + "

\n".format(module_name) + ad += spacer + "{} module\n".format(module_name) ad += spacer + "

\n" ad += spacer + "
\n" if description is not None: - ad += spacer + "{0}\n".format(("\n" + spacer).join( + ad += spacer + "{}\n".format(("\n" + spacer).join( self.rst2html(description).splitlines())) ad += spacer + "
\n" ad += spacer + "

\n" @@ -138,7 +137,7 @@ def write_layout(self, outdir): exc_info = sys.exc_info() logging.error("".join(traceback.format_exception(*exc_info))) logging.error( - "Can't load module {0}".format(self.root_module_name)) + "Can't load module {}".format(self.root_module_name)) module = sys.modules[self.root_module_name] release_info = {} exec(compile(open(os.path.join(module.__path__[0], "info.py"), "rb").read(), os.path.join(module.__path__[0], "info.py"), 'exec'), release_info) @@ -162,7 +161,7 @@ def write_layout(self, outdir): "" % item) else: indicators.append( - "
  • ".format(cnt)) images.append( "
    " @@ -184,14 +183,14 @@ def write_layout(self, outdir): path = os.path.join(outdir, "layout.html") # Start writing the index - idx = open(path, "wt") + idx = open(path, "w") w = idx.write # Edit the template with open(layout_file) as open_file: s = "".join(open_file.readlines()) - for key, value in six.iteritems(layout_info): - s = s.replace("%({0})s".format(key), value) + for key, value in layout_info.items(): + s = s.replace("%({})s".format(key), value) w(s) # Close the open file @@ -214,7 +213,7 @@ def write_installation(self, outdir): os.path.dirname(__file__), "resources", "installation.rst") # Generate title - title = "Installing `{0}`".format(self.root_module_name.upper()) + title = "Installing `{}`".format(self.root_module_name.upper()) title = [self.rst_section_levels[1] * len(title), title, self.rst_section_levels[1] * len(title)] @@ -229,14 +228,14 @@ def write_installation(self, outdir): path = os.path.join(outdir, "installation.rst") # Start writing the index - idx = open(path, "wt") + idx = open(path, "w") w = idx.write # Edit the template with open(layout_file) as open_file: s = "".join(open_file.readlines()) - for key, value in six.iteritems(layout_info): - s = s.replace("%({0})s".format(key), value) + for key, value in layout_info.items(): + s = s.replace("%({})s".format(key), value) w(s) # Close the open file @@ -263,14 +262,14 @@ def write_index(self, outdir, froot="index", rst_extension=".rst"): path = os.path.join(outdir, froot + rst_extension) # Start writing the index - idx = open(path, "wt") + idx = open(path, "w") w = idx.write # Header w(".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n") w(".. raw:: html\n\n") w("
    \n\n") - title = "Documentation of the {0} Pipelines\n".format( + title = "Documentation of the {} Pipelines\n".format( self.root_module_name.upper()) w(title) w(self.rst_section_levels[1] * len(title) + "\n\n") diff --git a/capsul/sphinxext/load_pilots.py b/capsul/sphinxext/load_pilots.py index 7ed0cc588..6a89b9e0c 100644 --- a/capsul/sphinxext/load_pilots.py +++ b/capsul/sphinxext/load_pilots.py @@ -7,7 +7,6 @@ ########################################################################## # System import -from __future__ import absolute_import import os import sys import logging @@ -79,7 +78,7 @@ def load_pilots(root, path, root_module_name): # An api exists, but it cannot be imported except ImportError as e: - logging.debug("Could not import {0}: {1}".format(module_name, e)) + logging.debug("Could not import {}: {}".format(module_name, e)) raise return pilots diff --git a/capsul/sphinxext/pipelinedocgen.py b/capsul/sphinxext/pipelinedocgen.py index 935d381e3..35641cbcf 100644 --- a/capsul/sphinxext/pipelinedocgen.py +++ b/capsul/sphinxext/pipelinedocgen.py @@ -3,7 +3,7 @@ from capsul.api import executable -class PipelineHelpWriter(object): +class PipelineHelpWriter: """ Class for automatic generation of pipeline API documentations in Sphinx-parsable reST format. """ @@ -62,12 +62,12 @@ def generate_api_doc(self, pipeline, schema): # Set the current module currentmodule = ".".join(pipeline_instance.definition.split(".")[:-1]) - ad += ".. currentmodule:: {0}\n\n".format(currentmodule) + ad += ".. currentmodule:: {}\n\n".format(currentmodule) # Generate a bookmark (for cross references) pipeline_name = pipeline_instance.__class__.__name__ label = pipeline + ":" - ad += "\n.. _{0}\n\n".format(label) + ad += "\n.. _{}\n\n".format(label) chap_title = pipeline ad += (chap_title + "\n" + @@ -86,7 +86,7 @@ def generate_api_doc(self, pipeline, schema): schama_title = "Pipeline schema" ad += ("\n" + schama_title + "\n" + "~" * len(schama_title) + "\n\n") - ad += ".. image:: {0}\n".format(schema) + ad += ".. image:: {}\n".format(schema) ad += " :height: 400px\n" ad += " :align: center\n\n" @@ -141,7 +141,7 @@ def write_api_docs(self, outdir=None, returnrst=False): if returnrst is False: outfile = os.path.join(outdir, pipeline_short + self.rst_extension) - fileobj = open(outfile, "wt") + fileobj = open(outfile, "w") fileobj.write(api_str) fileobj.close() else: @@ -208,7 +208,7 @@ def write_index(self, outdir, froot="index", relative_to=None, print('relpath:', relpath) # Edit the index file - idx = open(path, "wt") + idx = open(path, "w") w = idx.write # Add header to tell us that this documentation must not be edited @@ -235,9 +235,9 @@ def write_index(self, outdir, froot="index", relative_to=None, print('ref:', ref) table.append("
  • ") table.append( - "\n".format(ref, relative_pipeline)) - table.append("".format(title_str)) + "\n".format(ref, relative_pipeline)) + table.append("".format(title_str)) table.append("") # Close divs @@ -274,7 +274,7 @@ def write_main_index(self, outdir, module_name, root_module_name, path = os.path.join(outdir, froot + rst_extension) # Open the result index file - idx = open(path, "wt") + idx = open(path, "w") # Stat writing w = idx.write @@ -290,11 +290,11 @@ def write_main_index(self, outdir, module_name, root_module_name, # Generate a markup label = module_name - w(".. _{0}:\n\n".format(label)) + w(".. _{}:\n\n".format(label)) # Page use cases # # Generate a title - chap_title = ":mod:`{0}.{1}`: User Guide".format( + chap_title = ":mod:`{}.{}`: User Guide".format( root_module_name, module_name) w(chap_title + "\n" + self.rst_section_levels[1] * len(chap_title) + "\n\n") @@ -302,7 +302,7 @@ def write_main_index(self, outdir, module_name, root_module_name, if have_usecases: # # Generate a markup label = module_name + "_ug" - w(".. _{0}:\n\n".format(label)) + w(".. _{}:\n\n".format(label)) # # Some text description w("Some live examples containing snippets of codes.\n\n") # # Include user guide index @@ -310,18 +310,18 @@ def write_main_index(self, outdir, module_name, root_module_name, # API page # # Generate a title - chap_title = ":mod:`{0}.{1}`: API".format( + chap_title = ":mod:`{}.{}`: API".format( root_module_name, module_name) w(chap_title + "\n" + self.rst_section_levels[1] * len(chap_title) + "\n\n") # # Generate a markup label = module_name + "_api" - w(".. _{0}:\n\n".format(label)) + w(".. _{}:\n\n".format(label)) # # Some text description w("The API of functions and classes, as given by the " "docstrings.") if have_usecases: - w(" For the *user guide* see the {0}_ug_ " + w(" For the *user guide* see the {}_ug_ " "section for further details.\n\n".format(module_name)) else: w("\n\n") diff --git a/capsul/sphinxext/resources/custom_ext/hidden_code_block.py b/capsul/sphinxext/resources/custom_ext/hidden_code_block.py index 7925841b4..08cf70d8d 100644 --- a/capsul/sphinxext/resources/custom_ext/hidden_code_block.py +++ b/capsul/sphinxext/resources/custom_ext/hidden_code_block.py @@ -32,7 +32,6 @@ Released under the WTFPL (http://sam.zoy.org/wtfpl/). """ -from __future__ import absolute_import from docutils import nodes from docutils.parsers.rst import directives from sphinx.directives.code import CodeBlock @@ -102,7 +101,7 @@ def visit_hcb_html(self, node): code_block = self.body[-1] fill_header = { - "divname": "hiddencodeblock{0}".format(HCB_COUNTER), + "divname": "hiddencodeblock{}".format(HCB_COUNTER), "startdisplay": "none" if node["starthidden"] else "block", "label": node.get("label"), } diff --git a/capsul/sphinxext/resources/custom_ext/hidden_technical_block.py b/capsul/sphinxext/resources/custom_ext/hidden_technical_block.py index 716ec150a..2efcf31a7 100644 --- a/capsul/sphinxext/resources/custom_ext/hidden_technical_block.py +++ b/capsul/sphinxext/resources/custom_ext/hidden_technical_block.py @@ -1,5 +1,4 @@ # System import -from __future__ import absolute_import import logging # Docutils import @@ -71,16 +70,16 @@ def run(self): resource_path = item.replace(".. include:: ", "") # Try to open the file try: - fo = open(resource_path, "r") + fo = open(resource_path) # Item content is a string or buffer item_content = [x.replace("\n", "") for x in fo.readlines()] for string_content in item_content: new_content.append( - six.text_type(string_content), source=self.content + str(string_content), source=self.content ) fo.close() except MyError as e: - item_content = "Can't open the resource file " "'{0}'".format( + item_content = "Can't open the resource file " "'{}'".format( resource_path ) logging.error(item_content + e.value) @@ -92,7 +91,7 @@ def run(self): # Replace old content item self.content = new_content # Call the parent class method - return super(HiddenTechnicalBlock, self).run() + return super().run() # Add html writer @@ -102,7 +101,7 @@ def visit_htb_html(self, node): global HTB_COUNTER HTB_COUNTER += 1 - # Vist the node + # Visit the node self.visit_admonition(node) # Get the last element of the html body @@ -111,7 +110,7 @@ def visit_htb_html(self, node): # Get the node options fill_header = { - "divname": "hiddencodeblock{0}".format(HTB_COUNTER), + "divname": "hiddencodeblock{}".format(HTB_COUNTER), "startdisplay": "none" if node["starthidden"] else "block", "label": node.get("label", "[+ show/hide technical details]"), } diff --git a/capsul/sphinxext/resources/custom_ext/link_to_block.py b/capsul/sphinxext/resources/custom_ext/link_to_block.py index 8585ac1f1..a71389490 100644 --- a/capsul/sphinxext/resources/custom_ext/link_to_block.py +++ b/capsul/sphinxext/resources/custom_ext/link_to_block.py @@ -1,5 +1,4 @@ # System import -from __future__ import absolute_import import os # Docutils import @@ -29,12 +28,12 @@ class LinkToBlock(BaseAdmonition): def run(self): # Construct an empty node new_content = ViewList() - ref = ":ref:`{0} <{1}>`".format( + ref = ":ref:`{} <{}>`".format( self.options.get("label", "Link To"), "".join(self.arguments) ) new_content.append(ref, source=self.content) self.content = new_content - return super(LinkToBlock, self).run() + return super().run() # Add html writer @@ -43,7 +42,7 @@ def visit_ltb_html(self, node): # Generate the html div position = node.get("right-side", True) self.body.append( - "
    ".format("buttonNext" if position else "buttonPrevious") + "
    ".format("buttonNext" if position else "buttonPrevious") ) diff --git a/capsul/sphinxext/resources/installation.rst b/capsul/sphinxext/resources/installation.rst index bc9d8c1c0..a464491a5 100644 --- a/capsul/sphinxext/resources/installation.rst +++ b/capsul/sphinxext/resources/installation.rst @@ -4,7 +4,7 @@ %(TITLE)s -This tutorial will walk you through the process of intalling %(NAME_UPPER)s... +This tutorial will walk you through the process of installing %(NAME_UPPER)s... * :ref:`Install an official release `. This is the best approach for users who want a stable version. diff --git a/capsul/sphinxext/resources/numpy_ext/docscrape.py b/capsul/sphinxext/resources/numpy_ext/docscrape.py index 2b1719db5..9f17ffee8 100644 --- a/capsul/sphinxext/resources/numpy_ext/docscrape.py +++ b/capsul/sphinxext/resources/numpy_ext/docscrape.py @@ -1,7 +1,6 @@ """Extract reference documentation from the NumPy source tree. """ -from __future__ import division, absolute_import, print_function import inspect import textwrap @@ -12,7 +11,7 @@ import sys -class Reader(object): +class Reader: """A line-based string reader. """ @@ -86,7 +85,7 @@ def is_empty(self): return not ''.join(self._str).strip() -class NumpyDocString(object): +class NumpyDocString: def __init__(self, docstring, config={}): docstring = textwrap.dedent(docstring).split('\n') @@ -272,7 +271,7 @@ def _parse_summary(self): while True: summary = self._doc.read_to_next_empty_line() summary_str = " ".join([s.strip() for s in summary]).strip() - if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str): + if re.compile(r'^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str): self['Signature'] = summary_str if not self._is_at_section(): continue @@ -314,7 +313,7 @@ def _str_indent(self, doc, indent=4): def _str_signature(self): if self['Signature']: - return [self['Signature'].replace('*','\*')] + [''] + return [self['Signature'].replace('*',r'\*')] + [''] else: return [''] @@ -336,7 +335,7 @@ def _str_param_list(self, name): out += self._str_header(name) for param,param_type,desc in self[name]: if param_type: - out += ['%s : %s' % (param, param_type)] + out += ['{} : {}'.format(param, param_type)] else: out += [param] out += self._str_indent(desc) @@ -358,9 +357,9 @@ def _str_see_also(self, func_role): last_had_desc = True for func, desc, role in self['See Also']: if role: - link = ':%s:`%s`' % (role, func) + link = ':{}:`{}`'.format(role, func) elif func_role: - link = ':%s:`%s`' % (func_role, func) + link = ':{}:`{}`'.format(func_role, func) else: link = "`%s`_" % func if desc or last_had_desc: @@ -383,7 +382,7 @@ def _str_index(self): for section, references in idx.items(): if section == 'default': continue - out += [' :%s: %s' % (section, ', '.join(references))] + out += [' :{}: {}'.format(section, ', '.join(references))] return out def __str__(self, func_role=''): @@ -439,8 +438,8 @@ def __init__(self, func, role='func', doc=None, config={}): else: argspec = inspect.getargspec(func) argspec = inspect.formatargspec(*argspec) - argspec = argspec.replace('*','\*') - signature = '%s%s' % (func_name, argspec) + argspec = argspec.replace('*',r'\*') + signature = '{}{}'.format(func_name, argspec) except TypeError as e: signature = '%s()' % func_name self['Signature'] = signature @@ -457,7 +456,7 @@ def __str__(self): out = '' func, func_name = self.get_func() - signature = self['Signature'].replace('*', '\*') + signature = self['Signature'].replace('*', r'\*') roles = {'func': 'function', 'meth': 'method'} @@ -465,10 +464,10 @@ def __str__(self): if self._role: if self._role not in roles: print("Warning: invalid role %s" % self._role) - out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''), + out += '.. {}:: {}\n \n\n'.format(roles.get(self._role,''), func_name) - out += super(FunctionDoc, self).__str__(func_role=self._role) + out += super().__str__(func_role=self._role) return out @@ -519,7 +518,7 @@ def methods(self): return [name for name,func in inspect.getmembers(self._cls) if ((not name.startswith('_') or name in self.extra_public_methods) - and isinstance(func, collections.Callable))] + and isinstance(func, collections.abc.Callable))] @property def properties(self): diff --git a/capsul/sphinxext/resources/numpy_ext/docscrape_sphinx.py b/capsul/sphinxext/resources/numpy_ext/docscrape_sphinx.py index 747507dd8..6b4c7cc32 100644 --- a/capsul/sphinxext/resources/numpy_ext/docscrape_sphinx.py +++ b/capsul/sphinxext/resources/numpy_ext/docscrape_sphinx.py @@ -1,5 +1,3 @@ -from __future__ import division, absolute_import, print_function - import sys, re, inspect, textwrap, pydoc import sphinx import collections @@ -51,7 +49,7 @@ def _str_returns(self): out += [''] for param, param_type, desc in self['Returns']: if param_type: - out += self._str_indent(['**%s** : %s' % (param.strip(), + out += self._str_indent(['**{}** : {}'.format(param.strip(), param_type)]) else: out += self._str_indent([param.strip()]) @@ -68,7 +66,7 @@ def _str_param_list(self, name): out += [''] for param, param_type, desc in self[name]: if param_type: - out += self._str_indent(['**%s** : %s' % (param.strip(), + out += self._str_indent(['**{}** : {}'.format(param.strip(), param_type)]) else: out += self._str_indent(['**%s**' % param.strip()]) @@ -114,7 +112,7 @@ def _str_member_list(self, name): if param_obj and (pydoc.getdoc(param_obj) or not desc): # Referenced object has a docstring - autosum += [" %s%s" % (prefix, param)] + autosum += [" {}{}".format(prefix, param)] else: others.append((param, param_type, desc)) @@ -132,7 +130,7 @@ def _str_member_list(self, name): for param, param_type, desc in others: desc = sixu(" ").join(x.strip() for x in desc).strip() if param_type: - desc = "(%s) %s" % (param_type, desc) + desc = "({}) {}".format(param_type, desc) out += [fmt % (param.strip(), desc)] out += [hdr] out += [''] @@ -151,7 +149,7 @@ def _str_section(self, name): def _str_see_also(self, func_role): out = [] if self['See Also']: - see_also = super(SphinxDocString, self)._str_see_also(func_role) + see_also = super()._str_see_also(func_role) out = ['.. seealso::', ''] out += self._str_indent(see_also[2:]) return out @@ -176,7 +174,7 @@ def _str_index(self): elif section == 'refguide': out += [' single: %s' % (', '.join(references))] else: - out += [' %s: %s' % (section, ','.join(references))] + out += [' {}: {}'.format(section, ','.join(references))] return out def _str_references(self): @@ -257,7 +255,7 @@ def get_doc_object(obj, what=None, doc=None, config={}): what = 'class' elif inspect.ismodule(obj): what = 'module' - elif isinstance(obj, collections.Callable): + elif isinstance(obj, collections.abc.Callable): what = 'function' else: what = 'object' diff --git a/capsul/sphinxext/resources/numpy_ext/numpydoc.py b/capsul/sphinxext/resources/numpy_ext/numpydoc.py index 66896a2c8..ce51c2b0a 100644 --- a/capsul/sphinxext/resources/numpy_ext/numpydoc.py +++ b/capsul/sphinxext/resources/numpy_ext/numpydoc.py @@ -15,7 +15,6 @@ .. [1] https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt """ -from __future__ import division, absolute_import, print_function import os, sys, re, pydoc import sphinx @@ -50,7 +49,7 @@ def mangle_docstrings(app, what, name, obj, options, lines, if sys.version_info[0] >= 3: doc = str(doc) else: - doc = six.text_type(doc) + doc = str(doc) lines[:] = doc.split(sixu("\n")) if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \ @@ -94,7 +93,7 @@ def mangle_signature(app, what, name, obj, options, sig, retann): 'initializes x; see ' in pydoc.getdoc(obj.__init__))): return '', '' - if not (isinstance(obj, collections.Callable) or hasattr(obj, '__argspec_is_invalid_')): return + if not (isinstance(obj, collections.abc.Callable) or hasattr(obj, '__argspec_is_invalid_')): return if not hasattr(obj, '__doc__'): return doc = SphinxDocString(pydoc.getdoc(obj)) @@ -128,11 +127,11 @@ def setup(app, get_doc_object_=get_doc_object): from sphinx.domains.c import CDomain from sphinx.domains.python import PythonDomain -class ManglingDomainBase(object): +class ManglingDomainBase: directive_mangling_map = {} def __init__(self, *a, **kw): - super(ManglingDomainBase, self).__init__(*a, **kw) + super().__init__(*a, **kw) self.wrap_mangling_directives() def wrap_mangling_directives(self): diff --git a/capsul/sphinxext/test/test_usercases_doc.py b/capsul/sphinxext/test/test_usercases_doc.py index e5912612b..fdda53536 100644 --- a/capsul/sphinxext/test/test_usercases_doc.py +++ b/capsul/sphinxext/test/test_usercases_doc.py @@ -1,8 +1,4 @@ -from __future__ import with_statement - # System import -from __future__ import absolute_import -from __future__ import print_function import unittest # Capsul import diff --git a/capsul/sphinxext/usecasesdocgen.py b/capsul/sphinxext/usecasesdocgen.py index 194201bcc..aaa1e0fb6 100644 --- a/capsul/sphinxext/usecasesdocgen.py +++ b/capsul/sphinxext/usecasesdocgen.py @@ -7,19 +7,17 @@ ########################################################################## # System import -from __future__ import absolute_import import inspect import ast import os import logging import six -from six.moves import range # Define logger logger = logging.getLogger(__file__) -class UseCasesHelperWriter(object): +class UseCasesHelperWriter: """ A basic class to convert the pilot codes to rst use cases """ def __init__(self, pilots, rst_extension=".rst"): @@ -72,12 +70,12 @@ def generate_usecases_doc(self, src_code, module_name): nb_lines = len(lines) ad = ".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n" ad += ":orphan:\n\n" - ad += ".. _example_{0} :\n\n".format(module_name) + ad += ".. _example_{} :\n\n".format(module_name) line_start_code = 0 line_end_code = 0 is_header = True - full_code = "# The full use case code: {0}\n".format(module_name) + full_code = "# The full use case code: {}\n".format(module_name) for code_item in pilot_tree: if (isinstance(code_item, ast.Expr) and isinstance(code_item.value, ast.Str)): @@ -146,7 +144,7 @@ def write_usecases_docs(self, outdir=None, returnrst=False): """ # Check output directory if returnrst is False: - if not isinstance(outdir, six.string_types): + if not isinstance(outdir, str): raise Exception("If 'returnrst' is False, need a valid output " "directory.") if not os.path.exists(outdir): @@ -159,7 +157,7 @@ def write_usecases_docs(self, outdir=None, returnrst=False): for pilot in self.pilots: # Information message - logger.info("Processing pilot '{0}' in module '{1}'...".format( + logger.info("Processing pilot '{}' in module '{}'...".format( pilot.__name__, pilot.__module__)) # Generate reST @@ -173,7 +171,7 @@ def write_usecases_docs(self, outdir=None, returnrst=False): # Write to file if returnrst is False: outfile = os.path.join(outdir, uid + self.rst_extension) - fileobj = open(outfile, "wt") + fileobj = open(outfile, "w") fileobj.write(use_case_str) fileobj.close() else: @@ -216,7 +214,7 @@ def write_index(self, outdir, froot="index", relative_to=None, relpath = outdir.replace(relative_to + os.path.sep, "") else: relpath = outdir - idx = open(path, "wt") + idx = open(path, "w") w = idx.write w(".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n") w(".. raw:: html\n\n") @@ -235,9 +233,9 @@ def write_index(self, outdir, froot="index", relative_to=None, ref = os.path.join(relpath, f + ".html") table.append("
    ") table.append( - "\n".format(ref, relative_uid)) - table.append("".format(title_str)) + "\n".format(ref, relative_uid)) + table.append("".format(title_str)) table.append("") table.append("\n\n") diff --git a/capsul/test/test_completion.py b/capsul/test/test_completion.py index 21b06b9f6..a53db68a4 100644 --- a/capsul/test/test_completion.py +++ b/capsul/test/test_completion.py @@ -211,8 +211,8 @@ def test_pipeline_completion(self): metadata.bids = input_metadata metadata.generate_paths(pipeline) - params = dict( - (i, getattr(pipeline, i, undefined)) + params = { + i: getattr(pipeline, i, undefined) for i in ( "input", "template", @@ -223,7 +223,7 @@ def test_pipeline_completion(self): "right_gw_classif", "right_gw_mesh", ) - ) + } expected = { "input": "!{dataset.input.path}/rawdata/sub-aleksander/ses-m0/anat/sub-aleksander_ses-m0_T1w.nii", diff --git a/capsul/test/test_fake_morphologist.py b/capsul/test/test_fake_morphologist.py index 2b7ae91c2..59d72ce05 100644 --- a/capsul/test/test_fake_morphologist.py +++ b/capsul/test/test_fake_morphologist.py @@ -630,8 +630,8 @@ def test_path_generation(self): pv.show() app.exec_() - params = dict( - (i, getattr(morphologist, i, undefined)) + params = { + i: getattr(morphologist, i, undefined) for i in ( "PrepareSubject_Normalization_Normalization_AimsMIRegister_anatomical_template", "imported_t1mri", @@ -640,7 +640,7 @@ def test_path_generation(self): "left_labelled_graph", "right_labelled_graph", ) - ) + } self.maxDiff = None self.assertEqual(params, expected[normalization]) # for field in morphologist.fields(): diff --git a/capsul/test/test_tiny_morphologist.py b/capsul/test/test_tiny_morphologist.py index e6be192a1..300da5b31 100644 --- a/capsul/test/test_tiny_morphologist.py +++ b/capsul/test/test_tiny_morphologist.py @@ -638,8 +638,8 @@ def test_tiny_path_generation(self): }, ) metadata.generate_paths(tiny_morphologist) - params = dict( - (i, getattr(tiny_morphologist, i, undefined)) + params = { + i: getattr(tiny_morphologist, i, undefined) for i in ( "template", "nobias", @@ -647,7 +647,7 @@ def test_tiny_path_generation(self): "right_hemisphere", "left_hemisphere", ) - ) + } self.maxDiff = 2000 self.assertEqual(params, expected[normalization]) diff --git a/capsul/ui/static/engine.html b/capsul/ui/static/engine.html index 5aad13ec9..0e00e9b09 100644 --- a/capsul/ui/static/engine.html +++ b/capsul/ui/static/engine.html @@ -20,7 +20,7 @@

    Engine

    Executions

    type:%s{}
    value:%s{}
    " - "{1}{0}" + "{}{}
    " - "{1}{0}" + "{}{}
    - + diff --git a/completion.md b/completion.md index 71de0a791..6f914ff24 100644 --- a/completion.md +++ b/completion.md @@ -9,13 +9,13 @@ The path generation system is designed to be **fully automatic**. Path generatio Path generation is done using **metadata and metadata schema**. The metadata contains the values that are used to build various part of the path. For instance, the subject code is often used in path names as well as an extension defining the file type. These two values are included in the metadata. There is no single way to create a path given metadata. There are many possible layouts for path names using various metadata. For instance, BrainVISA has defined a path organisation layout. BIDS is another path organisation layout that is the actual standard for neuroimaging. Capsul can support many different systems; each one beign defined in a `MetadataSchema` class (see below). -Metadata given to create a path name can have **various origins**. For instance, the extension of the file is most often dependent on the process. An image parameter uses an image extension (such as `.nii`) whereas a mesh parameter uses a mesh format (such as `.gii`). This kind of metatada, called **process metadata**, is **defined globally for a process**, usually by the process developer. On the other hand, metadata such a subject identifier depends on the usage context of the process. This kind of metadata is called **user metadata** because it is **given at runtime** as the result of a user action (manual, entry, database selection, etc.). +Metadata given to create a path name can have **various origins**. For instance, the extension of the file is most often dependent on the process. An image parameter uses an image extension (such as `.nii`) whereas a mesh parameter uses a mesh format (such as `.gii`). This kind of metadata, called **process metadata**, is **defined globally for a process**, usually by the process developer. On the other hand, metadata such a subject identifier depends on the usage context of the process. This kind of metadata is called **user metadata** because it is **given at runtime** as the result of a user action (manual, entry, database selection, etc.). -The path generation system must be able to deal with **several metadata schemas for a single process**. If not it woul mean that all process parameters must be in the same schema. This is the case if all input and output data are following the BIDS standard. However, in many cases a process will have to deal with several metadata schemas. For instance, there could be one schema for input data (i.e. BIDS), another schema for output data (for use cases not covered by BIDS) and other schemas for third party data (for instance template images in SPM software directory). To support several metadata schema, Capsul make a link between a dataset (i.e. a directory) and the metadata schema used throughout that directory using he `Dataset` class. +The path generation system must be able to deal with **several metadata schemas for a single process**. If not it would mean that all process parameters must be in the same schema. This is the case if all input and output data are following the BIDS standard. However, in many cases a process will have to deal with several metadata schemas. For instance, there could be one schema for input data (i.e. BIDS), another schema for output data (for use cases not covered by BIDS) and other schemas for third party data (for instance template images in SPM software directory). To support several metadata schema, Capsul make a link between a dataset (i.e. a directory) and the metadata schema used throughout that directory using he `Dataset` class. ### Using path generation -Path generation is done for all path parameters of a process. The following diagram illustrate the case of a process with one `input` parameter supposed to follow BIDS schema and one `output` parameter following another schema called BrainVISA. In oder to use path generation, the user must create a ̀ProcessMetadata` instance that allows to set user metadata for all schemas used by the process. These user metadata are combined with process metadata in order to generate values for all path parameters. +Path generation is done for all path parameters of a process. The following diagram illustrate the case of a process with one `input` parameter supposed to follow BIDS schema and one `output` parameter following another schema called BrainVISA. In order to use path generation, the user must create a ̀ProcessMetadata` instance that allows to set user metadata for all schemas used by the process. These user metadata are combined with process metadata in order to generate values for all path parameters. ```mermaid graph LR diff --git a/doc/source/conf.py b/doc/source/conf.py index 2b3a1a99b..3723aae88 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -9,7 +9,6 @@ # All configuration values have a default; values that are commented out # serve to show the default. -from __future__ import print_function import sys, os import time import shutil @@ -126,9 +125,9 @@ def null(*args, **kwargs): master_doc = 'index' # General information about the project. -project = u'CAPSUL' +project = 'CAPSUL' release_info['COPYRIGHT_YEAR'] = time.strftime('%Y') -copyright = u'%(COPYRIGHT_YEAR)s, %(AUTHOR)s <%(AUTHOR_EMAIL)s>' % release_info +copyright = '%(COPYRIGHT_YEAR)s, %(AUTHOR)s <%(AUTHOR_EMAIL)s>' % release_info # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -275,8 +274,8 @@ def null(*args, **kwargs): # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'capsul.tex', u'CAPSUL Documentation', - u'CATI', 'manual'), + ('index', 'capsul.tex', 'CAPSUL Documentation', + 'CATI', 'manual'), ] autoclass_content = "both" diff --git a/doc/source/installation.rst b/doc/source/installation.rst index 8cce2a1ff..403569417 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -5,7 +5,7 @@ `CAPSUL` installation ===================== -.. This tutorial will walk you through the process of intalling CAPSUL. +.. This tutorial will walk you through the process of installing CAPSUL. .. .. * :ref:`Install an official release `. This .. is the best approach for users who want a stable version. diff --git a/doc/source/sphinxext/numpy_ext/docscrape.py b/doc/source/sphinxext/numpy_ext/docscrape.py index 2b1719db5..9f17ffee8 100644 --- a/doc/source/sphinxext/numpy_ext/docscrape.py +++ b/doc/source/sphinxext/numpy_ext/docscrape.py @@ -1,7 +1,6 @@ """Extract reference documentation from the NumPy source tree. """ -from __future__ import division, absolute_import, print_function import inspect import textwrap @@ -12,7 +11,7 @@ import sys -class Reader(object): +class Reader: """A line-based string reader. """ @@ -86,7 +85,7 @@ def is_empty(self): return not ''.join(self._str).strip() -class NumpyDocString(object): +class NumpyDocString: def __init__(self, docstring, config={}): docstring = textwrap.dedent(docstring).split('\n') @@ -272,7 +271,7 @@ def _parse_summary(self): while True: summary = self._doc.read_to_next_empty_line() summary_str = " ".join([s.strip() for s in summary]).strip() - if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str): + if re.compile(r'^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str): self['Signature'] = summary_str if not self._is_at_section(): continue @@ -314,7 +313,7 @@ def _str_indent(self, doc, indent=4): def _str_signature(self): if self['Signature']: - return [self['Signature'].replace('*','\*')] + [''] + return [self['Signature'].replace('*',r'\*')] + [''] else: return [''] @@ -336,7 +335,7 @@ def _str_param_list(self, name): out += self._str_header(name) for param,param_type,desc in self[name]: if param_type: - out += ['%s : %s' % (param, param_type)] + out += ['{} : {}'.format(param, param_type)] else: out += [param] out += self._str_indent(desc) @@ -358,9 +357,9 @@ def _str_see_also(self, func_role): last_had_desc = True for func, desc, role in self['See Also']: if role: - link = ':%s:`%s`' % (role, func) + link = ':{}:`{}`'.format(role, func) elif func_role: - link = ':%s:`%s`' % (func_role, func) + link = ':{}:`{}`'.format(func_role, func) else: link = "`%s`_" % func if desc or last_had_desc: @@ -383,7 +382,7 @@ def _str_index(self): for section, references in idx.items(): if section == 'default': continue - out += [' :%s: %s' % (section, ', '.join(references))] + out += [' :{}: {}'.format(section, ', '.join(references))] return out def __str__(self, func_role=''): @@ -439,8 +438,8 @@ def __init__(self, func, role='func', doc=None, config={}): else: argspec = inspect.getargspec(func) argspec = inspect.formatargspec(*argspec) - argspec = argspec.replace('*','\*') - signature = '%s%s' % (func_name, argspec) + argspec = argspec.replace('*',r'\*') + signature = '{}{}'.format(func_name, argspec) except TypeError as e: signature = '%s()' % func_name self['Signature'] = signature @@ -457,7 +456,7 @@ def __str__(self): out = '' func, func_name = self.get_func() - signature = self['Signature'].replace('*', '\*') + signature = self['Signature'].replace('*', r'\*') roles = {'func': 'function', 'meth': 'method'} @@ -465,10 +464,10 @@ def __str__(self): if self._role: if self._role not in roles: print("Warning: invalid role %s" % self._role) - out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''), + out += '.. {}:: {}\n \n\n'.format(roles.get(self._role,''), func_name) - out += super(FunctionDoc, self).__str__(func_role=self._role) + out += super().__str__(func_role=self._role) return out @@ -519,7 +518,7 @@ def methods(self): return [name for name,func in inspect.getmembers(self._cls) if ((not name.startswith('_') or name in self.extra_public_methods) - and isinstance(func, collections.Callable))] + and isinstance(func, collections.abc.Callable))] @property def properties(self): diff --git a/doc/source/sphinxext/numpy_ext/docscrape_sphinx.py b/doc/source/sphinxext/numpy_ext/docscrape_sphinx.py index cdc2a37d1..0b89f8475 100644 --- a/doc/source/sphinxext/numpy_ext/docscrape_sphinx.py +++ b/doc/source/sphinxext/numpy_ext/docscrape_sphinx.py @@ -1,5 +1,3 @@ -from __future__ import division, absolute_import, print_function - import sys, re, inspect, textwrap, pydoc import sphinx import collections @@ -53,7 +51,7 @@ def _str_returns(self): out += [''] for param, param_type, desc in self['Returns']: if param_type: - out += self._str_indent(['**%s** : %s' % (param.strip(), + out += self._str_indent(['**{}** : {}'.format(param.strip(), param_type)]) else: out += self._str_indent([param.strip()]) @@ -70,7 +68,7 @@ def _str_param_list(self, name): out += [''] for param, param_type, desc in self[name]: if param_type: - out += self._str_indent(['**%s** : %s' % (param.strip(), + out += self._str_indent(['**{}** : {}'.format(param.strip(), param_type)]) else: out += self._str_indent(['**%s**' % param.strip()]) @@ -116,7 +114,7 @@ def _str_member_list(self, name): if param_obj and (pydoc.getdoc(param_obj) or not desc): # Referenced object has a docstring - autosum += [" %s%s" % (prefix, param)] + autosum += [" {}{}".format(prefix, param)] else: others.append((param, param_type, desc)) @@ -134,7 +132,7 @@ def _str_member_list(self, name): for param, param_type, desc in others: desc = sixu(" ").join(x.strip() for x in desc).strip() if param_type: - desc = "(%s) %s" % (param_type, desc) + desc = "({}) {}".format(param_type, desc) out += [fmt % (param.strip(), desc)] out += [hdr] out += [''] @@ -153,7 +151,7 @@ def _str_section(self, name): def _str_see_also(self, func_role): out = [] if self['See Also']: - see_also = super(SphinxDocString, self)._str_see_also(func_role) + see_also = super()._str_see_also(func_role) out = ['.. seealso::', ''] out += self._str_indent(see_also[2:]) return out @@ -178,7 +176,7 @@ def _str_index(self): elif section == 'refguide': out += [' single: %s' % (', '.join(references))] else: - out += [' %s: %s' % (section, ','.join(references))] + out += [' {}: {}'.format(section, ','.join(references))] return out def _str_references(self): @@ -259,7 +257,7 @@ def get_doc_object(obj, what=None, doc=None, config={}): what = 'class' elif inspect.ismodule(obj): what = 'module' - elif isinstance(obj, collections.Callable): + elif isinstance(obj, collections.abc.Callable): what = 'function' else: what = 'object' diff --git a/doc/source/sphinxext/numpy_ext/numpydoc.py b/doc/source/sphinxext/numpy_ext/numpydoc.py index 2bc2d1e91..0f71168aa 100644 --- a/doc/source/sphinxext/numpy_ext/numpydoc.py +++ b/doc/source/sphinxext/numpy_ext/numpydoc.py @@ -15,7 +15,6 @@ .. [1] https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt """ -from __future__ import division, absolute_import, print_function import os, sys, re, pydoc import sphinx @@ -96,7 +95,7 @@ def mangle_signature(app, what, name, obj, options, sig, retann): 'initializes x; see ' in pydoc.getdoc(obj.__init__))): return '', '' - if not (isinstance(obj, collections.Callable) or hasattr(obj, '__argspec_is_invalid_')): return + if not (isinstance(obj, collections.abc.Callable) or hasattr(obj, '__argspec_is_invalid_')): return if not hasattr(obj, '__doc__'): return doc = SphinxDocString(pydoc.getdoc(obj)) @@ -130,11 +129,11 @@ def setup(app, get_doc_object_=get_doc_object): from sphinx.domains.c import CDomain from sphinx.domains.python import PythonDomain -class ManglingDomainBase(object): +class ManglingDomainBase: directive_mangling_map = {} def __init__(self, *a, **kw): - super(ManglingDomainBase, self).__init__(*a, **kw) + super().__init__(*a, **kw) self.wrap_mangling_directives() def wrap_mangling_directives(self): diff --git a/doc/source/status.rst b/doc/source/status.rst index 413f2cfae..347755b88 100644 --- a/doc/source/status.rst +++ b/doc/source/status.rst @@ -19,7 +19,7 @@ Pipeline execution: * sequential execution: |OK| * via Soma-Workflow: |OK| Switch nodes: |OK| -Iteraton nodes: |OK| +Iteration nodes: |OK| StudyConfig: |OK| Workflow transformation: |OK| Attributes and parameters completion: |OK| diff --git a/doc/source/user_guide_tree/advanced_usage.rst b/doc/source/user_guide_tree/advanced_usage.rst index 6a5d28d33..d61e46d44 100644 --- a/doc/source/user_guide_tree/advanced_usage.rst +++ b/doc/source/user_guide_tree/advanced_usage.rst @@ -8,7 +8,7 @@ Parameters completion Completion in Capsul v3 ======================= -This is not a doc yet, I write down thnigs that I seem to understand at the time I read them in the code. +This is not a doc yet, I write down things that I seem to understand at the time I read them in the code. Process parameters completion for filenames is working using attributes (or "metadata" assigned to a process or to its parameters. For instance a given data organization may organize data by study, center, subject, ... These study, center, subject elements can be seen as attributes. @@ -49,7 +49,7 @@ It defines metadata for a given schema name (like ``shared``), and the way to bu If a process uses several datasets with different schemas for different parameters (for instance input, output, shared datasets), several :class:`~capsul.dataset.ProcessSchema` subclasses may be declared for the same process class. -As the used shema is specified in the :class:`~capsul.dataset.ProcessSchema` subclass declaration, several subclasses may be declared for the same process parameters with different schemas. The schema selection will be done, for each dataset, at the time of data selection. +As the used schema is specified in the :class:`~capsul.dataset.ProcessSchema` subclass declaration, several subclasses may be declared for the same process parameters with different schemas. The schema selection will be done, for each dataset, at the time of data selection. * set this in the Capsul config:: diff --git a/doc/source/user_guide_tree/xml_spec.rst b/doc/source/user_guide_tree/xml_spec.rst index 60bddeb00..f0c993127 100644 --- a/doc/source/user_guide_tree/xml_spec.rst +++ b/doc/source/user_guide_tree/xml_spec.rst @@ -192,7 +192,7 @@ define the XML string associated to the function. Here is an example : @xml_process(''' - + @@ -211,7 +211,7 @@ Example : ''' - + @@ -230,7 +230,7 @@ Processes examples + doc="Method for thresolding."/> @@ -357,7 +357,7 @@ Attributes: - **name**: node name in the pipeline (as in process elements) - **switch\_value** (optional): value of the "switch" parameter: name of the active input -- **enabed** (optional): as in process elements +- **enabled** (optional): as in process elements Children: @@ -392,7 +392,7 @@ temporary values inside the pipeline if they are left undefined. Attributes: - **name**: node name in the pipeline (as in process elements) -- **enabed** (optional): as in process elements +- **enabled** (optional): as in process elements Children: diff --git a/readme.md b/readme.md index c76e7afe5..0b0f28af7 100644 --- a/readme.md +++ b/readme.md @@ -24,7 +24,7 @@ The simplest is to use a [casa-distro](https://github.com/brainvisa/casa-distro> singularity run -B capsul3:/casa/setup casa-dev-5.3-6.sif distro=opensource ``` -* change the `bv_maker.cfg` file for a ligher one, which switches to the expected branches: +* change the `bv_maker.cfg` file for a lighter one, which switches to the expected branches: ``` cat > capsul3/conf/bv_maker.cfg << EOF [ source \$CASA_SRC ]
    Label Status Waiting