diff --git a/folder.py b/folder.py index 6cb88a9dd..519069e18 100644 --- a/folder.py +++ b/folder.py @@ -205,7 +205,7 @@ def precheck_folder_secure(ctx, coll): found, last_run = get_last_run_time(ctx, coll) if (not correct_copytovault_start_status(ctx, coll) or not correct_copytovault_start_location(coll) - or not misc.last_run_time_acceptable(coll, found, last_run, config.vault_copy_backoff_time)): + or not misc.last_run_time_acceptable(found, last_run, config.vault_copy_backoff_time)): return False return True diff --git a/groups_import.py b/groups_import.py index 34d71d8b1..c1c876d68 100644 --- a/groups_import.py +++ b/groups_import.py @@ -142,7 +142,7 @@ def parse_csv_file(ctx): # Start processing the actual group data rows for line in lines: row_number += 1 - rowdata, error = process_csv_line(line) + rowdata, error = process_csv_line(ctx, line) if error is None: extracted_data.append(rowdata) diff --git a/integration_tests.py b/integration_tests.py index abfa859c9..f345b5c58 100644 --- a/integration_tests.py +++ b/integration_tests.py @@ -350,16 +350,16 @@ def _test_folder_secure_func(ctx, func): "test": lambda ctx: _call_msvc_json_arrayops(ctx, '["a", "b", "c"]', "", "size", 0, 3), "check": lambda x: x == 3}, {"name": "msvc.json_objops.add_notexist_empty", - "test": lambda ctx: _call_msvc_json_objops(ctx, '', msi.kvpair(ctx, "e", "f"), 'add', 0), + "test": lambda ctx: _call_msvc_json_objops(ctx, '', msi.kvpair(ctx, "e", "f"), 'add', 0), "check": lambda x: x == '{"e": "f"}'}, {"name": "msvc.json_objops.add_notexist_nonempty", - "test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b"}', msi.kvpair(ctx, "e", "f"), 'add', 0), + "test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b"}', msi.kvpair(ctx, "e", "f"), 'add', 0), "check": lambda x: x == '{"a": "b", "e": "f"}'}, {"name": "msvc.json_objops.add_exist_nonempty", - "test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b"}', msi.kvpair(ctx, "e", "g"), 'add', 0), + "test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b"}', msi.kvpair(ctx, "e", "g"), 'add', 0), "check": lambda x: x == '{"a": "b", "e": "g"}'}, {"name": "msvc.json_objops.get_exist", - "test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b", "c": "d"}', msi.kvpair(ctx, "c", ""), 'get', 1), + "test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b", "c": "d"}', msi.kvpair(ctx, "c", ""), 'get', 1), "check": lambda x: str(x) == "(['c'], ['d'])"}, {"name": "msvc.json_objops.get_notexist", "test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b", "c": "d"}', msi.kvpair(ctx, "e", ""), 'get', 1), diff --git a/meta.py b/meta.py index 829f60dfa..887401958 100644 --- a/meta.py +++ b/meta.py @@ -164,7 +164,7 @@ def is_json_metadata_valid(callback, :param metadata: Pre-parsed JSON object :param ignore_required: Ignore required fields - :returns: Boolean indicating if JSON metadata us valid + :returns: Boolean indicating if JSON metadata is valid """ try: return len(get_json_metadata_errors(callback, @@ -271,7 +271,7 @@ def collection_has_cloneable_metadata(callback, coll): @api.make() def api_meta_remove(ctx, coll): - """Remove a collection's metadata JSON, if it exist.""" + """Remove a collection's metadata JSON, if it exists.""" log.write(ctx, 'Remove metadata of coll {}'.format(coll)) try: diff --git a/schema_transformation.py b/schema_transformation.py index 35bc35dd9..d7f7cc947 100644 --- a/schema_transformation.py +++ b/schema_transformation.py @@ -66,6 +66,7 @@ def api_transform_metadata(ctx, coll, keep_metadata_backup=True): execute_transformation(ctx, metadata_path, transform, keep_metadata_backup) else: return api.Error('no_metadata', 'No metadata file found') + return None def get(ctx, metadata_path, metadata=None): @@ -197,7 +198,7 @@ def rule_batch_transform_vault_metadata(rule_args, callback, rei): vault_package = '/'.join(path_parts[:5]) metadata_path = meta.get_latest_vault_metadata_path(callback, vault_package) log.write(callback, "[METADATA] Checking whether metadata needs to be transformed: " + metadata_path) - if metadata_path != '': + if metadata_path != '': transform = get(callback, metadata_path) if transform is not None: log.write(callback, "[METADATA] Executing transformation for: " + metadata_path) @@ -376,9 +377,7 @@ def html(f): re.split('\n{2,}', f.__doc__))) # Remove docstring. - description = re.sub('((:param).*)|((:returns:).*)', ' ', description) - - return description + return re.sub('((:param).*)|((:returns:).*)', ' ', description) @rule.make(inputs=[], outputs=[0]) @@ -394,8 +393,8 @@ def rule_batch_vault_metadata_schema_report(ctx): the metadata matches the JSON schema). match_schema only has a meaning if a metadata schema could be found. """ - results = dict() - schema_cache = dict() + results = {} + schema_cache = {} # Find all vault collections iter = genquery.row_iterator( diff --git a/tests/conftest.py b/tests/conftest.py index 70a2520b6..a7c983b15 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -228,7 +228,7 @@ def api_request(user, request, data, timeout=10): # Retrieve user cookies. csrf, session = user_cookies[user] - # Disable unsecure connection warning. + # Disable insecure connection warning. urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) # Replace zone name with zone name from environment configuration. @@ -286,7 +286,7 @@ def post_form_data(user, request, files): # Retrieve user cookies. csrf, session = user_cookies[user] - # Disable unsecure connection warning. + # Disable insecure connection warning. urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) # Make POST request. diff --git a/tools/arb-update-resources.py b/tools/arb-update-resources.py index 20874af8f..48c93ecf7 100644 --- a/tools/arb-update-resources.py +++ b/tools/arb-update-resources.py @@ -53,7 +53,7 @@ def parse_args(): def parse_cs_values(input): """Parses a comma-separated list of key:value pairs as a dict.""" - result = dict() + result = {} for kv_pair in input.split(","): if kv_pair == "": continue @@ -96,7 +96,7 @@ def setup_session(irods_environment_config, ca_file="/etc/pki/tls/certs/chain.cr 'encryption_num_hash_rounds': 16, 'encryption_salt_size': 8, 'ssl_context': ssl_context} - settings = dict() + settings = {} settings.update(irods_environment_config) settings.update(ssl_settings) settings["password"] = password @@ -160,8 +160,8 @@ def call_rule(session, rulename, params, number_outputs, rule_engine='irods_rule output=output_params, **re_config) - outArray = myrule.execute() - buf = outArray.MsParam_PI[0].inOutStruct.stdoutBuf.buf.decode( + out_array = myrule.execute() + buf = out_array.MsParam_PI[0].inOutStruct.stdoutBuf.buf.decode( 'utf-8').splitlines() return buf[:number_outputs] @@ -208,7 +208,7 @@ def main(): print("Updating misc resources ...") call_rule_update_misc(session) except NetworkException: - print("Could not connect to iRODS sever ...") + print("Could not connect to iRODS server ...") if __name__ == '__main__': diff --git a/unit-tests/test_util_misc.py b/unit-tests/test_util_misc.py index cddbe5fcd..aa03ef2c2 100644 --- a/unit-tests/test_util_misc.py +++ b/unit-tests/test_util_misc.py @@ -18,23 +18,23 @@ class UtilMiscTest(TestCase): def test_last_run_time_acceptable(self): """Test the last run time for copy to vault""" - # No last run time (job hasn't be tried before) + # No last run time (job hasn't been tried before) found = False last_run = 1 - self.assertEqual(last_run_time_acceptable("b", found, last_run, 300), True) + self.assertEqual(last_run_time_acceptable(found, last_run, 300), True) # Last run time greater than the backoff, so can run now = int(time.time()) found = True copy_backoff_time = 300 last_run = now - copy_backoff_time - 1 - self.assertEqual(last_run_time_acceptable("b", found, last_run, copy_backoff_time), True) + self.assertEqual(last_run_time_acceptable(found, last_run, copy_backoff_time), True) # Last run time more recent than the backoff, so should not run found = True copy_backoff_time = 300 last_run = now - self.assertEqual(last_run_time_acceptable("b", found, int(time.time()), copy_backoff_time), False) + self.assertEqual(last_run_time_acceptable(found, int(time.time()), copy_backoff_time), False) def test_human_readable_size(self): output = human_readable_size(0) diff --git a/util/misc.py b/util/misc.py index 12df2a0af..a7d1c4471 100644 --- a/util/misc.py +++ b/util/misc.py @@ -9,7 +9,7 @@ from collections import OrderedDict -def last_run_time_acceptable(coll, found, last_run, config_backoff_time): +def last_run_time_acceptable(found, last_run, config_backoff_time): """Return whether the last run time is acceptable to continue with task.""" now = int(time.time()) @@ -48,5 +48,5 @@ def remove_empty_objects(d): # Clean lists by filtering out empty objects. return [remove_empty_objects(item) for item in d if remove_empty_objects(item) not in (None, '', {}, [])] else: - # Return the value abecause it is not a dict or list. + # Return the value because it is not a dict or list. return d