Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Minor fixes, typos #527

Merged
merged 7 commits into from
Oct 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion folder.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ def precheck_folder_secure(ctx, coll):
found, last_run = get_last_run_time(ctx, coll)
if (not correct_copytovault_start_status(ctx, coll)
or not correct_copytovault_start_location(coll)
or not misc.last_run_time_acceptable(coll, found, last_run, config.vault_copy_backoff_time)):
or not misc.last_run_time_acceptable(found, last_run, config.vault_copy_backoff_time)):
return False

return True
Expand Down
2 changes: 1 addition & 1 deletion groups_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def parse_csv_file(ctx):
# Start processing the actual group data rows
for line in lines:
row_number += 1
rowdata, error = process_csv_line(line)
rowdata, error = process_csv_line(ctx, line)

if error is None:
extracted_data.append(rowdata)
Expand Down
8 changes: 4 additions & 4 deletions integration_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,16 +350,16 @@ def _test_folder_secure_func(ctx, func):
"test": lambda ctx: _call_msvc_json_arrayops(ctx, '["a", "b", "c"]', "", "size", 0, 3),
"check": lambda x: x == 3},
{"name": "msvc.json_objops.add_notexist_empty",
"test": lambda ctx: _call_msvc_json_objops(ctx, '', msi.kvpair(ctx, "e", "f"), 'add', 0),
"test": lambda ctx: _call_msvc_json_objops(ctx, '', msi.kvpair(ctx, "e", "f"), 'add', 0),
"check": lambda x: x == '{"e": "f"}'},
{"name": "msvc.json_objops.add_notexist_nonempty",
"test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b"}', msi.kvpair(ctx, "e", "f"), 'add', 0),
"test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b"}', msi.kvpair(ctx, "e", "f"), 'add', 0),
"check": lambda x: x == '{"a": "b", "e": "f"}'},
{"name": "msvc.json_objops.add_exist_nonempty",
"test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b"}', msi.kvpair(ctx, "e", "g"), 'add', 0),
"test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b"}', msi.kvpair(ctx, "e", "g"), 'add', 0),
"check": lambda x: x == '{"a": "b", "e": "g"}'},
{"name": "msvc.json_objops.get_exist",
"test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b", "c": "d"}', msi.kvpair(ctx, "c", ""), 'get', 1),
"test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b", "c": "d"}', msi.kvpair(ctx, "c", ""), 'get', 1),
"check": lambda x: str(x) == "(['c'], ['d'])"},
{"name": "msvc.json_objops.get_notexist",
"test": lambda ctx: _call_msvc_json_objops(ctx, '{"a": "b", "c": "d"}', msi.kvpair(ctx, "e", ""), 'get', 1),
Expand Down
4 changes: 2 additions & 2 deletions meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def is_json_metadata_valid(callback,
:param metadata: Pre-parsed JSON object
:param ignore_required: Ignore required fields

:returns: Boolean indicating if JSON metadata us valid
:returns: Boolean indicating if JSON metadata is valid
"""
try:
return len(get_json_metadata_errors(callback,
Expand Down Expand Up @@ -271,7 +271,7 @@ def collection_has_cloneable_metadata(callback, coll):

@api.make()
def api_meta_remove(ctx, coll):
"""Remove a collection's metadata JSON, if it exist."""
"""Remove a collection's metadata JSON, if it exists."""
log.write(ctx, 'Remove metadata of coll {}'.format(coll))

try:
Expand Down
11 changes: 5 additions & 6 deletions schema_transformation.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ def api_transform_metadata(ctx, coll, keep_metadata_backup=True):
execute_transformation(ctx, metadata_path, transform, keep_metadata_backup)
else:
return api.Error('no_metadata', 'No metadata file found')
return None


def get(ctx, metadata_path, metadata=None):
Expand Down Expand Up @@ -197,7 +198,7 @@ def rule_batch_transform_vault_metadata(rule_args, callback, rei):
vault_package = '/'.join(path_parts[:5])
metadata_path = meta.get_latest_vault_metadata_path(callback, vault_package)
log.write(callback, "[METADATA] Checking whether metadata needs to be transformed: " + metadata_path)
if metadata_path != '':
if metadata_path != '':
transform = get(callback, metadata_path)
if transform is not None:
log.write(callback, "[METADATA] Executing transformation for: " + metadata_path)
Expand Down Expand Up @@ -376,9 +377,7 @@ def html(f):
re.split('\n{2,}', f.__doc__)))

# Remove docstring.
description = re.sub('((:param).*)|((:returns:).*)', ' ', description)

return description
return re.sub('((:param).*)|((:returns:).*)', ' ', description)


@rule.make(inputs=[], outputs=[0])
Expand All @@ -394,8 +393,8 @@ def rule_batch_vault_metadata_schema_report(ctx):
the metadata matches the JSON schema). match_schema only has a meaning if a metadata schema
could be found.
"""
results = dict()
schema_cache = dict()
results = {}
schema_cache = {}

# Find all vault collections
iter = genquery.row_iterator(
Expand Down
4 changes: 2 additions & 2 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ def api_request(user, request, data, timeout=10):
# Retrieve user cookies.
csrf, session = user_cookies[user]

# Disable unsecure connection warning.
# Disable insecure connection warning.
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

# Replace zone name with zone name from environment configuration.
Expand Down Expand Up @@ -286,7 +286,7 @@ def post_form_data(user, request, files):
# Retrieve user cookies.
csrf, session = user_cookies[user]

# Disable unsecure connection warning.
# Disable insecure connection warning.
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

# Make POST request.
Expand Down
10 changes: 5 additions & 5 deletions tools/arb-update-resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def parse_args():

def parse_cs_values(input):
"""Parses a comma-separated list of key:value pairs as a dict."""
result = dict()
result = {}
for kv_pair in input.split(","):
if kv_pair == "":
continue
Expand Down Expand Up @@ -96,7 +96,7 @@ def setup_session(irods_environment_config, ca_file="/etc/pki/tls/certs/chain.cr
'encryption_num_hash_rounds': 16,
'encryption_salt_size': 8,
'ssl_context': ssl_context}
settings = dict()
settings = {}
settings.update(irods_environment_config)
settings.update(ssl_settings)
settings["password"] = password
Expand Down Expand Up @@ -160,8 +160,8 @@ def call_rule(session, rulename, params, number_outputs, rule_engine='irods_rule
output=output_params,
**re_config)

outArray = myrule.execute()
buf = outArray.MsParam_PI[0].inOutStruct.stdoutBuf.buf.decode(
out_array = myrule.execute()
buf = out_array.MsParam_PI[0].inOutStruct.stdoutBuf.buf.decode(
'utf-8').splitlines()

return buf[:number_outputs]
Expand Down Expand Up @@ -208,7 +208,7 @@ def main():
print("Updating misc resources ...")
call_rule_update_misc(session)
except NetworkException:
print("Could not connect to iRODS sever ...")
print("Could not connect to iRODS server ...")


if __name__ == '__main__':
Expand Down
8 changes: 4 additions & 4 deletions unit-tests/test_util_misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,23 +18,23 @@ class UtilMiscTest(TestCase):

def test_last_run_time_acceptable(self):
"""Test the last run time for copy to vault"""
# No last run time (job hasn't be tried before)
# No last run time (job hasn't been tried before)
found = False
last_run = 1
self.assertEqual(last_run_time_acceptable("b", found, last_run, 300), True)
self.assertEqual(last_run_time_acceptable(found, last_run, 300), True)

# Last run time greater than the backoff, so can run
now = int(time.time())
found = True
copy_backoff_time = 300
last_run = now - copy_backoff_time - 1
self.assertEqual(last_run_time_acceptable("b", found, last_run, copy_backoff_time), True)
self.assertEqual(last_run_time_acceptable(found, last_run, copy_backoff_time), True)

# Last run time more recent than the backoff, so should not run
found = True
copy_backoff_time = 300
last_run = now
self.assertEqual(last_run_time_acceptable("b", found, int(time.time()), copy_backoff_time), False)
self.assertEqual(last_run_time_acceptable(found, int(time.time()), copy_backoff_time), False)

def test_human_readable_size(self):
output = human_readable_size(0)
Expand Down
4 changes: 2 additions & 2 deletions util/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from collections import OrderedDict


def last_run_time_acceptable(coll, found, last_run, config_backoff_time):
def last_run_time_acceptable(found, last_run, config_backoff_time):
"""Return whether the last run time is acceptable to continue with task."""
now = int(time.time())

Expand Down Expand Up @@ -48,5 +48,5 @@ def remove_empty_objects(d):
# Clean lists by filtering out empty objects.
return [remove_empty_objects(item) for item in d if remove_empty_objects(item) not in (None, '', {}, [])]
else:
# Return the value abecause it is not a dict or list.
# Return the value because it is not a dict or list.
return d
Loading