Skip to content

Commit

Permalink
minor changes after testing requests.get
Browse files Browse the repository at this point in the history
  • Loading branch information
claravox committed Oct 1, 2024
1 parent 28bafbe commit 6f23319
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 5 deletions.
2 changes: 1 addition & 1 deletion meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def is_json_metadata_valid(callback,
:param metadata: Pre-parsed JSON object
:param ignore_required: Ignore required fields
:returns: Boolean indicating if JSON metadata us valid
:returns: Boolean indicating if JSON metadata is valid
"""
try:
return len(get_json_metadata_errors(callback,
Expand Down
2 changes: 1 addition & 1 deletion schema_transformations.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def _default2_default3(ctx, m):
# Check for incorrect ORCID format.
if not re.search("^(https://orcid.org/)[0-9]{4}-[0-9]{4}-[0-9]{4}-[0-9]{3}[0-9X]$", person_identifier.get('Name_Identifier', None)):
corrected_orcid = correctify_orcid(person_identifier['Name_Identifier'])
# Only it an actual correction took place change the value and mark this data as 'changed'.
# Only if an actual correction took place change the value and mark this data as 'changed'.
if corrected_orcid is None:
log.write(ctx, "Warning: could not correct ORCID %s during schema transformation. It needs to be fixed manually."
% (person_identifier['Name_Identifier']))
Expand Down
13 changes: 10 additions & 3 deletions troubleshoot_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

import genquery
import requests
import urllib3

import datacite
from meta import vault_metadata_matches_schema
Expand Down Expand Up @@ -225,6 +226,8 @@ def compare_local_remote_landingpage(ctx, file_path, url, offline):
if offline:
return len(local_data) > 0

urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

try:
response = requests.get(url, verify=False)
except requests.exceptions.ConnectionError as e:
Expand All @@ -236,20 +239,22 @@ def compare_local_remote_landingpage(ctx, file_path, url, offline):
log.write_stdout(ctx, "compare_local_remote_landingpage: Error {} when connecting to <{}>.".format(response.status_code, url))
return False

# Set encoding to utf-8 for the response text (otherwise will not match local_data)
response.encoding = 'utf-8'

if local_data == response.text:
return True

log.write_stdout(ctx, "compare_local_remote_landingpage: File contents at irods path <{}> and remote landing page <{}> do not match.".format(file_path, url))
return False


def check_landingpage(ctx, data_package, publication_config, offline):
def check_landingpage(ctx, data_package, offline):
"""
Checks the integrity of landing page by comparing the contents
:param ctx: Combined type of a callback and rei struct
:param data_package: String representing the data package collection path.
:param publication_config: Dictionary of publication config
:param offline: Whether to skip any checks that require external server access
:returns: A tuple containing boolean results of checking
Expand Down Expand Up @@ -286,6 +291,8 @@ def check_combi_json(ctx, data_package, publication_config, offline):
if offline:
return True

urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

# Get the version doi
version_doi = ''
try:
Expand Down Expand Up @@ -399,7 +406,7 @@ def rule_batch_troubleshoot_published_data_packages(ctx, requested_package, log_
no_missing_avus_check, no_unexpected_avus_check = check_data_package_system_avus(ctx, data_package)
version_doi_check, base_doi_check = check_datacite_doi_registration(ctx, data_package)
publication_config = get_publication_config(ctx)
landing_page_check = check_landingpage(ctx, data_package, publication_config, offline)
landing_page_check = check_landingpage(ctx, data_package, offline)
combi_json_check = check_combi_json(ctx, data_package, publication_config, offline)

# Collect results for current data package
Expand Down

0 comments on commit 6f23319

Please sign in to comment.