Skip to content

Commit

Permalink
remove all but 1 pkg_resources imports
Browse files Browse the repository at this point in the history
  • Loading branch information
ocefpaf committed Oct 4, 2023
1 parent 8e5687c commit af3a188
Show file tree
Hide file tree
Showing 8 changed files with 40 additions and 47 deletions.
8 changes: 4 additions & 4 deletions compliance_checker/cf/util.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import importlib.resources
import itertools
import os
import sys
Expand All @@ -7,7 +8,6 @@
from cf_units import Unit
from lxml import etree
from netCDF4 import Dataset
from pkg_resources import resource_filename

# copied from paegan
# paegan may depend on these later
Expand Down Expand Up @@ -284,9 +284,9 @@ def download_cf_standard_name_table(version, location=None):
if (
location is None
): # This case occurs when updating the packaged version from command line
location = resource_filename(
"compliance_checker",
"data/cf-standard-name-table.xml",
location = (
importlib.resources.files("compliance_checker")
/ "data/cf-standard-name-table.xml"
)

if version == "latest":
Expand Down
4 changes: 2 additions & 2 deletions compliance_checker/cfutil.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@
compliance_checker/cfutil.py
"""
import csv
import importlib.resources
import re
import warnings
from collections import defaultdict
from functools import lru_cache, partial

from cf_units import Unit
from pkg_resources import resource_filename

_UNITLESS_DB = None
_SEA_NAMES = None
Expand Down Expand Up @@ -128,7 +128,7 @@ def get_sea_names():
if _SEA_NAMES is None:
buf = {}
with open(
resource_filename("compliance_checker", "data/seanames.csv"),
importlib.resources.files("compliance_checker") / "data/seanames.csv",
) as f:
reader = csv.reader(f)
for code, sea_name in reader:
Expand Down
1 change: 1 addition & 0 deletions compliance_checker/protocols/netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ def is_netcdf(url):
:param str url: Location of file on the file system
"""
# Try an obvious exclusion of remote resources
url = str(url)
if url.startswith("http"):
return False

Expand Down
9 changes: 3 additions & 6 deletions compliance_checker/suite.py
Original file line number Diff line number Diff line change
Expand Up @@ -765,12 +765,9 @@ def generate_dataset(self, cdl_path):
:param str cdl_path: Absolute path to cdl file that is used to generate netCDF file
"""
if (
".cdl" in cdl_path
): # it's possible the filename doesn't have the .cdl extension
ds_str = cdl_path.replace(".cdl", ".nc")
else:
ds_str = cdl_path + ".nc"
if isinstance(cdl_path, str):
cdl_path = Path(cdl_path)
ds_str = cdl_path.with_suffix(".nc")

# generate netCDF-4 file
iostat = subprocess.run(
Expand Down
5 changes: 3 additions & 2 deletions compliance_checker/tests/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import unittest
from pathlib import Path

from netCDF4 import Dataset

Expand All @@ -25,8 +26,8 @@ def load_dataset(self, nc_dataset):
"""
Return a loaded NC Dataset for the given path
"""
if not isinstance(nc_dataset, str):
raise ValueError("nc_dataset should be a string")
if not isinstance(nc_dataset, (str, Path)):
raise ValueError("nc_dataset should be a valid path")

nc_dataset = Dataset(nc_dataset, "r")
self.addCleanup(nc_dataset.close)
Expand Down
7 changes: 4 additions & 3 deletions compliance_checker/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
import importlib.resources
import os
import subprocess
from itertools import chain
from pathlib import Path

import pytest
from netCDF4 import Dataset
from pkg_resources import resource_filename

from compliance_checker.cf import util
from compliance_checker.suite import CheckSuite
Expand All @@ -27,7 +26,9 @@ def static_files(cdl_stem):
Returns the Path to a valid nc dataset\n
replaces the old STATIC_FILES dict
"""
datadir = Path(resource_filename("compliance_checker", "tests/data")).resolve()
datadir = (
importlib.resources.files("compliance_checker").joinpath("tests/data").resolve()
)
assert datadir.exists(), f"{datadir} not found"

cdl_paths = glob_down(datadir, f"{cdl_stem}.cdl", 3)
Expand Down
10 changes: 4 additions & 6 deletions compliance_checker/tests/resources.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,14 @@
import os
import importlib.resources
import subprocess

from pkg_resources import resource_filename


def get_filename(path):
"""
Returns the path to a valid dataset
"""
filename = resource_filename("compliance_checker", path)
nc_path = filename.replace(".cdl", ".nc")
if not os.path.exists(nc_path):
filename = importlib.resources.files("compliance_checker") / path
nc_path = filename.with_suffix(".nc")
if not nc_path.exists():
generate_dataset(filename, nc_path)
return nc_path

Expand Down
43 changes: 19 additions & 24 deletions compliance_checker/tests/test_suite.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,30 @@
import importlib.resources
import os
import unittest
from pathlib import Path

import numpy as np
from pkg_resources import resource_filename

from compliance_checker.acdd import ACDDBaseCheck
from compliance_checker.base import BaseCheck, GenericFile, Result
from compliance_checker.suite import CheckSuite

static_files = {
"2dim": resource_filename("compliance_checker", "tests/data/2dim-grid.nc"),
"bad_region": resource_filename("compliance_checker", "tests/data/bad_region.nc"),
"bad_data_type": resource_filename(
"compliance_checker",
"tests/data/bad_data_type.nc",
),
"test_cdl": resource_filename("compliance_checker", "tests/data/test_cdl.cdl"),
"test_cdl_nc": resource_filename(
"compliance_checker",
"tests/data/test_cdl_nc_file.nc",
),
"empty": resource_filename("compliance_checker", "tests/data/non-comp/empty.file"),
"ru07": resource_filename(
"compliance_checker",
"tests/data/ru07-20130824T170228_rt0.nc",
),
"netCDF4": resource_filename(
"compliance_checker",
"tests/data/test_cdl_nc4_file.cdl",
),
"2dim": importlib.resources.files("compliance_checker") / "tests/data/2dim-grid.nc",
"bad_region": importlib.resources.files("compliance_checker")
/ "tests/data/bad_region.nc",
"bad_data_type": importlib.resources.files("compliance_checker")
/ "tests/data/bad_data_type.nc",
"test_cdl": importlib.resources.files("compliance_checker")
/ "tests/data/test_cdl.cdl",
"test_cdl_nc": importlib.resources.files("compliance_checker")
/ "tests/data/test_cdl_nc_file.nc",
"empty": importlib.resources.files("compliance_checker")
/ "tests/data/non-comp/empty.file",
"ru07": importlib.resources.files("compliance_checker")
/ "tests/data/ru07-20130824T170228_rt0.nc",
"netCDF4": importlib.resources.files("compliance_checker")
/ "tests/data/test_cdl_nc4_file.cdl",
}


Expand Down Expand Up @@ -95,9 +90,9 @@ def test_generate_dataset_netCDF4(self):
# create netCDF4 file
ds_name = self.cs.generate_dataset(static_files["netCDF4"])
# check if correct name is return
assert ds_name == static_files["netCDF4"].replace(".cdl", ".nc")
assert ds_name == static_files["netCDF4"].with_suffix(".nc")
# check if netCDF4 file was created
assert os.path.isfile(static_files["netCDF4"].replace(".cdl", ".nc"))
assert os.path.isfile(static_files["netCDF4"].with_suffix(".nc"))

def test_include_checks(self):
ds = self.cs.load_dataset(static_files["bad_data_type"])
Expand Down Expand Up @@ -242,7 +237,7 @@ def test_cdl_file(self):
)
ds.close()

nc_file_path = static_files["test_cdl"].replace(".cdl", ".nc")
nc_file_path = static_files["test_cdl"].with_suffix(".nc")
self.addCleanup(os.remove, nc_file_path)

# Ok the scores should be equal!
Expand Down

0 comments on commit af3a188

Please sign in to comment.