Skip to content

Commit

Permalink
awt#218
Browse files Browse the repository at this point in the history
  • Loading branch information
John Kyrre Hansen committed Jul 6, 2023
1 parent e0fafd4 commit a40093f
Show file tree
Hide file tree
Showing 9 changed files with 126 additions and 43 deletions.
9 changes: 7 additions & 2 deletions tagreader/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,6 +313,7 @@ def key_path(
df: Union[str, pd.DataFrame],
read_type: ReaderType,
ts: Optional[Union[int, timedelta]] = None,
get_status: bool = False,
) -> str:
"""Return a string on the form
XXX$sYY$ZZZ where XXX is the ReadType, YY is the interval between samples
Expand Down Expand Up @@ -340,8 +341,9 @@ def store(
df: pd.DataFrame,
read_type: ReaderType,
ts: Optional[Union[int, timedelta]] = None,
get_status: bool = False,
) -> None:
key = self.key_path(df=df, read_type=read_type, ts=ts)
key = self.key_path(df=df, read_type=read_type, ts=ts, get_status=get_status)
if df.empty:
return # Weirdness ensues when using empty df in select statement below
if key in self:
Expand All @@ -363,8 +365,11 @@ def fetch(
ts: Optional[Union[int, timedelta]] = None,
start: Optional[datetime] = None,
end: Optional[datetime] = None,
get_status: bool = False,
) -> pd.DataFrame:
key = self.key_path(df=tagname, read_type=read_type, ts=ts)
key = self.key_path(
df=tagname, read_type=read_type, ts=ts, get_status=get_status
)
df = cast(Optional[pd.DataFrame], self.get(key=key))
if df is None:
return pd.DataFrame()
Expand Down
2 changes: 2 additions & 0 deletions tagreader/clients.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,6 +311,7 @@ def __init__(
verifySSL: bool = True,
auth: Optional[Any] = None,
cache: Optional[Union[SmartCache, BucketCache]] = None,
get_status: bool = False,
):
if isinstance(imstype, str):
try:
Expand Down Expand Up @@ -402,6 +403,7 @@ def _read_single_tag(
ts=ts,
start=time_slice[0],
end=time_slice[1],
get_status=get_status,
)
missing_intervals = get_missing_intervals(
df=df,
Expand Down
2 changes: 1 addition & 1 deletion tests/test_AspenHandlerODBC_connect.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@

@pytest.fixture # type: ignore[misc]
def client() -> Generator[IMSClient, None, None]:
c = IMSClient(datasource=SOURCE, imstype="ip21")
c = IMSClient(datasource=SOURCE, imstype="ip21", get_status=False)
c.cache = None # type: ignore[assignment]
c.connect()
yield c
Expand Down
7 changes: 6 additions & 1 deletion tests/test_AspenHandlerREST_connect.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,12 @@

@pytest.fixture # type: ignore[misc]
def client() -> Generator[IMSClient, None, None]:
c = IMSClient(datasource=SOURCE, imstype="aspenone", verifySSL=bool(VERIFY_SSL))
c = IMSClient(
datasource=SOURCE,
imstype="aspenone",
verifySSL=bool(VERIFY_SSL),
get_status=False,
)
c.cache = None # type: ignore[assignment]
c.connect()
yield c
Expand Down
2 changes: 1 addition & 1 deletion tests/test_PIHandlerODBC_connect.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@

@pytest.fixture # type: ignore[misc]
def client() -> Generator[IMSClient, None, None]:
c = IMSClient(datasource=SOURCE, imstype="pi")
c = IMSClient(datasource=SOURCE, imstype="pi", get_status=False)
c.cache = None # type: ignore[assignment]
c.connect()
yield c
Expand Down
7 changes: 6 additions & 1 deletion tests/test_PIHandlerREST_connect.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,12 @@

@pytest.fixture # type: ignore[misc]
def client() -> Generator[IMSClient, None, None]:
c = IMSClient(datasource=SOURCE, imstype="piwebapi", verifySSL=bool(verifySSL))
c = IMSClient(
datasource=SOURCE,
imstype="piwebapi",
verifySSL=bool(verifySSL),
get_status=False,
)
c.cache = None # type: ignore[assignment]
c.connect()
c.handler._max_rows = 1000 # For the long raw test
Expand Down
71 changes: 52 additions & 19 deletions tests/test_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,14 +52,18 @@ def test_key_path(cache: SmartCache) -> None:
pass


def test_cache_single_store_and_fetch(cache: SmartCache, data: pd.DataFrame) -> None:
cache.store(df=data, read_type=ReaderType.INT)
df_read = cache.fetch(tagname="tag1", read_type=ReaderType.INT, ts=60)
def test_cache_single_store_and_fetch(
cache: SmartCache, data: pd.DataFrame, get_status: bool = False
) -> None:
cache.store(df=data, read_type=ReaderType.INT, get_status=get_status)
df_read = cache.fetch(
tagname="tag1", read_type=ReaderType.INT, ts=60, get_status=get_status
)
pd.testing.assert_frame_equal(data, df_read)


def test_cache_multiple_store_single_fetch(
cache: SmartCache, data: pd.DataFrame
cache: SmartCache, data: pd.DataFrame, get_status: bool = False
) -> None:
df1 = data[0:3]
df2 = data[2:10]
Expand All @@ -69,41 +73,66 @@ def test_cache_multiple_store_single_fetch(
pd.testing.assert_frame_equal(df_read, data)


def test_interval_reads(cache: SmartCache, data: pd.DataFrame) -> None:
cache.store(df=data, read_type=ReaderType.INT)
def test_interval_reads(
cache: SmartCache, data: pd.DataFrame, get_status: bool = False
) -> None:
cache.store(df=data, read_type=ReaderType.INT, get_status=get_status)
start_oob = pd.to_datetime("2018-01-18 04:55:00")
start = pd.to_datetime("2018-01-18 05:05:00")
end = pd.to_datetime("2018-01-18 05:08:00")
end_oob = pd.to_datetime("2018-01-18 06:00:00")

df_read = cache.fetch(tagname="tag1", read_type=ReaderType.INT, ts=60, start=start)
df_read = cache.fetch(
tagname="tag1",
read_type=ReaderType.INT,
ts=60,
start=start,
get_status=get_status,
)
pd.testing.assert_frame_equal(data[start:], df_read) # type: ignore[misc]
df_read = cache.fetch(tagname="tag1", read_type=ReaderType.INT, ts=60, end=end)
df_read = cache.fetch(
tagname="tag1", read_type=ReaderType.INT, ts=60, end=end, get_status=get_status
)
pd.testing.assert_frame_equal(data[:end], df_read) # type: ignore[misc]
df_read = cache.fetch(
tagname="tag1", read_type=ReaderType.INT, ts=60, start=start_oob
tagname="tag1",
read_type=ReaderType.INT,
ts=60,
start=start_oob,
get_status=get_status,
)
pd.testing.assert_frame_equal(data, df_read)
df_read = cache.fetch(tagname="tag1", read_type=ReaderType.INT, ts=60, end=end_oob)
df_read = cache.fetch(
tagname="tag1",
read_type=ReaderType.INT,
ts=60,
end=end_oob,
get_status=get_status,
)
pd.testing.assert_frame_equal(data, df_read)
df_read = cache.fetch(
tagname="tag1",
read_type=ReaderType.INT,
ts=60,
start=start,
end=end,
get_status=get_status,
)
pd.testing.assert_frame_equal(data[start:end], df_read) # type: ignore[misc]


def test_store_empty_df(cache: SmartCache, data: pd.DataFrame) -> None:
def test_store_empty_df(
cache: SmartCache, data: pd.DataFrame, get_status: bool = False
) -> None:
# Empty dataframes should not be stored (note: df full of NaN is not empty!)
cache.store(df=data, read_type=ReaderType.INT)
cache.store(df=data, read_type=ReaderType.INT, get_status=get_status)
df = pd.DataFrame({"tag1": []})
cache.store(
df=df, read_type=ReaderType.INT, ts=60
) # Specify ts to ensure correct key /if/ stored
df_read = cache.fetch(tagname="tag1", read_type=ReaderType.INT, ts=60)
df_read = cache.fetch(
tagname="tag1", read_type=ReaderType.INT, ts=60, get_status=get_status
)
pd.testing.assert_frame_equal(data, df_read)


Expand All @@ -120,7 +149,7 @@ def test_store_metadata(cache: SmartCache) -> None:
assert "noworky" not in r


def test_to_dst_skips_time(cache: SmartCache) -> None:
def test_to_dst_skips_time(cache: SmartCache, get_status: bool = False) -> None:
index = pd.date_range(
start="2018-03-25 01:50:00",
end="2018-03-25 03:30:00",
Expand All @@ -133,12 +162,14 @@ def test_to_dst_skips_time(cache: SmartCache) -> None:
assert (
df.loc["2018-03-25 01:50:00":"2018-03-25 03:10:00"].size == (2 + 1 * 6 + 1) - 6 # type: ignore[misc]
)
cache.store(df=df, read_type=ReaderType.INT)
df_read = cache.fetch(tagname="tag1", read_type=ReaderType.INT, ts=600)
cache.store(df=df, read_type=ReaderType.INT, get_status=get_status)
df_read = cache.fetch(
tagname="tag1", read_type=ReaderType.INT, ts=600, get_status=get_status
)
pd.testing.assert_frame_equal(df_read, df)


def test_from_dst_folds_time(cache: SmartCache) -> None:
def test_from_dst_folds_time(cache: SmartCache, get_status: bool = False) -> None:
index = pd.date_range(
start="2017-10-29 00:30:00",
end="2017-10-29 04:30:00",
Expand All @@ -158,6 +189,8 @@ def test_from_dst_folds_time(cache: SmartCache) -> None:
assert (
df.loc["2017-10-29 01:50:00":"2017-10-29 03:10:00"].size == 2 + (1 + 1) * 6 + 1 # type: ignore[misc]
)
cache.store(df=df, read_type=ReaderType.INT)
df_read = cache.fetch(tagname="tag1", read_type=ReaderType.INT, ts=600)
cache.store(df=df, read_type=ReaderType.INT, get_status=get_status)
df_read = cache.fetch(
tagname="tag1", read_type=ReaderType.INT, ts=600, get_status=get_status
)
pd.testing.assert_frame_equal(df_read, df)
49 changes: 36 additions & 13 deletions tests/test_clients.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,55 +58,78 @@ class TestODBC:
def test_pi_init_odbc_client_with_host_port(self) -> None:
host = "thehostname"
port = 999
c = IMSClient(datasource="whatever", imstype="pi", host=host)
get_status = False
c = IMSClient(
datasource="whatever", imstype="pi", host=host, get_status=get_status
)
assert c.handler.host == host
assert c.handler.port == 5450
c = IMSClient(datasource="whatever", imstype="pi", host=host, port=port)
c = IMSClient(
datasource="whatever",
imstype="pi",
host=host,
port=port,
get_status=get_status,
)
assert c.handler.host == host
assert c.handler.port == port

def test_ip21_init_odbc_client_with_host_port(self) -> None:
host = "thehostname"
port = 999
c = IMSClient(datasource="whatever", imstype="ip21", host=host)
get_status = False
c = IMSClient(
datasource="whatever", imstype="ip21", host=host, get_status=get_status
)
assert c.handler.host == host
assert c.handler.port == 10014
c = IMSClient(datasource="whatever", imstype="ip21", host=host, port=port)
c = IMSClient(
datasource="whatever",
imstype="ip21",
host=host,
port=port,
get_status=get_status,
)
assert c.handler.host == host
assert c.handler.port == port

def test_pi_connection_string_override(self) -> None:
connstr = "someuserspecifiedconnectionstring"
get_status = False
c = IMSClient(
datasource="whatever",
host="host",
imstype="pi",
host="host",
handler_options={"connection_string": connstr},
get_status=get_status,
)
assert c.handler.generate_connection_string() == connstr

def test_ip21_connection_string_override(self) -> None:
connstr = "someuserspecifiedconnectionstring"
get_status = False
c = IMSClient(
datasource="whatever",
host="host",
imstype="ip21",
host="host",
handler_options={"connection_string": connstr},
get_status=get_status,
)
assert c.handler.generate_connection_string() == connstr

def test_init_odbc_clients(self) -> None:
get_status = False
with pytest.raises(ValueError):
_ = IMSClient(datasource="xyz")
_ = IMSClient(datasource="xyz", get_status=get_status)
with pytest.raises(ValueError):
_ = IMSClient(datasource="sNa", imstype="pi")
_ = IMSClient(datasource="sNa", imstype="pi", get_status=get_status)
with pytest.raises(ValueError):
_ = IMSClient(datasource="Ono-imS", imstype="aspen")
_ = IMSClient(datasource="Ono-imS", imstype="aspen", get_status=get_status)
with pytest.raises(ValueError):
_ = IMSClient(datasource="ono-ims", imstype="aspen")
_ = IMSClient(datasource="ono-ims", imstype="aspen", get_status=get_status)
with pytest.raises(ValueError):
_ = IMSClient(datasource="sna", imstype="pi")
c = IMSClient(datasource="onO-iMs", imstype="pi")
_ = IMSClient(datasource="sna", imstype="pi", get_status=get_status)
c = IMSClient(datasource="onO-iMs", imstype="pi", get_status=get_status)
assert isinstance(c.handler, PIHandlerODBC)
c = IMSClient(datasource="snA", imstype="aspen")
c = IMSClient(datasource="snA", imstype="aspen", get_status=get_status)
assert isinstance(c.handler, AspenHandlerODBC)
20 changes: 15 additions & 5 deletions tests/test_data_integrity.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@

@pytest.fixture # type: ignore[misc]
def pi_client_odbc() -> Generator[IMSClient, None, None]:
c = IMSClient(datasource=PI_DS, imstype="pi")
c = IMSClient(datasource=PI_DS, imstype="pi", get_status=False)
if os.path.exists(PI_DS + ".h5"):
os.remove(PI_DS + ".h5")
c.cache = None # type: ignore[assignment]
Expand All @@ -52,7 +52,9 @@ def pi_client_odbc() -> Generator[IMSClient, None, None]:

@pytest.fixture # type: ignore[misc]
def pi_client_web() -> Generator[IMSClient, None, None]:
c = IMSClient(datasource=PI_DS, imstype="piwebapi", verifySSL=verifySSL)
c = IMSClient(
datasource=PI_DS, imstype="piwebapi", verifySSL=verifySSL, get_status=False
)
if os.path.exists(PI_DS + ".h5"):
os.remove(PI_DS + ".h5")
c.cache = None # type: ignore[assignment]
Expand All @@ -64,7 +66,7 @@ def pi_client_web() -> Generator[IMSClient, None, None]:

@pytest.fixture # type: ignore[misc]
def aspen_client_odbc() -> Generator[IMSClient, None, None]:
c = IMSClient(datasource=ASPEN_DS, imstype="ip21")
c = IMSClient(datasource=ASPEN_DS, imstype="ip21", get_status=False)
if os.path.exists(ASPEN_DS + ".h5"):
os.remove(ASPEN_DS + ".h5")
c.cache = None # type: ignore[assignment]
Expand All @@ -76,7 +78,12 @@ def aspen_client_odbc() -> Generator[IMSClient, None, None]:

@pytest.fixture # type: ignore[misc]
def aspen_client_web() -> Generator[IMSClient, None, None]:
c = IMSClient(datasource=ASPEN_DS, imstype="aspenone", verifySSL=bool(verifySSL))
c = IMSClient(
datasource=ASPEN_DS,
imstype="aspenone",
verifySSL=bool(verifySSL),
get_status=False,
)
if os.path.exists(ASPEN_DS + ".h5"):
os.remove(ASPEN_DS + ".h5")
c.cache = None # type: ignore[assignment]
Expand Down Expand Up @@ -234,7 +241,9 @@ def test_concat_proper_fill_up(pi_client_web: IMSClient) -> None:
pi_client_web.handler._max_rows = max_rows_backup


def test_cache_proper_fill_up(pi_client_web: IMSClient, tmp_path: Path) -> None:
def test_cache_proper_fill_up(
pi_client_web: IMSClient, tmp_path: Path, get_status: bool = False
) -> None:
pi_client_web.cache = SmartCache(directory=tmp_path)
df_int_1 = pi_client_web.read(
tags=PI_TAG,
Expand All @@ -258,5 +267,6 @@ def test_cache_proper_fill_up(pi_client_web: IMSClient, tmp_path: Path) -> None:
ts=TS,
start=ensure_datetime_with_tz(PI_START_TIME),
end=ensure_datetime_with_tz(PI_END_TIME_2),
get_status=get_status,
)
assert len(df_cached) == 32

0 comments on commit a40093f

Please sign in to comment.